From 14d8ed99e64b1115c9a92b6776837bfed86a2494 Mon Sep 17 00:00:00 2001 From: mbentle6 <68927153+mbentle6@users.noreply.github.com> Date: Mon, 5 Oct 2020 18:44:02 -0700 Subject: [PATCH] Update HW_4.ipynb Fixed typo in Problem 5, fixed language in Problem 6 and added the "Z" variable for clarification --- lesson_4_Training/HW_4.ipynb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lesson_4_Training/HW_4.ipynb b/lesson_4_Training/HW_4.ipynb index 420b1ce..79d5a86 100644 --- a/lesson_4_Training/HW_4.ipynb +++ b/lesson_4_Training/HW_4.ipynb @@ -198,7 +198,7 @@ "id": "QhSnTaNuzOd4" }, "source": [ - "Create create Y, X, and Z samples from problem 1 with 100,000,000 observations (100 million). Measure how long does it take to estimate linear regression of Y on X and Z using: Stochastic Gradient Descent, Batch Gradient Descent, Linear Regression from sklearn, and Linear regression estimated using matrix multiplication $$\\hat{\\theta} = (\\pmb{X}^T \\cdot \\pmb{X})^{-1} \\cdot \\pmb{X}^T \\cdot \\pmb{y}$$, and Linear regression estimated preudo-inverse. For Batch Gradient Descent use 1000 iterations and eta = 0.01. For Stochastic Gradient Descent use 5 iterations and eta = 0.01. Use SGDRegressior imported from sklearn.model.\n", + "Create Y, X, and Z samples from problem 1 with 100,000,000 observations (100 million). Measure how long does it take to estimate linear regression of Y on X and Z using: Stochastic Gradient Descent, Batch Gradient Descent, Linear Regression from sklearn, and Linear regression estimated using matrix multiplication $$\\hat{\\theta} = (\\pmb{X}^T \\cdot \\pmb{X})^{-1} \\cdot \\pmb{X}^T \\cdot \\pmb{y}$$, and Linear regression estimated preudo-inverse. For Batch Gradient Descent use 1000 iterations and eta = 0.01. For Stochastic Gradient Descent use 5 iterations and eta = 0.01. Use SGDRegressior imported from sklearn.model.\n", "\n", "For each estimation import **MSE** and **time**" ] @@ -288,7 +288,7 @@ }, "source": [ "# Problem 6\n", - "Recreate a X,Y data from problem 1 using 500 observations. Create a 25-degree polynomial for X and scale the data using standard scaler. Estimate the regression model predicting Y via Ridge regression. Calculate MSE (average 'neg_mean_squared_error') using cross-validation with cv = 3. Find optimal alpha by looping from 0.0001 to 1 using step size 0.0001." + "Recreate the X,Y and Z data from problem 1 using 500 observations. Create a 25-degree polynomial for X and scale the data using standard scaler. Estimate the regression model predicting Y via Ridge regression. Calculate MSE (average 'neg_mean_squared_error') using cross-validation with cv = 3. Find optimal alpha by looping from 0.0001 to 1 using step size 0.0001." ] }, {