diff --git a/ex5/ex5.m b/ex5/ex5.m index 8ca6c23..9a8698c 100644 --- a/ex5/ex5.m +++ b/ex5/ex5.m @@ -3,7 +3,7 @@ % % Instructions % ------------ -% +% % This file contains code that helps you get started on the % exercise. You will need to complete the following functions: % @@ -19,7 +19,7 @@ clear ; close all; clc %% =========== Part 1: Loading and Visualizing Data ============= -% We start the exercise by first loading and visualizing the dataset. +% We start the exercise by first loading and visualizing the dataset. % The following code will load the dataset into your environment and plot % the data. % @@ -27,7 +27,7 @@ clear ; close all; clc % Load Training Data fprintf('Loading and Visualizing Data ...\n') -% Load from ex5data1: +% Load from ex5data1: % You will have X, y, Xval, yval, Xtest, ytest in your environment load ('ex5data1.mat'); @@ -43,8 +43,8 @@ fprintf('Program paused. Press enter to continue.\n'); pause; %% =========== Part 2: Regularized Linear Regression Cost ============= -% You should now implement the cost function for regularized linear -% regression. +% You should now implement the cost function for regularized linear +% regression. % theta = [1 ; 1]; @@ -57,7 +57,7 @@ fprintf('Program paused. Press enter to continue.\n'); pause; %% =========== Part 3: Regularized Linear Regression Gradient ============= -% You should now implement the gradient for regularized linear +% You should now implement the gradient for regularized linear % regression. % @@ -74,10 +74,10 @@ pause; %% =========== Part 4: Train Linear Regression ============= % Once you have implemented the cost and gradient correctly, the -% trainLinearReg function will use your cost function to train +% trainLinearReg function will use your cost function to train % regularized linear regression. -% -% Write Up Note: The data is non-linear, so this will not give a great +% +% Write Up Note: The data is non-linear, so this will not give a great % fit. % @@ -98,10 +98,10 @@ pause; %% =========== Part 5: Learning Curve for Linear Regression ============= -% Next, you should implement the learningCurve function. +% Next, you should implement the learningCurve function. % % Write Up Note: Since the model is underfitting the data, we expect to -% see a graph with "high bias" -- slide 8 in ML-advice.pdf +% see a graph with "high bias" -- slide 8 in ML-advice.pdf % lambda = 0; @@ -159,7 +159,7 @@ pause; %% =========== Part 7: Learning Curve for Polynomial Regression ============= % Now, you will get to experiment with polynomial regression with multiple -% values of lambda. The code below runs polynomial regression with +% values of lambda. The code below runs polynomial regression with % lambda = 0. You should try running the code with different values of % lambda to see how the fit and learning curve change. % @@ -196,7 +196,7 @@ fprintf('Program paused. Press enter to continue.\n'); pause; %% =========== Part 8: Validation for Selecting Lambda ============= -% You will now implement validationCurve to test various values of +% You will now implement validationCurve to test various values of % lambda on a validation set. You will then use this to select the % "best" lambda value. % @@ -218,3 +218,11 @@ end fprintf('Program paused. Press enter to continue.\n'); pause; + +% Computing test set error +[~, best_i] = min(error_val, [], 1); +lambda_best = lambda_vec(best_i); +theta_best = trainLinearReg(X_poly, y, lambda_best); +error_test = linearRegCostFunction(X_poly_test, ytest, theta_best, 0); +fprintf('Test set error for best lambda = %f: %f\n', ... + lambda_best, error_test);