diff --git a/ex1/computeCostMulti.m b/ex1/computeCostMulti.m index d9a3d68..5f59913 100644 --- a/ex1/computeCostMulti.m +++ b/ex1/computeCostMulti.m @@ -6,7 +6,7 @@ function J = computeCostMulti(X, y, theta) % Initialize some useful values m = length(y); % number of training examples -% You need to return the following variables correctly +% You need to return the following variables correctly J = 0; % ====================== YOUR CODE HERE ====================== @@ -15,8 +15,6 @@ J = 0; - - % ========================================================================= end diff --git a/ex1/gradientDescentMulti.m b/ex1/gradientDescentMulti.m index e5e1b7e..c98315b 100644 --- a/ex1/gradientDescentMulti.m +++ b/ex1/gradientDescentMulti.m @@ -11,7 +11,7 @@ for iter = 1:num_iters % ====================== YOUR CODE HERE ====================== % Instructions: Perform a single gradient step on the parameter vector - % theta. + % theta. % % Hint: While debugging, it can be useful to print out the values % of the cost function (computeCostMulti) and gradient here. @@ -19,17 +19,9 @@ for iter = 1:num_iters - - - - - - - - % ============================================================ - % Save the cost J in every iteration + % Save the cost J in every iteration J_history(iter) = computeCostMulti(X, y, theta); end