From e38033c00d57fa3ee7f69086d8cf6c56877a7738 Mon Sep 17 00:00:00 2001 From: neingeist Date: Thu, 2 Oct 2014 22:32:37 +0200 Subject: [PATCH] Clean up whitespace --- ex1/computeCostMulti.m | 4 +--- ex1/gradientDescentMulti.m | 12 ++---------- 2 files changed, 3 insertions(+), 13 deletions(-) diff --git a/ex1/computeCostMulti.m b/ex1/computeCostMulti.m index d9a3d68..5f59913 100644 --- a/ex1/computeCostMulti.m +++ b/ex1/computeCostMulti.m @@ -6,7 +6,7 @@ function J = computeCostMulti(X, y, theta) % Initialize some useful values m = length(y); % number of training examples -% You need to return the following variables correctly +% You need to return the following variables correctly J = 0; % ====================== YOUR CODE HERE ====================== @@ -15,8 +15,6 @@ J = 0; - - % ========================================================================= end diff --git a/ex1/gradientDescentMulti.m b/ex1/gradientDescentMulti.m index e5e1b7e..c98315b 100644 --- a/ex1/gradientDescentMulti.m +++ b/ex1/gradientDescentMulti.m @@ -11,7 +11,7 @@ for iter = 1:num_iters % ====================== YOUR CODE HERE ====================== % Instructions: Perform a single gradient step on the parameter vector - % theta. + % theta. % % Hint: While debugging, it can be useful to print out the values % of the cost function (computeCostMulti) and gradient here. @@ -19,17 +19,9 @@ for iter = 1:num_iters - - - - - - - - % ============================================================ - % Save the cost J in every iteration + % Save the cost J in every iteration J_history(iter) = computeCostMulti(X, y, theta); end