1
0
Fork 0

Clean up whitespace

master
neingeist 10 years ago
parent 8559c243c5
commit e38033c00d

@ -6,7 +6,7 @@ function J = computeCostMulti(X, y, theta)
% Initialize some useful values % Initialize some useful values
m = length(y); % number of training examples m = length(y); % number of training examples
% You need to return the following variables correctly % You need to return the following variables correctly
J = 0; J = 0;
% ====================== YOUR CODE HERE ====================== % ====================== YOUR CODE HERE ======================
@ -15,8 +15,6 @@ J = 0;
% ========================================================================= % =========================================================================
end end

@ -11,7 +11,7 @@ for iter = 1:num_iters
% ====================== YOUR CODE HERE ====================== % ====================== YOUR CODE HERE ======================
% Instructions: Perform a single gradient step on the parameter vector % Instructions: Perform a single gradient step on the parameter vector
% theta. % theta.
% %
% Hint: While debugging, it can be useful to print out the values % Hint: While debugging, it can be useful to print out the values
% of the cost function (computeCostMulti) and gradient here. % of the cost function (computeCostMulti) and gradient here.
@ -19,17 +19,9 @@ for iter = 1:num_iters
% ============================================================ % ============================================================
% Save the cost J in every iteration % Save the cost J in every iteration
J_history(iter) = computeCostMulti(X, y, theta); J_history(iter) = computeCostMulti(X, y, theta);
end end