From eccdcc0d8160bc1b3576a24357e197645bf31e66 Mon Sep 17 00:00:00 2001 From: neingeist Date: Sun, 2 Nov 2014 13:48:34 +0100 Subject: [PATCH] Regularized NN gradient --- ex4/nnCostFunction.m | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/ex4/nnCostFunction.m b/ex4/nnCostFunction.m index 6a5f55c..3e172c7 100644 --- a/ex4/nnCostFunction.m +++ b/ex4/nnCostFunction.m @@ -124,12 +124,16 @@ Theta1_grad = D_1; % Note: Theta1/2 are matrixes here, we want all their rows, but skip their % first column (not regularizing the bias term). -regularization_term = lambda/(2*m) * ... +J_regularization_term = lambda/(2*m) * ... (sum(sum(Theta1(:,2:end).^2)) ... + sum(sum(Theta2(:,2:end).^2))); -assert(size(regularization_term) == [1 1]); +assert(size(J_regularization_term) == [1 1]); +J += J_regularization_term; -J += regularization_term; +Theta2_grad_regularization_term = lambda/m * [zeros(size(Theta2, 1), 1) Theta2(:,2:end)]; +Theta1_grad_regularization_term = lambda/m * [zeros(size(Theta1, 1), 1) Theta1(:,2:end)]; +Theta2_grad += Theta2_grad_regularization_term; +Theta1_grad += Theta1_grad_regularization_term; % -------------------------------------------------------------