2014-10-21 20:59:55 +02:00
function [ all_theta ] = oneVsAll ( X , y , num_labels , lambda )
% ONEVSALL trains multiple logistic regression classifiers and returns all
2014-10-23 21:17:20 +02:00
% the classifiers in a matrix all_theta , where the i - th row of all_theta
2014-10-21 20:59:55 +02:00
% corresponds to the classifier for label i
% [ all_theta ] = ONEVSALL ( X , y , num_labels , lambda ) trains num_labels
% logisitc regression classifiers and returns each of these classifiers
2014-10-23 21:17:20 +02:00
% in a matrix all_theta , where the i - th row of all_theta corresponds
2014-10-21 20:59:55 +02:00
% to the classifier for label i
% Some useful variables
m = size ( X , 1 ) ;
n = size ( X , 2 ) ;
2014-10-23 21:17:20 +02:00
% You need to return the following variables correctly
2014-10-21 20:59:55 +02:00
all_theta = zeros ( num_labels , n + 1 ) ;
% Add ones to the X data matrix
X = [ ones ( m , 1 ) X ] ;
% === === === === === === === = YOUR CODE HERE === === === === === === === =
% Instructions : You should complete the following code to train num_labels
% logistic regression classifiers with regularization
2014-10-23 21:17:20 +02:00
% parameter lambda .
2014-10-21 20:59:55 +02:00
%
% Hint : theta ( : ) will return a column vector .
%
2014-10-23 21:17:20 +02:00
% Hint : You can use y = = c to obtain a vector of 1 ' s and 0 ' s that tell use
2014-10-21 20:59:55 +02:00
% whether the ground truth is true / false for this class .
%
% Note : For this assignment , we recommend using fmincg to optimize the cost
% function . It is okay to use a for - loop ( for c = 1 : num_labels ) to
% loop over the different classes .
%
% fmincg works similarly to fminunc , but is more efficient when we
% are dealing with large number of parameters .
%
% Example Code for fmincg :
%
% % Set Initial theta
% initial_theta = zeros ( n + 1 , 1 ) ;
2014-10-23 21:17:20 +02:00
%
2014-10-21 20:59:55 +02:00
% % Set options for fminunc
% options = optimset ( ' GradObj ' , ' on ' , ' MaxIter ' , 50 ) ;
2014-10-23 21:17:20 +02:00
%
2014-10-21 20:59:55 +02:00
% % Run fmincg to obtain the optimal theta
2014-10-23 21:17:20 +02:00
% % This function will return theta and the cost
2014-10-21 20:59:55 +02:00
% [ theta ] = . . .
% fmincg ( @ ( t ) ( lrCostFunction ( t , X , ( y = = c ) , lambda ) ) , . . .
% initial_theta , options ) ;
%
2014-10-23 21:17:20 +02:00
for c = 1 : num_labels
2014-10-21 20:59:55 +02:00
2014-10-23 21:17:20 +02:00
% Train a one - vs all classifier for this class c
2014-10-21 20:59:55 +02:00
2014-10-23 21:17:20 +02:00
initial_theta = zeros ( n + 1 , 1 ) ;
options = optimset ( ' GradObj ' , ' on ' , ' MaxIter ' , 50 ) ;
2014-10-21 20:59:55 +02:00
2014-10-23 21:17:20 +02:00
[ theta ] = fmincg ( @ ( t ) ( lrCostFunction ( t , X , ( y = = c ) , lambda ) ) ,
initial_theta , options ) ;
2014-10-21 20:59:55 +02:00
2014-10-23 21:17:20 +02:00
all_theta ( c , : ) = theta ' ;
2014-10-21 20:59:55 +02:00
2014-10-23 21:17:20 +02:00
end
2014-10-21 20:59:55 +02:00
% === === === === === === === === === === === === === === === === === === === === === === === === =
end