diff --git a/ex6/dataset3Params.m b/ex6/dataset3Params.m index 71a24d8..9af429f 100644 --- a/ex6/dataset3Params.m +++ b/ex6/dataset3Params.m @@ -2,8 +2,8 @@ function [C, sigma] = dataset3Params(X, y, Xval, yval) %EX6PARAMS returns your choice of C and sigma for Part 3 of the exercise %where you select the optimal (C, sigma) learning parameters to use for SVM %with RBF kernel -% [C, sigma] = EX6PARAMS(X, y, Xval, yval) returns your choice of C and -% sigma. You should complete this function to return the optimal C and +% [C, sigma] = EX6PARAMS(X, y, Xval, yval) returns your choice of C and +% sigma. You should complete this function to return the optimal C and % sigma based on a cross-validation set. % @@ -15,19 +15,46 @@ sigma = 0.3; % Instructions: Fill in this function to return the optimal C and sigma % learning parameters found using the cross validation set. % You can use svmPredict to predict the labels on the cross -% validation set. For example, +% validation set. For example, % predictions = svmPredict(model, Xval); % will return the predictions on the cross validation set. % -% Note: You can compute the prediction error using +% Note: You can compute the prediction error using % mean(double(predictions ~= yval)) % - - - - - +grid_search = 0; + +if grid_search + % Grid search + load ex6data3.mat + %grid = [0.01, 0.03]; + grid = [0.01, 0.03, 0.1, 0.3, 1, 3, 10, 30]; + results = []; + for C = grid + for sigma = grid + + fprintf('== C = %.2f, sigma = %.2f\n', C, sigma); + model= svmTrain(X, y, C, @(x1, x2) gaussianKernel(x1, x2, sigma)); + predictions = svmPredict(model, Xval); + error = mean(double(predictions ~= yval)); + fprintf('error = %.2f\n\n', error); + + results(end + 1,:) = [C, sigma, error]; + + end + end + + [_, best_i] = min(results(:,3)); + C = results(best_i, 1); + sigma = results(best_i, 2); + error = results(best_i, 3); + fprintf('Best: C = %.2f, sigma = %.2f with error = %.2f\n', C, sigma, error); +else + % Found through the grid search above + C = 1.00; + sigma = 0.10; +end % =========================================================================