dataset3Params.m 2.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172
  1. function [C, sigma] = dataset3Params(X, y, Xval, yval)
  2. %DATASET3PARAMS returns your choice of C and sigma for Part 3 of the exercise
  3. %where you select the optimal (C, sigma) learning parameters to use for SVM
  4. %with RBF kernel
  5. % [C, sigma] = DATASET3PARAMS(X, y, Xval, yval) returns your choice of C and
  6. % sigma. You should complete this function to return the optimal C and
  7. % sigma based on a cross-validation set.
  8. %
  9. % You need to return the following variables correctly.
  10. C = 1;
  11. sigma = 0.3;
  12. % ====================== YOUR CODE HERE ======================
  13. % Instructions: Fill in this function to return the optimal C and sigma
  14. % learning parameters found using the cross validation set.
  15. % You can use svmPredict to predict the labels on the cross
  16. % validation set. For example,
  17. % predictions = svmPredict(model, Xval);
  18. % will return the predictions on the cross validation set.
  19. %
  20. % Note: You can compute the prediction error using
  21. % mean(double(predictions ~= yval))
  22. %
  23. C_set = [0.01, 0.03, 0.1, 0.3, 1,3, 10, 30];
  24. sigma_set = C_set;
  25. n = columns(C_set);
  26. % pred_error is a matrix of (n*n or 64) rows X 5 columns to store the results
  27. % where each row contains
  28. % [row#i, col#j, pred_error, C@i th row, sigma@j th row]
  29. %
  30. pred_error = zeros(n * n, 5);
  31. k = 1;
  32. %
  33. % 'x1' and 'x2' are dummy parameters.
  34. % They are filled-in at runtime when svmTrain() calls your kernel function.
  35. % taken out from Discussion Forums | Week 7 | FAQ for Week 7 and programming exercise 6
  36. %
  37. x1 = x2 = 0;
  38. for i = 1:n
  39. for j = 1:n
  40. C = C_set(:,i);
  41. sigma = sigma_set(:, j);
  42. fprintf ("run number: C[%d]=%f sigma[%d]=%f ", i, C, j, sigma);
  43. model= svmTrain(X, y, C, @(x1, x2) gaussianKernel(x1, x2, sigma));
  44. predictions = svmPredict(model, Xval);
  45. error = mean(double(predictions ~= yval));
  46. fprintf (" error = %f\n", error);
  47. pred_error(k,:) = [i j error C sigma];
  48. k=k+1;
  49. endfor
  50. endfor
  51. [v, i] = min(pred_error(:,3));
  52. %pred_error(i,:)
  53. C = pred_error(i, 4);
  54. sigma = pred_error(i, 5);
  55. fprintf ("------------------------------------");
  56. fprintf ("min cost is %f when C = %f and sigma = %f\n", pred_error(i,3), C, sigma);
  57. % =========================================================================
  58. end