| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051 |
- % =========== Part 2: Regularized Linear Regression Cost =============
- % You should now implement the cost function for regularized linear
- % regression.
- %
- % Load Training Data
- fprintf('Loading and Visualizing Data ...\n')
- % Load from ex5data1:
- % You will have X, y, Xval, yval, Xtest, ytest in your environment
- load ('ex5data1.mat');
- % m = Number of examples
- m = size(X, 1);
- theta = [1 ; 1];
- J = linearRegCostFunction([ones(m, 1) X], y, theta, 1);
- fprintf(['Cost at theta = [1 ; 1]: %f '...
- '\n(this value should be about 303.993192)\n'], J);
- fprintf('Program paused. Press enter to continue.\n');
- theta = [1 ; 1];
- [J, grad] = linearRegCostFunction([ones(m, 1) X], y, theta, 1);
- fprintf(['Gradient at theta = [1 ; 1]: [%f; %f] '...
- '\n(this value should be about [-15.303016; 598.250744])\n'], ...
- grad(1), grad(2));
- fprintf('Program paused. Press enter to continue.\n');
- %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
- %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
- lambda = 0;
- [error_train, error_val] = ...
- learningCurve([ones(m, 1) X], y, ...
- [ones(size(Xval, 1), 1) Xval], yval, ...
- lambda);
- plot(1:m, error_train, 1:m, error_val);
- title('Learning curve for linear regression')
- legend('Train', 'Cross Validation')
- xlabel('Number of training examples')
- ylabel('Error')
- axis([0 13 0 150])
- fprintf('# Training Examples\tTrain Error\tCross Validation Error\n');
- for i = 1:m
- fprintf(' \t%d\t\t%f\t%f\n', i, error_train(i), error_val(i));
- end
- fprintf('Program paused. Press enter to continue.\n');
|