|
|
@@ -2,7 +2,15 @@
|
|
|
% You should now implement the cost function for regularized linear
|
|
|
% regression.
|
|
|
%
|
|
|
+% Load Training Data
|
|
|
+fprintf('Loading and Visualizing Data ...\n')
|
|
|
|
|
|
+% Load from ex5data1:
|
|
|
+% You will have X, y, Xval, yval, Xtest, ytest in your environment
|
|
|
+load ('ex5data1.mat');
|
|
|
+
|
|
|
+% m = Number of examples
|
|
|
+m = size(X, 1);
|
|
|
theta = [1 ; 1];
|
|
|
J = linearRegCostFunction([ones(m, 1) X], y, theta, 1);
|
|
|
|
|
|
@@ -19,5 +27,25 @@ fprintf(['Gradient at theta = [1 ; 1]: [%f; %f] '...
|
|
|
grad(1), grad(2));
|
|
|
|
|
|
fprintf('Program paused. Press enter to continue.\n');
|
|
|
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
|
|
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
|
|
+
|
|
|
+lambda = 0;
|
|
|
+[error_train, error_val] = ...
|
|
|
+ learningCurve([ones(m, 1) X], y, ...
|
|
|
+ [ones(size(Xval, 1), 1) Xval], yval, ...
|
|
|
+ lambda);
|
|
|
+
|
|
|
+plot(1:m, error_train, 1:m, error_val);
|
|
|
+title('Learning curve for linear regression')
|
|
|
+legend('Train', 'Cross Validation')
|
|
|
+xlabel('Number of training examples')
|
|
|
+ylabel('Error')
|
|
|
+axis([0 13 0 150])
|
|
|
+
|
|
|
+fprintf('# Training Examples\tTrain Error\tCross Validation Error\n');
|
|
|
+for i = 1:m
|
|
|
+ fprintf(' \t%d\t\t%f\t%f\n', i, error_train(i), error_val(i));
|
|
|
+end
|
|
|
|
|
|
-
|
|
|
+fprintf('Program paused. Press enter to continue.\n');
|