astron пре 7 година
родитељ
комит
166ebeb4bb

+ 1 - 30
machine learning/machine-learning-ex5/ex5/learningCurve.m

@@ -22,36 +22,7 @@ error_train = zeros(m, 1);
 error_val   = zeros(m, 1);
 
 % ====================== YOUR CODE HERE ======================
-% Instructions: Fill in this function to return training errors in 
-%               error_train and the cross validation errors in error_val. 
-%               i.e., error_train(i) and 
-%               error_val(i) should give you the errors
-%               obtained after training on i examples.
-%
-% Note: You should evaluate the training error on the first i training
-%       examples (i.e., X(1:i, :) and y(1:i)).
-%
-%       For the cross-validation error, you should instead evaluate on
-%       the _entire_ cross validation set (Xval and yval).
-%
-% Note: If you are using your cost function (linearRegCostFunction)
-%       to compute the training and cross validation error, you should 
-%       call the function with the lambda argument set to 0. 
-%       Do note that you will still need to use lambda when running
-%       the training to obtain the theta parameters.
-%
-% Hint: You can loop over the examples with the following:
-%
-%       for i = 1:m
-%           % Compute train/cross validation errors using training examples 
-%           % X(1:i, :) and y(1:i), storing the result in 
-%           % error_train(i) and error_val(i)
-%           ....
-%           
-%       end
-%
-
-% ---------------------- Sample Solution ----------------------
+% ?????
 
 for i = 1:m
   X_train = X(1:i,:);

+ 282 - 5
machine learning/machine-learning-ex5/ex5/myex5.m

@@ -1,7 +1,29 @@
-% =========== Part 2: Regularized Linear Regression Cost =============
-%  You should now implement the cost function for regularized linear 
-%  regression. 
+%% Machine Learning Online Class
+%  Exercise 5 | Regularized Linear Regression and Bias-Variance
+%
+%  Instructions
+%  ------------
+% 
+%  This file contains code that helps you get started on the
+%  exercise. You will need to complete the following functions:
+%
+%     linearRegCostFunction.m
+%     learningCurve.m
+%     validationCurve.m
+%
+%  For this exercise, you will not need to change any code in this file,
+%  or any other files other than those mentioned above.
 %
+
+%% Initialization
+clear ; close all; clc
+
+%% =========== Part 1: Loading and Visualizing Data =============
+%  We start the exercise by first loading and visualizing the dataset. 
+%  The following code will load the dataset into your environment and plot
+%  the data.
+%
+
 % Load Training Data
 fprintf('Loading and Visualizing Data ...\n')
 
@@ -11,6 +33,19 @@ load ('ex5data1.mat');
 
 % m = Number of examples
 m = size(X, 1);
+
+% Plot training data
+plot(X, y, 'rx', 'MarkerSize', 10, 'LineWidth', 1.5);
+xlabel('Change in water level (x)');
+ylabel('Water flowing out of the dam (y)');
+
+fprintf('Program paused. Press enter to continue.\n');
+
+%% =========== Part 2: Regularized Linear Regression Cost =============
+%  You should now implement the cost function for regularized linear 
+%  regression. 
+%
+
 theta = [1 ; 1];
 J = linearRegCostFunction([ones(m, 1) X], y, theta, 1);
 
@@ -19,6 +54,11 @@ fprintf(['Cost at theta = [1 ; 1]: %f '...
 
 fprintf('Program paused. Press enter to continue.\n');
 
+%% =========== Part 3: Regularized Linear Regression Gradient =============
+%  You should now implement the gradient for regularized linear 
+%  regression.
+%
+
 theta = [1 ; 1];
 [J, grad] = linearRegCostFunction([ones(m, 1) X], y, theta, 1);
 
@@ -27,8 +67,38 @@ fprintf(['Gradient at theta = [1 ; 1]:  [%f; %f] '...
          grad(1), grad(2));
 
 fprintf('Program paused. Press enter to continue.\n');
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+
+%% =========== Part 4: Train Linear Regression =============
+%  Once you have implemented the cost and gradient correctly, the
+%  trainLinearReg function will use your cost function to train 
+%  regularized linear regression.
+% 
+%  Write Up Note: The data is non-linear, so this will not give a great 
+%                 fit.
+%
+
+%  Train linear regression with lambda = 0
+lambda = 0;
+[theta] = trainLinearReg([ones(m, 1) X], y, lambda);
+
+%  Plot fit over the data
+plot(X, y, 'rx', 'MarkerSize', 10, 'LineWidth', 1.5);
+xlabel('Change in water level (x)');
+ylabel('Water flowing out of the dam (y)');
+hold on;
+plot(X, [ones(m, 1) X]*theta, '--', 'LineWidth', 2)
+hold off;
+
+fprintf('Program paused. Press enter to continue.\n');
+
+
+%% =========== Part 5: Learning Curve for Linear Regression =============
+%  Next, you should implement the learningCurve function. 
+%
+%  Write Up Note: Since the model is underfitting the data, we expect to
+%                 see a graph with "high bias" -- Figure 3 in ex5.pdf 
+%
 
 lambda = 0;
 [error_train, error_val] = ...
@@ -49,3 +119,210 @@ for i = 1:m
 end
 
 fprintf('Program paused. Press enter to continue.\n');
+
+%% =========== Part 6: Feature Mapping for Polynomial Regression =============
+%  One solution to this is to use polynomial regression. You should now
+%  complete polyFeatures to map each example into its powers
+%
+
+p = 8;
+
+% Map X onto Polynomial Features and Normalize
+X_poly = polyFeatures(X, p);
+[X_poly, mu, sigma] = featureNormalize(X_poly);  % Normalize
+X_poly = [ones(m, 1), X_poly];                   % Add Ones
+
+% Map X_poly_test and normalize (using mu and sigma)
+X_poly_test = polyFeatures(Xtest, p);
+X_poly_test = bsxfun(@minus, X_poly_test, mu);
+X_poly_test = bsxfun(@rdivide, X_poly_test, sigma);
+X_poly_test = [ones(size(X_poly_test, 1), 1), X_poly_test];         % Add Ones
+
+% Map X_poly_val and normalize (using mu and sigma)
+X_poly_val = polyFeatures(Xval, p);
+X_poly_val = bsxfun(@minus, X_poly_val, mu);
+X_poly_val = bsxfun(@rdivide, X_poly_val, sigma);
+X_poly_val = [ones(size(X_poly_val, 1), 1), X_poly_val];           % Add Ones
+
+fprintf('Normalized Training Example 1:\n');
+fprintf('  %f  \n', X_poly(1, :));
+
+fprintf('\nProgram paused. Press enter to continue.\n');
+pause;
+
+
+
+%% =========== Part 7: Learning Curve for Polynomial Regression =============
+%  Now, you will get to experiment with polynomial regression with multiple
+%  values of lambda. The code below runs polynomial regression with 
+%  lambda = 0. You should try running the code with different values of
+%  lambda to see how the fit and learning curve change.
+%
+
+lambda = 0;
+[theta] = trainLinearReg(X_poly, y, lambda);
+
+% Plot training data and fit
+figure(1);
+plot(X, y, 'rx', 'MarkerSize', 10, 'LineWidth', 1.5);
+plotFit(min(X), max(X), mu, sigma, theta, p);
+xlabel('Change in water level (x)');
+ylabel('Water flowing out of the dam (y)');
+title (sprintf('Polynomial Regression Fit (lambda = %f)', lambda));
+
+figure(2);
+[error_train, error_val] = ...
+    learningCurve(X_poly, y, X_poly_val, yval, lambda);
+plot(1:m, error_train, 1:m, error_val);
+
+title(sprintf('Polynomial Regression Learning Curve (lambda = %f)', lambda));
+xlabel('Number of training examples')
+ylabel('Error')
+axis([0 13 0 100])
+legend('Train', 'Cross Validation')
+
+fprintf('Polynomial Regression (lambda = %f)\n\n', lambda);
+fprintf('# Training Examples\tTrain Error\tCross Validation Error\n');
+for i = 1:m
+    fprintf('  \t%d\t\t%f\t%f\n', i, error_train(i), error_val(i));
+end
+
+fprintf('Program paused. Press enter to continue.\n');
+pause;
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+% lambda = 1100
+
+
+lambda = 1100;
+[theta] = trainLinearReg(X_poly, y, lambda);
+
+% Plot training data and fit
+figure(1);
+plot(X, y, 'rx', 'MarkerSize', 10, 'LineWidth', 1.5);
+plotFit(min(X), max(X), mu, sigma, theta, p);
+xlabel('Change in water level (x)');
+ylabel('Water flowing out of the dam (y)');
+title (sprintf('Polynomial Regression Fit (lambda = %f)', lambda));
+
+figure(2);
+[error_train, error_val] = ...
+    learningCurve(X_poly, y, X_poly_val, yval, lambda);
+plot(1:m, error_train, 1:m, error_val);
+
+title(sprintf('Polynomial Regression Learning Curve (lambda = %f)', lambda));
+xlabel('Number of training examples')
+ylabel('Error')
+axis([0 13 0 100])
+legend('Train', 'Cross Validation')
+
+fprintf('Polynomial Regression (lambda = %f)\n\n', lambda);
+fprintf('# Training Examples\tTrain Error\tCross Validation Error\n');
+for i = 1:m
+    fprintf('  \t%d\t\t%f\t%f\n', i, error_train(i), error_val(i));
+end
+
+fprintf('Program paused. Press enter to continue.\n');
+pause;
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+% lambda = 1
+
+
+lambda = 1;
+[theta] = trainLinearReg(X_poly, y, lambda);
+
+% Plot training data and fit
+figure(1);
+plot(X, y, 'rx', 'MarkerSize', 10, 'LineWidth', 1.5);
+plotFit(min(X), max(X), mu, sigma, theta, p);
+xlabel('Change in water level (x)');
+ylabel('Water flowing out of the dam (y)');
+title (sprintf('Polynomial Regression Fit (lambda = %f)', lambda));
+
+figure(2);
+[error_train, error_val] = ...
+    learningCurve(X_poly, y, X_poly_val, yval, lambda);
+plot(1:m, error_train, 1:m, error_val);
+
+title(sprintf('Polynomial Regression Learning Curve (lambda = %f)', lambda));
+xlabel('Number of training examples')
+ylabel('Error')
+axis([0 13 0 100])
+legend('Train', 'Cross Validation')
+
+fprintf('Polynomial Regression (lambda = %f)\n\n', lambda);
+fprintf('# Training Examples\tTrain Error\tCross Validation Error\n');
+for i = 1:m
+    fprintf('  \t%d\t\t%f\t%f\n', i, error_train(i), error_val(i));
+end
+
+fprintf('Program paused. Press enter to continue.\n');
+pause;
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+% lambda = 100
+
+
+lambda = 100;
+[theta] = trainLinearReg(X_poly, y, lambda);
+
+% Plot training data and fit
+figure(1);
+plot(X, y, 'rx', 'MarkerSize', 10, 'LineWidth', 1.5);
+plotFit(min(X), max(X), mu, sigma, theta, p);
+xlabel('Change in water level (x)');
+ylabel('Water flowing out of the dam (y)');
+title (sprintf('Polynomial Regression Fit (lambda = %f)', lambda));
+
+figure(2);
+[error_train, error_val] = ...
+    learningCurve(X_poly, y, X_poly_val, yval, lambda);
+plot(1:m, error_train, 1:m, error_val);
+
+title(sprintf('Polynomial Regression Learning Curve (lambda = %f)', lambda));
+xlabel('Number of training examples')
+ylabel('Error')
+axis([0 13 0 100])
+legend('Train', 'Cross Validation')
+
+fprintf('Polynomial Regression (lambda = %f)\n\n', lambda);
+fprintf('# Training Examples\tTrain Error\tCross Validation Error\n');
+for i = 1:m
+    fprintf('  \t%d\t\t%f\t%f\n', i, error_train(i), error_val(i));
+end
+
+fprintf('Program paused. Press enter to continue.\n');
+pause;
+
+
+
+
+
+%% =========== Part 8: Validation for Selecting Lambda =============
+%  You will now implement validationCurve to test various values of 
+%  lambda on a validation set. You will then use this to select the
+%  "best" lambda value.
+%
+
+[lambda_vec, error_train, error_val] = ...
+    validationCurve(X_poly, y, X_poly_val, yval);
+
+close all;
+plot(lambda_vec, error_train, lambda_vec, error_val);
+legend('Train', 'Cross Validation');
+xlabel('lambda');
+ylabel('Error');
+
+fprintf('lambda\t\tTrain Error\tValidation Error\n');
+for i = 1:length(lambda_vec)
+	fprintf(' %f\t%f\t%f\n', ...
+            lambda_vec(i), error_train(i), error_val(i));
+end
+
+fprintf('Program paused. Press enter to continue.\n');
+pause;
+
+validateFinal(X_poly, y, X_poly_test, ytest, 3);

+ 6 - 1
machine learning/machine-learning-ex5/ex5/polyFeatures.m

@@ -15,7 +15,12 @@ X_poly = zeros(numel(X), p);
 %
 % 
 
-
+for i = 2:p
+  
+  X = [X X(:,1) .^ i];
+  
+endfor
+X_poly = X;
 
 
 

+ 1 - 1
machine learning/machine-learning-ex5/ex5/token.mat

@@ -1,4 +1,4 @@
-# Created by Octave 4.4.1, Tue Nov 20 18:37:16 2018 GMT <unknown@ICC-LARRYC-D1>
+# Created by Octave 4.2.2, Tue Nov 20 23:07:16 2018 HKT <astron@astron>
 # name: email
 # type: sq_string
 # elements: 1

+ 19 - 0
machine learning/machine-learning-ex5/ex5/validationCurve.m

@@ -16,6 +16,7 @@ lambda_vec = [0 0.001 0.003 0.01 0.03 0.1 0.3 1 3 10]';
 error_train = zeros(length(lambda_vec), 1);
 error_val = zeros(length(lambda_vec), 1);
 
+
 % ====================== YOUR CODE HERE ======================
 % Instructions: Fill in this function to return training errors in 
 %               error_train and the validation errors in error_val. The 
@@ -39,6 +40,24 @@ error_val = zeros(length(lambda_vec), 1);
 %
 %
 
+for i = 1:length(lambda_vec)
+  
+  lambda = lambda_vec(i);
+
+% In validationCurve: you need to use lambda only for computing thetas, 
+% for error_val anderror_train lambda has to be 0.
+% https://www.coursera.org/learn/machine-learning/discussions/weeks/6/threads/xBFU64t2EeWH0w7fQkp2-w
+
+
+
+  theta = trainLinearReg(X, y, lambda);  
+  [error_train(i,:), grad] = linearRegCostFunction(X, y, theta, 0);
+  [error_val(i,:), grad] = linearRegCostFunction(Xval, yval, theta, 0);  
+
+
+  
+  
+endfor