ex1_multi.m 5.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194
  1. %% Machine Learning Online Class
  2. % Exercise 1: Linear regression with multiple variables
  3. %
  4. % Instructions
  5. % ------------
  6. %
  7. % This file contains code that helps you get started on the
  8. % linear regression exercise.
  9. %
  10. % You will need to complete the following functions in this
  11. % exericse:
  12. %
  13. % warmUpExercise.m
  14. % plotData.m
  15. % gradientDescent.m
  16. % computeCost.m
  17. % gradientDescentMulti.m
  18. % computeCostMulti.m
  19. % featureNormalize.m
  20. % normalEqn.m
  21. %
  22. % For this part of the exercise, you will need to change some
  23. % parts of the code below for various experiments (e.g., changing
  24. % learning rates).
  25. %
  26. %% Initialization
  27. %% ================ Part 1: Feature Normalization ================
  28. %% Clear and Close Figures
  29. clear ; close all; clc
  30. fprintf('Loading data ...\n');
  31. %% Load Data
  32. data = load('ex1data2.txt');
  33. X = data(:, 1:2);
  34. y = data(:, 3);
  35. m = length(y);
  36. % Print out some data points
  37. fprintf('First 10 examples from the dataset: \n');
  38. fprintf(' x = [%.0f %.0f], y = %.0f \n', [X(1:10,:) y(1:10,:)]');
  39. %% Plotting =======================
  40. fprintf('Plotting Data ...\n')
  41. subplot(2,1,1);
  42. plot(X(:,1),y,'rx','MarkerSize', 10);
  43. ylabel('price');
  44. xlabel('house size in sqft');
  45. subplot(2,1,2);
  46. ylabel('price');
  47. xlabel('num bedrooms');
  48. plot(X(:,2),y,'bo','MarkerSize', 10);
  49. %fprintf('Program paused. Press enter to continue.\n');
  50. %pause;
  51. % Scale features and set them to zero mean
  52. fprintf('Normalizing Features ...\n');
  53. [X mu sigma] = featureNormalize(X);
  54. fprintf('First 10 examples from the dataset: \n');
  55. fprintf(' x = [%f %f]\n', [X(1:10,:)]');
  56. % Add intercept term to X
  57. X = [ones(m, 1) X];
  58. %% ================ Part 2: Gradient Descent ================
  59. % ====================== YOUR CODE HERE ======================
  60. % Instructions: We have provided you with the following starter
  61. % code that runs gradient descent with a particular
  62. % learning rate (alpha).
  63. %
  64. % Your task is to first make sure that your functions -
  65. % computeCost and gradientDescent already work with
  66. % this starter code and support multiple variables.
  67. %
  68. % After that, try running gradient descent with
  69. % different values of alpha and see which one gives
  70. % you the best result.
  71. %
  72. % Finally, you should complete the code at the end
  73. % to predict the price of a 1650 sq-ft, 3 br house.
  74. %
  75. % Hint: By using the 'hold on' command, you can plot multiple
  76. % graphs on the same figure.
  77. %
  78. % Hint: At prediction, make sure you do the same feature normalization.
  79. %
  80. fprintf('Running gradient descent ...\n');
  81. % Choose some alpha value
  82. alpha = 0.1;
  83. num_iters = 400;
  84. % Init Theta and Run Gradient Descent
  85. theta = zeros(3, 1);
  86. [theta, J_history] = gradientDescentMulti(X, y, theta, alpha, num_iters);
  87. % Plot the convergence graph
  88. figure;
  89. plot(1:numel(J_history), J_history, '-b', 'LineWidth', 2);
  90. xlabel('Number of iterations');
  91. ylabel('Cost J');
  92. % Display gradient descent's result
  93. fprintf('Theta computed from gradient descent: \n');
  94. fprintf(' %f \n', theta);
  95. fprintf('\n');
  96. % Estimate the price of a 1650 sq-ft, 3 br house
  97. % ====================== YOUR CODE HERE ======================
  98. % Recall that the first column of X is all-ones. Thus, it does
  99. % not need to be normalized.
  100. %price = 0; % You should change this
  101. X_norm = [1 ([1650 3] - mu) ./ sigma];
  102. price = X_norm * theta;
  103. fprintf(['Predicted price of a 1650 sq-ft, 3 br house ' ...
  104. '(using gradient descent):\n $%f\n'], price);
  105. X_norm = [1 ([2000 4 ] - mu) ./ sigma];
  106. price = X_norm * theta;
  107. % ============================================================
  108. fprintf(['Predicted price of a 2000 sq-ft, 4 br house ' ...
  109. '(using gradient descent):\n $%f\n'], price);
  110. %fprintf('Program paused. Press enter to continue.\n');
  111. %pause;
  112. %% ================ Part 3: Normal Equations ================
  113. fprintf('Solving with normal equations...\n');
  114. % ====================== YOUR CODE HERE ======================
  115. % Instructions: The following code computes the closed form
  116. % solution for linear regression using the normal
  117. % equations. You should complete the code in
  118. % normalEqn.m
  119. %
  120. % After doing so, you should complete this code
  121. % to predict the price of a 1650 sq-ft, 3 br house.
  122. %
  123. %% Load Data
  124. data = csvread('ex1data2.txt');
  125. X = data(:, 1:2);
  126. y = data(:, 3);
  127. m = length(y);
  128. % Add intercept term to X
  129. X = [ones(m, 1) X];
  130. % Calculate the parameters from the normal equation
  131. theta = normalEqn(X, y);
  132. % Display normal equation's result
  133. fprintf('Theta computed from the normal equations: \n');
  134. fprintf(' %f \n', theta);
  135. fprintf('\n');
  136. % Estimate the price of a 1650 sq-ft, 3 br house
  137. % ====================== YOUR CODE HERE ======================
  138. price = 0; % You should change this
  139. price = [1 1650 3] * theta;
  140. % ============================================================
  141. fprintf(['Predicted price of a 1650 sq-ft, 3 br house ' ...
  142. '(using normal equations):\n $%f\n'], price);