mypca.m 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105
  1. %% Machine Learning Online Class
  2. % Exercise 7 | Principle Component Analysis and K-Means Clustering
  3. %
  4. % Instructions
  5. % ------------
  6. %
  7. % This file contains code that helps you get started on the
  8. % exercise. You will need to complete the following functions:
  9. %
  10. % pca.m
  11. % projectData.m
  12. % recoverData.m
  13. % computeCentroids.m
  14. % findClosestCentroids.m
  15. % kMeansInitCentroids.m
  16. %
  17. % For this exercise, you will not need to change any code in this file,
  18. % or any other files other than those mentioned above.
  19. %
  20. %% Initialization
  21. clear ; close all; clc
  22. %% ================== Part 1: Load Example Dataset ===================
  23. % We start this exercise by using a small dataset that is easily to
  24. % visualize
  25. %
  26. fprintf('Visualizing example dataset for PCA.\n\n');
  27. % The following command loads the dataset. You should now have the
  28. % variable X in your environment
  29. load ('ex7data1.mat');
  30. % Visualize the example dataset
  31. plot(X(:, 1), X(:, 2), 'bo');
  32. axis([0.5 6.5 2 8]); axis square;
  33. fprintf('Program paused. Press enter to continue.\n');
  34. %% =============== Part 2: Principal Component Analysis ===============
  35. % You should now implement PCA, a dimension reduction technique. You
  36. % should complete the code in pca.m
  37. %
  38. fprintf('\nRunning PCA on example dataset.\n\n');
  39. % Before running PCA, it is important to first normalize X
  40. [X_norm, mu, sigma] = featureNormalize(X);
  41. % Run PCA
  42. [U, S] = pca(X_norm);
  43. % Compute mu, the mean of the each feature
  44. % Draw the eigenvectors centered at mean of data. These lines show the
  45. % directions of maximum variations in the dataset.
  46. hold on;
  47. drawLine(mu, mu + 1.5 * S(1,1) * U(:,1)', '-k', 'LineWidth', 2);
  48. drawLine(mu, mu + 1.5 * S(2,2) * U(:,2)', '-k', 'LineWidth', 2);
  49. hold off;
  50. fprintf('Top eigenvector: \n');
  51. fprintf(' U(:,1) = %f %f \n', U(1,1), U(2,1));
  52. fprintf('\n(you should expect to see -0.707107 -0.707107)\n');
  53. fprintf('Program paused. Press enter to continue.\n');
  54. %% =================== Part 3: Dimension Reduction ===================
  55. % You should now implement the projection step to map the data onto the
  56. % first k eigenvectors. The code will then plot the data in this reduced
  57. % dimensional space. This will show you what the data looks like when
  58. % using only the corresponding eigenvectors to reconstruct it.
  59. %
  60. % You should complete the code in projectData.m
  61. %
  62. fprintf('\nDimension reduction on example dataset.\n\n');
  63. % Plot the normalized dataset (returned from pca)
  64. plot(X_norm(:, 1), X_norm(:, 2), 'bo');
  65. axis([-4 3 -4 3]); axis square
  66. % Project the data onto K = 1 dimension
  67. K = 1;
  68. Z = projectData(X_norm, U, K);
  69. fprintf('Projection of the first example: %f\n', Z(1));
  70. fprintf('\n(this value should be about 1.481274)\n\n');
  71. X_rec = recoverData(Z, U, K);
  72. fprintf('Approximation of the first example: %f %f\n', X_rec(1, 1), X_rec(1, 2));
  73. fprintf('\n(this value should be about -1.047419 -1.047419)\n\n');
  74. % Draw lines connecting the projected points to the original points
  75. hold on;
  76. plot(X_rec(:, 1), X_rec(:, 2), 'ro');
  77. for i = 1:size(X_norm, 1)
  78. drawLine(X_norm(i,:), X_rec(i,:), '--k', 'LineWidth', 1);
  79. end
  80. hold off
  81. fprintf('Program paused. Press enter to continue.\n');
  82. pause;