Path: blob/master/Week 2/Programming Assignment-1/ex1.m
628 views
%% Machine Learning Online Class - Exercise 1: Linear Regression12% Instructions3% ------------4%5% This file contains code that helps you get started on the6% linear exercise. You will need to complete the following functions7% in this exericse:8%9% warmUpExercise.m10% plotData.m11% gradientDescent.m12% computeCost.m13% gradientDescentMulti.m14% computeCostMulti.m15% featureNormalize.m16% normalEqn.m17%18% For this exercise, you will not need to change any code in this file,19% or any other files other than those mentioned above.20%21% x refers to the population size in 10,000s22% y refers to the profit in $10,000s23%2425%% Initialization26clear ; close all; clc2728%% ==================== Part 1: Basic Function ====================29% Complete warmUpExercise.m30fprintf('Running warmUpExercise ... \n');31fprintf('5x5 Identity Matrix: \n');32warmUpExercise()3334fprintf('Program paused. Press enter to continue.\n');35pause;363738%% ======================= Part 2: Plotting =======================39fprintf('Plotting Data ...\n')40data = load('ex1data1.txt');41X = data(:, 1); y = data(:, 2);42m = length(y); % number of training examples4344% Plot Data45% Note: You have to complete the code in plotData.m46plotData(X, y);4748fprintf('Program paused. Press enter to continue.\n');49pause;5051%% =================== Part 3: Cost and Gradient descent ===================5253X = [ones(m, 1), data(:,1)]; % Add a column of ones to x54theta = zeros(2, 1); % initialize fitting parameters5556% Some gradient descent settings57iterations = 1500;58alpha = 0.01;5960fprintf('\nTesting the cost function ...\n')61% compute and display initial cost62J = computeCost(X, y, theta);63fprintf('With theta = [0 ; 0]\nCost computed = %f\n', J);64fprintf('Expected cost value (approx) 32.07\n');6566% further testing of the cost function67J = computeCost(X, y, [-1 ; 2]);68fprintf('\nWith theta = [-1 ; 2]\nCost computed = %f\n', J);69fprintf('Expected cost value (approx) 54.24\n');7071fprintf('Program paused. Press enter to continue.\n');72pause;7374fprintf('\nRunning Gradient Descent ...\n')75% run gradient descent76theta = gradientDescent(X, y, theta, alpha, iterations);7778% print theta to screen79fprintf('Theta found by gradient descent:\n');80fprintf('%f\n', theta);81fprintf('Expected theta values (approx)\n');82fprintf(' -3.6303\n 1.1664\n\n');8384% Plot the linear fit85hold on; % keep previous plot visible86plot(X(:,2), X*theta, '-')87legend('Training data', 'Linear regression')88hold off % don't overlay any more plots on this figure8990% Predict values for population sizes of 35,000 and 70,00091predict1 = [1, 3.5] *theta;92fprintf('For population = 35,000, we predict a profit of %f\n',...93predict1*10000);94predict2 = [1, 7] * theta;95fprintf('For population = 70,000, we predict a profit of %f\n',...96predict2*10000);9798fprintf('Program paused. Press enter to continue.\n');99pause;100101%% ============= Part 4: Visualizing J(theta_0, theta_1) =============102fprintf('Visualizing J(theta_0, theta_1) ...\n')103104% Grid over which we will calculate J105theta0_vals = linspace(-10, 10, 100);106theta1_vals = linspace(-1, 4, 100);107108% initialize J_vals to a matrix of 0's109J_vals = zeros(length(theta0_vals), length(theta1_vals));110111% Fill out J_vals112for i = 1:length(theta0_vals)113for j = 1:length(theta1_vals)114t = [theta0_vals(i); theta1_vals(j)];115J_vals(i,j) = computeCost(X, y, t);116end117end118119120% Because of the way meshgrids work in the surf command, we need to121% transpose J_vals before calling surf, or else the axes will be flipped122J_vals = J_vals';123% Surface plot124figure;125surf(theta0_vals, theta1_vals, J_vals)126xlabel('\theta_0'); ylabel('\theta_1');127128% Contour plot129figure;130% Plot J_vals as 15 contours spaced logarithmically between 0.01 and 100131contour(theta0_vals, theta1_vals, J_vals, logspace(-2, 3, 20))132xlabel('\theta_0'); ylabel('\theta_1');133hold on;134plot(theta(1), theta(2), 'rx', 'MarkerSize', 10, 'LineWidth', 2);135136137