Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
hackassin
GitHub Repository: hackassin/Coursera-Machine-Learning
Path: blob/master/Week 2/Programming Assignment-1/ex1.m
628 views
1
%% Machine Learning Online Class - Exercise 1: Linear Regression
2
3
% Instructions
4
% ------------
5
%
6
% This file contains code that helps you get started on the
7
% linear exercise. You will need to complete the following functions
8
% in this exericse:
9
%
10
% warmUpExercise.m
11
% plotData.m
12
% gradientDescent.m
13
% computeCost.m
14
% gradientDescentMulti.m
15
% computeCostMulti.m
16
% featureNormalize.m
17
% normalEqn.m
18
%
19
% For this exercise, you will not need to change any code in this file,
20
% or any other files other than those mentioned above.
21
%
22
% x refers to the population size in 10,000s
23
% y refers to the profit in $10,000s
24
%
25
26
%% Initialization
27
clear ; close all; clc
28
29
%% ==================== Part 1: Basic Function ====================
30
% Complete warmUpExercise.m
31
fprintf('Running warmUpExercise ... \n');
32
fprintf('5x5 Identity Matrix: \n');
33
warmUpExercise()
34
35
fprintf('Program paused. Press enter to continue.\n');
36
pause;
37
38
39
%% ======================= Part 2: Plotting =======================
40
fprintf('Plotting Data ...\n')
41
data = load('ex1data1.txt');
42
X = data(:, 1); y = data(:, 2);
43
m = length(y); % number of training examples
44
45
% Plot Data
46
% Note: You have to complete the code in plotData.m
47
plotData(X, y);
48
49
fprintf('Program paused. Press enter to continue.\n');
50
pause;
51
52
%% =================== Part 3: Cost and Gradient descent ===================
53
54
X = [ones(m, 1), data(:,1)]; % Add a column of ones to x
55
theta = zeros(2, 1); % initialize fitting parameters
56
57
% Some gradient descent settings
58
iterations = 1500;
59
alpha = 0.01;
60
61
fprintf('\nTesting the cost function ...\n')
62
% compute and display initial cost
63
J = computeCost(X, y, theta);
64
fprintf('With theta = [0 ; 0]\nCost computed = %f\n', J);
65
fprintf('Expected cost value (approx) 32.07\n');
66
67
% further testing of the cost function
68
J = computeCost(X, y, [-1 ; 2]);
69
fprintf('\nWith theta = [-1 ; 2]\nCost computed = %f\n', J);
70
fprintf('Expected cost value (approx) 54.24\n');
71
72
fprintf('Program paused. Press enter to continue.\n');
73
pause;
74
75
fprintf('\nRunning Gradient Descent ...\n')
76
% run gradient descent
77
theta = gradientDescent(X, y, theta, alpha, iterations);
78
79
% print theta to screen
80
fprintf('Theta found by gradient descent:\n');
81
fprintf('%f\n', theta);
82
fprintf('Expected theta values (approx)\n');
83
fprintf(' -3.6303\n 1.1664\n\n');
84
85
% Plot the linear fit
86
hold on; % keep previous plot visible
87
plot(X(:,2), X*theta, '-')
88
legend('Training data', 'Linear regression')
89
hold off % don't overlay any more plots on this figure
90
91
% Predict values for population sizes of 35,000 and 70,000
92
predict1 = [1, 3.5] *theta;
93
fprintf('For population = 35,000, we predict a profit of %f\n',...
94
predict1*10000);
95
predict2 = [1, 7] * theta;
96
fprintf('For population = 70,000, we predict a profit of %f\n',...
97
predict2*10000);
98
99
fprintf('Program paused. Press enter to continue.\n');
100
pause;
101
102
%% ============= Part 4: Visualizing J(theta_0, theta_1) =============
103
fprintf('Visualizing J(theta_0, theta_1) ...\n')
104
105
% Grid over which we will calculate J
106
theta0_vals = linspace(-10, 10, 100);
107
theta1_vals = linspace(-1, 4, 100);
108
109
% initialize J_vals to a matrix of 0's
110
J_vals = zeros(length(theta0_vals), length(theta1_vals));
111
112
% Fill out J_vals
113
for i = 1:length(theta0_vals)
114
for j = 1:length(theta1_vals)
115
t = [theta0_vals(i); theta1_vals(j)];
116
J_vals(i,j) = computeCost(X, y, t);
117
end
118
end
119
120
121
% Because of the way meshgrids work in the surf command, we need to
122
% transpose J_vals before calling surf, or else the axes will be flipped
123
J_vals = J_vals';
124
% Surface plot
125
figure;
126
surf(theta0_vals, theta1_vals, J_vals)
127
xlabel('\theta_0'); ylabel('\theta_1');
128
129
% Contour plot
130
figure;
131
% Plot J_vals as 15 contours spaced logarithmically between 0.01 and 100
132
contour(theta0_vals, theta1_vals, J_vals, logspace(-2, 3, 20))
133
xlabel('\theta_0'); ylabel('\theta_1');
134
hold on;
135
plot(theta(1), theta(2), 'rx', 'MarkerSize', 10, 'LineWidth', 2);
136
137