Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
hackassin
GitHub Repository: hackassin/Coursera-Machine-Learning
Path: blob/master/Week 6/Programming Assignment - 5/machine-learning-ex5/ex5/ex5.m
864 views
1
%% Machine Learning Online Class
2
% Exercise 5 | Regularized Linear Regression and Bias-Variance
3
%
4
% Instructions
5
% ------------
6
%
7
% This file contains code that helps you get started on the
8
% exercise. You will need to complete the following functions:
9
%
10
% linearRegCostFunction.m
11
% learningCurve.m
12
% validationCurve.m
13
%
14
% For this exercise, you will not need to change any code in this file,
15
% or any other files other than those mentioned above.
16
%
17
18
%% Initialization
19
clear ; close all; clc
20
21
%% =========== Part 1: Loading and Visualizing Data =============
22
% We start the exercise by first loading and visualizing the dataset.
23
% The following code will load the dataset into your environment and plot
24
% the data.
25
%
26
27
% Load Training Data
28
fprintf('Loading and Visualizing Data ...\n')
29
30
% Load from ex5data1:
31
% You will have X, y, Xval, yval, Xtest, ytest in your environment
32
load ('ex5data1.mat');
33
34
% m = Number of examples
35
m = size(X, 1);
36
37
% Plot training data
38
plot(X, y, 'rx', 'MarkerSize', 10, 'LineWidth', 1.5);
39
xlabel('Change in water level (x)');
40
ylabel('Water flowing out of the dam (y)');
41
42
fprintf('Program paused. Press enter to continue.\n');
43
pause;
44
45
%% =========== Part 2: Regularized Linear Regression Cost =============
46
% You should now implement the cost function for regularized linear
47
% regression.
48
%
49
50
theta = [1 ; 1];
51
J = linearRegCostFunction([ones(m, 1) X], y, theta, 1);
52
53
fprintf(['Cost at theta = [1 ; 1]: %f '...
54
'\n(this value should be about 303.993192)\n'], J);
55
56
fprintf('Program paused. Press enter to continue.\n');
57
pause;
58
59
%% =========== Part 3: Regularized Linear Regression Gradient =============
60
% You should now implement the gradient for regularized linear
61
% regression.
62
%
63
64
theta = [1 ; 1];
65
[J, grad] = linearRegCostFunction([ones(m, 1) X], y, theta, 1);
66
67
fprintf(['Gradient at theta = [1 ; 1]: [%f; %f] '...
68
'\n(this value should be about [-15.303016; 598.250744])\n'], ...
69
grad(1), grad(2));
70
71
fprintf('Program paused. Press enter to continue.\n');
72
pause;
73
74
75
%% =========== Part 4: Train Linear Regression =============
76
% Once you have implemented the cost and gradient correctly, the
77
% trainLinearReg function will use your cost function to train
78
% regularized linear regression.
79
%
80
% Write Up Note: The data is non-linear, so this will not give a great
81
% fit.
82
%
83
84
% Train linear regression with lambda = 0
85
lambda = 0;
86
[theta] = trainLinearReg([ones(m, 1) X], y, lambda);
87
88
% Plot fit over the data
89
plot(X, y, 'rx', 'MarkerSize', 10, 'LineWidth', 1.5);
90
xlabel('Change in water level (x)');
91
ylabel('Water flowing out of the dam (y)');
92
hold on;
93
plot(X, [ones(m, 1) X]*theta, '--', 'LineWidth', 2)
94
hold off;
95
96
fprintf('Program paused. Press enter to continue.\n');
97
pause;
98
99
100
%% =========== Part 5: Learning Curve for Linear Regression =============
101
% Next, you should implement the learningCurve function.
102
%
103
% Write Up Note: Since the model is underfitting the data, we expect to
104
% see a graph with "high bias" -- Figure 3 in ex5.pdf
105
%
106
107
lambda = 0;
108
[error_train, error_val] = ...
109
learningCurve([ones(m, 1) X], y, ...
110
[ones(size(Xval, 1), 1) Xval], yval, ...
111
lambda);
112
113
plot(1:m, error_train, 1:m, error_val);
114
title('Learning curve for linear regression')
115
legend('Train', 'Cross Validation')
116
xlabel('Number of training examples')
117
ylabel('Error')
118
axis([0 13 0 150])
119
120
fprintf('# Training Examples\tTrain Error\tCross Validation Error\n');
121
for i = 1:m
122
fprintf(' \t%d\t\t%f\t%f\n', i, error_train(i), error_val(i));
123
end
124
125
fprintf('Program paused. Press enter to continue.\n');
126
pause;
127
128
%% =========== Part 6: Feature Mapping for Polynomial Regression =============
129
% One solution to this is to use polynomial regression. You should now
130
% complete polyFeatures to map each example into its powers
131
%
132
133
p = 8;
134
135
% Map X onto Polynomial Features and Normalize
136
X_poly = polyFeatures(X, p);
137
[X_poly, mu, sigma] = featureNormalize(X_poly); % Normalize
138
X_poly = [ones(m, 1), X_poly]; % Add Ones
139
140
% Map X_poly_test and normalize (using mu and sigma)
141
X_poly_test = polyFeatures(Xtest, p);
142
X_poly_test = bsxfun(@minus, X_poly_test, mu);
143
X_poly_test = bsxfun(@rdivide, X_poly_test, sigma);
144
X_poly_test = [ones(size(X_poly_test, 1), 1), X_poly_test]; % Add Ones
145
146
% Map X_poly_val and normalize (using mu and sigma)
147
X_poly_val = polyFeatures(Xval, p);
148
X_poly_val = bsxfun(@minus, X_poly_val, mu);
149
X_poly_val = bsxfun(@rdivide, X_poly_val, sigma);
150
X_poly_val = [ones(size(X_poly_val, 1), 1), X_poly_val]; % Add Ones
151
152
fprintf('Normalized Training Example 1:\n');
153
fprintf(' %f \n', X_poly(1, :));
154
155
fprintf('\nProgram paused. Press enter to continue.\n');
156
pause;
157
158
159
160
%% =========== Part 7: Learning Curve for Polynomial Regression =============
161
% Now, you will get to experiment with polynomial regression with multiple
162
% values of lambda. The code below runs polynomial regression with
163
% lambda = 0. You should try running the code with different values of
164
% lambda to see how the fit and learning curve change.
165
%
166
167
lambda = 0;
168
[theta] = trainLinearReg(X_poly, y, lambda);
169
170
% Plot training data and fit
171
figure(1);
172
plot(X, y, 'rx', 'MarkerSize', 10, 'LineWidth', 1.5);
173
plotFit(min(X), max(X), mu, sigma, theta, p);
174
xlabel('Change in water level (x)');
175
ylabel('Water flowing out of the dam (y)');
176
title (sprintf('Polynomial Regression Fit (lambda = %f)', lambda));
177
178
figure(2);
179
[error_train, error_val] = ...
180
learningCurve(X_poly, y, X_poly_val, yval, lambda);
181
plot(1:m, error_train, 1:m, error_val);
182
183
title(sprintf('Polynomial Regression Learning Curve (lambda = %f)', lambda));
184
xlabel('Number of training examples')
185
ylabel('Error')
186
axis([0 13 0 100])
187
legend('Train', 'Cross Validation')
188
189
fprintf('Polynomial Regression (lambda = %f)\n\n', lambda);
190
fprintf('# Training Examples\tTrain Error\tCross Validation Error\n');
191
for i = 1:m
192
fprintf(' \t%d\t\t%f\t%f\n', i, error_train(i), error_val(i));
193
end
194
195
fprintf('Program paused. Press enter to continue.\n');
196
pause;
197
198
%% =========== Part 8: Validation for Selecting Lambda =============
199
% You will now implement validationCurve to test various values of
200
% lambda on a validation set. You will then use this to select the
201
% "best" lambda value.
202
%
203
204
[lambda_vec, error_train, error_val] = ...
205
validationCurve(X_poly, y, X_poly_val, yval);
206
207
close all;
208
plot(lambda_vec, error_train, lambda_vec, error_val);
209
legend('Train', 'Cross Validation');
210
xlabel('lambda');
211
ylabel('Error');
212
213
fprintf('lambda\t\tTrain Error\tValidation Error\n');
214
for i = 1:length(lambda_vec)
215
fprintf(' %f\t%f\t%f\n', ...
216
lambda_vec(i), error_train(i), error_val(i));
217
end
218
219
fprintf('Program paused. Press enter to continue.\n');
220
pause;
221
222