Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
hackassin
GitHub Repository: hackassin/Coursera-Machine-Learning
Path: blob/master/Week 9/Programming Assignment - 8/ex8/ex8_cofi.m
616 views
1
%% Machine Learning Online Class
2
% Exercise 8 | Anomaly Detection and Collaborative Filtering
3
%
4
% Instructions
5
% ------------
6
%
7
% This file contains code that helps you get started on the
8
% exercise. You will need to complete the following functions:
9
%
10
% estimateGaussian.m
11
% selectThreshold.m
12
% cofiCostFunc.m
13
%
14
% For this exercise, you will not need to change any code in this file,
15
% or any other files other than those mentioned above.
16
%
17
18
%% =============== Part 1: Loading movie ratings dataset ================
19
% You will start by loading the movie ratings dataset to understand the
20
% structure of the data.
21
%
22
fprintf('Loading movie ratings dataset.\n\n');
23
24
% Load data
25
load ('ex8_movies.mat');
26
27
% Y is a 1682x943 matrix, containing ratings (1-5) of 1682 movies on
28
% 943 users
29
%
30
% R is a 1682x943 matrix, where R(i,j) = 1 if and only if user j gave a
31
% rating to movie i
32
33
% From the matrix, we can compute statistics like average rating.
34
fprintf('Average rating for movie 1 (Toy Story): %f / 5\n\n', ...
35
mean(Y(1, R(1, :))));
36
37
% We can "visualize" the ratings matrix by plotting it with imagesc
38
imagesc(Y);
39
ylabel('Movies');
40
xlabel('Users');
41
42
fprintf('\nProgram paused. Press enter to continue.\n');
43
pause;
44
45
%% ============ Part 2: Collaborative Filtering Cost Function ===========
46
% You will now implement the cost function for collaborative filtering.
47
% To help you debug your cost function, we have included set of weights
48
% that we trained on that. Specifically, you should complete the code in
49
% cofiCostFunc.m to return J.
50
51
% Load pre-trained weights (X, Theta, num_users, num_movies, num_features)
52
load ('ex8_movieParams.mat');
53
54
% Reduce the data set size so that this runs faster
55
num_users = 4; num_movies = 5; num_features = 3;
56
X = X(1:num_movies, 1:num_features);
57
Theta = Theta(1:num_users, 1:num_features);
58
Y = Y(1:num_movies, 1:num_users);
59
R = R(1:num_movies, 1:num_users);
60
61
% Evaluate cost function
62
J = cofiCostFunc([X(:) ; Theta(:)], Y, R, num_users, num_movies, ...
63
num_features, 0);
64
65
fprintf(['Cost at loaded parameters: %f '...
66
'\n(this value should be about 22.22)\n'], J);
67
68
fprintf('\nProgram paused. Press enter to continue.\n');
69
pause;
70
71
72
%% ============== Part 3: Collaborative Filtering Gradient ==============
73
% Once your cost function matches up with ours, you should now implement
74
% the collaborative filtering gradient function. Specifically, you should
75
% complete the code in cofiCostFunc.m to return the grad argument.
76
%
77
fprintf('\nChecking Gradients (without regularization) ... \n');
78
79
% Check gradients by running checkNNGradients
80
checkCostFunction;
81
82
fprintf('\nProgram paused. Press enter to continue.\n');
83
pause;
84
85
86
%% ========= Part 4: Collaborative Filtering Cost Regularization ========
87
% Now, you should implement regularization for the cost function for
88
% collaborative filtering. You can implement it by adding the cost of
89
% regularization to the original cost computation.
90
%
91
92
% Evaluate cost function
93
J = cofiCostFunc([X(:) ; Theta(:)], Y, R, num_users, num_movies, ...
94
num_features, 1.5);
95
96
fprintf(['Cost at loaded parameters (lambda = 1.5): %f '...
97
'\n(this value should be about 31.34)\n'], J);
98
99
fprintf('\nProgram paused. Press enter to continue.\n');
100
pause;
101
102
103
%% ======= Part 5: Collaborative Filtering Gradient Regularization ======
104
% Once your cost matches up with ours, you should proceed to implement
105
% regularization for the gradient.
106
%
107
108
%
109
fprintf('\nChecking Gradients (with regularization) ... \n');
110
111
% Check gradients by running checkNNGradients
112
checkCostFunction(1.5);
113
114
fprintf('\nProgram paused. Press enter to continue.\n');
115
pause;
116
117
118
%% ============== Part 6: Entering ratings for a new user ===============
119
% Before we will train the collaborative filtering model, we will first
120
% add ratings that correspond to a new user that we just observed. This
121
% part of the code will also allow you to put in your own ratings for the
122
% movies in our dataset!
123
%
124
movieList = loadMovieList();
125
126
% Initialize my ratings
127
my_ratings = zeros(1682, 1);
128
129
% Check the file movie_idx.txt for id of each movie in our dataset
130
% For example, Toy Story (1995) has ID 1, so to rate it "4", you can set
131
my_ratings(1) = 4;
132
133
% Or suppose did not enjoy Silence of the Lambs (1991), you can set
134
my_ratings(98) = 2;
135
136
% We have selected a few movies we liked / did not like and the ratings we
137
% gave are as follows:
138
my_ratings(7) = 3;
139
my_ratings(12)= 5;
140
my_ratings(54) = 4;
141
my_ratings(64)= 5;
142
my_ratings(66)= 3;
143
my_ratings(69) = 5;
144
my_ratings(183) = 4;
145
my_ratings(226) = 5;
146
my_ratings(355)= 5;
147
148
fprintf('\n\nNew user ratings:\n');
149
for i = 1:length(my_ratings)
150
if my_ratings(i) > 0
151
fprintf('Rated %d for %s\n', my_ratings(i), ...
152
movieList{i});
153
end
154
end
155
156
fprintf('\nProgram paused. Press enter to continue.\n');
157
pause;
158
159
160
%% ================== Part 7: Learning Movie Ratings ====================
161
% Now, you will train the collaborative filtering model on a movie rating
162
% dataset of 1682 movies and 943 users
163
%
164
165
fprintf('\nTraining collaborative filtering...\n');
166
167
% Load data
168
load('ex8_movies.mat');
169
170
% Y is a 1682x943 matrix, containing ratings (1-5) of 1682 movies by
171
% 943 users
172
%
173
% R is a 1682x943 matrix, where R(i,j) = 1 if and only if user j gave a
174
% rating to movie i
175
176
% Add our own ratings to the data matrix
177
Y = [my_ratings Y];
178
R = [(my_ratings ~= 0) R];
179
180
% Normalize Ratings
181
[Ynorm, Ymean] = normalizeRatings(Y, R);
182
183
% Useful Values
184
num_users = size(Y, 2);
185
num_movies = size(Y, 1);
186
num_features = 10;
187
188
% Set Initial Parameters (Theta, X)
189
X = randn(num_movies, num_features);
190
Theta = randn(num_users, num_features);
191
192
initial_parameters = [X(:); Theta(:)];
193
194
% Set options for fmincg
195
options = optimset('GradObj', 'on', 'MaxIter', 100);
196
197
% Set Regularization
198
lambda = 10;
199
theta = fmincg (@(t)(cofiCostFunc(t, Ynorm, R, num_users, num_movies, ...
200
num_features, lambda)), ...
201
initial_parameters, options);
202
203
% Unfold the returned theta back into U and W
204
X = reshape(theta(1:num_movies*num_features), num_movies, num_features);
205
Theta = reshape(theta(num_movies*num_features+1:end), ...
206
num_users, num_features);
207
208
fprintf('Recommender system learning completed.\n');
209
210
fprintf('\nProgram paused. Press enter to continue.\n');
211
pause;
212
213
%% ================== Part 8: Recommendation for you ====================
214
% After training the model, you can now make recommendations by computing
215
% the predictions matrix.
216
%
217
218
p = X * Theta';
219
my_predictions = p(:,1) + Ymean;
220
221
movieList = loadMovieList();
222
223
[r, ix] = sort(my_predictions, 'descend');
224
fprintf('\nTop recommendations for you:\n');
225
for i=1:10
226
j = ix(i);
227
fprintf('Predicting rating %.1f for movie %s\n', my_predictions(j), ...
228
movieList{j});
229
end
230
231
fprintf('\n\nOriginal ratings provided:\n');
232
for i = 1:length(my_ratings)
233
if my_ratings(i) > 0
234
fprintf('Rated %d for %s\n', my_ratings(i), ...
235
movieList{i});
236
end
237
end
238
239