Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
hackassin
GitHub Repository: hackassin/Coursera-Machine-Learning
Path: blob/master/Week 5/Programming Assignment - 4/machine-learning-ex4/ex4/nnCostFunction.m
864 views
1
function [J grad] = nnCostFunction(nn_params, ...
2
input_layer_size, ...
3
hidden_layer_size, ...
4
num_labels, ...
5
X, y, lambda)
6
%NNCOSTFUNCTION Implements the neural network cost function for a two layer
7
%neural network which performs classification
8
% [J grad] = NNCOSTFUNCTON(nn_params, hidden_layer_size, num_labels, ...
9
% X, y, lambda) computes the cost and gradient of the neural network. The
10
% parameters for the neural network are "unrolled" into the vector
11
% nn_params and need to be converted back into the weight matrices.
12
%
13
% The returned parameter grad should be a "unrolled" vector of the
14
% partial derivatives of the neural network.
15
%
16
17
% Reshape nn_params back into the parameters Theta1 and Theta2, the weight matrices
18
% for our 2 layer neural network
19
Theta1 = reshape(nn_params(1:hidden_layer_size * (input_layer_size + 1)), ...
20
hidden_layer_size, (input_layer_size + 1));
21
22
Theta2 = reshape(nn_params((1 + (hidden_layer_size * (input_layer_size + 1))):end), ...
23
num_labels, (hidden_layer_size + 1));
24
25
% Setup some useful variables
26
m = size(X, 1);
27
28
% You need to return the following variables correctly
29
J = 0;
30
Theta1_grad = zeros(size(Theta1));
31
Theta2_grad = zeros(size(Theta2));
32
33
% ====================== YOUR CODE HERE ======================
34
% Instructions: You should complete the code by working through the
35
% following parts.
36
%
37
% Part 1: Feedforward the neural network and return the cost in the
38
% variable J. After implementing Part 1, you can verify that your
39
% cost function computation is correct by verifying the cost
40
% computed in ex4.m
41
%
42
% Part 2: Implement the backpropagation algorithm to compute the gradients
43
% Theta1_grad and Theta2_grad. You should return the partial derivatives of
44
% the cost function with respect to Theta1 and Theta2 in Theta1_grad and
45
% Theta2_grad, respectively. After implementing Part 2, you can check
46
% that your implementation is correct by running checkNNGradients
47
%
48
% Note: The vector y passed into the function is a vector of labels
49
% containing values from 1..K. You need to map this vector into a
50
% binary vector of 1's and 0's to be used with the neural network
51
% cost function.
52
%
53
% Hint: We recommend implementing backpropagation using a for-loop
54
% over the training examples if you are implementing it for the
55
% first time.
56
%
57
% Part 3: Implement regularization with the cost function and gradients.
58
%
59
% Hint: You can implement this around the code for
60
% backpropagation. That is, you can compute the gradients for
61
% the regularization separately and then add them to Theta1_grad
62
% and Theta2_grad from Part 2.
63
%
64
% Step 1: Feedforward & Cost Function
65
a1 = [ones(m,1) X];
66
z2 = a1 * Theta1';
67
a2 = sigmoid(z2);
68
a2 = [ones(m,1) a2];
69
z3 = a2 * Theta2';
70
a3 = sigmoid(z3);
71
% Cost function
72
for k=1:num_labels, yCls = y==k;
73
J = J - (1/m)*sum(((yCls.*log(a3(:,k))) + (((1-yCls).* log(1-a3(:,k))))));
74
end;
75
% Regularizing cost function
76
regTheta1 = Theta1;
77
regTheta2 = Theta2;
78
regTheta1(:,1) = 0;
79
regTheta2(:,1) = 0;
80
81
regSum = sum(sum(regTheta1.^2)) + sum(sum(regTheta2.^2));
82
J = J + (lambda/(2*m))*regSum;
83
84
% Computing gradient
85
for t = 1:m,
86
a1 = [1; X(t,:)'];
87
z2 = Theta1*a1;
88
a2 = [1;sigmoid(z2)];
89
z3 = Theta2*a2;
90
a3 = sigmoid(z3);
91
92
yi = ([1:num_labels]==y(t))';
93
% Child Delta
94
delta3 = a3 - yi;
95
delta2 = (Theta2(:,2:end)'*delta3) .* sigmoidGradient(z2);
96
% Father Delta
97
Theta1_grad = Theta1_grad + delta2 * a1';
98
Theta2_grad = Theta2_grad + delta3 * a2';
99
end;
100
% -------------------------------------------------------------
101
Theta1_grad = (1/m) * Theta1_grad + (lambda/m) * [zeros(hidden_layer_size,1) Theta1(:,2:end)];
102
Theta2_grad = (1/m) * Theta2_grad + (lambda/m) * [zeros(num_labels,1) Theta2(:,2:end)];
103
% =========================================================================
104
105
% Unroll gradients
106
grad = [Theta1_grad(:) ; Theta2_grad(:)];
107
108
109
end
110
111