Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
hackassin
GitHub Repository: hackassin/Coursera-Machine-Learning
Path: blob/master/Week 2/Programming Assignment-1/gradientDescent.m
626 views
1
function [theta, J_history] = gradientDescent(X, y, theta, alpha, num_iters)
2
%GRADIENTDESCENT Performs gradient descent to learn theta
3
% theta = GRADIENTDESCENT(X, y, theta, alpha, num_iters) updates theta by
4
% taking num_iters gradient steps with learning rate alpha
5
6
% Initialize some useful values
7
m = length(y); % number of training examples
8
J_history = zeros(num_iters, 1);
9
10
for iter = 1:num_iters
11
12
% ====================== YOUR CODE HERE ======================
13
% Instructions: Perform a single gradient step on the parameter vector
14
% theta.
15
%
16
% Hint: While debugging, it can be useful to print out the values
17
% of the cost function (computeCost) and gradient here.
18
%
19
predictions = X * theta;
20
error = (predictions - y);
21
temp1 = theta(1) - (alpha/m) * sum(error.*(X(:,1)));
22
temp2 = theta(2) - (alpha/m) * sum(error.*(X(:,2)));
23
% ============================================================
24
% Save the cost J in every iteration
25
J_history(iter) = computeCost(X, y, theta);
26
%printf("Compute Cost: J_history(iter%d) = %f\n", iter,J_history(iter));
27
end
28
predict3 = [1, 6] * theta;
29
fprintf('For population = 60,000, we predict a profit of %f\n',...
30
predict3*10000);
31
predict4 = [1, 8] * theta;
32
fprintf('For population = 80,000, we predict a profit of %f\n',...
33
predict4*10000);
34
end
35
36