Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
hackassin
GitHub Repository: hackassin/Coursera-Machine-Learning
Path: blob/master/Week 4/Programming Assignment - 3/machine-learning-ex3/ex3/ex3.m
864 views
1
%% Machine Learning Online Class - Exercise 3 | Part 1: One-vs-all
2
3
% Instructions
4
% ------------
5
%
6
% This file contains code that helps you get started on the
7
% linear exercise. You will need to complete the following functions
8
% in this exericse:
9
%
10
% lrCostFunction.m (logistic regression cost function)
11
% oneVsAll.m
12
% predictOneVsAll.m
13
% predict.m
14
%
15
% For this exercise, you will not need to change any code in this file,
16
% or any other files other than those mentioned above.
17
%
18
19
%% Initialization
20
clear ; close all; clc
21
22
%% Setup the parameters you will use for this part of the exercise
23
input_layer_size = 400; % 20x20 Input Images of Digits
24
num_labels = 10; % 10 labels, from 1 to 10
25
% (note that we have mapped "0" to label 10)
26
27
%% =========== Part 1: Loading and Visualizing Data =============
28
% We start the exercise by first loading and visualizing the dataset.
29
% You will be working with a dataset that contains handwritten digits.
30
%
31
32
% Load Training Data
33
fprintf('Loading and Visualizing Data ...\n')
34
35
load('ex3data1.mat'); % training data stored in arrays X, y
36
m = size(X, 1);
37
38
% Randomly select 100 data points to display
39
rand_indices = randperm(m);
40
sel = X(rand_indices(1:100), :);
41
42
displayData(sel);
43
44
fprintf('Program paused. Press enter to continue.\n');
45
pause;
46
47
%% ============ Part 2a: Vectorize Logistic Regression ============
48
% In this part of the exercise, you will reuse your logistic regression
49
% code from the last exercise. You task here is to make sure that your
50
% regularized logistic regression implementation is vectorized. After
51
% that, you will implement one-vs-all classification for the handwritten
52
% digit dataset.
53
%
54
55
% Test case for lrCostFunction
56
fprintf('\nTesting lrCostFunction() with regularization');
57
58
theta_t = [-2; -1; 1; 2];
59
X_t = [ones(5,1) reshape(1:15,5,3)/10];
60
y_t = ([1;0;1;0;1] >= 0.5);
61
lambda_t = 3;
62
[J grad] = lrCostFunction(theta_t, X_t, y_t, lambda_t);
63
64
fprintf('\nCost: %f\n', J);
65
fprintf('Expected cost: 2.534819\n');
66
fprintf('Gradients:\n');
67
fprintf(' %f \n', grad);
68
fprintf('Expected gradients:\n');
69
fprintf(' 0.146561\n -0.548558\n 0.724722\n 1.398003\n');
70
71
fprintf('Program paused. Press enter to continue.\n');
72
pause;
73
%% ============ Part 2b: One-vs-All Training ============
74
fprintf('\nTraining One-vs-All Logistic Regression...\n')
75
76
lambda = 0.1;
77
[all_theta] = oneVsAll(X, y, num_labels, lambda);
78
79
fprintf('Program paused. Press enter to continue.\n');
80
pause;
81
82
83
%% ================ Part 3: Predict for One-Vs-All ================
84
85
pred = predictOneVsAll(all_theta, X);
86
fprintf ("This is prediction: %f\n",pred);
87
fprintf('\nTraining Set Accuracy: %f\n', mean(double(pred == y)) * 100);
88
89
90