Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
hackassin
GitHub Repository: hackassin/Coursera-Machine-Learning
Path: blob/master/Week 7/Programming Assignment - 6/ex6/svmPredict.m
863 views
1
function pred = svmPredict(model, X)
2
%SVMPREDICT returns a vector of predictions using a trained SVM model
3
%(svmTrain).
4
% pred = SVMPREDICT(model, X) returns a vector of predictions using a
5
% trained SVM model (svmTrain). X is a mxn matrix where there each
6
% example is a row. model is a svm model returned from svmTrain.
7
% predictions pred is a m x 1 column of predictions of {0, 1} values.
8
%
9
10
% Check if we are getting a column vector, if so, then assume that we only
11
% need to do prediction for a single example
12
if (size(X, 2) == 1)
13
% Examples should be in rows
14
X = X';
15
end
16
17
% Dataset
18
m = size(X, 1);
19
p = zeros(m, 1);
20
pred = zeros(m, 1);
21
22
if strcmp(func2str(model.kernelFunction), 'linearKernel')
23
% We can use the weights and bias directly if working with the
24
% linear kernel
25
p = X * model.w + model.b;
26
elseif strfind(func2str(model.kernelFunction), 'gaussianKernel')
27
% Vectorized RBF Kernel
28
% This is equivalent to computing the kernel on every pair of examples
29
X1 = sum(X.^2, 2);
30
X2 = sum(model.X.^2, 2)';
31
K = bsxfun(@plus, X1, bsxfun(@plus, X2, - 2 * X * model.X'));
32
K = model.kernelFunction(1, 0) .^ K;
33
K = bsxfun(@times, model.y', K);
34
K = bsxfun(@times, model.alphas', K);
35
p = sum(K, 2);
36
else
37
% Other Non-linear kernel
38
for i = 1:m
39
prediction = 0;
40
for j = 1:size(model.X, 1)
41
prediction = prediction + ...
42
model.alphas(j) * model.y(j) * ...
43
model.kernelFunction(X(i,:)', model.X(j,:)');
44
end
45
p(i) = prediction + model.b;
46
end
47
end
48
49
% Convert predictions into 0 / 1
50
pred(p >= 0) = 1;
51
pred(p < 0) = 0;
52
53
end
54
55
56