Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
debakarr
GitHub Repository: debakarr/machinelearning
Path: blob/master/Part 9 - Dimension Reduction/Principal Component Analysis/pca.R
1336 views
1
# PCA
2
3
# Importing the dataset
4
dataset = read.csv('Wine.csv')
5
6
# Splitting the dataset into the Training set and Test set
7
# install.packages('caTools')
8
library(caTools)
9
set.seed(123)
10
split = sample.split(dataset$Customer_Segment, SplitRatio = 0.8)
11
training_set = subset(dataset, split == TRUE)
12
test_set = subset(dataset, split == FALSE)
13
14
# Feature Scaling
15
training_set[-14] = scale(training_set[-14])
16
test_set[-14] = scale(test_set[-14])
17
18
# Applying PCA
19
# install.packages('caret')
20
library(caret)
21
# install.packages('e1071')
22
library(e1071)
23
pca = preProcess(x = training_set[-14], method = 'pca', pcaComp = 2)
24
training_set = predict(pca, training_set)
25
training_set = training_set[c(2, 3, 1)]
26
test_set = predict(pca, test_set)
27
test_set = test_set[c(2, 3, 1)]
28
29
# Fitting SVM to the Training set
30
# install.packages('e1071')
31
library(e1071)
32
classifier = svm(formula = Customer_Segment ~ .,
33
data = training_set,
34
type = 'C-classification',
35
kernel = 'linear')
36
37
# Predicting the Test set results
38
y_pred = predict(classifier, newdata = test_set[-3])
39
40
# Making the Confusion Matrix
41
cm = table(test_set[, 3], y_pred)
42
43
# Visualising the Training set results
44
library(ElemStatLearn)
45
set = training_set
46
X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01)
47
X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01)
48
grid_set = expand.grid(X1, X2)
49
colnames(grid_set) = c('PC1', 'PC2')
50
y_grid = predict(classifier, newdata = grid_set)
51
plot(set[, -3],
52
main = 'SVM (Training set)',
53
xlab = 'PC1', ylab = 'PC2',
54
xlim = range(X1), ylim = range(X2))
55
contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE)
56
points(grid_set, pch = '.', col = ifelse(y_grid == 2, 'deepskyblue', ifelse(y_grid == 1, 'springgreen3', 'tomato')))
57
points(set, pch = 21, bg = ifelse(set[, 3] == 2, 'blue3', ifelse(set[, 3] == 1, 'green4', 'red3')))
58
59
# Visualising the Test set results
60
library(ElemStatLearn)
61
set = test_set
62
X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01)
63
X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01)
64
grid_set = expand.grid(X1, X2)
65
colnames(grid_set) = c('PC1', 'PC2')
66
y_grid = predict(classifier, newdata = grid_set)
67
plot(set[, -3], main = 'SVM (Test set)',
68
xlab = 'PC1', ylab = 'PC2',
69
xlim = range(X1), ylim = range(X2))
70
contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE)
71
points(grid_set, pch = '.', col = ifelse(y_grid == 2, 'deepskyblue', ifelse(y_grid == 1, 'springgreen3', 'tomato')))
72
points(set, pch = 21, bg = ifelse(set[, 3] == 2, 'blue3', ifelse(set[, 3] == 1, 'green4', 'red3')))
73