Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
debakarr
GitHub Repository: debakarr/machinelearning
Path: blob/master/Part 9 - Dimension Reduction/Linear Discriminant Analysis/lda.R
1339 views
1
# LDA
2
3
# Importing the dataset
4
dataset = read.csv('Wine.csv')
5
6
# Splitting the dataset into the Training set and Test set
7
# install.packages('caTools')
8
library(caTools)
9
set.seed(123)
10
split = sample.split(dataset$Customer_Segment, SplitRatio = 0.8)
11
training_set = subset(dataset, split == TRUE)
12
test_set = subset(dataset, split == FALSE)
13
14
# Feature Scaling
15
training_set[-14] = scale(training_set[-14])
16
test_set[-14] = scale(test_set[-14])
17
18
# Applying LDA
19
library(MASS)
20
lda = lda(formula = Customer_Segment ~ ., data = training_set)
21
training_set = as.data.frame(predict(lda, training_set))
22
training_set = training_set[c(5, 6, 1)]
23
test_set = as.data.frame(predict(lda, test_set))
24
test_set = test_set[c(5, 6, 1)]
25
26
# Fitting SVM to the Training set
27
# install.packages('e1071')
28
library(e1071)
29
classifier = svm(formula = class ~ .,
30
data = training_set,
31
type = 'C-classification',
32
kernel = 'linear')
33
34
# Predicting the Test set results
35
y_pred = predict(classifier, newdata = test_set[-3])
36
37
# Making the Confusion Matrix
38
cm = table(test_set[, 3], y_pred)
39
40
# Visualising the Training set results
41
library(ElemStatLearn)
42
set = training_set
43
X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01)
44
X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01)
45
grid_set = expand.grid(X1, X2)
46
colnames(grid_set) = c('x.LD1', 'x.LD2')
47
y_grid = predict(classifier, newdata = grid_set)
48
plot(set[, -3],
49
main = 'SVM (Training set)',
50
xlab = 'LD1', ylab = 'LD2',
51
xlim = range(X1), ylim = range(X2))
52
contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE)
53
points(grid_set, pch = '.', col = ifelse(y_grid == 2, 'deepskyblue', ifelse(y_grid == 1, 'springgreen3', 'tomato')))
54
points(set, pch = 21, bg = ifelse(set[, 3] == 2, 'blue3', ifelse(set[, 3] == 1, 'green4', 'red3')))
55
56
# Visualising the Test set results
57
library(ElemStatLearn)
58
set = test_set
59
X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01)
60
X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01)
61
grid_set = expand.grid(X1, X2)
62
colnames(grid_set) = c('x.LD1', 'x.LD2')
63
y_grid = predict(classifier, newdata = grid_set)
64
plot(set[, -3], main = 'SVM (Test set)',
65
xlab = 'LD1', ylab = 'LD2',
66
xlim = range(X1), ylim = range(X2))
67
contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE)
68
points(grid_set, pch = '.', col = ifelse(y_grid == 2, 'deepskyblue', ifelse(y_grid == 1, 'springgreen3', 'tomato')))
69
points(set, pch = 21, bg = ifelse(set[, 3] == 2, 'blue3', ifelse(set[, 3] == 1, 'green4', 'red3')))
70