Path: blob/master/Part 10 - Model Selection And Boosting/Grid Search/grid_search.R
1336 views
# Grid Search12# Importing the dataset3dataset = read.csv('Social_Network_Ads.csv')4dataset = dataset[3:5]56# Encoding the target feature as factor7dataset$Purchased = factor(dataset$Purchased, levels = c(0, 1))89# Splitting the dataset into the Training set and Test set10# install.packages('caTools')11library(caTools)12set.seed(123)13split = sample.split(dataset$Purchased, SplitRatio = 0.75)14training_set = subset(dataset, split == TRUE)15test_set = subset(dataset, split == FALSE)1617# Feature Scaling18training_set[-3] = scale(training_set[-3])19test_set[-3] = scale(test_set[-3])2021# Fitting Kernel SVM to the Training set22# install.packages('e1071')23library(e1071)24classifier = svm(formula = Purchased ~ .,25data = training_set,26type = 'C-classification',27kernel = 'radial')2829# Predicting the Test set results30y_pred = predict(classifier, newdata = test_set[-3])3132# Making the Confusion Matrix33cm = table(test_set[, 3], y_pred)3435# Applying k-Fold Cross Validation36# install.packages('caret')37library(caret)38folds = createFolds(training_set$Purchased, k = 10)39cv = lapply(folds, function(x) {40training_fold = training_set[-x, ]41test_fold = training_set[x, ]42classifier = svm(formula = Purchased ~ .,43data = training_fold,44type = 'C-classification',45kernel = 'radial')46y_pred = predict(classifier, newdata = test_fold[-3])47cm = table(test_fold[, 3], y_pred)48accuracy = (cm[1,1] + cm[2,2]) / (cm[1,1] + cm[2,2] + cm[1,2] + cm[2,1])49return(accuracy)50})51accuracy = mean(as.numeric(cv))5253# Applying Grid Search to find the best parameters54# install.packages('caret')55library(caret)56classifier = train(form = Purchased ~ ., data = training_set, method = 'svmRadial')57classifier58classifier$bestTune5960# Visualising the Training set results61library(ElemStatLearn)62set = training_set63X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01)64X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01)65grid_set = expand.grid(X1, X2)66colnames(grid_set) = c('Age', 'EstimatedSalary')67y_grid = predict(classifier, newdata = grid_set)68plot(set[, -3],69main = 'Kernel SVM (Training set)',70xlab = 'Age', ylab = 'Estimated Salary',71xlim = range(X1), ylim = range(X2))72contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE)73points(grid_set, pch = '.', col = ifelse(y_grid == 1, 'springgreen3', 'tomato'))74points(set, pch = 21, bg = ifelse(set[, 3] == 1, 'green4', 'red3'))7576# Visualising the Test set results77library(ElemStatLearn)78set = test_set79X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01)80X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01)81grid_set = expand.grid(X1, X2)82colnames(grid_set) = c('Age', 'EstimatedSalary')83y_grid = predict(classifier, newdata = grid_set)84plot(set[, -3], main = 'Kernel SVM (Test set)',85xlab = 'Age', ylab = 'Estimated Salary',86xlim = range(X1), ylim = range(X2))87contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE)88points(grid_set, pch = '.', col = ifelse(y_grid == 1, 'springgreen3', 'tomato'))89points(set, pch = 21, bg = ifelse(set[, 3] == 1, 'green4', 'red3'))9091