Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
dynamicslab
GitHub Repository: dynamicslab/databook_python
Path: blob/master/CH06/CH06_SEC05_1_DeepCNN.ipynb
597 views
Kernel: Python 3
import numpy as np import matplotlib.pyplot as plt from matplotlib import rcParams from scipy import io import os from keras.utils import to_categorical from keras.models import Sequential from keras.layers import Dense, Conv2D, Flatten, MaxPool2D from keras import optimizers rcParams.update({'font.size': 18}) plt.rcParams['figure.figsize'] = [12, 12]
letters_train_mat = io.loadmat(os.path.join('..','DATA','lettersTrainSet.mat')) letters_test_mat = io.loadmat(os.path.join('..','DATA','lettersTestSet.mat')) XTrain = letters_train_mat['XTrain'] TTrain = letters_train_mat['TTrain_cell'] XTest = letters_test_mat['XTest'] TTest = letters_test_mat['TTest_cell'] perm = np.random.permutation(1500)[:20] # By default, Keras expects data in form (batch, height, width, channels) XTrain = np.transpose(XTrain,axes=[3,0,1,2]) XTest = np.transpose(XTest,axes=[3,0,1,2]) fig,axs = plt.subplots(4,5) axs = axs.reshape(-1) for j in range(len(axs)): axs[j].imshow(np.squeeze(XTrain[perm[j],:,:,:]),cmap='gray') axs[j].axis('off')
Image in a Jupyter notebook
classes = np.unique(TTrain) y_train_label = np.zeros_like(TTrain) y_test_label = np.zeros_like(TTest) for nc in range(len(classes)): y_train_label[TTrain == classes[nc]] = nc y_test_label[TTest == classes[nc]] = nc y_train_label = y_train_label.reshape(-1) y_test_label = y_test_label.reshape(-1) #one-hot encode categorical classes y_train = to_categorical(y_train_label) y_test = to_categorical(y_test_label)
#create model model = Sequential() #add model layers model.add(Conv2D(filters=16, kernel_size=5, activation='relu', input_shape=(28,28,1))) model.add(MaxPool2D(pool_size=2, strides=2)) model.add(Flatten()) model.add(Dense(len(classes), activation='softmax')) sgd_optimizer = optimizers.SGD(momentum=0.9) model.compile(optimizer=sgd_optimizer, loss='categorical_crossentropy') model.fit(XTrain, y_train, epochs=30)
Epoch 1/30 1500/1500 [==============================] - 0s 183us/step - loss: 0.4938 Epoch 2/30 1500/1500 [==============================] - 0s 66us/step - loss: 0.0512 Epoch 3/30 1500/1500 [==============================] - 0s 66us/step - loss: 0.0173 Epoch 4/30 1500/1500 [==============================] - 0s 69us/step - loss: 0.0108 Epoch 5/30 1500/1500 [==============================] - 0s 68us/step - loss: 0.0080 Epoch 6/30 1500/1500 [==============================] - 0s 67us/step - loss: 0.0060 Epoch 7/30 1500/1500 [==============================] - 0s 68us/step - loss: 0.0050 Epoch 8/30 1500/1500 [==============================] - 0s 68us/step - loss: 0.0040 Epoch 9/30 1500/1500 [==============================] - 0s 68us/step - loss: 0.0035 Epoch 10/30 1500/1500 [==============================] - 0s 68us/step - loss: 0.0030 Epoch 11/30 1500/1500 [==============================] - 0s 66us/step - loss: 0.0026 Epoch 12/30 1500/1500 [==============================] - 0s 72us/step - loss: 0.0025 Epoch 13/30 1500/1500 [==============================] - 0s 66us/step - loss: 0.0021 Epoch 14/30 1500/1500 [==============================] - 0s 69us/step - loss: 0.0019 Epoch 15/30 1500/1500 [==============================] - 0s 67us/step - loss: 0.0018 Epoch 16/30 1500/1500 [==============================] - 0s 62us/step - loss: 0.0017 Epoch 17/30 1500/1500 [==============================] - 0s 68us/step - loss: 0.0015 Epoch 18/30 1500/1500 [==============================] - 0s 67us/step - loss: 0.0015 Epoch 19/30 1500/1500 [==============================] - 0s 67us/step - loss: 0.0014 Epoch 20/30 1500/1500 [==============================] - 0s 72us/step - loss: 0.0013 Epoch 21/30 1500/1500 [==============================] - 0s 71us/step - loss: 0.0012 Epoch 22/30 1500/1500 [==============================] - 0s 78us/step - loss: 0.0011 Epoch 23/30 1500/1500 [==============================] - 0s 68us/step - loss: 0.0011 Epoch 24/30 1500/1500 [==============================] - 0s 70us/step - loss: 0.0010 Epoch 25/30 1500/1500 [==============================] - 0s 68us/step - loss: 9.5643e-04 Epoch 26/30 1500/1500 [==============================] - 0s 66us/step - loss: 9.0637e-04 Epoch 27/30 1500/1500 [==============================] - 0s 70us/step - loss: 8.7863e-04 Epoch 28/30 1500/1500 [==============================] - 0s 75us/step - loss: 8.3013e-04 Epoch 29/30 1500/1500 [==============================] - 0s 75us/step - loss: 7.9356e-04 Epoch 30/30 1500/1500 [==============================] - 0s 75us/step - loss: 7.6706e-04
<keras.callbacks.History at 0x12140b09940>
YPredict = np.argmax(model.predict(XTest),axis=1) # argmax reverses the one-hot encoding scheme accuracy = np.sum(YPredict == y_test_label)/len(y_test_label) print('Accuracy = {}'.format(accuracy))
Accuracy = 0.8853333333333333