Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
deeplearningzerotoall
GitHub Repository: deeplearningzerotoall/PyTorch
Path: blob/master/CNN/lab-10-2-mnist_nn.ipynb
618 views
Kernel: Python 3
# Lab 10 MNIST and softmax import torch import torchvision.datasets as dsets import torchvision.transforms as transforms import random
device = 'cuda' if torch.cuda.is_available() else 'cpu' # for reproducibility random.seed(777) torch.manual_seed(777) if device == 'cuda': torch.cuda.manual_seed_all(777)
# parameters learning_rate = 0.001 training_epochs = 15 batch_size = 100
# MNIST dataset mnist_train = dsets.MNIST(root='MNIST_data/', train=True, transform=transforms.ToTensor(), download=True) mnist_test = dsets.MNIST(root='MNIST_data/', train=False, transform=transforms.ToTensor(), download=True)
# dataset loader data_loader = torch.utils.data.DataLoader(dataset=mnist_train, batch_size=batch_size, shuffle=True, drop_last=True)
# nn layers linear1 = torch.nn.Linear(784, 256, bias=True) linear2 = torch.nn.Linear(256, 256, bias=True) linear3 = torch.nn.Linear(256, 10, bias=True) relu = torch.nn.ReLU()
# model model = torch.nn.Sequential(linear1, relu, linear2, relu, linear3).to(device)
# define cost/loss & optimizer criterion = torch.nn.CrossEntropyLoss().to(device) # Softmax is internally computed. optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
total_batch = len(data_loader) for epoch in range(training_epochs): avg_cost = 0 for X, Y in data_loader: # reshape input image into [batch_size by 784] # label is not one-hot encoded X = X.view(-1, 28 * 28).to(device) Y = Y.to(device) optimizer.zero_grad() hypothesis = model(X) cost = criterion(hypothesis, Y) cost.backward() optimizer.step() avg_cost += cost / total_batch print('Epoch:', '%04d' % (epoch + 1), 'cost =', '{:.9f}'.format(avg_cost)) print('Learning finished')
Epoch: 0001 cost = 0.300109118 Epoch: 0002 cost = 0.114128537 Epoch: 0003 cost = 0.074882820 Epoch: 0004 cost = 0.052360620 Epoch: 0005 cost = 0.039674822 Epoch: 0006 cost = 0.030959716 Epoch: 0007 cost = 0.024696343 Epoch: 0008 cost = 0.020191995 Epoch: 0009 cost = 0.017330298 Epoch: 0010 cost = 0.014325669 Epoch: 0011 cost = 0.016393280 Epoch: 0012 cost = 0.011057314 Epoch: 0013 cost = 0.013703486 Epoch: 0014 cost = 0.009413618 Epoch: 0015 cost = 0.009952513 Learning finished
# Test the model using test sets with torch.no_grad(): X_test = mnist_test.test_data.view(-1, 28 * 28).float().to(device) Y_test = mnist_test.test_labels.to(device) prediction = model(X_test) correct_prediction = torch.argmax(prediction, 1) == Y_test accuracy = correct_prediction.float().mean() print('Accuracy:', accuracy.item()) # Get one and predict r = random.randint(0, len(mnist_test) - 1) X_single_data = mnist_test.test_data[r:r + 1].view(-1, 28 * 28).float().to(device) Y_single_data = mnist_test.test_labels[r:r + 1].to(device) print('Label: ', Y_single_data.item()) single_prediction = model(X_single_data) print('Prediction: ', torch.argmax(single_prediction, 1).item())
Accuracy: 0.9790999889373779 Label: 8 Prediction: 8