Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
deeplearningzerotoall
GitHub Repository: deeplearningzerotoall/PyTorch
Path: blob/master/CNN/lab-10-1-mnist_softmax.ipynb
618 views
Kernel: Python 3
# Lab 10 MNIST and softmax import torch import torchvision.datasets as dsets import torchvision.transforms as transforms import random
device = 'cuda' if torch.cuda.is_available() else 'cpu' # for reproducibility random.seed(777) torch.manual_seed(777) if device == 'cuda': torch.cuda.manual_seed_all(777)
# parameters learning_rate = 0.001 training_epochs = 15 batch_size = 100
# MNIST dataset mnist_train = dsets.MNIST(root='MNIST_data/', train=True, transform=transforms.ToTensor(), download=True) mnist_test = dsets.MNIST(root='MNIST_data/', train=False, transform=transforms.ToTensor(), download=True)
# dataset loader data_loader = torch.utils.data.DataLoader(dataset=mnist_train, batch_size=batch_size, shuffle=True, drop_last=True)
# MNIST data image of shape 28 * 28 = 784 linear = torch.nn.Linear(784, 10, bias=True).to(device)
# define cost/loss & optimizer criterion = torch.nn.CrossEntropyLoss().to(device) # Softmax is internally computed. optimizer = torch.optim.Adam(linear.parameters(), lr=learning_rate)
total_batch = len(data_loader) for epoch in range(training_epochs): avg_cost = 0 for X, Y in data_loader: # reshape input image into [batch_size by 784] # label is not one-hot encoded X = X.view(-1, 28 * 28).to(device) Y = Y.to(device) optimizer.zero_grad() hypothesis = linear(X) cost = criterion(hypothesis, Y) cost.backward() optimizer.step() avg_cost += cost / total_batch print('Epoch:', '%04d' % (epoch + 1), 'cost =', '{:.9f}'.format(avg_cost)) print('Learning finished')
Epoch: 0001 cost = 0.612622023 Epoch: 0002 cost = 0.343994737 Epoch: 0003 cost = 0.307820767 Epoch: 0004 cost = 0.291483581 Epoch: 0005 cost = 0.281717360 Epoch: 0006 cost = 0.274238169 Epoch: 0007 cost = 0.269270778 Epoch: 0008 cost = 0.265026242 Epoch: 0009 cost = 0.261815339 Epoch: 0010 cost = 0.258980662 Epoch: 0011 cost = 0.256636679 Epoch: 0012 cost = 0.254542708 Epoch: 0013 cost = 0.252477616 Epoch: 0014 cost = 0.251285702 Epoch: 0015 cost = 0.249446675 Learning finished
# Test the model using test sets with torch.no_grad(): X_test = mnist_test.test_data.view(-1, 28 * 28).float().to(device) Y_test = mnist_test.test_labels.to(device) prediction = linear(X_test) correct_prediction = torch.argmax(prediction, 1) == Y_test accuracy = correct_prediction.float().mean() print('Accuracy:', accuracy.item()) # Get one and predict r = random.randint(0, len(mnist_test) - 1) X_single_data = mnist_test.test_data[r:r + 1].view(-1, 28 * 28).float().to(device) Y_single_data = mnist_test.test_labels[r:r + 1].to(device) print('Label: ', Y_single_data.item()) single_prediction = linear(X_single_data) print('Prediction: ', torch.argmax(single_prediction, 1).item())
Accuracy: 0.9101999998092651 Label: 8 Prediction: 3