Path: blob/master/CNN/lab-10-1-mnist_softmax.py
618 views
# Lab 10 MNIST and softmax1import torch2import torchvision.datasets as dsets3import torchvision.transforms as transforms4import random56device = 'cuda' if torch.cuda.is_available() else 'cpu'78# for reproducibility9random.seed(777)10torch.manual_seed(777)11if device == 'cuda':12torch.cuda.manual_seed_all(777)1314# parameters15learning_rate = 0.00116training_epochs = 1517batch_size = 1001819# MNIST dataset20mnist_train = dsets.MNIST(root='MNIST_data/',21train=True,22transform=transforms.ToTensor(),23download=True)2425mnist_test = dsets.MNIST(root='MNIST_data/',26train=False,27transform=transforms.ToTensor(),28download=True)2930# dataset loader31data_loader = torch.utils.data.DataLoader(dataset=mnist_train,32batch_size=batch_size,33shuffle=True,34drop_last=True)3536# MNIST data image of shape 28 * 28 = 78437linear = torch.nn.Linear(784, 10, bias=True).to(device)3839# define cost/loss & optimizer40criterion = torch.nn.CrossEntropyLoss().to(device) # Softmax is internally computed.41optimizer = torch.optim.Adam(linear.parameters(), lr=learning_rate)4243total_batch = len(data_loader)44for epoch in range(training_epochs):45avg_cost = 04647for X, Y in data_loader:48# reshape input image into [batch_size by 784]49# label is not one-hot encoded50X = X.view(-1, 28 * 28).to(device)51Y = Y.to(device)5253optimizer.zero_grad()54hypothesis = linear(X)55cost = criterion(hypothesis, Y)56cost.backward()57optimizer.step()5859avg_cost += cost / total_batch6061print('Epoch:', '%04d' % (epoch + 1), 'cost =', '{:.9f}'.format(avg_cost))6263print('Learning finished')6465# Test the model using test sets66with torch.no_grad():67X_test = mnist_test.test_data.view(-1, 28 * 28).float().to(device)68Y_test = mnist_test.test_labels.to(device)6970prediction = linear(X_test)71correct_prediction = torch.argmax(prediction, 1) == Y_test72accuracy = correct_prediction.float().mean()73print('Accuracy:', accuracy.item())7475# Get one and predict76r = random.randint(0, len(mnist_test) - 1)77X_single_data = mnist_test.test_data[r:r + 1].view(-1, 28 * 28).float().to(device)78Y_single_data = mnist_test.test_labels[r:r + 1].to(device)7980print('Label: ', Y_single_data.item())81single_prediction = linear(X_single_data)82print('Prediction: ', torch.argmax(single_prediction, 1).item())8384