多分類任務中不同隱藏單元個數對實驗結果的影響
阿新 • • 發佈:2021-11-05
1 匯入實驗所需要的包
import torch
import torch.nn as nn
import numpy as np
import torchvision
import torchvision.transforms as transforms
import matplotlib.pyplot as plt
2 下載MNIST資料集和讀取資料
#下載MNIST手寫數字資料集 mnist_train = torchvision.datasets.MNIST(root='../Datasets/MNIST', train=True,download=True, transform=transforms.ToTensor()) mnist_test= torchvision.datasets.MNIST(root='../Datasets/MNIST', train=False, download=True, transform=transforms.ToTensor()) #讀取資料 batch_size = 32 train_iter = torch.utils.data.DataLoader(mnist_train, batch_size=batch_size, shuffle=True,num_workers=0) test_iter = torch.utils.data.DataLoader(mnist_test, batch_size=batch_size, shuffle=False,num_workers=0)
3 定義模型引數
#訓練次數和學習率
num_epochs ,lr = 50, 0.01
4 定義模型
class LinearNet(nn.Module):
def __init__(self,num_inputs, num_outputs, num_hiddens):
super(LinearNet,self).__init__()
self.linear1 = nn.Linear(num_inputs,num_hiddens)
self.relu = nn.ReLU()
self.linear2 = nn.Linear(num_hiddens,num_outputs)
def forward(self,x):
x = self.linear1(x)
x = self.relu(x)
x = self.linear2(x)
y = self.relu(x)
return y
5 定義訓練函式
def train(net,train_iter,test_iter,loss,num_epochs,batch_size,params=None,lr=None,optimizer=None):
train_ls, test_ls = [], []
for epoch in range(num_epochs):
ls, count = 0, 0
for X,y in train_iter:
X = X.reshape(-1,num_inputs)
l=loss(net(X),y)
optimizer.zero_grad()
l.backward()
optimizer.step()
ls += l.item()
count += y.shape[0]
train_ls.append(ls)
ls, count = 0, 0
for X,y in test_iter:
X = X.reshape(-1,num_inputs)
l=loss(net(X),y)
ls += l.item()
count += y.shape[0]
test_ls.append(ls)
if(epoch+1)%5==0:
print('epoch: %d, train loss: %f, test loss: %f'%(epoch+1,train_ls[-1],test_ls[-1]))
return train_ls,test_ls
6 模型訓練
different_hiddens = [100,200,300,400,500,600,700]
#定義輸入層神經元個數和輸出層神經元個數
num_inputs, num_outputs = 784, 10
#定義損失函式
loss = nn.CrossEntropyLoss()
Train_loss, Test_loss = [], []
for cur_hiddens in different_hiddens:
net = LinearNet(num_inputs, num_outputs, cur_hiddens)
optimizer = torch.optim.SGD(net.parameters(),lr = 0.001)
for param in net.parameters():
nn.init.normal_(param,mean=0, std= 0.01)
train_ls, test_ls = train(net,train_iter,test_iter,loss,num_epochs,batch_size,net.parameters,lr,optimizer)
Train_loss.append(train_ls)
Test_loss.append(test_ls)
7 繪製不同隱藏單元損失圖
x = np.linspace(0,len(train_ls),len(train_ls))
plt.figure(figsize=(10,8))
for i in range(0,len(different_hiddens)):
plt.plot(x,Train_loss[i],label= f'Neuronss:{different_hiddens[i]}',linewidth=1.5)
plt.xlabel('epoch')
plt.ylabel('loss')
plt.legend()
plt.title('Train loss vs different hiddens')
plt.show()
因上求緣,果上努力~~~~ 作者:希望每天漲粉,轉載請註明原文連結:https://www.cnblogs.com/BlairGrowing/p/15511129.html