方法一:直接在epoch过程中求取准确率
简介:此段代码是LeNet5中截取的。
def train_model(model,train_loader):
optimizer = torch.optim.Adam(model.parameters())
loss_func = nn.CrossEntropyLoss()
EPOCHS = 5
for epoch in range(EPOCHS):
correct = 0
for batch_idx,(X_batch,y_batch) in enumerate(train_loader):
optimizer.zero_grad()
#这里是只取训练数据的意思吗,X_batch和y_batch是怎么分开的?
#答:X_batch和y_batch是一一对应的,只不过顺序打乱了,参考torch.utils.data.ipynb
output = model(X_batch.float()) #X_batch.float()是什么意思
loss = loss_func(output,y_batch)
loss.backward()
optimizer.step()
# Total correct predictions
#第一个1代表取每行的最大值,第二个1代表只取最大值的索引
#这两行代码是求准确率的地方
predicted = torch.max(output.data,1)[1]
correct += (predicted == y_batch).sum()
#print(correct)
if batch_idx % 100 == 0:
print('Epoch :{}[{}/{}({:.0f}%)]\t Loss:{:.6f}\t Accuracy:{:.3f}'.format(epoch,batch_idx * len(X_batch),len(train_loader.dataset),100.*batch_idx / len(train_loader),loss.data.item(),float(correct*100)/float(BATCH_SIZE)*(batch_idx+1)))
if __name__ == '__main__':
myModel = LeNet5()
print(myModel)
train_model(myModel,train_loader)
evaluate(myModel,test_loader,BATCH_SIZE)
方法二:构建函数,然后在epoch中调用该函数
简介:此段代码是对Titanic(泰坦尼克号)数据分析截取。

epochs = 10
log_step_freq = 30
dfhistory = pd.DataFrame(columns = ['epoch','loss',metric_name,'val_loss','val_'+metric_name])
print('Start Training...')
nowtime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print('========='*8 + '%s'%nowtime)
for epoch in range(1,epochs+1):
#1.训练循环
net.train()
loss_sum = 0.0
metric_sum = 0.0
step = 1
for step,(features,labels) in enumerate(dl_train,1):
#梯度清零
optimizer.zero_grad()
#正向传播求损失
predictions = net(features)
loss = loss_func(predictions,labels)
metric = metric_func(predictions,labels)
#反向传播求梯度
loss.backward()
optimizer.step()
#打印batch级别日志
loss_sum += loss.item()
metric_sum += metric.item()
if step%log_step_freq == 0:
print(('[Step = %d] loss: %.3f,' + metric_name+': %.3f %%')%(step,loss_sum/step,100*metric_sum/step))
#2,验证循环
net.eval()
val_loss_sum = 0.0
val_metric_sum = 0.0
val_step =1
for val_step,(features,labels) in enumerate(dl_valid,1):
#关闭梯度计算
with torch.no_grad():
pred = net(features)
val_loss = loss_func(pred,labels)
val_metric = metric_func(labels,pred)
val_loss_sum += val_loss.item()
val_metric_sum += val_metric.item()
#3,记录日志
info = (epoch,loss_sum/step,100*metric_sum/step,
val_loss_sum/val_step,100*val_metric_sum/val_step)
dfhistory.loc[epoch-1] = info
#打印epoch级别日志
print(('\nEPOCH = %d,loss = %.3f,' + metric_name+\
'=%.3f %%,val_loss = %.3f'+' val_'+metric_name+'= %.3f %%')%info)
nowtime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print('\n'+'=========='*8 + '%s'%nowtime)
print('Finishing Training...')