时间20210430
作者:知道许多的橘子
实现:WRN28-4对CIFAR-10数据集的分类
测试集准确度:95.3%
实现框架pytorch
数据增强方法:Normalize+Fix等
训练次数:200
阶段学习率[0-200]:smooth_step(10,40,100,150,epoch_s)
优化器optimizer = torch.optim.SGD(model.parameters(),lr=smooth_step(10,40,100,150,epoch_s), momentum=0.9,weight_decay=1e-5)
如果感觉算力不够用了,或者心疼自己电脑了!
可以用我实验室的算力,试试呢!
害,谁叫我的算力都用不完呢!
支持所有框架!实际上框架都配置好了!
傻瓜式云计算!
Tesla v100 1卡,2卡,4卡,8卡
内存16-128G
cpu:8-24核
想要?加个微信:15615634293
或者点这里,找我!
欢迎打扰!
# 数据见Fcat_20210419
# In[1] 导入所需工具包
from __future__ import division
import os
import torch
import torch.nn as nn
import torchvision
from torchvision import datasets,transforms
import time
from torch.nn import functional as F
from math import floor, ceil
import math
import numpy as np
import sys
sys.path.append(r'/home/megstudio/workspace/')
from FMIX.fmix import sample_and_apply, sample_mask
#import torchvision.transforms as transforms
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
print(device)
import randomfrom torch.autograd import Variable
# In[1] 设置超参数
num_epochs = 200
batch_size = 100
tbatch_size = 100
test_name = '/_W28_04_BJGB3_BLN_S_C10'# In[1] 加载数据
# In[2]#图像预处理变换的定义
transform_train = transforms.Compose([
transforms.RandomCrop(32, padding=4), #在一个随机的位置进行裁剪,32正方形裁剪,每个边框上填充4
transforms.RandomHorizontalFlip(), #以给定的概率随机水平翻转给定的PIL图像,默认值为0.5
transforms.ToTensor(), #将PIL Image或者 ndarray 转换为tensor,并且归一化至[0-1]
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),#用平均值和标准偏差归一化张量图像,
#(M1,…,Mn)和(S1,…,Sn)将标准化输入的每个通道
])
transform_test = transforms.Compose([ #测试集同样进行图像预处理
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
cifar10_train = torchvision.datasets.CIFAR10(root='/home/megstudio/dataset', train=True, download=False, transform=transform_train)
cifar10_test = torchvision.datasets.CIFAR10(root='/home/megstudio/dataset', train=False, download=False, transform=transform_test)train_loader = torch.utils.data.DataLoader(cifar10_train, batch_size=batch_size, shuffle=True, num_workers=2)
test_loader = torch.utils.data.DataLoader(cifar10_test, batch_size=tbatch_size, shuffle=False, num_workers=2)# In[1] 加载模型# Code modified from https://github.com/xternalz/WideResNet-pytorchclass BasicBlock(nn.Module):def __init__(self, in_planes, out_planes, stride, dropRate=0.0):super(BasicBlock, self).__init__()self.bn1 = nn.BatchNorm2d(in_planes)self.relu1 = nn.ReLU(inplace=True)self.conv1 = nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,padding=1, bias=False)self.bn2 = nn.BatchNorm2d(out_planes)self.relu2 = nn.ReLU(inplace=True)self.conv2 = nn.Conv2d(out_planes, out_planes, kernel_size=3, stride=1,padding=1, bias=False)self.droprate = dropRateself.equalInOut = (in_planes == out_planes)self.convShortcut = (not self.equalInOut) and nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride,padding=0, bias=False) or Nonedef forward(self, x):if not self.equalInOut:x = self.relu1(self.bn1(x))else:out = self.relu1(self.bn1(x))out = self.relu2(self.bn2(self.conv1(out if self.equalInOut else x)))if self.droprate > 0:out = F.dropout(out, p=self.droprate, training=self.training)out = self.conv2(out)return torch.add(x if self.equalInOut else self.convShortcut(x), out)class NetworkBlock(nn.Module):def __init__(self, nb_layers, in_planes, out_planes, block, stride, dropRate=0.0):super(NetworkBlock, self).__init__()self.layer = self._make_layer(block, in_planes, out_planes, nb_layers, stride, dropRate)def _make_layer(self, block, in_planes, out_planes, nb_layers, stride, dropRate):layers = []for i in range(nb_layers):layers.append(block(i == 0 and in_planes or out_planes, out_planes, i == 0 and stride or 1, dropRate))return nn.Sequential(*layers)def forward(self, x):return self.layer(x)class WideResNet(nn.Module):def __init__(self, depth, num_classes, widen_factor=1, dropRate=0.0, nc=1):super(WideResNet, self).__init__()nChannels = [16, 16 * widen_factor, 32 * widen_factor, 64 * widen_factor]assert (depth - 4) % 6 == 0, 'depth should be 6n+4'n = (depth - 4) // 6block = BasicBlock# 1st conv before any network blockself.conv1 = nn.Conv2d(nc, nChannels[0], kernel_size=3, stride=1,padding=1, bias=False)# 1st blockself.block1 = NetworkBlock(n, nChannels[0], nChannels[1], block, 1, dropRate)# 2nd blockself.block2 = NetworkBlock(n, nChannels[1], nChannels[2], block, 2, dropRate)# 3rd blockself.block3 = NetworkBlock(n, nChannels[2], nChannels[3], block, 2, dropRate)# global average pooling and classifierself.bn1 = nn.BatchNorm2d(nChannels[3])self.relu = nn.ReLU(inplace=True)self.fc = nn.Linear(nChannels[3], num_classes)self.nChannels = nChannels[3]for m in self.modules():if isinstance(m, nn.Conv2d):n = m.kernel_size[0] * m.kernel_size[1] * m.out_channelsm.weight.data.normal_(0, math.sqrt(2. / n))elif isinstance(m, nn.BatchNorm2d):m.weight.data.fill_(1)m.bias.data.zero_()elif isinstance(m, nn.Linear):m.bias.data.zero_()def forward(self, x):out = self.conv1(x)out = self.block1(out)out = self.block2(out)out = self.block3(out)out = self.relu(self.bn1(out))out = F.avg_pool2d(out, 7)out = out.view(-1, self.nChannels)return self.fc(out)def wrn(**kwargs):"""Constructs a Wide Residual Networks."""model = WideResNet(**kwargs)return modeldef smooth_step(a,b,c,d,x):level_s=0.01level_m=0.1level_n=0.01level_r=0.005if x<=a:return level_sif a<x<=b:return (((x-a)/(b-a))*(level_m-level_s)+level_s)if b<x<=c:return level_mif c<x<=d:return level_nif d<x:return level_r
# In[1] 设置一个通过优化器更新学习率的函数
def update_lr(optimizer, lr):for param_group in optimizer.param_groups:param_group['lr'] = lr
# In[1] 定义测试函数
def test(model,test_loader):model.eval()with torch.no_grad():correct = 0total = 0for images, labels in test_loader:images = images.to(device)labels = labels.to(device)outputs= model(images)_, predicted = torch.max(outputs.data, 1)total += labels.size(0)correct += (predicted == labels).sum().item()acc=100 * correct / totalprint('Accuracy of the model on the test images: {} %'.format(acc))return acc# In[1] 定义模型和损失函数
# =============================================================================
def mkdir(path):folder = os.path.exists(path)if not folder:                   #判断是否存在文件夹如果不存在则创建为文件夹os.makedirs(path)            #makedirs 创建文件时如果路径不存在会创建这个路径print("---  new folder...  ---")print("---  OK  ---")else:print("---  There is this folder!  ---")
path = os.getcwd()
path = path+test_name
print(path)
mkdir(path)             #调用函数
# In[1] 定义模型和损失函数
#$$
# =============================================================================
try:model = torch.load(path+'/model.pkl').to(device)epoch_s = np.load(path+'/learning_rate.npy')#learning_rate *= 3print(epoch_s)train_loss = np.load(path+'/test_acc.npy').tolist()test_acc = np.load(path+'/test_acc.npy').tolist()print("---  There is a model in the folder...  ---")
except:print("---  Create a new model...  ---")epoch_s = 0model = wrn(depth=28, num_classes=10, widen_factor=4, dropRate=0.4, nc=3).to(device)train_loss=[]#准备放误差     test_acc=[]#准备放测试准确率
# =============================================================================
def saveModel(model,epoch,test_acc,train_loss):torch.save(model, path+'/model.pkl')# torch.save(model.state_dict(), 'resnet.ckpt')epoch_save=np.array(epoch)np.save(path+'/learning_rate.npy',epoch_save)test_acc=np.array(test_acc)np.save(path+'/test_acc.npy',test_acc)train_loss=np.array(train_loss)np.save(path+'/train_loss.npy',train_loss)criterion = nn.CrossEntropyLoss()
#optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate, weight_decay=1e-5)
optimizer = torch.optim.SGD(model.parameters(),lr=smooth_step(10,40,100,150,epoch_s), momentum=0.9,weight_decay=1e-5)
#scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer,  milestones = [100, 150], gamma = 0.1, last_epoch=-1)
# In[1] 训练模型更新学习率
total_step = len(train_loader)
#curr_lr = learning_rate
for epoch in range(epoch_s, num_epochs):#scheduler.step()in_epoch = time.time()for i, (images, labels) in enumerate(train_loader):# =============================================================================# FMiX# print(type(images))images, index, lam = sample_and_apply(images, alpha=1, decay_power=3, shape=(32,32))images = images.type(torch.FloatTensor) shuffled_label = labels[index].to(device)images = images.to(device)labels = labels.to(device)# Forward passoutputs = model(images)loss = lam*criterion(outputs, labels) + (1-lam)*criterion(outputs, shuffled_label)# loss = criterion(outputs, labels)# =============================================================================# Backward and optimizeoptimizer.zero_grad()loss.backward(retain_graph=False)optimizer.step()if (i + 1) % 100 == 0:print("Epoch [{}/{}], Step [{}/{}] Loss: {:.4f}".format(epoch + 1, num_epochs, i + 1, total_step, loss.item()))# 记录误差和精度train_loss.append(loss.item())acctemp = test(model, test_loader)test_acc.append(acctemp)# 更新学习率curr_lr = smooth_step(10, 40, 100, 150, epoch)update_lr(optimizer, curr_lr)# 保存模型和一些参数、指标saveModel(model, epoch, test_acc, train_loss)# 记录时间out_epoch = time.time()print(f"use {(out_epoch-in_epoch)//60}min{(out_epoch-in_epoch)%60}s")
#$$
# In[1] 测试训练集
test(model, train_loader)
cuda
/home/megstudio/workspace/__20210430/_W28_04_BJGB3_BLN_S_C10
---  There is this folder!  ---
---  Create a new model...  ---
Epoch [1/200], Step [100/500] Loss: 2.1790
Epoch [1/200], Step [200/500] Loss: 2.1559
Epoch [1/200], Step [300/500] Loss: 2.0387
Epoch [1/200], Step [400/500] Loss: 2.0988
Epoch [1/200], Step [500/500] Loss: 1.8831
Accuracy of the model on the test images: 15.8 %
use 0.0min21.123395442962646s
Epoch [2/200], Step [100/500] Loss: 2.2224
Epoch [2/200], Step [200/500] Loss: 2.1422
Epoch [2/200], Step [300/500] Loss: 1.9208
Epoch [2/200], Step [400/500] Loss: 2.0676
Epoch [2/200], Step [500/500] Loss: 1.8473
Accuracy of the model on the test images: 38.1 %
use 0.0min20.357621431350708s
Epoch [3/200], Step [100/500] Loss: 1.9768
Epoch [3/200], Step [200/500] Loss: 2.0925
Epoch [3/200], Step [300/500] Loss: 1.8357
Epoch [3/200], Step [400/500] Loss: 1.8657
Epoch [3/200], Step [500/500] Loss: 2.0923
Accuracy of the model on the test images: 43.41 %
use 0.0min20.361759662628174s
Epoch [4/200], Step [100/500] Loss: 2.0043
Epoch [4/200], Step [200/500] Loss: 1.9353
Epoch [4/200], Step [300/500] Loss: 1.8797
Epoch [4/200], Step [400/500] Loss: 1.6698
Epoch [4/200], Step [500/500] Loss: 1.5369
Accuracy of the model on the test images: 48.11 %
use 0.0min20.455519914627075s
Epoch [5/200], Step [100/500] Loss: 1.9625
Epoch [5/200], Step [200/500] Loss: 1.8843
Epoch [5/200], Step [300/500] Loss: 2.0839
Epoch [5/200], Step [400/500] Loss: 1.7186
Epoch [5/200], Step [500/500] Loss: 1.9012
Accuracy of the model on the test images: 47.71 %
use 0.0min20.377105951309204s
Epoch [6/200], Step [100/500] Loss: 1.9017
Epoch [6/200], Step [200/500] Loss: 1.8785
Epoch [6/200], Step [300/500] Loss: 1.9705
Epoch [6/200], Step [400/500] Loss: 1.6972
Epoch [6/200], Step [500/500] Loss: 1.3769
Accuracy of the model on the test images: 57.25 %
use 0.0min20.3419349193573s
Epoch [7/200], Step [100/500] Loss: 1.4513
Epoch [7/200], Step [200/500] Loss: 1.5923
Epoch [7/200], Step [300/500] Loss: 1.8236
Epoch [7/200], Step [400/500] Loss: 1.9022
Epoch [7/200], Step [500/500] Loss: 1.9974
Accuracy of the model on the test images: 56.02 %
use 0.0min20.339054822921753s
Epoch [8/200], Step [100/500] Loss: 2.0042
Epoch [8/200], Step [200/500] Loss: 1.3628
Epoch [8/200], Step [300/500] Loss: 1.9355
Epoch [8/200], Step [400/500] Loss: 1.2330
Epoch [8/200], Step [500/500] Loss: 1.4815
Accuracy of the model on the test images: 58.41 %
use 0.0min20.403385639190674s
Epoch [9/200], Step [100/500] Loss: 1.1941
Epoch [9/200], Step [200/500] Loss: 1.1131
Epoch [9/200], Step [300/500] Loss: 1.4155
Epoch [9/200], Step [400/500] Loss: 1.7827
Epoch [9/200], Step [500/500] Loss: 1.3642
Accuracy of the model on the test images: 64.23 %
use 0.0min20.375659942626953s
Epoch [10/200], Step [100/500] Loss: 1.4404
Epoch [10/200], Step [200/500] Loss: 1.7268
Epoch [10/200], Step [300/500] Loss: 1.9433
Epoch [10/200], Step [400/500] Loss: 1.4888
Epoch [10/200], Step [500/500] Loss: 1.1935
Accuracy of the model on the test images: 63.67 %
use 0.0min20.35075068473816s
Epoch [11/200], Step [100/500] Loss: 1.4947
Epoch [11/200], Step [200/500] Loss: 1.7364
Epoch [11/200], Step [300/500] Loss: 1.8127
Epoch [11/200], Step [400/500] Loss: 1.4353
Epoch [11/200], Step [500/500] Loss: 1.7878
Accuracy of the model on the test images: 67.08 %
use 0.0min20.35635018348694s
Epoch [12/200], Step [100/500] Loss: 1.8190
Epoch [12/200], Step [200/500] Loss: 1.7807
Epoch [12/200], Step [300/500] Loss: 1.1506
Epoch [12/200], Step [400/500] Loss: 1.6618
Epoch [12/200], Step [500/500] Loss: 1.4403
Accuracy of the model on the test images: 69.18 %
use 0.0min20.43039631843567s
Epoch [13/200], Step [100/500] Loss: 1.7314
Epoch [13/200], Step [200/500] Loss: 0.9851
Epoch [13/200], Step [300/500] Loss: 1.6270
Epoch [13/200], Step [400/500] Loss: 1.0374
Epoch [13/200], Step [500/500] Loss: 1.2373
Accuracy of the model on the test images: 70.25 %
use 0.0min20.34923028945923s
Epoch [14/200], Step [100/500] Loss: 1.4373
Epoch [14/200], Step [200/500] Loss: 1.0507
Epoch [14/200], Step [300/500] Loss: 1.7159
Epoch [14/200], Step [400/500] Loss: 1.6959
Epoch [14/200], Step [500/500] Loss: 1.6582
Accuracy of the model on the test images: 70.07 %
use 0.0min20.372792959213257s
Epoch [15/200], Step [100/500] Loss: 1.5673
Epoch [15/200], Step [200/500] Loss: 1.6747
Epoch [15/200], Step [300/500] Loss: 1.2791
Epoch [15/200], Step [400/500] Loss: 1.6326
Epoch [15/200], Step [500/500] Loss: 1.1707
Accuracy of the model on the test images: 72.36 %
use 0.0min20.39280652999878s
Epoch [16/200], Step [100/500] Loss: 1.6406
Epoch [16/200], Step [200/500] Loss: 1.4876
Epoch [16/200], Step [300/500] Loss: 1.7292
Epoch [16/200], Step [400/500] Loss: 1.6747
Epoch [16/200], Step [500/500] Loss: 1.4707
Accuracy of the model on the test images: 75.81 %
use 0.0min20.440183401107788s
Epoch [17/200], Step [100/500] Loss: 1.8210
Epoch [17/200], Step [200/500] Loss: 1.2006
Epoch [17/200], Step [300/500] Loss: 1.8593
Epoch [17/200], Step [400/500] Loss: 1.6482
Epoch [17/200], Step [500/500] Loss: 1.7049
Accuracy of the model on the test images: 73.82 %
use 0.0min20.38654136657715s
Epoch [18/200], Step [100/500] Loss: 1.6006
Epoch [18/200], Step [200/500] Loss: 1.4309
Epoch [18/200], Step [300/500] Loss: 1.1157
Epoch [18/200], Step [400/500] Loss: 1.2330
Epoch [18/200], Step [500/500] Loss: 1.5895
Accuracy of the model on the test images: 76.96 %
use 0.0min20.359359979629517s
Epoch [19/200], Step [100/500] Loss: 1.0131
Epoch [19/200], Step [200/500] Loss: 1.5762
Epoch [19/200], Step [300/500] Loss: 0.8004
Epoch [19/200], Step [400/500] Loss: 1.4015
Epoch [19/200], Step [500/500] Loss: 1.8083
Accuracy of the model on the test images: 75.61 %
use 0.0min20.411002159118652s
Epoch [20/200], Step [100/500] Loss: 1.3020
Epoch [20/200], Step [200/500] Loss: 1.5370
Epoch [20/200], Step [300/500] Loss: 1.1880
Epoch [20/200], Step [400/500] Loss: 1.5604
Epoch [20/200], Step [500/500] Loss: 1.4079
Accuracy of the model on the test images: 75.06 %
use 0.0min20.35499906539917s
Epoch [21/200], Step [100/500] Loss: 1.3176
Epoch [21/200], Step [200/500] Loss: 1.5449
Epoch [21/200], Step [300/500] Loss: 1.5561
Epoch [21/200], Step [400/500] Loss: 1.5398
Epoch [21/200], Step [500/500] Loss: 1.0853
Accuracy of the model on the test images: 79.0 %
use 0.0min20.335134744644165s
Epoch [22/200], Step [100/500] Loss: 1.4288
Epoch [22/200], Step [200/500] Loss: 1.5144
Epoch [22/200], Step [300/500] Loss: 1.0419
Epoch [22/200], Step [400/500] Loss: 0.6797
Epoch [22/200], Step [500/500] Loss: 1.6279
Accuracy of the model on the test images: 80.12 %
use 0.0min20.401585578918457s
Epoch [23/200], Step [100/500] Loss: 0.7904
Epoch [23/200], Step [200/500] Loss: 0.9844
Epoch [23/200], Step [300/500] Loss: 1.5441
Epoch [23/200], Step [400/500] Loss: 1.3214
Epoch [23/200], Step [500/500] Loss: 1.5232
Accuracy of the model on the test images: 80.41 %
use 0.0min20.373417377471924s
Epoch [24/200], Step [100/500] Loss: 1.4240
Epoch [24/200], Step [200/500] Loss: 1.4639
Epoch [24/200], Step [300/500] Loss: 1.5579
Epoch [24/200], Step [400/500] Loss: 1.7012
Epoch [24/200], Step [500/500] Loss: 1.1945
Accuracy of the model on the test images: 81.64 %
use 0.0min20.36199688911438s
Epoch [25/200], Step [100/500] Loss: 0.7147
Epoch [25/200], Step [200/500] Loss: 1.7029
Epoch [25/200], Step [300/500] Loss: 0.7409
Epoch [25/200], Step [400/500] Loss: 1.4500
Epoch [25/200], Step [500/500] Loss: 1.2520
Accuracy of the model on the test images: 80.97 %
use 0.0min20.388251781463623s
Epoch [26/200], Step [100/500] Loss: 1.4758
Epoch [26/200], Step [200/500] Loss: 1.3249
Epoch [26/200], Step [300/500] Loss: 0.9811
Epoch [26/200], Step [400/500] Loss: 1.7301
Epoch [26/200], Step [500/500] Loss: 1.6176
Accuracy of the model on the test images: 83.47 %
use 0.0min20.412151336669922s
Epoch [27/200], Step [100/500] Loss: 1.3818
Epoch [27/200], Step [200/500] Loss: 1.2675
Epoch [27/200], Step [300/500] Loss: 1.0948
Epoch [27/200], Step [400/500] Loss: 1.3120
Epoch [27/200], Step [500/500] Loss: 1.4844
Accuracy of the model on the test images: 83.34 %
use 0.0min20.373883962631226s
Epoch [28/200], Step [100/500] Loss: 0.6394
Epoch [28/200], Step [200/500] Loss: 1.4804
Epoch [28/200], Step [300/500] Loss: 1.1436
Epoch [28/200], Step [400/500] Loss: 1.4518
Epoch [28/200], Step [500/500] Loss: 1.5433
Accuracy of the model on the test images: 84.37 %
use 0.0min20.37519359588623s
Epoch [29/200], Step [100/500] Loss: 1.4559
Epoch [29/200], Step [200/500] Loss: 1.6367
Epoch [29/200], Step [300/500] Loss: 1.2694
Epoch [29/200], Step [400/500] Loss: 1.4473
Epoch [29/200], Step [500/500] Loss: 1.1265
Accuracy of the model on the test images: 84.67 %
use 0.0min20.359278917312622s
Epoch [30/200], Step [100/500] Loss: 1.3819
Epoch [30/200], Step [200/500] Loss: 0.5442
Epoch [30/200], Step [300/500] Loss: 1.6324
Epoch [30/200], Step [400/500] Loss: 1.5540
Epoch [30/200], Step [500/500] Loss: 1.3784
Accuracy of the model on the test images: 85.49 %
use 0.0min20.419134616851807s
Epoch [31/200], Step [100/500] Loss: 0.7779
Epoch [31/200], Step [200/500] Loss: 0.9519
Epoch [31/200], Step [300/500] Loss: 1.1823
Epoch [31/200], Step [400/500] Loss: 1.4914
Epoch [31/200], Step [500/500] Loss: 1.0552
Accuracy of the model on the test images: 85.39 %
use 0.0min20.369332313537598s
Epoch [32/200], Step [100/500] Loss: 0.3527
Epoch [32/200], Step [200/500] Loss: 0.5136
Epoch [32/200], Step [300/500] Loss: 1.1930
Epoch [32/200], Step [400/500] Loss: 0.5148
Epoch [32/200], Step [500/500] Loss: 0.6566
Accuracy of the model on the test images: 86.03 %
use 0.0min20.37297773361206s
Epoch [33/200], Step [100/500] Loss: 0.5352
Epoch [33/200], Step [200/500] Loss: 1.0109
Epoch [33/200], Step [300/500] Loss: 0.4268
Epoch [33/200], Step [400/500] Loss: 1.0549
Epoch [33/200], Step [500/500] Loss: 1.2225
Accuracy of the model on the test images: 85.99 %
use 0.0min20.465530157089233s
Epoch [34/200], Step [100/500] Loss: 1.2413
Epoch [34/200], Step [200/500] Loss: 1.3109
Epoch [34/200], Step [300/500] Loss: 1.0909
Epoch [34/200], Step [400/500] Loss: 0.9841
Epoch [34/200], Step [500/500] Loss: 1.1050
Accuracy of the model on the test images: 87.44 %
use 0.0min20.359282970428467s
Epoch [35/200], Step [100/500] Loss: 0.5035
Epoch [35/200], Step [200/500] Loss: 1.1636
Epoch [35/200], Step [300/500] Loss: 1.4892
Epoch [35/200], Step [400/500] Loss: 1.3404
Epoch [35/200], Step [500/500] Loss: 0.9692
Accuracy of the model on the test images: 86.94 %
use 0.0min20.34918475151062s
Epoch [36/200], Step [100/500] Loss: 0.2755
Epoch [36/200], Step [200/500] Loss: 1.3625
Epoch [36/200], Step [300/500] Loss: 0.8354
Epoch [36/200], Step [400/500] Loss: 1.3214
Epoch [36/200], Step [500/500] Loss: 0.5478
Accuracy of the model on the test images: 87.78 %
use 0.0min20.52253794670105s
Epoch [37/200], Step [100/500] Loss: 1.2926
Epoch [37/200], Step [200/500] Loss: 0.9581
Epoch [37/200], Step [300/500] Loss: 0.6451
Epoch [37/200], Step [400/500] Loss: 1.2414
Epoch [37/200], Step [500/500] Loss: 1.0961
Accuracy of the model on the test images: 87.79 %
use 0.0min20.383507251739502s
Epoch [38/200], Step [100/500] Loss: 1.4111
Epoch [38/200], Step [200/500] Loss: 0.4365
Epoch [38/200], Step [300/500] Loss: 1.3412
Epoch [38/200], Step [400/500] Loss: 1.4119
Epoch [38/200], Step [500/500] Loss: 1.4039
Accuracy of the model on the test images: 88.16 %
use 0.0min20.35458493232727s
Epoch [39/200], Step [100/500] Loss: 1.3965
Epoch [39/200], Step [200/500] Loss: 0.3257
Epoch [39/200], Step [300/500] Loss: 1.2711
Epoch [39/200], Step [400/500] Loss: 0.9460
Epoch [39/200], Step [500/500] Loss: 1.3978
Accuracy of the model on the test images: 88.07 %
use 0.0min20.341139316558838s
Epoch [40/200], Step [100/500] Loss: 0.9849
Epoch [40/200], Step [200/500] Loss: 1.1538
Epoch [40/200], Step [300/500] Loss: 1.4707
Epoch [40/200], Step [400/500] Loss: 1.3971
Epoch [40/200], Step [500/500] Loss: 1.4379
Accuracy of the model on the test images: 88.02 %
use 0.0min20.370015144348145s
Epoch [41/200], Step [100/500] Loss: 0.4366
Epoch [41/200], Step [200/500] Loss: 0.9759
Epoch [41/200], Step [300/500] Loss: 1.0825
Epoch [41/200], Step [400/500] Loss: 1.0324
Epoch [41/200], Step [500/500] Loss: 0.4587
Accuracy of the model on the test images: 88.11 %
use 0.0min20.34174919128418s
Epoch [42/200], Step [100/500] Loss: 0.9650
Epoch [42/200], Step [200/500] Loss: 0.4519
Epoch [42/200], Step [300/500] Loss: 1.4696
Epoch [42/200], Step [400/500] Loss: 1.3668
Epoch [42/200], Step [500/500] Loss: 1.0172
Accuracy of the model on the test images: 89.22 %
use 0.0min20.347109079360962s
Epoch [43/200], Step [100/500] Loss: 1.3966
Epoch [43/200], Step [200/500] Loss: 1.0262
Epoch [43/200], Step [300/500] Loss: 1.1712
Epoch [43/200], Step [400/500] Loss: 1.3660
Epoch [43/200], Step [500/500] Loss: 1.3209
Accuracy of the model on the test images: 89.03 %
use 0.0min20.35543131828308s
Epoch [44/200], Step [100/500] Loss: 1.3771
Epoch [44/200], Step [200/500] Loss: 1.2250
Epoch [44/200], Step [300/500] Loss: 0.3338
Epoch [44/200], Step [400/500] Loss: 1.2763
Epoch [44/200], Step [500/500] Loss: 0.9907
Accuracy of the model on the test images: 89.15 %
use 0.0min20.35590386390686s
Epoch [45/200], Step [100/500] Loss: 1.1190
Epoch [45/200], Step [200/500] Loss: 1.3418
Epoch [45/200], Step [300/500] Loss: 0.3929
Epoch [45/200], Step [400/500] Loss: 0.7896
Epoch [45/200], Step [500/500] Loss: 1.0600
Accuracy of the model on the test images: 87.92 %
use 0.0min20.3344624042511s
Epoch [46/200], Step [100/500] Loss: 0.5885
Epoch [46/200], Step [200/500] Loss: 1.2346
Epoch [46/200], Step [300/500] Loss: 0.8508
Epoch [46/200], Step [400/500] Loss: 1.2530
Epoch [46/200], Step [500/500] Loss: 0.4935
Accuracy of the model on the test images: 90.71 %
use 0.0min20.371198892593384s
Epoch [47/200], Step [100/500] Loss: 0.4391
Epoch [47/200], Step [200/500] Loss: 0.7598
Epoch [47/200], Step [300/500] Loss: 1.1973
Epoch [47/200], Step [400/500] Loss: 1.3947
Epoch [47/200], Step [500/500] Loss: 0.6789
Accuracy of the model on the test images: 90.02 %
use 0.0min20.366332054138184s
Epoch [48/200], Step [100/500] Loss: 1.4140
Epoch [48/200], Step [200/500] Loss: 1.2009
Epoch [48/200], Step [300/500] Loss: 1.1860
Epoch [48/200], Step [400/500] Loss: 0.3818
Epoch [48/200], Step [500/500] Loss: 0.8759
Accuracy of the model on the test images: 90.28 %
use 0.0min20.336992502212524s
Epoch [49/200], Step [100/500] Loss: 1.2034
Epoch [49/200], Step [200/500] Loss: 1.0903
Epoch [49/200], Step [300/500] Loss: 0.8990
Epoch [49/200], Step [400/500] Loss: 0.4474
Epoch [49/200], Step [500/500] Loss: 0.8824
Accuracy of the model on the test images: 89.16 %
use 0.0min20.347647428512573s
Epoch [50/200], Step [100/500] Loss: 1.1515
Epoch [50/200], Step [200/500] Loss: 1.2839
Epoch [50/200], Step [300/500] Loss: 1.0883
Epoch [50/200], Step [400/500] Loss: 0.8164
Epoch [50/200], Step [500/500] Loss: 0.9372
Accuracy of the model on the test images: 90.11 %
use 0.0min20.36854887008667s
Epoch [51/200], Step [100/500] Loss: 0.7564
Epoch [51/200], Step [200/500] Loss: 1.0694
Epoch [51/200], Step [300/500] Loss: 0.3627
Epoch [51/200], Step [400/500] Loss: 0.4782
Epoch [51/200], Step [500/500] Loss: 0.9174
Accuracy of the model on the test images: 89.49 %
use 0.0min20.361841440200806s
Epoch [52/200], Step [100/500] Loss: 1.1215
Epoch [52/200], Step [200/500] Loss: 1.0632
Epoch [52/200], Step [300/500] Loss: 0.5354
Epoch [52/200], Step [400/500] Loss: 1.1713
Epoch [52/200], Step [500/500] Loss: 0.3436
Accuracy of the model on the test images: 90.77 %
use 0.0min20.35405945777893s
Epoch [53/200], Step [100/500] Loss: 1.0341
Epoch [53/200], Step [200/500] Loss: 1.2249
Epoch [53/200], Step [300/500] Loss: 0.5486
Epoch [53/200], Step [400/500] Loss: 1.1428
Epoch [53/200], Step [500/500] Loss: 1.1575
Accuracy of the model on the test images: 90.4 %
use 0.0min20.34561586380005s
Epoch [54/200], Step [100/500] Loss: 0.8552
Epoch [54/200], Step [200/500] Loss: 0.5618
Epoch [54/200], Step [300/500] Loss: 1.0586
Epoch [54/200], Step [400/500] Loss: 0.5835
Epoch [54/200], Step [500/500] Loss: 0.4571
Accuracy of the model on the test images: 90.23 %
use 0.0min20.375904083251953s
Epoch [55/200], Step [100/500] Loss: 0.5887
Epoch [55/200], Step [200/500] Loss: 1.2362
Epoch [55/200], Step [300/500] Loss: 1.1553
Epoch [55/200], Step [400/500] Loss: 1.2960
Epoch [55/200], Step [500/500] Loss: 1.2422
Accuracy of the model on the test images: 90.14 %
use 0.0min20.35897397994995s
Epoch [56/200], Step [100/500] Loss: 1.1849
Epoch [56/200], Step [200/500] Loss: 0.8863
Epoch [56/200], Step [300/500] Loss: 0.4298
Epoch [56/200], Step [400/500] Loss: 1.3727
Epoch [56/200], Step [500/500] Loss: 0.1565
Accuracy of the model on the test images: 91.64 %
use 0.0min20.358381986618042s
Epoch [57/200], Step [100/500] Loss: 1.3350
Epoch [57/200], Step [200/500] Loss: 0.7779
Epoch [57/200], Step [300/500] Loss: 0.4617
Epoch [57/200], Step [400/500] Loss: 1.2344
Epoch [57/200], Step [500/500] Loss: 1.3262
Accuracy of the model on the test images: 91.31 %
use 0.0min20.415284633636475s
Epoch [58/200], Step [100/500] Loss: 0.1896
Epoch [58/200], Step [200/500] Loss: 0.6137
Epoch [58/200], Step [300/500] Loss: 0.9304
Epoch [58/200], Step [400/500] Loss: 1.0907
Epoch [58/200], Step [500/500] Loss: 0.8966
Accuracy of the model on the test images: 91.12 %
use 0.0min20.36165738105774s
Epoch [59/200], Step [100/500] Loss: 0.7631
Epoch [59/200], Step [200/500] Loss: 0.5612
Epoch [59/200], Step [300/500] Loss: 1.2073
Epoch [59/200], Step [400/500] Loss: 1.1358
Epoch [59/200], Step [500/500] Loss: 0.9003
Accuracy of the model on the test images: 90.22 %
use 0.0min20.355273485183716s
Epoch [60/200], Step [100/500] Loss: 0.8119
Epoch [60/200], Step [200/500] Loss: 0.8268
Epoch [60/200], Step [300/500] Loss: 1.1098
Epoch [60/200], Step [400/500] Loss: 0.7903
Epoch [60/200], Step [500/500] Loss: 1.2007
Accuracy of the model on the test images: 91.78 %
use 0.0min20.364991903305054s
Epoch [61/200], Step [100/500] Loss: 0.9761
Epoch [61/200], Step [200/500] Loss: 0.4134
Epoch [61/200], Step [300/500] Loss: 1.1588
Epoch [61/200], Step [400/500] Loss: 1.1384
Epoch [61/200], Step [500/500] Loss: 1.0511
Accuracy of the model on the test images: 92.31 %
use 0.0min20.366493940353394s
Epoch [62/200], Step [100/500] Loss: 0.4448
Epoch [62/200], Step [200/500] Loss: 1.1300
Epoch [62/200], Step [300/500] Loss: 1.1617
Epoch [62/200], Step [400/500] Loss: 0.9995
Epoch [62/200], Step [500/500] Loss: 0.5588
Accuracy of the model on the test images: 91.07 %
use 0.0min20.35614776611328s
Epoch [63/200], Step [100/500] Loss: 1.0792
Epoch [63/200], Step [200/500] Loss: 1.1368
Epoch [63/200], Step [300/500] Loss: 0.6460
Epoch [63/200], Step [400/500] Loss: 0.9589
Epoch [63/200], Step [500/500] Loss: 1.3234
Accuracy of the model on the test images: 91.63 %
use 0.0min20.352925300598145s
Epoch [64/200], Step [100/500] Loss: 0.6526
Epoch [64/200], Step [200/500] Loss: 1.2217
Epoch [64/200], Step [300/500] Loss: 1.2415
Epoch [64/200], Step [400/500] Loss: 1.2455
Epoch [64/200], Step [500/500] Loss: 1.0209
Accuracy of the model on the test images: 91.41 %
use 0.0min20.417600870132446s
Epoch [65/200], Step [100/500] Loss: 0.4426
Epoch [65/200], Step [200/500] Loss: 1.2359
Epoch [65/200], Step [300/500] Loss: 0.7901
Epoch [65/200], Step [400/500] Loss: 1.1537
Epoch [65/200], Step [500/500] Loss: 1.1570
Accuracy of the model on the test images: 91.98 %
use 0.0min20.355173349380493s
Epoch [66/200], Step [100/500] Loss: 0.4872
Epoch [66/200], Step [200/500] Loss: 0.6941
Epoch [66/200], Step [300/500] Loss: 1.0854
Epoch [66/200], Step [400/500] Loss: 0.7960
Epoch [66/200], Step [500/500] Loss: 1.2412
Accuracy of the model on the test images: 91.9 %
use 0.0min20.3370361328125s
Epoch [67/200], Step [100/500] Loss: 1.2178
Epoch [67/200], Step [200/500] Loss: 1.0593
Epoch [67/200], Step [300/500] Loss: 1.0975
Epoch [67/200], Step [400/500] Loss: 1.0498
Epoch [67/200], Step [500/500] Loss: 1.1520
Accuracy of the model on the test images: 92.39 %
use 0.0min20.368600130081177s
Epoch [68/200], Step [100/500] Loss: 1.3849
Epoch [68/200], Step [200/500] Loss: 1.1138
Epoch [68/200], Step [300/500] Loss: 1.1563
Epoch [68/200], Step [400/500] Loss: 1.0671
Epoch [68/200], Step [500/500] Loss: 1.3540
Accuracy of the model on the test images: 91.47 %
use 0.0min20.388365983963013s
Epoch [69/200], Step [100/500] Loss: 0.6983
Epoch [69/200], Step [200/500] Loss: 0.8688
Epoch [69/200], Step [300/500] Loss: 0.6467
Epoch [69/200], Step [400/500] Loss: 1.0615
Epoch [69/200], Step [500/500] Loss: 0.9949
Accuracy of the model on the test images: 91.5 %
use 0.0min20.358011722564697s
Epoch [70/200], Step [100/500] Loss: 1.3060
Epoch [70/200], Step [200/500] Loss: 0.2657
Epoch [70/200], Step [300/500] Loss: 0.5019
Epoch [70/200], Step [400/500] Loss: 1.1539
Epoch [70/200], Step [500/500] Loss: 0.2488
Accuracy of the model on the test images: 91.36 %
use 0.0min20.365375995635986s
Epoch [71/200], Step [100/500] Loss: 0.9794
Epoch [71/200], Step [200/500] Loss: 1.1348
Epoch [71/200], Step [300/500] Loss: 1.1828
Epoch [71/200], Step [400/500] Loss: 0.8390
Epoch [71/200], Step [500/500] Loss: 1.2209
Accuracy of the model on the test images: 90.86 %
use 0.0min20.40201234817505s
Epoch [72/200], Step [100/500] Loss: 0.1715
Epoch [72/200], Step [200/500] Loss: 0.4423
Epoch [72/200], Step [300/500] Loss: 1.0107
Epoch [72/200], Step [400/500] Loss: 1.0902
Epoch [72/200], Step [500/500] Loss: 1.1116
Accuracy of the model on the test images: 92.34 %
use 0.0min20.396960020065308s
Epoch [73/200], Step [100/500] Loss: 0.3739
Epoch [73/200], Step [200/500] Loss: 1.1867
Epoch [73/200], Step [300/500] Loss: 0.4535
Epoch [73/200], Step [400/500] Loss: 1.1095
Epoch [73/200], Step [500/500] Loss: 0.1533
Accuracy of the model on the test images: 92.87 %
use 0.0min20.382538557052612s
Epoch [74/200], Step [100/500] Loss: 0.9620
Epoch [74/200], Step [200/500] Loss: 1.1031
Epoch [74/200], Step [300/500] Loss: 0.2878
Epoch [74/200], Step [400/500] Loss: 1.1353
Epoch [74/200], Step [500/500] Loss: 1.0970
Accuracy of the model on the test images: 92.46 %
use 0.0min20.36136531829834s
Epoch [75/200], Step [100/500] Loss: 0.8250
Epoch [75/200], Step [200/500] Loss: 0.9810
Epoch [75/200], Step [300/500] Loss: 0.4182
Epoch [75/200], Step [400/500] Loss: 0.9515
Epoch [75/200], Step [500/500] Loss: 1.1185
Accuracy of the model on the test images: 92.2 %
use 0.0min20.370022773742676s
Epoch [76/200], Step [100/500] Loss: 1.0507
Epoch [76/200], Step [200/500] Loss: 1.2088
Epoch [76/200], Step [300/500] Loss: 0.4608
Epoch [76/200], Step [400/500] Loss: 0.6427
Epoch [76/200], Step [500/500] Loss: 0.3101
Accuracy of the model on the test images: 92.62 %
use 0.0min20.362632989883423s
Epoch [77/200], Step [100/500] Loss: 0.5323
Epoch [77/200], Step [200/500] Loss: 0.2878
Epoch [77/200], Step [300/500] Loss: 0.6650
Epoch [77/200], Step [400/500] Loss: 1.0999
Epoch [77/200], Step [500/500] Loss: 1.1225
Accuracy of the model on the test images: 91.89 %
use 0.0min20.352317571640015s
Epoch [78/200], Step [100/500] Loss: 0.7871
Epoch [78/200], Step [200/500] Loss: 0.7206
Epoch [78/200], Step [300/500] Loss: 0.9529
Epoch [78/200], Step [400/500] Loss: 0.7113
Epoch [78/200], Step [500/500] Loss: 0.6871
Accuracy of the model on the test images: 92.88 %
use 0.0min20.35572576522827s
Epoch [79/200], Step [100/500] Loss: 0.8967
Epoch [79/200], Step [200/500] Loss: 0.7975
Epoch [79/200], Step [300/500] Loss: 0.3521
Epoch [79/200], Step [400/500] Loss: 1.1852
Epoch [79/200], Step [500/500] Loss: 0.3066
Accuracy of the model on the test images: 91.92 %
use 0.0min20.411757230758667s
Epoch [80/200], Step [100/500] Loss: 0.9884
Epoch [80/200], Step [200/500] Loss: 0.7489
Epoch [80/200], Step [300/500] Loss: 1.0278
Epoch [80/200], Step [400/500] Loss: 0.2410
Epoch [80/200], Step [500/500] Loss: 0.3059
Accuracy of the model on the test images: 92.7 %
use 0.0min20.342678785324097s
Epoch [81/200], Step [100/500] Loss: 1.0701
Epoch [81/200], Step [200/500] Loss: 0.8364
Epoch [81/200], Step [300/500] Loss: 1.1095
Epoch [81/200], Step [400/500] Loss: 0.6371
Epoch [81/200], Step [500/500] Loss: 1.0618
Accuracy of the model on the test images: 92.5 %
use 0.0min20.350439310073853s
Epoch [82/200], Step [100/500] Loss: 1.3469
Epoch [82/200], Step [200/500] Loss: 1.1329
Epoch [82/200], Step [300/500] Loss: 0.4903
Epoch [82/200], Step [400/500] Loss: 0.8416
Epoch [82/200], Step [500/500] Loss: 1.1430
Accuracy of the model on the test images: 93.34 %
use 0.0min20.346050262451172s
Epoch [83/200], Step [100/500] Loss: 0.7893
Epoch [83/200], Step [200/500] Loss: 0.6076
Epoch [83/200], Step [300/500] Loss: 0.6015
Epoch [83/200], Step [400/500] Loss: 1.0161
Epoch [83/200], Step [500/500] Loss: 0.9365
Accuracy of the model on the test images: 92.0 %
use 0.0min20.381393909454346s
Epoch [84/200], Step [100/500] Loss: 1.1401
Epoch [84/200], Step [200/500] Loss: 1.0463
Epoch [84/200], Step [300/500] Loss: 0.2169
Epoch [84/200], Step [400/500] Loss: 0.2457
Epoch [84/200], Step [500/500] Loss: 0.9423
Accuracy of the model on the test images: 92.8 %
use 0.0min20.357431411743164s
Epoch [85/200], Step [100/500] Loss: 0.7488
Epoch [85/200], Step [200/500] Loss: 0.7533
Epoch [85/200], Step [300/500] Loss: 0.4501
Epoch [85/200], Step [400/500] Loss: 0.4702
Epoch [85/200], Step [500/500] Loss: 0.8354
Accuracy of the model on the test images: 93.16 %
use 0.0min20.340961694717407s
Epoch [86/200], Step [100/500] Loss: 1.0408
Epoch [86/200], Step [200/500] Loss: 1.0514
Epoch [86/200], Step [300/500] Loss: 1.0801
Epoch [86/200], Step [400/500] Loss: 0.9697
Epoch [86/200], Step [500/500] Loss: 0.8542
Accuracy of the model on the test images: 93.79 %
use 0.0min20.4689519405365s
Epoch [87/200], Step [100/500] Loss: 0.3422
Epoch [87/200], Step [200/500] Loss: 1.0760
Epoch [87/200], Step [300/500] Loss: 1.0802
Epoch [87/200], Step [400/500] Loss: 0.5832
Epoch [87/200], Step [500/500] Loss: 0.8596
Accuracy of the model on the test images: 92.35 %
use 0.0min20.37018585205078s
Epoch [88/200], Step [100/500] Loss: 1.0215
Epoch [88/200], Step [200/500] Loss: 0.5354
Epoch [88/200], Step [300/500] Loss: 1.1278
Epoch [88/200], Step [400/500] Loss: 0.5407
Epoch [88/200], Step [500/500] Loss: 1.0884
Accuracy of the model on the test images: 93.21 %
use 0.0min20.390015602111816s
Epoch [89/200], Step [100/500] Loss: 1.0806
Epoch [89/200], Step [200/500] Loss: 0.9242
Epoch [89/200], Step [300/500] Loss: 0.9808
Epoch [89/200], Step [400/500] Loss: 0.9977
Epoch [89/200], Step [500/500] Loss: 1.0073
Accuracy of the model on the test images: 92.72 %
use 0.0min20.356212854385376s
Epoch [90/200], Step [100/500] Loss: 1.2423
Epoch [90/200], Step [200/500] Loss: 0.5137
Epoch [90/200], Step [300/500] Loss: 0.1287
Epoch [90/200], Step [400/500] Loss: 1.2059
Epoch [90/200], Step [500/500] Loss: 1.1756
Accuracy of the model on the test images: 92.48 %
use 0.0min20.41212034225464s
Epoch [91/200], Step [100/500] Loss: 1.2338
Epoch [91/200], Step [200/500] Loss: 0.6848
Epoch [91/200], Step [300/500] Loss: 1.0647
Epoch [91/200], Step [400/500] Loss: 0.9616
Epoch [91/200], Step [500/500] Loss: 0.9189
Accuracy of the model on the test images: 93.32 %
use 0.0min20.348811388015747s
Epoch [92/200], Step [100/500] Loss: 1.1084
Epoch [92/200], Step [200/500] Loss: 1.0042
Epoch [92/200], Step [300/500] Loss: 0.4030
Epoch [92/200], Step [400/500] Loss: 1.0330
Epoch [92/200], Step [500/500] Loss: 0.9179
Accuracy of the model on the test images: 92.81 %
use 0.0min20.355735301971436s
Epoch [93/200], Step [100/500] Loss: 0.7364
Epoch [93/200], Step [200/500] Loss: 1.0574
Epoch [93/200], Step [300/500] Loss: 0.6714
Epoch [93/200], Step [400/500] Loss: 1.0800
Epoch [93/200], Step [500/500] Loss: 0.7175
Accuracy of the model on the test images: 92.89 %
use 0.0min20.47357749938965s
Epoch [94/200], Step [100/500] Loss: 0.5591
Epoch [94/200], Step [200/500] Loss: 1.0056
Epoch [94/200], Step [300/500] Loss: 1.0477
Epoch [94/200], Step [400/500] Loss: 1.2228
Epoch [94/200], Step [500/500] Loss: 0.8515
Accuracy of the model on the test images: 92.65 %
use 0.0min20.355154514312744s
Epoch [95/200], Step [100/500] Loss: 0.8575
Epoch [95/200], Step [200/500] Loss: 0.9975
Epoch [95/200], Step [300/500] Loss: 1.1158
Epoch [95/200], Step [400/500] Loss: 1.0921
Epoch [95/200], Step [500/500] Loss: 0.9453
Accuracy of the model on the test images: 93.45 %
use 0.0min20.35475468635559s
Epoch [96/200], Step [100/500] Loss: 0.5343
Epoch [96/200], Step [200/500] Loss: 0.4544
Epoch [96/200], Step [300/500] Loss: 0.7710
Epoch [96/200], Step [400/500] Loss: 0.9027
Epoch [96/200], Step [500/500] Loss: 0.7813
Accuracy of the model on the test images: 93.24 %
use 0.0min20.366315126419067s
Epoch [97/200], Step [100/500] Loss: 1.0281
Epoch [97/200], Step [200/500] Loss: 1.0061
Epoch [97/200], Step [300/500] Loss: 1.0532
Epoch [97/200], Step [400/500] Loss: 1.0409
Epoch [97/200], Step [500/500] Loss: 1.0199
Accuracy of the model on the test images: 92.55 %
use 0.0min20.421039819717407s
Epoch [98/200], Step [100/500] Loss: 0.8878
Epoch [98/200], Step [200/500] Loss: 0.1803
Epoch [98/200], Step [300/500] Loss: 0.7777
Epoch [98/200], Step [400/500] Loss: 0.3656
Epoch [98/200], Step [500/500] Loss: 1.1228
Accuracy of the model on the test images: 93.74 %
use 0.0min20.35983943939209s
Epoch [99/200], Step [100/500] Loss: 1.1167
Epoch [99/200], Step [200/500] Loss: 0.9838
Epoch [99/200], Step [300/500] Loss: 0.6263
Epoch [99/200], Step [400/500] Loss: 0.8882
Epoch [99/200], Step [500/500] Loss: 1.0340
Accuracy of the model on the test images: 92.63 %
use 0.0min20.419955015182495s
Epoch [100/200], Step [100/500] Loss: 0.7307
Epoch [100/200], Step [200/500] Loss: 0.7769
Epoch [100/200], Step [300/500] Loss: 0.2171
Epoch [100/200], Step [400/500] Loss: 1.2046
Epoch [100/200], Step [500/500] Loss: 1.0446
Accuracy of the model on the test images: 93.14 %
use 0.0min20.452274560928345s
Epoch [101/200], Step [100/500] Loss: 0.8717
Epoch [101/200], Step [200/500] Loss: 0.9948
Epoch [101/200], Step [300/500] Loss: 1.1716
Epoch [101/200], Step [400/500] Loss: 1.0076
Epoch [101/200], Step [500/500] Loss: 0.7147
Accuracy of the model on the test images: 93.89 %
use 0.0min20.380756378173828s
Epoch [102/200], Step [100/500] Loss: 1.1337
Epoch [102/200], Step [200/500] Loss: 0.4186
Epoch [102/200], Step [300/500] Loss: 0.9568
Epoch [102/200], Step [400/500] Loss: 0.4825
Epoch [102/200], Step [500/500] Loss: 0.3573
Accuracy of the model on the test images: 93.62 %
use 0.0min20.413810968399048s
Epoch [103/200], Step [100/500] Loss: 1.0148
Epoch [103/200], Step [200/500] Loss: 0.6628
Epoch [103/200], Step [300/500] Loss: 1.0269
Epoch [103/200], Step [400/500] Loss: 0.3809
Epoch [103/200], Step [500/500] Loss: 0.7289
Accuracy of the model on the test images: 95.05 %
use 0.0min20.3670597076416s
Epoch [104/200], Step [100/500] Loss: 0.9882
Epoch [104/200], Step [200/500] Loss: 1.0050
Epoch [104/200], Step [300/500] Loss: 0.9322
Epoch [104/200], Step [400/500] Loss: 0.8870
Epoch [104/200], Step [500/500] Loss: 0.2787
Accuracy of the model on the test images: 95.08 %
use 0.0min20.38065481185913s
Epoch [105/200], Step [100/500] Loss: 0.4757
Epoch [105/200], Step [200/500] Loss: 0.7992
Epoch [105/200], Step [300/500] Loss: 0.7278
Epoch [105/200], Step [400/500] Loss: 0.5998
Epoch [105/200], Step [500/500] Loss: 0.7446
Accuracy of the model on the test images: 94.98 %
use 0.0min20.369545459747314s
Epoch [106/200], Step [100/500] Loss: 0.7413
Epoch [106/200], Step [200/500] Loss: 1.0611
Epoch [106/200], Step [300/500] Loss: 0.9262
Epoch [106/200], Step [400/500] Loss: 0.8101
Epoch [106/200], Step [500/500] Loss: 0.7643
Accuracy of the model on the test images: 95.22 %
use 0.0min20.358132123947144s
Epoch [107/200], Step [100/500] Loss: 0.7844
Epoch [107/200], Step [200/500] Loss: 0.0925
Epoch [107/200], Step [300/500] Loss: 0.5614
Epoch [107/200], Step [400/500] Loss: 0.5424
Epoch [107/200], Step [500/500] Loss: 0.7826
Accuracy of the model on the test images: 95.27 %
use 0.0min20.39941668510437s
Epoch [108/200], Step [100/500] Loss: 0.2129
Epoch [108/200], Step [200/500] Loss: 0.3992
Epoch [108/200], Step [300/500] Loss: 0.8792
Epoch [108/200], Step [400/500] Loss: 0.7090
Epoch [108/200], Step [500/500] Loss: 0.6356
Accuracy of the model on the test images: 95.29 %
use 0.0min20.351104259490967s
Epoch [109/200], Step [100/500] Loss: 0.6356
Epoch [109/200], Step [200/500] Loss: 0.8048
Epoch [109/200], Step [300/500] Loss: 0.1197
Epoch [109/200], Step [400/500] Loss: 0.5651
Epoch [109/200], Step [500/500] Loss: 0.8223
Accuracy of the model on the test images: 95.14 %
use 0.0min20.34543776512146s
Epoch [110/200], Step [100/500] Loss: 0.6938
Epoch [110/200], Step [200/500] Loss: 0.4959
Epoch [110/200], Step [300/500] Loss: 0.8740
Epoch [110/200], Step [400/500] Loss: 0.7046
Epoch [110/200], Step [500/500] Loss: 0.7780
Accuracy of the model on the test images: 95.06 %
use 0.0min20.357868432998657s
Epoch [111/200], Step [100/500] Loss: 0.8586
Epoch [111/200], Step [200/500] Loss: 0.9439
Epoch [111/200], Step [300/500] Loss: 0.9383
Epoch [111/200], Step [400/500] Loss: 0.7799
Epoch [111/200], Step [500/500] Loss: 0.7568
Accuracy of the model on the test images: 95.04 %
use 0.0min20.377333164215088s
Epoch [112/200], Step [100/500] Loss: 0.8705
Epoch [112/200], Step [200/500] Loss: 0.4833
Epoch [112/200], Step [300/500] Loss: 0.7744
Epoch [112/200], Step [400/500] Loss: 0.8568
Epoch [112/200], Step [500/500] Loss: 0.6183
Accuracy of the model on the test images: 95.12 %
use 0.0min20.358330488204956s
Epoch [113/200], Step [100/500] Loss: 0.8118
Epoch [113/200], Step [200/500] Loss: 0.2100
Epoch [113/200], Step [300/500] Loss: 0.9770
Epoch [113/200], Step [400/500] Loss: 0.2692
Epoch [113/200], Step [500/500] Loss: 1.0045
Accuracy of the model on the test images: 95.13 %
use 0.0min20.3507342338562s
Epoch [114/200], Step [100/500] Loss: 0.7069
Epoch [114/200], Step [200/500] Loss: 0.8443
Epoch [114/200], Step [300/500] Loss: 0.8858
Epoch [114/200], Step [400/500] Loss: 0.8029
Epoch [114/200], Step [500/500] Loss: 0.9669
Accuracy of the model on the test images: 95.19 %
use 0.0min20.42878007888794s
Epoch [115/200], Step [100/500] Loss: 0.5202
Epoch [115/200], Step [200/500] Loss: 0.0889
Epoch [115/200], Step [300/500] Loss: 0.5234
Epoch [115/200], Step [400/500] Loss: 0.2499
Epoch [115/200], Step [500/500] Loss: 0.2063
Accuracy of the model on the test images: 95.3 %

WRN28-4对CIFAR-10数据集的分类95.3%以上相关推荐

  1. 深度学习入门——利用卷积神经网络训练CIFAR—10数据集

    CIFAR-10数据集简介 CIFAR-10是由Hinton的学生Alex Krizhevsky和Ilya Sutskever整理的一个用于普适物体的小型数据集.它一共包含10个类别的RGB彩色图片: ...

  2. 《pytorch》对CIFAR数据集的分类

    今天复习了下pytorch,又看了下它的一个实例,复习了下对CIFAR数据集的分类. 学习链接:https://zhuanlan.zhihu.com/p/39667289 直接上源码,注释均在其中 i ...

  3. (!详解 Pytorch实战:①)kaggle猫狗数据集二分类:加载(集成/自定义)数据集

    这系列的文章是我对Pytorch入门之后的一个总结,特别是对数据集生成加载这一块加强学习 另外,这里有一些比较常用的数据集,大家可以进行下载: 需要注意的是,本篇文章使用的PyTorch的版本是v0. ...

  4. 动物数据集+动物分类识别训练代码(Pytorch)

    动物数据集+动物分类识别训练代码(Pytorch) 目录 动物数据集+动物分类识别训练代码(Pytorch) 1. 前言 2. Animals-Dataset动物数据集说明 (1)Animals90动 ...

  5. ML之FE之FS:特征工程/数据预处理—特征选择之利用过滤式filter、包装式wrapper、嵌入式Embedded方法(RF/SF)进行特征选择(mushroom蘑菇数据集二分类预测)最全案例应用

    ML之FE之FS:特征工程/数据预处理-特征选择之利用过滤式filter.包装式wrapper.嵌入式Embedded方法(RF/SF)进行特征选择(mushroom蘑菇数据集二分类预测)案例应用 利 ...

  6. 鸢尾花数据集、月亮数据集二分类可视化分析

    鸢尾花数据集.月亮数据集二分类可视化分析 目录 鸢尾花数据集.月亮数据集二分类可视化分析 一.线性LDA算法 1.LDA算法 2.鸢尾花数据集 2.月亮数据集 二.K-Means算法 1.K-Mean ...

  7. paddle2.0高层API实现自定义数据集文本分类中的情感分析任务

    paddle2.0高层API实现自定义数据集文本分类中的情感分析任务 本文包含了: - 自定义文本分类数据集继承 - 文本分类数据处理 - 循环神经网络RNN, LSTM - ·seq2vec· - ...

  8. 机器学习--python代码实现基于Fisher的线性判别(鸢尾花数据集的分类)

    一.线性分类–判断该函数属于哪一类 先上例题,然后我会通过两种方法来判断该函数属于哪一类 1.图解法 定义 对于多类问题:模式有 ω1 ,ω2 , - , ωm 个类别,可分三种情况: 第一种情况:每 ...

  9. pytorch创建自己的数据集(分类任务)

    pytorch创建自己的数据集(分类任务) 转载于:https://www.cnblogs.com/cititude/p/11615158.html

最新文章

  1. 2012年11月14日学习研究报告
  2. 为什么读博士以及有什么意义
  3. 有关信号处理中的 窗函数
  4. 关于Unable to instantiate activity ComponentInfo错误解决方案
  5. Cloud for Customer mobile应用里pass Code登录界面的绘制原理
  6. 企业办公自动化系统_OA系统的核心功能有哪些?分析当下OA系统的缺陷以及相关解决方案...
  7. 深入理解MySQL8.0直方图
  8. 使用DataX同步MaxCompute数据到TableStore(原OTS)优化指南
  9. JDK9.0.4安装配置
  10. 自然语言处理 4.语义分析
  11. Python处理QXDM抓取log
  12. auraservice是什么_AURA安装与配置全解.doc
  13. oracle中db_create_file_dest参数
  14. JDK官方下载(旧版本,以前老版本)
  15. Fresher练打字
  16. 【转】阿里云主机购买使用教程
  17. 通过pytorch建立神经网络模型 分析遗传基因数据
  18. git 怎么切换分支命令_git命令-切换分支
  19. 阿里云 云服务器最新优惠活动
  20. 蓝桥杯单片机12届第二场

热门文章

  1. org.gradle.jvmargs=-Xmx1536m
  2. 威布尔分布的参数估计
  3. 波浪谱荷载的Python实现
  4. winform的Textbox设置只读之后使用ForeColor更改颜色
  5. 【架构师】解决方案架构师常用的5种类型架构图
  6. 操作系统-最近最久未使用置换算法
  7. MAC OS X文件夹和应用程序的本地化名称
  8. 硬刚Doris系列」Apache Doris基本使用和数据模型
  9. Go内建容器——Golang学习笔记3
  10. Vue-Class 与 Style 绑定