网络详解:

  • ResNet网络详解

  • ResNeXt网络详解

torch复现:

import torch.nn as nn
import torch'''
对应着18层和34层的残差结构
既要拥有实现部分的残差功能,又要拥有虚线部分的残差功能
'''
class BasicBlock(nn.Module):# 残差结构中主分支采用卷积核是否发生变化# 18、32层的卷积核个数都是一样的,50、101、152层中第三层卷积核个数是第一第二层的四倍expansion = 1# downsample,对应conv3 conv4 conv5中虚线部分第一层的残差结构,第一个残差结构有降维的作用def __init__(self, in_channel, out_channel, stride=1, downsample=None, **kwargs):super(BasicBlock, self).__init__()self.conv1 = nn.Conv2d(in_channels=in_channel, out_channels=out_channel,kernel_size=3, stride=stride, padding=1, bias=False)self.bn1 = nn.BatchNorm2d(out_channel)self.relu = nn.ReLU()self.conv2 = nn.Conv2d(in_channels=out_channel, out_channels=out_channel,kernel_size=3, stride=1, padding=1, bias=False)self.bn2 = nn.BatchNorm2d(out_channel)self.downsample = downsampledef forward(self, x):identity = x# 为None就是实线的残差结构if self.downsample is not None:identity = self.downsample(x)out = self.conv1(x)out = self.bn1(out)out = self.relu(out)out = self.conv2(out)out = self.bn2(out)out += identityout = self.relu(out)return outclass Bottleneck(nn.Module):"""注意:原论文中,在虚线残差结构的主分支上,第一个1x1卷积层的步距是2,第二个3x3卷积层步距是1。但在pytorch官方实现过程中是第一个1x1卷积层的步距是1,第二个3x3卷积层步距是2,这么做的好处是能够在top1上提升大概0.5%的准确率。可参考Resnet v1.5 https://ngc.nvidia.com/catalog/model-scripts/nvidia:resnet_50_v1_5_for_pytorch"""# 对应残差结构所使用卷积核的变化,在50层、101层、152层中,第三层卷积核个数是第一第二层的四倍expansion = 4def __init__(self, in_channel, out_channel, stride=1, downsample=None,groups=1, width_per_group=64):super(Bottleneck, self).__init__()width = int(out_channel * (width_per_group / 64.)) * groupsself.conv1 = nn.Conv2d(in_channels=in_channel, out_channels=width,kernel_size=1, stride=1, bias=False)  # squeeze channelsself.bn1 = nn.BatchNorm2d(width)# -----------------------------------------self.conv2 = nn.Conv2d(in_channels=width, out_channels=width, groups=groups,kernel_size=3, stride=stride, bias=False, padding=1)self.bn2 = nn.BatchNorm2d(width)# -----------------------------------------self.conv3 = nn.Conv2d(in_channels=width, out_channels=out_channel*self.expansion,kernel_size=1, stride=1, bias=False)  # unsqueeze channelsself.bn3 = nn.BatchNorm2d(out_channel*self.expansion)self.relu = nn.ReLU(inplace=True)self.downsample = downsampledef forward(self, x):identity = x# 为None则对应实现部分的残差结构if self.downsample is not None:identity = self.downsample(x)out = self.conv1(x)out = self.bn1(out)out = self.relu(out)out = self.conv2(out)out = self.bn2(out)out = self.relu(out)out = self.conv3(out)out = self.bn3(out)out += identityout = self.relu(out)return outclass ResNet(nn.Module):def __init__(self, block, blocks_num, num_classes=1000, include_top=True, groups=1, width_per_group=64):super(ResNet, self).__init__()self.include_top = include_topself.in_channel = 64self.groups = groupsself.width_per_group = width_per_groupself.conv1 = nn.Conv2d(3, self.in_channel, kernel_size=7, stride=2, padding=3, bias=False)self.bn1 = nn.BatchNorm2d(self.in_channel)self.relu = nn.ReLU(inplace=True)self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)# conv2self.layer1 = self._make_layer(block, 64, blocks_num[0])# conv3self.layer2 = self._make_layer(block, 128, blocks_num[1], stride=2)# conv4self.layer3 = self._make_layer(block, 256, blocks_num[2], stride=2)# conv5self.layer4 = self._make_layer(block, 512, blocks_num[3], stride=2)if self.include_top:self.avgpool = nn.AdaptiveAvgPool2d((1, 1))  # output size = (1, 1)self.fc = nn.Linear(512 * block.expansion, num_classes)for m in self.modules():if isinstance(m, nn.Conv2d):nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')# channel: 所有conv中,第一次卷积核个数# block_num:盖层共有多少个残差结构def _make_layer(self, block, channel, block_num, stride=1):downsample = None# 18层、34层不走这个ifif stride != 1 or self.in_channel != channel * block.expansion:downsample = nn.Sequential(nn.Conv2d(self.in_channel, channel * block.expansion, kernel_size=1, stride=stride, bias=False),nn.BatchNorm2d(channel * block.expansion))layers = []# 虚线部分  第一层残差结构layers.append(block(self.in_channel,channel,downsample=downsample,stride=stride,groups=self.groups,width_per_group=self.width_per_group))self.in_channel = channel * block.expansionfor _ in range(1, block_num):layers.append(block(self.in_channel,channel,groups=self.groups,width_per_group=self.width_per_group))return nn.Sequential(*layers)def forward(self, x):x = self.conv1(x)x = self.bn1(x)x = self.relu(x)x = self.maxpool(x)x = self.layer1(x)x = self.layer2(x)x = self.layer3(x)x = self.layer4(x)if self.include_top:x = self.avgpool(x)x = torch.flatten(x, 1)x = self.fc(x)return xdef resnet34(num_classes=1000, include_top=True):# https://download.pytorch.org/models/resnet34-333f7ec4.pthreturn ResNet(BasicBlock, [3, 4, 6, 3], num_classes=num_classes, include_top=include_top)def resnet50(num_classes=1000, include_top=True):# https://download.pytorch.org/models/resnet50-19c8e357.pthreturn ResNet(Bottleneck, [3, 4, 6, 3], num_classes=num_classes, include_top=include_top)def resnet101(num_classes=1000, include_top=True):# https://download.pytorch.org/models/resnet101-5d3b4d8f.pthreturn ResNet(Bottleneck, [3, 4, 23, 3], num_classes=num_classes, include_top=include_top)def resnext50_32x4d(num_classes=1000, include_top=True):# https://download.pytorch.org/models/resnext50_32x4d-7cdf4587.pthgroups = 32width_per_group = 4return ResNet(Bottleneck, [3, 4, 6, 3],num_classes=num_classes,include_top=include_top,groups=groups,width_per_group=width_per_group)def resnext101_32x8d(num_classes=1000, include_top=True):# https://download.pytorch.org/models/resnext101_32x8d-8ba56ff5.pthgroups = 32width_per_group = 8return ResNet(Bottleneck, [3, 4, 23, 3],num_classes=num_classes,include_top=include_top,groups=groups,width_per_group=width_per_group)

ResNet、ResNeXt网络详解及复现相关推荐

  1. HighwayNet网络详解及复现

    HighwayNet网络详解及复现: https://mp.weixin.qq.com/s?__biz=Mzk0MzIzODM5MA==&mid=2247485190&idx=1&am ...

  2. ResNeXt网络详解

    论文:Aggregated Residual Transformations for Deep Netral Networks 0 序言 ResNeXt网络可以理解是ResNet网络的小幅升级,个人感 ...

  3. 第十六章 ConvNeXt网络详解

    系列文章目录 第一章 AlexNet网络详解 第二章 VGG网络详解 第三章 GoogLeNet网络详解 第四章 ResNet网络详解 第五章 ResNeXt网络详解 第六章 MobileNetv1网 ...

  4. ResNet网络详解与keras实现

    ResNet网络详解与keras实现 ResNet网络详解与keras实现 Resnet网络的概览 Pascal_VOC数据集 第一层目录 第二层目录 第三层目录 梯度退化 Residual Lear ...

  5. ResNet网络详解并使用pytorch搭建模型、并基于迁移学习训练

    1.ResNet网络详解 网络中的创新点: (1)超深的网络结构(突破1000层) (2)提出residual模块 (3)使用Batch Normalization加速训练(丢弃dropout) (1 ...

  6. IFM网络详解及torch复现

    文章目录 IFM网络详解 网络结构代码 训练代码 main IFM网络详解 https://mp.weixin.qq.com/s?__biz=Mzk0MzIzODM5MA==&mid=2247 ...

  7. 深度学习之图像分类(二十六)-- ConvMixer 网络详解

    深度学习之图像分类(二十六)ConvMixer 网络详解 目录 深度学习之图像分类(二十六)ConvMixer 网络详解 1. 前言 2. A Simple Model: ConvMixer 2.1 ...

  8. GoogleNet网络详解与keras实现

    GoogleNet网络详解与keras实现 GoogleNet网络详解与keras实现 GoogleNet系列网络的概览 Pascal_VOC数据集 第一层目录 第二层目录 第三层目录 Incepti ...

  9. EfficientNetV2网络详解

    原论文名称:EfficientNetV2: Smaller Models and Faster Training 论文下载地址:https://arxiv.org/abs/2104.00298 原论文 ...

最新文章

  1. 分享基于EF6、Unitwork、Autofac的Repository模式设计
  2. 多线程端点服务发布程序(摘)
  3. arcgis engine二次开发python-使用C#配合ArcGIS Engine进行地理信息系统开发
  4. 数据库设计笔记——关系型数据库基础知识(三)
  5. AutoCAD.NET API 最新(2012)教程下载及在线视频教程DevTV
  6. 你的女神今日结婚了!!!你失恋了......
  7. java保存图书每日的交易记录
  8. http状态 404 - 未找到_404终结者来了!
  9. BI如何在企业信息化中助ERP一臂之力?
  10. JS/JQUERY函数库
  11. 【原生JS】js小数正则
  12. Linux服务器 -- 安全篇
  13. 小米2系列板砖自救行动
  14. 统信UOS安装flash浏览器插件
  15. wireshark抓取分析UDP数据包
  16. 用matlab计算矩阵的权重,用Excel计算层次分析法的矩阵权重分析
  17. 明源售楼系统技术解析 房源生成(二)
  18. IDEA开发工具整合YAPI接口平台
  19. 护理和计算机哪个专业好,护理专业考研的就业前景和方向
  20. tcp/ip网络编程--accept()函数返回的套接字

热门文章

  1. Centos8安装NodeJs-16
  2. java客户端发消息到kafka
  3. 软考(软件设计师知识点) --法律法规
  4. 关于电子科技大学本科生宿舍热水情况调查
  5. pyside2 系列之介绍,安装,简单例子
  6. Mastering the game of Go with deep neural networks and tree search翻译
  7. win10关于仅仅只能创建文件夹的问题
  8. Python编程基础:函数的使用
  9. mybatisSql编写小结
  10. 解决:IEDA在plugins里搜不到mybatisx插件