当前位置: 代码迷 >> 综合 >> Dilated Residual Networks
  详细解决方案

Dilated Residual Networks

热度:14   发布时间:2024-01-16 00:20:00.0

在这里插入图片描述

完整代码请见: https://github.com/fyu/drn

class DRN(nn.Module):def __init__(self, block, layers, num_classes=1000,channels=(16, 32, 64, 128, 256, 512, 512, 512),out_map=False, out_middle=False, pool_size=28, arch='D'):super(DRN, self).__init__()self.inplanes = channels[0]self.out_map = out_mapself.out_dim = channels[-1]self.out_middle = out_middleself.arch = archif arch == 'C':self.conv1 = nn.Conv2d(3, channels[0], kernel_size=7, stride=1,padding=3, bias=False)self.bn1 = BatchNorm(channels[0])self.relu = nn.ReLU(inplace=True)self.layer1 = self._make_layer(BasicBlock, channels[0], layers[0], stride=1)self.layer2 = self._make_layer(BasicBlock, channels[1], layers[1], stride=2)elif arch == 'D':self.layer0 = nn.Sequential(nn.Conv2d(3, channels[0], kernel_size=7, stride=1, padding=3,bias=False),BatchNorm(channels[0]),nn.ReLU(inplace=True))self.layer1 = self._make_conv_layers(channels[0], layers[0], stride=1)self.layer2 = self._make_conv_layers(channels[1], layers[1], stride=2)self.layer3 = self._make_layer(block, channels[2], layers[2], stride=2)self.layer4 = self._make_layer(block, channels[3], layers[3], stride=2)self.layer5 = self._make_layer(block, channels[4], layers[4],dilation=2, new_level=False)self.layer6 = None if layers[5] == 0 else \self._make_layer(block, channels[5], layers[5], dilation=4,new_level=False)if arch == 'C':self.layer7 = None if layers[6] == 0 else \self._make_layer(BasicBlock, channels[6], layers[6], dilation=2,new_level=False, residual=False)self.layer8 = None if layers[7] == 0 else \self._make_layer(BasicBlock, channels[7], layers[7], dilation=1,new_level=False, residual=False)elif arch == 'D':self.layer7 = None if layers[6] == 0 else \self._make_conv_layers(channels[6], layers[6], dilation=2)self.layer8 = None if layers[7] == 0 else \self._make_conv_layers(channels[7], layers[7], dilation=1)if num_classes > 0:self.avgpool = nn.AvgPool2d(pool_size)self.fc = nn.Conv2d(self.out_dim, num_classes, kernel_size=1,stride=1, padding=0, bias=True)for m in self.modules():if isinstance(m, nn.Conv2d):n = m.kernel_size[0] * m.kernel_size[1] * m.out_channelsm.weight.data.normal_(0, math.sqrt(2. / n))elif isinstance(m, BatchNorm):m.weight.data.fill_(1)m.bias.data.zero_()def _make_layer(self, block, planes, blocks, stride=1, dilation=1,new_level=True, residual=True):assert dilation == 1 or dilation % 2 == 0downsample = Noneif stride != 1 or self.inplanes != planes * block.expansion:downsample = nn.Sequential(nn.Conv2d(self.inplanes, planes * block.expansion,kernel_size=1, stride=stride, bias=False),BatchNorm(planes * block.expansion),)layers = list()layers.append(block(self.inplanes, planes, stride, downsample,dilation=(1, 1) if dilation == 1 else (dilation // 2 if new_level else dilation, dilation),residual=residual))self.inplanes = planes * block.expansionfor i in range(1, blocks):layers.append(block(self.inplanes, planes, residual=residual,dilation=(dilation, dilation)))return nn.Sequential(*layers)def _make_conv_layers(self, channels, convs, stride=1, dilation=1):modules = []for i in range(convs):modules.extend([nn.Conv2d(self.inplanes, channels, kernel_size=3,stride=stride if i == 0 else 1,padding=dilation, bias=False, dilation=dilation),BatchNorm(channels),nn.ReLU(inplace=True)])self.inplanes = channelsreturn nn.Sequential(*modules)
  相关解决方案