def __init__(self, backbone_out_sizes, op_names, num_classes, config, agg_size=64, aux_cell=False, sep_repeats=1, agg_concat=False, num_pools=4, ctx_cell=ContextualCell_v1, cell_concat=False, **params): """Construct MicroDecoder class. :param op_names: list of operation candidate names :param backbone_out_sizes: backbone output channels :param num_classes: number of classes :param config: config list :param agg_size: number of channels in aggregation cells :param num_pools: number of pools :param ctx_cell: ctx module :param aux_cell: aux cells :param sep_repeats: number of repeats :param agg_concat: whether to concat or add agg results :param cell_concat: whether to concat or add cells :param params: other parameters """ # NOTE: bring all outputs to the same size super(MicroDecoder, self).__init__() adapt = [] for out_idx, size in enumerate(backbone_out_sizes): adapt.append(conv_bn_relu(C_in=size, C_out=agg_size, kernel_size=1, stride=1, padding=0, affine=True)) backbone_out_sizes[out_idx] = agg_size if sys.version_info[0] < 3: backbone_out_sizes = list(backbone_out_sizes) else: backbone_out_sizes = backbone_out_sizes.copy() self.adapt = ProcessList(*adapt) cell_config, conns = config collect_inds = [] cells = [] for block_idx, conn in enumerate(conns): for ind in conn: if ind in collect_inds: # remove from outputs if used by pool cell collect_inds.remove(ind) ind_1, ind_2 = conn cells.append(MergeCell(op_names=op_names, ctx_config=cell_config, conn=conn, inps=( backbone_out_sizes[ind_1], backbone_out_sizes[ind_2]), agg_size=agg_size, ctx_cell=ctx_cell, repeats=sep_repeats, cell_concat=cell_concat)) cell_concat = False collect_inds.append(block_idx + num_pools) backbone_out_sizes.append(agg_size) # for description self.block = ProcessList(*cells, out_list=conns) self.upsample = MicroDecoder_Upsample( collect_inds=collect_inds, agg_concat=agg_concat) self.pre_clf = conv_bn_relu(C_in=agg_size * (len(collect_inds) if agg_concat else 1), C_out=agg_size, kernel_size=1, stride=1, padding=0) self.conv_clf = conv3x3( inchannel=agg_size, outchannel=num_classes, stride=1, bias=True)
'sep_conv_3x3': lambda C, stride, affine, repeats=1: SeparatedConv(C, C, 3, stride, 1, affine=affine), 'sep_conv_5x5': lambda C, stride, affine, repeats=1: SeparatedConv(C, C, 5, stride, 2, affine=affine), 'sep_conv_7x7': lambda C, stride, affine, repeats=1: SeparatedConv(C, C, 7, stride, 3, affine=affine), 'dil_conv_3x3': lambda C, stride, affine, repeats=1: DilConv(C, C, 3, stride, 2, 2, affine=affine), 'dil_conv_5x5': lambda C, stride, affine, repeats=1: DilConv(C, C, 5, stride, 4, 2, affine=affine), 'conv_7x1_1x7': lambda C, stride, affine, repeats=1: Seq( ops.Relu(inplace=False), ops.Conv2d(C, C, (1, 7), stride=(1, stride), padding=(0, 3), bias=False), ops.Conv2d(C, C, (7, 1), stride=(stride, 1), padding=(3, 0), bias=False), ops.BatchNorm2d(C, affine=affine)), 'conv1x1': lambda C, stride, affine, repeats=1: Seq( conv1X1(C, C, stride=stride), ops.BatchNorm2d(C, affine=affine), ops.Relu(inplace=False)), 'conv3x3': lambda C, stride, affine, repeats=1: Seq( conv3x3(C, C, stride=stride), ops.BatchNorm2d(C, affine=affine), ops.Relu(inplace=False)), 'conv5x5': lambda C, stride, affine, repeats=1: Seq( conv5x5(C, C, stride=stride), ops.BatchNorm2d(C, affine=affine), ops.Relu(inplace=False)), 'conv7x7': lambda C, stride, affine, repeats=1: Seq( conv7x7(C, C, stride=stride), ops.BatchNorm2d(C, affine=affine), ops.Relu(inplace=False)), 'conv3x3_dil2': lambda C, stride, affine, repeats=1: Seq( conv3x3(C, C, stride=stride, dilation=2), ops.BatchNorm2d(C, affine=affine), ops.Relu(inplace=False)), 'conv3x3_dil3': lambda C, stride, affine, repeats=1: Seq(
lambda C, stride, affine, repeats=1: DilConv( C, C, 5, stride, 4, 2, affine=affine), 'conv_7x1_1x7': lambda C, stride, affine, repeats=1: Seq( ops.Relu(inplace=False), ops.Conv2d( C, C, (1, 7), stride=(1, stride), padding=(0, 3), bias=False), ops.Conv2d( C, C, (7, 1), stride=(stride, 1), padding=(3, 0), bias=False), ops.BatchNorm2d(C, affine=affine)), 'conv1x1': lambda C, stride, affine, repeats=1: Seq(conv1X1(C, C, stride=stride), ops.BatchNorm2d(C, affine=affine), ops.Relu(inplace=False)), 'conv3x3': lambda C, stride, affine, repeats=1: Seq(conv3x3(C, C, stride=stride), ops.BatchNorm2d(C, affine=affine), ops.Relu(inplace=False)), 'conv5x5': lambda C, stride, affine, repeats=1: Seq(conv5x5(C, C, stride=stride), ops.BatchNorm2d(C, affine=affine), ops.Relu(inplace=False)), 'conv7x7': lambda C, stride, affine, repeats=1: Seq(conv7x7(C, C, stride=stride), ops.BatchNorm2d(C, affine=affine), ops.Relu(inplace=False)), 'conv3x3_dil2': lambda C, stride, affine, repeats=1: Seq( conv3x3(C, C, stride=stride, dilation=2), ops.BatchNorm2d(C, affine=affine), ops.Relu(inplace=False)), 'conv3x3_dil3':
def __init__(self, block, arch, base_channel, strides=[1, 2, 2, 2], dilations=[1, 1, 1, 1], num_classes=1000, groups=1, base_width=64, structure='full', Conv2d='Conv2d', norm_layer={"norm_type": 'BN'}): """Construct the ResNet_arch class. :param block: BasicBlock or Bottleneck instance :param arch: code of model :param base_channel: base channel numbers :param stride: stride of the convolution :param dilation: dilation of convolution layer :param num_classes: number of output classes :param groups: groups of convolution layer :param base_width: base channel numbers :param structure: structure of the model :param norm_layer: type of norm layer. :param Conv2d: type of conv layer. """ assert structure in ['full', 'drop_last', 'backbone' ], 'unknown structrue: %s' % repr(structure) self.structure = structure self.num_classes = num_classes self.arch = [[int(a) for a in x] for x in arch.split('-')] self.base_channel = base_channel self.strides = strides self.dilations = dilations super(ResNet_arch, self).__init__() self.conv1 = conv3x3(3, base_channel // 2, stride=2) self.bn1 = build_norm_layer((base_channel // 2), **norm_layer) self.relu = nn.ReLU(inplace=False) self.conv2 = conv3x3(base_channel // 2, base_channel, stride=2) self.bn2 = build_norm_layer((base_channel), **norm_layer) self.res_layers = [] self.block = block total_expand = 0 inplanes = planes = self.base_channel self.stage_out_channels = [] for i, arch in enumerate(self.arch): num_expand = arch.count(2) total_expand += num_expand stride = self.strides[i] res_layer, out_channels = self.make_res_layer( self.block, inplanes, planes, arch, groups=groups, base_width=base_width, stride=stride, norm_layer=norm_layer, Conv2d=Conv2d) self.stage_out_channels.append(out_channels) planes = self.base_channel * 2**total_expand inplanes = planes * self.block.expansion layer_name = 'layer{}'.format(i + 1) self.add_module(layer_name, res_layer) self.res_layers.append(layer_name) self.out_channels = out_channels