Ejemplo n.º 1
0
    def create_block_list(self):
        self.clear_list()

        backbone = self.factory.get_base_model(BackboneName.GhostNet,
                                               self.data_channel)
        base_out_channels = backbone.get_outchannel_list()
        self.add_block_list(BlockType.BaseNet, backbone, base_out_channels[-1])

        avgpool = nn.AdaptiveAvgPool2d((1, 1))
        self.add_block_list(LayerType.GlobalAvgPool, avgpool,
                            base_out_channels[-1])

        output_channel = 1280
        layer1 = FcLayer(base_out_channels[-1], output_channel)
        self.add_block_list(layer1.get_name(), layer1, output_channel)

        layer2 = NormalizeLayer(bn_name=NormalizationType.BatchNormalize1d,
                                out_channel=output_channel)
        self.add_block_list(layer2.get_name(), layer2, output_channel)

        layer3 = ActivationLayer(self.activation_name, inplace=False)
        self.add_block_list(layer3.get_name(), layer3, output_channel)

        layer4 = nn.Dropout(0.2)
        self.add_block_list(LayerType.Dropout, layer4, output_channel)

        layer5 = nn.Linear(output_channel, self.class_number)
        self.add_block_list(LayerType.FcLinear, layer5, self.class_number)

        self.create_loss()
Ejemplo n.º 2
0
    def create_block_list(self):
        self.clear_list()

        if (self.scale < 0.99) or (self.scale > 1.001):
            up = Upsample(scale_factor=self.scale, mode='nearest')
            self.add_block_list(up.get_name(), up, self.data_channel)

        stage1 = ConvBNActivationBlock(in_channels=self.data_channel,
                                       out_channels=self.out_channels[0],
                                       kernel_size=3,
                                       stride=2,
                                       padding=1,
                                       bias=False,
                                       bnName=self.bn_name,
                                       activationName=self.activation_name)
        self.add_block_list(stage1.get_name(), stage1, self.out_channels[0])

        for index, number in enumerate(self.repeats):
            self.make_layers(number, self.out_channels[index],
                             self.out_channels[index + 1],
                             self.kernel_sizes[index], self.strides[index],
                             self.expands[index])
            activate = ActivationLayer(activation_name=self.activation_name)
            self.add_block_list(activate.get_name(), activate,
                                self.block_out_channels[-1])

        stage9 = ConvBNActivationBlock(in_channels=self.out_channels[7],
                                       out_channels=self.out_channels[8],
                                       kernel_size=1,
                                       stride=1,
                                       padding=0,
                                       bias=False,
                                       bnName=self.bn_name,
                                       activationName=self.activation_name)
        self.add_block_list(stage9.get_name(), stage9, self.out_channels[8])
Ejemplo n.º 3
0
    def create_block_list(self):
        self.clear_list()

        backbone = PointNet(data_channel=self.data_channel,
                            feature_transform=self.feature_transform,
                            bn_name=self.bn_name,
                            activation_name=self.activation_name)
        base_out_channels = backbone.get_outchannel_list()
        self.add_block_list(BlockType.BaseNet, backbone, base_out_channels[-1])

        input_channel = self.block_out_channels[-1]
        fc1 = FcBNActivationBlock(input_channel,
                                  512,
                                  bnName=self.bn_name,
                                  activationName=self.activation_name)
        self.add_block_list(fc1.get_name(), fc1, 512)

        input_channel = 512
        fc2 = nn.Linear(input_channel, 256)
        self.add_block_list(LayerType.FcLinear, fc2, 256)

        input_channel = 256
        dropout = nn.Dropout(p=0.3)
        self.add_block_list(LayerType.Dropout, dropout, input_channel)

        normalize = NormalizeLayer(self.bn_name, input_channel)
        self.add_block_list(normalize.get_name(), normalize, input_channel)

        activate = ActivationLayer(self.activation_name, inplace=False)
        self.add_block_list(activate.get_name(), activate, input_channel)

        fc3 = nn.Linear(input_channel, self.class_number)
        self.add_block_list(LayerType.FcLinear, fc3, self.class_number)

        self.create_loss()
Ejemplo n.º 4
0
    def __init__(self,
                 in_channels,
                 nclass,
                 ncodes=32,
                 se_loss=True,
                 bn_name=NormalizationType.BatchNormalize2d,
                 activation_name=ActivationType.ReLU):
        super().__init__(EncNetBlockName.EncBlock)
        self.se_loss = se_loss
        self.encoding = nn.Sequential(
            ConvBNActivationBlock(in_channels=in_channels,
                                  out_channels=in_channels,
                                  kernel_size=1,
                                  bias=False,
                                  bnName=bn_name,
                                  activationName=activation_name),
            Encoding(D=in_channels, K=ncodes),
            NormalizeLayer(NormalizationType.BatchNormalize1d, ncodes),
            ActivationLayer(activation_name), MeanLayer(dim=1))
        self.fc = nn.Sequential(nn.Linear(in_channels, in_channels),
                                nn.Sigmoid())

        self.activate = ActivationLayer(activation_name)

        if self.se_loss:
            self.se_layer = nn.Linear(in_channels, nclass)
Ejemplo n.º 5
0
    def __init__(self,
                 in_planes,
                 out_planes,
                 stride=1,
                 bn_name=NormalizationType.BatchNormalize2d,
                 activation_name=ActivationType.ReLU):
        super().__init__(PNASNetBlockName.CellB)
        self.stride = stride
        # Left branch
        self.sep_conv1 = SeparableConv(in_planes,
                                       out_planes,
                                       kernel_size=7,
                                       stride=stride,
                                       bn_name=bn_name)
        self.sep_conv2 = SeparableConv(in_planes,
                                       out_planes,
                                       kernel_size=3,
                                       stride=stride,
                                       bn_name=bn_name)
        # Right branch
        self.sep_conv3 = SeparableConv(in_planes,
                                       out_planes,
                                       kernel_size=5,
                                       stride=stride,
                                       bn_name=bn_name)
        if stride == 2:
            self.conv1 = ConvBNActivationBlock(
                in_channels=in_planes,
                out_channels=out_planes,
                kernel_size=1,
                stride=1,
                padding=0,
                bias=False,
                bnName=bn_name,
                activationName=ActivationType.Linear)

        self.activate1 = ActivationLayer(activation_name, inplace=False)
        self.activate2 = ActivationLayer(activation_name, inplace=False)

        # Reduce channels
        self.conv2 = ConvBNActivationBlock(in_channels=2 * out_planes,
                                           out_channels=out_planes,
                                           kernel_size=1,
                                           stride=1,
                                           padding=0,
                                           bias=False,
                                           bnName=bn_name,
                                           activationName=activation_name)
Ejemplo n.º 6
0
 def __init__(self,
              low_channels,
              high_channels,
              out_channels,
              scale,
              bn_name=NormalizationType.BatchNormalize2d,
              activation_name=ActivationType.ReLU):
     super().__init__(ICNetBlockName.CascadeFeatureFusion)
     self.up = Upsample(scale_factor=scale, mode='bilinear')
     self.conv_low = ConvBNActivationBlock(
         in_channels=low_channels,
         out_channels=out_channels,
         kernel_size=3,
         padding=2,
         dilation=2,
         bias=False,
         bnName=bn_name,
         activationName=ActivationType.Linear)
     self.conv_high = ConvBNActivationBlock(
         in_channels=high_channels,
         out_channels=out_channels,
         kernel_size=1,
         bias=False,
         bnName=bn_name,
         activationName=ActivationType.Linear)
     self.activation = ActivationLayer(activation_name=activation_name,
                                       inplace=False)
 def __init__(self,
              in_channel,
              reduction=16,
              activate_name=ActivationType.ReLU):
     super().__init__(BlockType.SEBlock)
     # self.avg_pool = GlobalAvgPool2d()
     self.avg_pool = nn.AdaptiveAvgPool2d(1)
     self.fc = nn.Sequential(nn.Linear(in_channel, in_channel // reduction),
                             ActivationLayer(activate_name),
                             nn.Linear(in_channel // reduction, in_channel),
                             nn.Sigmoid())
Ejemplo n.º 8
0
    def __init__(self,
                 in_channel,
                 out_channel,
                 stride,
                 bn_name=NormalizationType.BatchNormalize2d,
                 activation_name=ActivationType.ReLU):
        super().__init__(ResNextBlockName.ResNextBottleNeckC)

        C = ResNextBottleNeck.CARDINALITY

        #"""We note that the input/output width of the template is fixed as
        #256-d (Fig. 3), We note that the input/output width of the template
        #is fixed as 256-d (Fig. 3), and all widths are dou- bled each time
        #when the feature map is subsampled (see Table 1)."""
        # number of channels per group
        D = int(ResNextBottleNeck.DEPTH * out_channel /
                ResNextBottleNeck.BASEWIDTH)
        self.split_transforms = nn.Sequential(
            ConvBNActivationBlock(in_channels=in_channel,
                                  out_channels=C * D,
                                  kernel_size=1,
                                  groups=C,
                                  bias=False,
                                  bnName=bn_name,
                                  activationName=activation_name),
            ConvBNActivationBlock(in_channels=C * D,
                                  out_channels=C * D,
                                  kernel_size=3,
                                  stride=stride,
                                  groups=C,
                                  padding=1,
                                  bias=False,
                                  bnName=bn_name,
                                  activationName=activation_name),
            ConvBNActivationBlock(in_channels=C * D,
                                  out_channels=out_channel * 4,
                                  kernel_size=1,
                                  bias=False,
                                  bnName=bn_name,
                                  activationName=ActivationType.Linear))

        self.shortcut = nn.Sequential()
        if stride != 1 or in_channel != out_channel * 4:
            self.shortcut = ConvBNActivationBlock(
                in_channels=in_channel,
                out_channels=out_channel * 4,
                kernel_size=1,
                stride=stride,
                bias=False,
                bnName=bn_name,
                activationName=ActivationType.Linear)

        self.relu = ActivationLayer(activation_name=activation_name,
                                    inplace=False)
Ejemplo n.º 9
0
    def create_block_list(self):
        self.clear_list()

        layer1 = ConvBNActivationBlock(in_channels=self.data_channel,
                                       out_channels=self.num_init_features,
                                       kernel_size=7,
                                       stride=2,
                                       padding=3,
                                       bnName=self.bnName,
                                       activationName=self.activationName)
        self.add_block_list(layer1.get_name(), layer1, self.num_init_features)

        layer2 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
        self.add_block_list(LayerType.MyMaxPool2d, layer2,
                            self.num_init_features)

        self.in_channels = self.num_init_features
        for index, num_block in enumerate(self.num_blocks):
            self.make_densenet_layer(num_block, self.dilations[index],
                                     self.bn_size, self.growth_rate,
                                     self.drop_rate, self.bnName,
                                     self.activationName)
            self.in_channels = self.block_out_channels[-1]
            if index != len(self.num_blocks) - 1:
                trans = TransitionBlock(in_channel=self.in_channels,
                                        output_channel=self.in_channels // 2,
                                        stride=1,
                                        bnName=self.bnName,
                                        activationName=self.activationName)
                self.add_block_list(trans.get_name(), trans,
                                    self.in_channels // 2)
                avg_pool = nn.AvgPool2d(kernel_size=2, stride=2)
                self.add_block_list(LayerType.GlobalAvgPool, avg_pool,
                                    self.block_out_channels[-1])
                self.in_channels = self.block_out_channels[-1]
        layer3 = NormalizeLayer(bn_name=self.bnName,
                                out_channel=self.in_channels)
        self.add_block_list(layer3.get_name(), layer3, self.in_channels)

        layer4 = ActivationLayer(self.activationName, False)
        self.add_block_list(layer4.get_name(), layer4, self.in_channels)
Ejemplo n.º 10
0
    def create_block_list(self):
        self.clear_list()

        backbone = self.factory.get_base_model(BackboneName.Vgg19,
                                               self.data_channel)
        base_out_channels = backbone.get_outchannel_list()
        self.add_block_list(BlockType.BaseNet, backbone, base_out_channels[-1])

        # avgpool = nn.AdaptiveAvgPool2d((7, 7))

        layer1 = FcLayer(base_out_channels[-1], 4096)
        self.add_block_list(layer1.get_name(), layer1, 4096)

        layer2 = ActivationLayer(self.activation_name, inplace=False)
        self.add_block_list(layer2.get_name(), layer2, 4096)

        layer3 = nn.Dropout()
        self.add_block_list(LayerType.Dropout, layer3, 4096)

        layer4 = nn.Linear(4096, 4096)
        self.add_block_list(LayerType.FcLinear, layer4, 4096)

        layer5 = ActivationLayer(self.activation_name, inplace=False)
        self.add_block_list(layer5.get_name(), layer5, 4096)

        layer6 = nn.Dropout()
        self.add_block_list(LayerType.Dropout, layer6, 4096)

        layer7 = nn.Linear(4096, self.class_number)
        self.add_block_list(LayerType.FcLinear, layer7, self.class_number)

        self.create_loss()
Ejemplo n.º 11
0
    def __init__(self,
                 in_planes,
                 out_planes,
                 stages,
                 activation_name=ActivationType.ReLU):
        super().__init__(RefineNetBlockName.RefineNetBlock)
        self.activate = ActivationLayer(activation_name, inplace=False)

        self.mflow = CRPBlock(in_planes, out_planes, stages)
        self.conv1 = nn.Conv2d(out_planes, 256, 1, bias=False)
        self.up = Upsample(scale_factor=2, mode='bilinear')

        self.conv2 = nn.Conv2d(256, 256, 1, bias=False)
Ejemplo n.º 12
0
    def create_block_list(self):
        self.block_out_channels = []
        self.index = 0

        stem = ConvBNActivationBlock(in_channels=self.data_channel,
                                     out_channels=self.first_output,
                                     kernel_size=3,
                                     padding=1,
                                     bias=False,
                                     bnName=self.bn_name,
                                     activationName=ActivationType.Linear)
        self.add_block_list(stem.get_name(), stem, self.first_output)

        for i in range(self.reduction_num):
            self.make_normal(NormalCell, self.repeat_cell_num, self.filters)
            self.filters *= 2
            self.make_reduction(ReductionCell, self.filters)

        self.make_normal(NormalCell, self.repeat_cell_num, self.filters)

        relu = ActivationLayer(activation_name=self.activation_name,
                               inplace=False)
        self.add_block_list(relu.get_name(), relu, self.filters * 6)
 def __init__(self, in_channel, kernel_size,
              padding=0, stride=1, dilation=1, bias=False,
              bn_name=NormalizationType.BatchNormalize2d,
              activation_name=ActivationType.ReLU):
     super().__init__(BlockType.DepthwiseConv2dBlock)
     conv = nn.Conv2d(in_channel, in_channel, kernel_size,
                      padding=padding, stride=stride, dilation=dilation,
                      groups=in_channel, bias=bias)
     normal = NormalizeLayer(bn_name, in_channel)
     activation = ActivationLayer(activation_name)
     self.block = nn.Sequential(OrderedDict([
         (LayerType.Convolutional, conv),
         (bn_name, normal),
         (activation_name, activation)
     ]))
Ejemplo n.º 14
0
    def __init__(self,
                 input_channel,
                 bn_name=NormalizationType.BatchNormalize2d,
                 activation_name=ActivationType.ReLU):
        super().__init__(InceptionBlockName.InceptionResNetA)
        self.branch3x3stack = nn.Sequential(
            ConvBNActivationBlock(input_channel,
                                  32,
                                  kernel_size=1,
                                  bnName=bn_name,
                                  activationName=activation_name),
            ConvBNActivationBlock(32,
                                  48,
                                  kernel_size=3,
                                  padding=1,
                                  bnName=bn_name,
                                  activationName=activation_name),
            ConvBNActivationBlock(48,
                                  64,
                                  kernel_size=3,
                                  padding=1,
                                  bnName=bn_name,
                                  activationName=activation_name))

        self.branch3x3 = nn.Sequential(
            ConvBNActivationBlock(input_channel,
                                  32,
                                  kernel_size=1,
                                  bnName=bn_name,
                                  activationName=activation_name),
            ConvBNActivationBlock(32,
                                  32,
                                  kernel_size=3,
                                  padding=1,
                                  bnName=bn_name,
                                  activationName=activation_name))

        self.branch1x1 = ConvBNActivationBlock(input_channel,
                                               32,
                                               kernel_size=1,
                                               bnName=bn_name,
                                               activationName=activation_name)

        self.reduction1x1 = nn.Conv2d(128, 384, kernel_size=1)
        self.shortcut = nn.Conv2d(input_channel, 384, kernel_size=1)
        self.bn = NormalizeLayer(bn_name=bn_name, out_channel=384)
        self.relu = ActivationLayer(activation_name=activation_name,
                                    inplace=False)
Ejemplo n.º 15
0
 def create_layer(self, module_def):
     if module_def['type'] == LayerType.MyMaxPool2d:
         kernel_size = int(module_def['size'])
         stride = int(module_def['stride'])
         maxpool = MyMaxPool2d(kernel_size, stride)
         self.addBlockList(LayerType.MyMaxPool2d, maxpool, self.filters)
         self.input_channels = self.filters
     elif module_def['type'] == LayerType.GlobalAvgPool:
         globalAvgPool = GlobalAvgPool2d()
         self.addBlockList(LayerType.GlobalAvgPool, globalAvgPool, self.filters)
         self.input_channels = self.filters
     elif module_def['type'] == LayerType.FcLayer:
         num_output = int(module_def['num_output'])
         self.filters = num_output
         layer = FcLayer(self.input_channels, num_output)
         self.addBlockList(LayerType.FcLayer, layer, num_output)
         self.input_channels = num_output
     elif module_def['type'] == LayerType.Upsample:
         scale = int(module_def['stride'])
         mode = module_def.get('model', 'bilinear')
         upsample = Upsample(scale_factor=scale, mode=mode)
         self.addBlockList(LayerType.Upsample, upsample, self.filters)
         self.input_channels = self.filters
     elif module_def['type'] == LayerType.MultiplyLayer:
         layer = MultiplyLayer(module_def['layers'])
         self.addBlockList(LayerType.MultiplyLayer, layer, self.filters)
         self.input_channels = self.filters
     elif module_def['type'] == LayerType.AddLayer:
         layer = AddLayer(module_def['layers'])
         self.addBlockList(LayerType.AddLayer, layer, self.filters)
         self.input_channels = self.filters
     elif module_def['type'] == LayerType.Dropout:
         probability = float(module_def['probability'])
         layer = nn.Dropout(p=probability, inplace=False)
         self.addBlockList(LayerType.Dropout, layer, self.filters)
         self.input_channels = self.filters
     elif module_def['type'] == LayerType.NormalizeLayer:
         bn_name = module_def['batch_normalize'].strip()
         layer = NormalizeLayer(bn_name, self.filters)
         self.addBlockList(LayerType.NormalizeLayer, layer, self.filters)
         self.input_channels = self.filters
     elif module_def['type'] == LayerType.ActivationLayer:
         activation_name = module_def['activation'].strip()
         layer = ActivationLayer(activation_name, inplace=False)
         self.addBlockList(LayerType.ActivationLayer, layer, self.filters)
         self.input_channels = self.filters
Ejemplo n.º 16
0
    def __init__(self, in_channels, out_channels, stride=1, dilation=1,
                 start_with_relu=True, bn_name=NormalizationType.BatchNormalize2d,
                 activation_name=ActivationType.ReLU):
        super().__init__(XceptionBlockName.BlockA)
        if out_channels != in_channels or stride != 1:
            self.skip = ConvBNActivationBlock(in_channels=in_channels,
                                              out_channels=out_channels,
                                              kernel_size=1,
                                              stride=stride,
                                              bias=False,
                                              bnName=bn_name,
                                              activationName=ActivationType.Linear)
        else:
            self.skip = None

        self.relu = ActivationLayer(activation_name, inplace=False)
        rep = list()
        inter_channels = out_channels // 4

        if start_with_relu:
            rep.append(self.relu)
        rep.append(SeparableConv2dBNActivation(in_channels, inter_channels, 3, 1,
                                               dilation, bn_name=bn_name,
                                               activation_name=activation_name))
        rep.append(NormalizeLayer(bn_name, inter_channels))

        rep.append(self.relu)
        rep.append(SeparableConv2dBNActivation(inter_channels, inter_channels, 3, 1,
                                               dilation, bn_name=bn_name,
                                               activation_name=activation_name))
        rep.append(NormalizeLayer(bn_name, inter_channels))

        if stride != 1:
            rep.append(self.relu)
            rep.append(SeparableConv2dBNActivation(inter_channels, out_channels, 3, stride,
                                                   bn_name=bn_name,
                                                   activation_name=activation_name))
            rep.append(NormalizeLayer(bn_name, out_channels))
        else:
            rep.append(self.relu)
            rep.append(SeparableConv2dBNActivation(inter_channels, out_channels, 3, 1,
                                                   bn_name=bn_name,
                                                   activation_name=activation_name))
            rep.append(NormalizeLayer(bn_name, out_channels))
        self.rep = nn.Sequential(*rep)
Ejemplo n.º 17
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 stride,
                 reduction=16,
                 bn_name=NormalizationType.BatchNormalize2d,
                 activation_name=ActivationType.ReLU):
        super().__init__(SeNetBlockName.BottleneckResidualSEBlock)

        self.residual = nn.Sequential(
            ConvBNActivationBlock(in_channels=in_channels,
                                  out_channels=out_channels,
                                  kernel_size=1,
                                  bnName=bn_name,
                                  activationName=activation_name),
            ConvBNActivationBlock(in_channels=out_channels,
                                  out_channels=out_channels,
                                  kernel_size=3,
                                  stride=stride,
                                  padding=1,
                                  bnName=bn_name,
                                  activationName=activation_name),
            ConvBNActivationBlock(in_channels=out_channels,
                                  out_channels=out_channels * self.expansion,
                                  kernel_size=1,
                                  bnName=bn_name,
                                  activationName=activation_name))

        self.se_block = SEBlock(out_channels * self.expansion, reduction)

        self.shortcut = nn.Sequential()
        if stride != 1 or in_channels != out_channels * self.expansion:
            self.shortcut = ConvBNActivationBlock(
                in_channels=in_channels,
                out_channels=out_channels * self.expansion,
                kernel_size=1,
                stride=stride,
                bnName=bn_name,
                activationName=ActivationType.Linear)
        self.relu = ActivationLayer(activation_name=activation_name,
                                    inplace=False)
Ejemplo n.º 18
0
 def __init__(self, highter_in_channels, lower_in_channels, out_channels,
              scale_factor=4, bn_name=NormalizationType.BatchNormalize2d,
              activation_name=ActivationType.ReLU):
     super().__init__(FastSCNNBlockName.FeatureFusionBlock)
     self.scale_factor = scale_factor
     self.dwconv = ConvBNActivationBlock(in_channels=lower_in_channels,
                                         out_channels=out_channels,
                                         kernel_size=1,
                                         bnName=bn_name,
                                         activationName=activation_name)
     self.conv_lower_res = ConvBNActivationBlock(in_channels=out_channels,
                                                 out_channels=out_channels,
                                                 kernel_size=1,
                                                 bnName=bn_name,
                                                 activationName=ActivationType.Linear)
     self.conv_higher_res = ConvBNActivationBlock(in_channels=highter_in_channels,
                                                  out_channels=out_channels,
                                                  kernel_size=1,
                                                  bnName=bn_name,
                                                  activationName=ActivationType.Linear)
     self.relu = ActivationLayer(activation_name, inplace=False)
 def __init__(self,
              in_channels,
              middle_channels,
              out_channels,
              activation_name=ActivationType.ReLU,
              is_deconv=True):
     super().__init__(TernausNetBlockName.DecoderBlock)
     if is_deconv:
         self.block = nn.Sequential(
             ConvActivationBlock(in_channels=in_channels,
                                 out_channels=middle_channels,
                                 kernel_size=3,
                                 padding=1,
                                 activationName=activation_name),
             # nn.ConvTranspose2d(middle_channels, out_channels, kernel_size=4, stride=2,
             #                    padding=1),
             nn.ConvTranspose2d(middle_channels,
                                out_channels,
                                kernel_size=3,
                                stride=2,
                                padding=1,
                                output_padding=1),
             ActivationLayer(activation_name),
         )
     else:
         self.block = nn.Sequential(
             Upsample(scale_factor=2, mode='bilinear'),
             ConvActivationBlock(in_channels=in_channels,
                                 out_channels=middle_channels,
                                 kernel_size=3,
                                 padding=1,
                                 activationName=activation_name),
             ConvActivationBlock(in_channels=middle_channels,
                                 out_channels=out_channels,
                                 kernel_size=3,
                                 padding=1,
                                 activationName=activation_name),
         )
Ejemplo n.º 20
0
 def __init__(self, inplanes, planes, stride=1, downsample=None,
              bn_name=NormalizationType.BatchNormalize2d,
              activation_name=ActivationType.ReLU):
     super().__init__(DFNetBlockName.BasicBlock)
     self.conv1 = ConvBNActivationBlock(in_channels=inplanes,
                                        out_channels=planes,
                                        kernel_size=3,
                                        stride=stride,
                                        padding=1,
                                        bias=False,
                                        bnName=bn_name,
                                        activationName=activation_name)
     self.conv2 = ConvBNActivationBlock(in_channels=planes,
                                        out_channels=planes,
                                        kernel_size=3,
                                        stride=1,
                                        padding=1,
                                        bias=False,
                                        bnName=bn_name,
                                        activationName=ActivationType.Linear)
     self.relu = ActivationLayer(activation_name=activation_name)
     self.downsample = downsample
     self.stride = stride
Ejemplo n.º 21
0
    def __init__(self,
                 input_channel,
                 bn_name=NormalizationType.BatchNormalize2d,
                 activation_name=ActivationType.ReLU):
        # Figure 19. The schema for 8×8 grid (Inception-ResNet-C)
        # module of the Inception-ResNet-v2 network."""
        super().__init__(InceptionBlockName.InceptionResNetC)
        self.branch3x3 = nn.Sequential(
            ConvBNActivationBlock(input_channel,
                                  192,
                                  kernel_size=1,
                                  bnName=bn_name,
                                  activationName=activation_name),
            ConvBNActivationBlock(192,
                                  224,
                                  kernel_size=(1, 3),
                                  padding=(0, 1),
                                  bnName=bn_name,
                                  activationName=activation_name),
            ConvBNActivationBlock(224,
                                  256,
                                  kernel_size=(3, 1),
                                  padding=(1, 0),
                                  bnName=bn_name,
                                  activationName=activation_name))

        self.branch1x1 = ConvBNActivationBlock(input_channel,
                                               192,
                                               kernel_size=1,
                                               bnName=bn_name,
                                               activationName=activation_name)
        self.reduction1x1 = nn.Conv2d(448, 2048, kernel_size=1)
        self.shorcut = nn.Conv2d(input_channel, 2048, kernel_size=1)

        self.bn = NormalizeLayer(bn_name=bn_name, out_channel=2048)
        self.relu = ActivationLayer(activation_name=activation_name,
                                    inplace=False)
Ejemplo n.º 22
0
    def __init__(self,
                 input_channel,
                 bn_name=NormalizationType.BatchNormalize2d,
                 activation_name=ActivationType.ReLU):
        super().__init__(InceptionBlockName.InceptionResNetB)
        self.branch7x7 = nn.Sequential(
            ConvBNActivationBlock(input_channel,
                                  128,
                                  kernel_size=1,
                                  bnName=bn_name,
                                  activationName=activation_name),
            ConvBNActivationBlock(128,
                                  160,
                                  kernel_size=(1, 7),
                                  padding=(0, 3),
                                  bnName=bn_name,
                                  activationName=activation_name),
            ConvBNActivationBlock(160,
                                  192,
                                  kernel_size=(7, 1),
                                  padding=(3, 0),
                                  bnName=bn_name,
                                  activationName=activation_name))

        self.branch1x1 = ConvBNActivationBlock(input_channel,
                                               192,
                                               kernel_size=1,
                                               bnName=bn_name,
                                               activationName=activation_name)

        self.reduction1x1 = nn.Conv2d(384, 1154, kernel_size=1)
        self.shortcut = nn.Conv2d(input_channel, 1154, kernel_size=1)

        self.bn = NormalizeLayer(bn_name=bn_name, out_channel=1154)
        self.relu = ActivationLayer(activation_name=activation_name,
                                    inplace=False)
Ejemplo n.º 23
0
    def __init__(self,
                 in_channels,
                 channels,
                 stride=1,
                 dilation=1,
                 groups=1,
                 dropout=None,
                 dist_bn=False,
                 bn_name=NormalizationType.BatchNormalize2d,
                 activation_name=ActivationType.ReLU):
        super().__init__(WiderResNetBlockName.IdentityResidualBlock)
        # Check parameters for inconsistencies
        if len(channels) != 2 and len(channels) != 3:
            raise ValueError(
                "channels must contain either two or three values")
        if len(channels) == 2 and groups != 1:
            raise ValueError("groups > 1 are only valid if len(channels) == 3")
        self.dist_bn = dist_bn
        is_bottleneck = len(channels) == 3
        need_proj_conv = stride != 1 or in_channels != channels[-1]

        self.normal = NormalizeLayer(bn_name, in_channels)
        self.activate = ActivationLayer(activation_name, inplace=False)
        if not is_bottleneck:
            layers = [("conv1",
                       ConvBNActivationBlock(in_channels=in_channels,
                                             out_channels=channels[0],
                                             kernel_size=3,
                                             stride=stride,
                                             padding=dilation,
                                             dilation=dilation,
                                             bias=False,
                                             bnName=bn_name,
                                             activationName=activation_name)),
                      ("conv2",
                       nn.Conv2d(channels[0],
                                 channels[1],
                                 kernel_size=3,
                                 stride=1,
                                 padding=dilation,
                                 dilation=dilation,
                                 bias=False))]
            if dropout is not None:
                layers = [layers[0], ("dropout", dropout()), layers[1]]
        else:
            layers = [("conv1",
                       ConvBNActivationBlock(in_channels=in_channels,
                                             out_channels=channels[0],
                                             kernel_size=1,
                                             stride=stride,
                                             padding=0,
                                             bias=False,
                                             bnName=bn_name,
                                             activationName=activation_name)),
                      ("conv2",
                       ConvBNActivationBlock(in_channels=channels[0],
                                             out_channels=channels[1],
                                             kernel_size=3,
                                             stride=1,
                                             padding=dilation,
                                             dilation=dilation,
                                             groups=groups,
                                             bias=False,
                                             bnName=bn_name,
                                             activationName=activation_name)),
                      ("conv3",
                       nn.Conv2d(channels[1],
                                 channels[2],
                                 kernel_size=1,
                                 stride=1,
                                 padding=0,
                                 bias=False))]
            if dropout is not None:
                layers = [
                    layers[0], layers[1], ("dropout", dropout()), layers[2]
                ]

        self.convs = nn.Sequential(OrderedDict(layers))

        self.shortcut = nn.Sequential()
        if need_proj_conv:
            self.shortcut = nn.Conv2d(in_channels,
                                      channels[-1],
                                      kernel_size=1,
                                      stride=stride,
                                      padding=0,
                                      bias=False)
Ejemplo n.º 24
0
    def __init__(self,
                 inp,
                 hidden_dim,
                 oup,
                 kernel_size,
                 stride,
                 use_se,
                 bn_name=NormalizationType.BatchNormalize2d,
                 activation_name=ActivationType.ReLU):
        super().__init__(MobileNetBlockName.InvertedResidual)
        assert stride in [1, 2]

        self.identity = stride == 1 and inp == oup

        if inp == hidden_dim:
            self.conv = nn.Sequential(
                # dw
                ConvBNActivationBlock(in_channels=hidden_dim,
                                      out_channels=hidden_dim,
                                      kernel_size=kernel_size,
                                      stride=stride,
                                      padding=(kernel_size - 1) // 2,
                                      groups=hidden_dim,
                                      bias=False,
                                      bnName=bn_name,
                                      activationName=activation_name),
                # Squeeze-and-Excite
                SEBlock(hidden_dim, reduction=4)
                if use_se else nn.Sequential(),
                # pw-linear
                ConvBNActivationBlock(in_channels=hidden_dim,
                                      out_channels=oup,
                                      kernel_size=1,
                                      stride=1,
                                      padding=0,
                                      bias=False,
                                      bnName=bn_name,
                                      activationName=ActivationType.Linear))
        else:
            self.conv = nn.Sequential(
                # pw
                ConvBNActivationBlock(in_channels=inp,
                                      out_channels=hidden_dim,
                                      kernel_size=1,
                                      stride=1,
                                      padding=0,
                                      bias=False,
                                      bnName=bn_name,
                                      activationName=activation_name),
                # dw
                ConvBNActivationBlock(in_channels=hidden_dim,
                                      out_channels=hidden_dim,
                                      kernel_size=kernel_size,
                                      stride=stride,
                                      padding=(kernel_size - 1) // 2,
                                      groups=hidden_dim,
                                      bias=False,
                                      bnName=bn_name,
                                      activationName=ActivationType.Linear),
                # Squeeze-and-Excite
                SEBlock(hidden_dim, reduction=4)
                if use_se else nn.Sequential(),
                ActivationLayer(activation_name=activation_name,
                                inplace=False),
                # pw-linear
                ConvBNActivationBlock(in_channels=hidden_dim,
                                      out_channels=oup,
                                      kernel_size=1,
                                      stride=1,
                                      padding=0,
                                      bias=False,
                                      bnName=bn_name,
                                      activationName=ActivationType.Linear))