Example #1
0
    def __init__(self, vocab, embed_size, kernel_sizes, num_channels,
                 **kwargs):
        super(TextCNN, self).__init__(**kwargs)
        self.embedding = nn.Embedding(len(vocab), embed_size)

        # 不参与训练的嵌入层
        self.constant_embedding = nn.Embedding(len(vocab), embed_size)
        self.dropout = nn.Dropout(0.5)
        self.decoder = nn.Dense(2)

        # 时序最大池化层没有权重,所以可以共用一个实例
        self.pool = nn.GlobalAvgPool1D()
        self.convs = nn.Sequential()  # 创建多个一维卷积层

        for c, k in zip(num_channels, kernel_sizes):
            self.convs.add(nn.Conv1D(c, k, activation="relu"))
Example #2
0
def test_pool():
    layers1d = [
        nn.MaxPool1D(),
        nn.MaxPool1D(3),
        nn.MaxPool1D(3, 2),
        nn.AvgPool1D(),
        nn.AvgPool1D(count_include_pad=False),
        nn.GlobalAvgPool1D(),
        ]
    for layer in layers1d:
        check_layer_forward(layer, (1, 2, 10))


    layers2d = [
        nn.MaxPool2D(),
        nn.MaxPool2D((3, 3)),
        nn.MaxPool2D(3, 2),
        nn.AvgPool2D(),
        nn.AvgPool2D(count_include_pad=False),
        nn.GlobalAvgPool2D(),
        ]
    for layer in layers2d:
        check_layer_forward(layer, (1, 2, 10, 10))

    layers3d = [
        nn.MaxPool3D(),
        nn.MaxPool3D((3, 3, 3)),
        nn.MaxPool3D(3, 2),
        nn.AvgPool3D(),
        nn.AvgPool3D(count_include_pad=False),
        nn.GlobalAvgPool3D(),
        ]
    for layer in layers3d:
        check_layer_forward(layer, (1, 2, 10, 10, 10))

    # test ceil_mode
    x = mx.nd.zeros((2, 2, 10, 10))

    layer = nn.MaxPool2D(3, ceil_mode=False)
    layer.collect_params().initialize()
    assert (layer(x).shape==(2, 2, 3, 3))

    layer = nn.MaxPool2D(3, ceil_mode=True)
    layer.collect_params().initialize()
    assert (layer(x).shape==(2, 2, 4, 4))
Example #3
0
    def __init__(self, ctx=mx.cpu(), warmup=5, runs=25, inputs=None):
        # Set the default Inputs
        default_parameters = {
            "data": (32, 3, 256),
            "data_initializer": nd.normal,
            "layout": "NCW",
            "run_backward": True,
            "dtype": "float32"
        }

        super().__init__(ctx=ctx,
                         warmup=warmup,
                         runs=runs,
                         default_parameters=default_parameters,
                         custom_parameters=inputs)

        self.data = get_mx_ndarray(ctx=self.ctx,
                                   in_tensor=self.inputs["data"],
                                   dtype=self.inputs["dtype"],
                                   initializer=self.inputs["data_initializer"],
                                   attach_grad=self.inputs["run_backward"])

        self.block = nn.GlobalAvgPool1D(layout=self.inputs["layout"])
        self.block.initialize(ctx=self.ctx)
Example #4
0
    def __init__(self, num_classes, pretrained=True):
        super(DLA_IDL, self).__init__()
        model_name = "ResNet50_v2"
        backbone = get_model(model_name, pretrained=pretrained)
        backbone.collect_params().setattr("lr_mult", 0.1)

        #for ind,feat in enumerate(backbone.features):
        #    print(feat.name, ind)
        self.output_inds = [5, 6, 7, 10]
        self.backbone = backbone.features[0:11]

        self.agg_blocks = nn.Sequential()
        self.agg_blocks.add(agg_block(256, 512), agg_block(512, 1024),
                            agg_block(1024, 2048))

        self.output = nn.Sequential()
        self.output.add(
            nn.GlobalAvgPool1D(),
            nn.Dense(num_classes),
        )

        self.agg_blocks.initialize(mx.init.Xavier())
        self.output.initialize(mx.init.Xavier())
        return
Example #5
0
 def __init__(self,
              block,
              layers,
              cardinality=1,
              bottleneck_width=64,
              classes=1000,
              dilated=False,
              dilation=1,
              norm_layer=BatchNorm,
              norm_kwargs=None,
              last_gamma=False,
              deep_stem=False,
              stem_width=32,
              avg_down=False,
              final_drop=0.0,
              use_global_stats=False,
              name_prefix='',
              dropblock_prob=0,
              input_size=224,
              use_splat=False,
              radix=2,
              avd=False,
              avd_first=False,
              split_drop_ratio=0,
              in_channels=3):
     self.cardinality = cardinality
     self.bottleneck_width = bottleneck_width
     self.inplanes = stem_width * 2 if deep_stem else 64
     self.radix = radix
     self.split_drop_ratio = split_drop_ratio
     self.avd_first = avd_first
     super(ResNet, self).__init__(prefix=name_prefix)
     norm_kwargs = norm_kwargs if norm_kwargs is not None else {}
     if use_global_stats:
         norm_kwargs['use_global_stats'] = True
     self.norm_kwargs = norm_kwargs
     with self.name_scope():
         if not deep_stem:
             self.conv1 = nn.Conv1D(channels=64,
                                    kernel_size=7,
                                    strides=2,
                                    padding=3,
                                    use_bias=False,
                                    in_channels=in_channels)
         else:
             self.conv1 = nn.HybridSequential(prefix='conv1')
             self.conv1.add(
                 nn.Conv1D(channels=stem_width,
                           kernel_size=3,
                           strides=2,
                           padding=1,
                           use_bias=False,
                           in_channels=in_channels))
             self.conv1.add(
                 norm_layer(in_channels=stem_width, **norm_kwargs))
             self.conv1.add(nn.Activation('relu'))
             self.conv1.add(
                 nn.Conv1D(channels=stem_width,
                           kernel_size=3,
                           strides=1,
                           padding=1,
                           use_bias=False,
                           in_channels=stem_width))
             self.conv1.add(
                 norm_layer(in_channels=stem_width, **norm_kwargs))
             self.conv1.add(nn.Activation('relu'))
             self.conv1.add(
                 nn.Conv1D(channels=stem_width * 2,
                           kernel_size=3,
                           strides=1,
                           padding=1,
                           use_bias=False,
                           in_channels=stem_width))
         input_size = _update_input_size(input_size, 2)
         self.bn1 = norm_layer(
             in_channels=64 if not deep_stem else stem_width * 2,
             **norm_kwargs)
         self.relu = nn.Activation('relu')
         self.maxpool = nn.MaxPool1D(pool_size=3, strides=2, padding=1)
         input_size = _update_input_size(input_size, 2)
         self.layer1 = self._make_layer(1,
                                        block,
                                        64,
                                        layers[0],
                                        avg_down=avg_down,
                                        norm_layer=norm_layer,
                                        last_gamma=last_gamma,
                                        use_splat=use_splat,
                                        avd=avd)
         self.layer2 = self._make_layer(2,
                                        block,
                                        128,
                                        layers[1],
                                        strides=2,
                                        avg_down=avg_down,
                                        norm_layer=norm_layer,
                                        last_gamma=last_gamma,
                                        use_splat=use_splat,
                                        avd=avd)
         input_size = _update_input_size(input_size, 2)
         if dilated or dilation == 4:
             self.layer3 = self._make_layer(3,
                                            block,
                                            256,
                                            layers[2],
                                            strides=1,
                                            dilation=2,
                                            avg_down=avg_down,
                                            norm_layer=norm_layer,
                                            last_gamma=last_gamma,
                                            dropblock_prob=dropblock_prob,
                                            input_size=input_size,
                                            use_splat=use_splat,
                                            avd=avd)
             self.layer4 = self._make_layer(4,
                                            block,
                                            512,
                                            layers[3],
                                            strides=1,
                                            dilation=4,
                                            pre_dilation=2,
                                            avg_down=avg_down,
                                            norm_layer=norm_layer,
                                            last_gamma=last_gamma,
                                            dropblock_prob=dropblock_prob,
                                            input_size=input_size,
                                            use_splat=use_splat,
                                            avd=avd)
         elif dilation == 3:
             # special
             self.layer3 = self._make_layer(3,
                                            block,
                                            256,
                                            layers[2],
                                            strides=1,
                                            dilation=2,
                                            avg_down=avg_down,
                                            norm_layer=norm_layer,
                                            last_gamma=last_gamma,
                                            dropblock_prob=dropblock_prob,
                                            input_size=input_size,
                                            use_splat=use_splat,
                                            avd=avd)
             self.layer4 = self._make_layer(4,
                                            block,
                                            512,
                                            layers[3],
                                            strides=2,
                                            dilation=2,
                                            pre_dilation=2,
                                            avg_down=avg_down,
                                            norm_layer=norm_layer,
                                            last_gamma=last_gamma,
                                            dropblock_prob=dropblock_prob,
                                            input_size=input_size,
                                            use_splat=use_splat,
                                            avd=avd)
         elif dilation == 2:
             self.layer3 = self._make_layer(3,
                                            block,
                                            256,
                                            layers[2],
                                            strides=2,
                                            avg_down=avg_down,
                                            norm_layer=norm_layer,
                                            last_gamma=last_gamma,
                                            dropblock_prob=dropblock_prob,
                                            input_size=input_size,
                                            use_splat=use_splat,
                                            avd=avd)
             self.layer4 = self._make_layer(4,
                                            block,
                                            512,
                                            layers[3],
                                            strides=1,
                                            dilation=2,
                                            avg_down=avg_down,
                                            norm_layer=norm_layer,
                                            last_gamma=last_gamma,
                                            dropblock_prob=dropblock_prob,
                                            input_size=input_size,
                                            use_splat=use_splat,
                                            avd=avd)
         else:
             self.layer3 = self._make_layer(3,
                                            block,
                                            256,
                                            layers[2],
                                            strides=2,
                                            avg_down=avg_down,
                                            norm_layer=norm_layer,
                                            last_gamma=last_gamma,
                                            dropblock_prob=dropblock_prob,
                                            input_size=input_size,
                                            use_splat=use_splat,
                                            avd=avd)
             input_size = _update_input_size(input_size, 2)
             self.layer4 = self._make_layer(4,
                                            block,
                                            512,
                                            layers[3],
                                            strides=2,
                                            avg_down=avg_down,
                                            norm_layer=norm_layer,
                                            last_gamma=last_gamma,
                                            dropblock_prob=dropblock_prob,
                                            input_size=input_size,
                                            use_splat=use_splat,
                                            avd=avd)
             input_size = _update_input_size(input_size, 2)
         self.avgpool = nn.GlobalAvgPool1D()
         self.flat = nn.Flatten()
         self.drop = None
         if final_drop > 0.0:
             self.drop = nn.Dropout(final_drop)
         self.fc = nn.Dense(in_units=512 * block.expansion, units=classes)