Exemplo n.º 1
0
    def __init__(self, top_down, lateral, post, to_layer, fusion_method=None):
        super(JunctionBlock, self).__init__()

        self.from_layer = {}

        self.to_layer = to_layer
        top_down_ = copy.copy(top_down)
        lateral_ = copy.copy(lateral)
        self.fusion_method = fusion_method

        self.top_down_block = []
        if top_down_:
            self.from_layer['top_down'] = top_down_.pop('from_layer')
            if 'trans' in top_down_:
                self.top_down_block.append(build_module(top_down_['trans']))
            if 'upsample' in top_down_:
                self.top_down_block.append(build_module(top_down_['upsample']))
        self.top_down_block = nn.Sequential(*self.top_down_block)

        if lateral_:
            self.from_layer['lateral'] = lateral_.pop('from_layer')
            if lateral_:
                self.lateral_block = build_module(lateral_)
            else:
                self.lateral_block = nn.Sequential()
        else:
            self.lateral_block = nn.Sequential()

        if post:
            self.post_block = build_module(post)
        else:
            self.post_block = nn.Sequential()
Exemplo n.º 2
0
    def __init__(self, F, input_size, output_size, stn):
        super(TPS_STN, self).__init__()

        self.F = F
        self.input_size = input_size
        self.output_size = output_size

        self.feature_extractor = build_feature_extractor(stn['feature_extractor'])
        self.pool = build_torch_nn(stn['pool'])
        heads = []
        for head in stn['head']:
            heads.append(build_module(head))
        self.heads = nn.Sequential(*heads)

        self.grid_generator = GridGenerator(F, output_size)

        # Init last fc in heads
        last_fc = heads[-1].fc
        last_fc.weight.data.fill_(0)
        """ see RARE paper Fig. 6 (a) """
        ctrl_pts_x = np.linspace(-1.0, 1.0, int(F / 2))
        ctrl_pts_y_top = np.linspace(0.0, -1.0, num=int(F / 2))
        ctrl_pts_y_bottom = np.linspace(1.0, 0.0, num=int(F / 2))
        ctrl_pts_top = np.stack([ctrl_pts_x, ctrl_pts_y_top], axis=1)
        ctrl_pts_bottom = np.stack([ctrl_pts_x, ctrl_pts_y_bottom], axis=1)
        initial_bias = np.concatenate([ctrl_pts_top, ctrl_pts_bottom], axis=0)
        last_fc.bias.data = torch.from_numpy(initial_bias).float().view(-1)
Exemplo n.º 3
0
    def __init__(self, layers, zero_init_residual=False,
                 groups=1, width_per_group=64, norm_layer=None):
        super(GResNet, self).__init__()

        if norm_layer is None:
            norm_layer = nn.BatchNorm2d
        self._norm_layer = norm_layer

        self.dilation = 1
        self.groups = groups
        self.base_width = width_per_group

        self.layers = nn.ModuleList()
        stage_layers = []
        for layer_name, layer_cfg in layers:
            if layer_name == 'conv':
                layer = build_module(layer_cfg)
                self.inplanes = layer_cfg['out_channels']
            elif layer_name == 'pool':
                layer = build_torch_nn(layer_cfg)
            elif layer_name == 'block':
                layer = self._make_layer(**layer_cfg)
            else:
                raise ValueError('Unknown layer name {}'.format(layer_name))
            stride = layer_cfg.get('stride', 1)
            max_stride = stride if isinstance(stride, int) else max(stride)
            if max_stride > 1:
                self.layers.append(nn.Sequential(*stage_layers))
                stage_layers = []
            stage_layers.append(layer)
        self.layers.append(nn.Sequential(*stage_layers))

        logger.info('GResNet init weights')
        init_weights(self.modules())
Exemplo n.º 4
0
 def __init__(self, cfg):
     super(SPN, self).__init__()
     self.body = build_feature_extractor(cfg['feature_extractor'])
     self.pool = build_torch_nn(cfg['pool'])
     heads = []
     for head in cfg['head']:
         heads.append(build_module(head))
     self.head = nn.Sequential(*heads)
Exemplo n.º 5
0
    def __init__(self,
                 layers,
                 zero_init_residual=False,
                 groups=1,
                 width_per_group=64,
                 norm_layer=None):
        super(GResNet, self).__init__()

        if norm_layer is None:
            norm_layer = nn.BatchNorm2d
        self._norm_layer = norm_layer

        self.dilation = 1
        self.groups = groups
        self.base_width = width_per_group

        self.layers = nn.ModuleList()
        stage_layers = []
        for layer_name, layer_cfg in layers:
            if layer_name == 'conv':
                layer = build_module(layer_cfg)
                self.inplanes = layer_cfg['out_channels']
            elif layer_name == 'pool':
                layer = build_torch_nn(layer_cfg)
            elif layer_name == 'block':
                layer = self._make_layer(**layer_cfg)
            else:
                raise ValueError('Unknown layer name {}'.format(layer_name))
            stride = layer_cfg.get('stride', 1)
            max_stride = stride if isinstance(stride, int) else max(stride)
            if max_stride > 1:
                self.layers.append(nn.Sequential(*stage_layers))
                stage_layers = []
            stage_layers.append(layer)
        self.layers.append(nn.Sequential(*stage_layers))

        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                nn.init.kaiming_normal_(m.weight,
                                        mode='fan_in',
                                        nonlinearity='leaky_relu')
            elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
                nn.init.constant_(m.weight, 1)
                nn.init.constant_(m.bias, 0)

        # Zero-initialize the last BN in each residual branch,
        # so that the residual branch starts with zeros, and each residual block behaves like an identity.
        # This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677
        if zero_init_residual:
            for m in self.modules():
                if isinstance(m, Bottleneck):
                    nn.init.constant_(m.bn3.weight, 0)
                elif isinstance(m, BasicBlock):
                    nn.init.constant_(m.bn2.weight, 0)
Exemplo n.º 6
0
    def __init__(self,
                 feat,
                 hidden,
                 fusion_method='add',
                 post=None,
                 post_activation='softmax'):
        super(CellAttentionBlock, self).__init__()

        feat_ = feat.copy()
        self.feat_from = feat_.pop('from_layer')
        self.feat_block = build_module(feat_)
        self.hidden_block = build_module(hidden)

        self.fusion_method = fusion_method
        self.activate = post_activation

        if post is not None:
            self.post_block = build_module(post)
        else:
            self.post_block = nn.Sequential()
Exemplo n.º 7
0
    def __init__(self,
                 from_layer,
                 generator,
                 ):
        super(Head, self).__init__()

        self.from_layer = from_layer
        self.generator = build_module(generator)

        logger.info('Head init weights')
        init_weights(self.modules())
Exemplo n.º 8
0
    def __init__(
        self,
        method,
        from_layers,
        feat_strides,
        in_channels_list,
        out_channels_list,
        upsample,
        conv_cfg=dict(type='Conv'),
        norm_cfg=dict(type='BN'),
        activation='relu',
        inplace=True,
        common_stride=4,
    ):
        super(FusionBlock, self).__init__()

        assert method in ('add', 'concat')
        self.method = method
        self.from_layers = from_layers

        assert len(in_channels_list) == len(out_channels_list)

        self.blocks = nn.ModuleList()
        for idx in range(len(from_layers)):
            in_channels = in_channels_list[idx]
            out_channels = out_channels_list[idx]
            feat_stride = feat_strides[idx]
            ups_num = int(
                max(1,
                    math.log2(feat_stride) - math.log2(common_stride)))
            head_ops = []
            for idx2 in range(ups_num):
                cur_in_channels = in_channels if idx2 == 0 else out_channels
                conv = ConvModule(
                    cur_in_channels,
                    out_channels,
                    kernel_size=3,
                    padding=1,
                    conv_cfg=conv_cfg,
                    norm_cfg=norm_cfg,
                    activation=activation,
                    inplace=inplace,
                )
                head_ops.append(conv)
                if int(feat_stride) != int(common_stride):
                    head_ops.append(build_module(upsample))
            self.blocks.append(nn.Sequential(*head_ops))
Exemplo n.º 9
0
    def __init__(self, layers):
        super(GVGG, self).__init__()

        self.layers = nn.ModuleList()
        stage_layers = []
        for layer_name, layer_cfg in layers:
            if layer_name == 'conv':
                layer = build_module(layer_cfg)
            elif layer_name == 'pool':
                layer = build_torch_nn(layer_cfg)
            else:
                raise ValueError('Unknown layer name {}'.format(layer_name))
            stride = layer_cfg.get('stride', 1)
            max_stride = stride if isinstance(stride, int) else max(stride)
            if max_stride > 1:
                self.layers.append(nn.Sequential(*stage_layers))
                stage_layers = []
            stage_layers.append(layer)
        self.layers.append(nn.Sequential(*stage_layers))

        init_weights(self.modules())
Exemplo n.º 10
0
    def __init__(self, layers):
        super(Feedforward, self).__init__()

        self.layers = [build_module(layer) for layer in layers]
        self.layers = nn.Sequential(*self.layers)