Exemplo n.º 1
0
    def make_layer(layer_cfg):
        nonlocal in_channels

        # Possible patterns:
        # ( 256, 3, {}) -> conv
        # ( 256,-2, {}) -> deconv
        # (None,-2, {}) -> bilinear interpolate
        # ('cat',[],{}) -> concat the subnetworks in the list
        #
        # You know it would have probably been simpler just to adopt a 'c' 'd' 'u' naming scheme.
        # Whatever, it's too late now.

        # layer 가능한 패턴이 4가지 있는데 코드가 너무 방대해서 각각 이름 붙이기에는 늦었다..
        # 따라서 cfg 앞의 값들을 보고 어떤 종류의 layer인지 판단

        if isinstance(layer_cfg[0], str):  # 맨 앞 값이 string인지 확인
            layer_name = layer_cfg[0]

            if layer_name == 'cat':  # 'cat'인 경우만 존재하므로 이 경우에는 예외처리가 따로 없음, subnets 합쳐서 처리
                nets = [make_net(in_channels, x) for x in layer_cfg[1]
                        ]  # subnetwork들의 list에서 하나씩 뽑아서 network로 만들어준다
                layer = Concat(
                    [net[0] for net in nets], layer_cfg[2]
                )  # make_net의 반환값은 Sequential로 묶인 layers와 출력channel이므로
                num_channels = sum([net[1] for net in nets
                                    ])  # net[0]들은 같은 layer로 묶어주고,
        else:
            num_channels = layer_cfg[0]
            kernel_size = layer_cfg[1]

            if kernel_size > 0:  # conv
                layer = nn.Conv2d(in_channels, num_channels, kernel_size,
                                  **layer_cfg[2])
            else:
                if num_channels is None:  # bilinear interpolate
                    layer = InterpolateModule(scale_factor=-kernel_size,
                                              mode='bilinear',
                                              align_corners=False,
                                              **layer_cfg[2])
                else:  # deconv
                    layer = nn.ConvTranspose2d(in_channels, num_channels,
                                               -kernel_size, **layer_cfg[2])

        in_channels = num_channels if num_channels is not None else in_channels

        # Don't return a ReLU layer if we're doing an upsample. This probably doesn't affect anything
        # output-wise, but there's no need to go through a ReLU here.
        # Commented out for backwards compatibility with previous models
        # if num_channels is None:
        #     return [layer]
        # else:
        return [layer, nn.ReLU(inplace=True)
                ]  # 일단 layer에는 ReLU 넣어놓고 아래에서 net 리턴할 때 제외
Exemplo n.º 2
0
    def make_layer(layer_cfg):
        nonlocal in_channels

        # Possible patterns:
        # ( 256, 3, {}) -> conv
        # ( 256,-2, {}) -> deconv
        # (None,-2, {}) -> bilinear interpolate
        # ('cat',[],{}) -> concat the subnetworks in the list
        #
        # You know it would have probably been simpler just to adopt a 'c' 'd' 'u' naming scheme.
        # Whatever, it's too late now.
        if isinstance(layer_cfg[0], str):
            layer_name = layer_cfg[0]

            if layer_name == 'cat':
                nets = [make_net(in_channels, x) for x in layer_cfg[1]]
                layer = Concat([net[0] for net in nets], layer_cfg[2])
                num_channels = sum([net[1] for net in nets])
        else:
            num_channels = layer_cfg[0]
            kernel_size = layer_cfg[1]

            if kernel_size > 0:
                layer = nn.Conv2d(in_channels, num_channels, kernel_size,
                                  **layer_cfg[2])
            else:
                if num_channels is None:
                    layer = InterpolateModule(scale_factor=-kernel_size,
                                              mode='bilinear',
                                              align_corners=False,
                                              **layer_cfg[2])
                else:
                    layer = nn.ConvTranspose2d(in_channels, num_channels,
                                               -kernel_size, **layer_cfg[2])

        in_channels = num_channels if num_channels is not None else in_channels

        # Don't return a ReLU layer if we're doing an upsample. This probably doesn't affect anything
        # output-wise, but there's no need to go through a ReLU here.
        # Commented out for backwards compatibility with previous models
        if num_channels is None:
            return [layer]
        else:
            # return [layer, nn.BatchNorm2d(in_channels) ,nn.ReLU(inplace=True)]
            return [
                layer,
                nn.GroupNorm(32, in_channels),
                nn.ReLU(inplace=True)
            ]
Exemplo n.º 3
0
    def forward(self, fpn_outs):
        if self.upsample_layer is None:
            final_size = (fpn_outs[0].size(2), fpn_outs[0].size(3))
            self.upsample_layer = InterpolateModule(size=final_size,
                                                    mode='bilinear',
                                                    align_corners=False)

        fusion_maps = []
        for i in range(len(fpn_outs)):
            out = self.layer_modules[i](fpn_outs[i])
            out = self.upsample_layer(out)

            fusion_maps.append(out)

        fusion_out = sum(fusion_maps)

        return fusion_out
Exemplo n.º 4
0
    def make_layer(layer_cfg):
    #nonlocal to clarify a local_area parameter that share with up-layer 
        nonlocal in_channels
        
        # Possible patterns:
        # ( 256, 3, {}) -> conv
        # ( 256,-2, {}) -> deconv
        # (None,-2, {}) -> bilinear interpolate
        # ('cat',[],{}) -> concat the subnetworks in the list
        #
        # You know it would have probably been simpler just to adopt a 'c' 'd' 'u' naming scheme.
        # Whatever, it's too late now.
        if isinstance(layer_cfg[0], str):
            layer_name = layer_cfg[0]

            if layer_name == 'cat':
            #seems no called
                print("make_net =",layer_cfg[1],"\n")
                nets = [make_net(in_channels, x) for x in layer_cfg[1]]
                layer = Concat([net[0] for net in nets], layer_cfg[2])
                num_channels = sum([net[1] for net in nets])
        else:
            num_channels = layer_cfg[0]
            kernel_size = layer_cfg[1]

            if kernel_size > 0:
                # *args 与 **kwargs 的区别,两者都是 python 中的可变参数:
                #*args 表示任何多个无名参数,它本质是一个 tuple
                #**kwargs 表示关键字参数,它本质上是一个 dict
                layer = nn.Conv2d(in_channels, num_channels, kernel_size, **layer_cfg[2])
            else:
                if num_channels is None:
                    layer = InterpolateModule(scale_factor=-kernel_size, mode='bilinear', align_corners=False, **layer_cfg[2])
                else:
                    layer = nn.ConvTranspose2d(in_channels, num_channels, -kernel_size, **layer_cfg[2])
        
        in_channels = num_channels if num_channels is not None else in_channels

        # Don't return a ReLU layer if we're doing an upsample. This probably doesn't affect anything
        # output-wise, but there's no need to go through a ReLU here.
        # Commented out for backwards compatibility with previous models
        # if num_channels is None:
        #     return [layer]
        # else:
        return [layer, nn.ReLU(inplace=True)]