Ejemplo n.º 1
0
    def __init__(self, channel_in, height, width, kernel_size=3, dilation=[1,1,1], shuffle=True):
        '''
        :param channel_in: # of input channels
        :param channel_out: # of output channels
        :param height: Height of the input volume
        :param width: Width of the input volume
        :param kernel_size: Kernel size. We use the same kernel size of 3 for each dimension. Larger kernel size would increase the FLOPs and Parameters
        :param dilation: It's a list with 3 elements, each element corresponding to a dilation rate for each dimension.
        :param shuffle: Shuffle the feature maps in the volume-wise separable convolutions
        '''
        super().__init__()
        assert len(dilation) == 3

        self.left_layer = nn.Sequential(CBR(channel_in, channel_in, 3, stride=2, groups=channel_in),
                                        CBR(channel_in, channel_in, 1, 1)
                                        )
        self.right_layer =  nn.Sequential(
            nn.AvgPool2d(kernel_size=3, padding=1, stride=2),
            DICE_HC(channel_in, channel_in, height, width, kernel_size=kernel_size, dilation=dilation,
                 shuffle=shuffle),
            CBR(channel_in, channel_in, 1, 1)
        )
        self.shuffle = Shuffle(groups=2)

        self.width = width
        self.height = height
        self.channel_in = channel_in
        self.channel_out = 2*channel_in
        self.ksize = kernel_size
    def __init__(self, inplanes, groups=2):
        super(StridedShuffleBlock, self).__init__()

        self.layer_right = nn.Sequential(
            CBR(inplanes, inplanes, 1, 1),
            DWSepConv(inplanes, inplanes, stride=2)
        )
        self.layer_left = DWSepConv(inplanes, inplanes, stride=2)

        self.groups = groups
        self.inplanes = inplanes
        self.out_size = 2*inplanes
        self.shuffle = Shuffle(groups=2)
Ejemplo n.º 3
0
    def __init__(self, inplanes, outplanes, height, width,kernel_s, c_tag=0.5, groups=2 ):
        super(ShuffleDICEBlock, self).__init__()
        self.left_part = round(c_tag * inplanes)
        self.right_part_in = inplanes - self.left_part
        self.right_part_out = outplanes - self.left_part

        self.layer_right = nn.Sequential(
            CBR(self.right_part_in, self.right_part_out, 1, 1),
            DICE(channel_in=self.right_part_out, channel_out=self.right_part_out, height=height, width=width, kernel_size=kernel_s)
        )

        self.inplanes = inplanes
        self.outplanes = outplanes
        self.groups = groups
        self.shuffle = Shuffle(groups=2)
    def __init__(self, inplanes, outplanes, c_tag=0.5, groups=2):
        super(ShuffleBlock, self).__init__()
        self.left_part = round(c_tag * inplanes)
        self.right_part_in = inplanes - self.left_part
        self.right_part_out = outplanes - self.left_part

        self.layer_right = nn.Sequential(
            CBR(self.right_part_in, self.right_part_out, 1, 1),
            DWSepConv(channel_in=self.right_part_out, channel_out=self.right_part_out)
        )

        self.inplanes = inplanes
        self.outplanes = outplanes
        self.groups = groups
        self.shuffle = Shuffle(groups=2)
Ejemplo n.º 5
0
    def __init__(self, channel_in, channel_out, height, width, kernel_size=3, dilation=[1, 1, 1], shuffle=True):
        '''
        :param channel_in: # of input channels
        :param channel_out: # of output channels
        :param height: Height of the input volume
        :param width: Width of the input volume
        :param kernel_size: Kernel size. We use the same kernel size of 3 for each dimension. Larger kernel size would increase the FLOPs and Parameters
        :param dilation: It's a list with 3 elements, each element corresponding to a dilation rate for each dimension.
        :param shuffle: Shuffle the feature maps in the volume-wise separable convolutions
        '''
        super().__init__()
        assert len(dilation) == 3
        padding_1 = int((kernel_size - 1) / 2) *dilation[0]
        # padding_2 = int((kernel_size - 1) / 2) *dilation[1]
        padding_3 = int((kernel_size - 1) / 2) *dilation[2]
        self.conv_channel = nn.Conv2d(channel_in, channel_in, kernel_size=kernel_size, stride=1, groups=channel_in,
                                      padding=padding_1, bias=False, dilation=dilation[0])
        # self.conv_width = nn.Conv2d(width, width, kernel_size=kernel_size, stride=1, groups=width,
        #                        padding=padding_2, bias=False, dilation=dilation[1])
        self.conv_height = nn.Conv2d(height, height, kernel_size=kernel_size, stride=1, groups=height,
                               padding=padding_3, bias=False, dilation=dilation[2])

        self.br_act = BR(2*channel_in)
        self.weight_avg_layer = CBR(2*channel_in, channel_in, kSize=1, stride=1, groups=channel_in)

        # project from channel_in to Channel_out
        groups_proj = math.gcd(channel_in, channel_out)
        self.proj_layer = CBR(channel_in, channel_out, kSize=3, stride=1, groups=groups_proj)
        self.linear_comb_layer = nn.Sequential(
            nn.AdaptiveAvgPool2d(output_size=1),
            nn.Conv2d(channel_in, channel_in // 4, kernel_size=1, bias=False),
            nn.ReLU(inplace=True),
            nn.Conv2d(channel_in //4, channel_out, kernel_size=1, bias=False),
            nn.Sigmoid()
        )

        self.vol_shuffle = Shuffle(2)

        self.width = width
        self.height = height
        self.channel_in = channel_in
        self.channel_out = channel_out
        self.shuffle = shuffle
        self.ksize=kernel_size
        self.dilation = dilation
Ejemplo n.º 6
0
    def __init__(self,
                 in_planes,
                 proj_planes,
                 out_planes,
                 scales=[2.0, 1.5, 1.0, 0.5, 0.1],
                 last_layer_br=True):
        super(EfficientPyrPool, self).__init__()
        self.stages = nn.ModuleList()
        scales.sort(reverse=True)

        self.projection_layer = CBR(in_planes, proj_planes, 1, 1)
        for _ in enumerate(scales):
            self.stages.append(
                nn.Conv2d(proj_planes,
                          proj_planes,
                          kernel_size=3,
                          stride=1,
                          padding=1,
                          bias=False,
                          groups=proj_planes))

        self.merge_layer = nn.Sequential(
            # perform one big batch normalization instead of p small ones
            BR(proj_planes * len(scales)),
            Shuffle(groups=len(scales)),
            CBR(proj_planes * len(scales),
                proj_planes,
                3,
                1,
                groups=proj_planes),
            nn.Conv2d(proj_planes,
                      out_planes,
                      kernel_size=1,
                      stride=1,
                      bias=not last_layer_br),
        )
        if last_layer_br:
            self.br = BR(out_planes)
        self.last_layer_br = last_layer_br
        self.scales = scales