Exemple #1
0
    def __init__(self, opt):
        super(NET, self).__init__()

        n_resblocks = opt.n_resblocks
        n_feats = opt.channels
        bias = opt.bias
        norm_type = opt.norm_type
        act_type = opt.act_type
        block_type = opt.block_type

        head = [common.ConvBlock(4, n_feats, 5, act_type=act_type, bias=True)]
        if block_type.lower() == 'rrdb':
            resblock = [common.RRDB(n_feats, n_feats, 3,
                                       1, bias, norm_type, act_type, 0.2)
                            for _ in range(n_resblocks)]
        elif block_type.lower() == 'res':
            resblock = [common.ResBlock(n_feats, 3, norm_type, act_type, res_scale=1, bias=bias)
                            for _ in range(n_resblocks)]
        else:
            raise RuntimeError('block_type is not supported')

        resblock += [common.ConvBlock(n_feats, n_feats, 3, bias=True)]
        tail = [common.ConvBlock(n_feats, 3, 3, bias=True)]

        self.model = nn.Sequential(*head, common.ShortcutBlock(nn.Sequential(*resblock)), *tail)

        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                nn.init.xavier_normal_(m.weight)
                m.weight.requires_grad = True
                if m.bias is not None:
                    m.bias.data.zero_()
                    m.bias.requires_grad = True
Exemple #2
0
    def __init__(self, opt):
        super(green_res, self).__init__()
        sr_n_resblocks = opt.sr_n_resblocks
        dm_n_resblocks = opt.dm_n_resblocks
        sr_n_feats = opt.channels
        dm_n_feats = opt.channels
        scale = opt.scale

        denoise = opt.denoise
        block_type = opt.block_type
        act_type = opt.act_type
        bias = opt.bias
        norm_type = opt.norm_type
        self.head = common.ConvBlock(2,
                                     dm_n_feats,
                                     5,
                                     act_type=act_type,
                                     bias=True)
        self.r1 = common.RRDB(dm_n_feats, dm_n_feats, 3, 1, bias, norm_type,
                              act_type, 0.2)
        self.r2 = common.RRDB(dm_n_feats, dm_n_feats, 3, 1, bias, norm_type,
                              act_type, 0.2)
        #self.r3 = common.RRDB2(dm_n_feats, dm_n_feats, 3, 1, bias, norm_type, act_type, 0.2)
        #self.r4 = common.RRDB2(dm_n_feats, dm_n_feats, 3, 1, bias, norm_type, act_type, 0.2)
        #self.r5 = common.RRDB2(dm_n_feats, dm_n_feats, 3, 1, bias, norm_type, act_type, 0.2)
        #self.r6 = common.RRDB2(dm_n_feats, dm_n_feats, 3, 1, bias, norm_type, act_type, 0.2)
        self.final = common.ConvBlock(dm_n_feats, dm_n_feats, 3, bias=bias)

        self.up = nn.Sequential(
            common.Upsampler(2, dm_n_feats, norm_type, act_type, bias=bias),
            common.ConvBlock(dm_n_feats, 1, 3, bias=True),
            nn.LeakyReLU(0.2, inplace=True))
Exemple #3
0
    def __init__(self, n_resblock=24, n_feats=256, scale=2, bias=True, norm_type=False,
                 act_type='prelu'):
        super(NET, self).__init__()

        self.scale = scale
        m = [common.default_conv(1, n_feats, 3, stride=2)]
        m += [nn.PixelShuffle(2),
              common.ConvBlock(n_feats//4, n_feats, bias=True, act_type=act_type)
              ]

        m += [common.ResBlock(n_feats, 3, norm_type, act_type, res_scale=1, bias=bias)
                             for _ in range(n_resblock)]

        for _ in range(int(math.log(scale, 2))):
            m += [nn.PixelShuffle(2),
                  common.ConvBlock(n_feats//4, n_feats, bias=True, act_type=act_type)
                  ]

        m += [common.default_conv(n_feats, 3, 3)]

        self.model = nn.Sequential(*m)
        for m in self.modules():
            # pdb.set_trace()
            if isinstance(m, nn.Conv2d):
                # Xavier
                # nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
                nn.init.xavier_normal_(m.weight)
                m.weight.requires_grad = True
                if m.bias is not None:
                    m.bias.data.zero_()
                    m.bias.requires_grad = True
Exemple #4
0
    def __init__(self, opt):
        super(NET, self).__init__()
        # parameter
        denoise = opt.denoise
        block_type = opt.block_type
        n_feats = opt.channels
        act_type = opt.act_type
        bias = opt.bias
        norm_type = opt.norm_type
        n_resblocks = opt.n_resblocks

        # architecture
        if denoise:
            dm_head = [
                common.ConvBlock(5, n_feats, 5, act_type=act_type, bias=True)
            ]
        else:
            dm_head = [
                common.ConvBlock(4, n_feats, 5, act_type=act_type, bias=True)
            ]
        if block_type.lower() == 'rrdb':
            dm_resblock = [
                common.RRDB(n_feats, n_feats, 3, 1, bias, norm_type, act_type,
                            0.2) for _ in range(n_resblocks)
            ]
        elif block_type.lower() == 'res':
            dm_resblock = [
                common.ResBlock(n_feats,
                                3,
                                norm_type,
                                act_type,
                                res_scale=1,
                                bias=bias) for _ in range(n_resblocks)
            ]
        else:
            raise RuntimeError('block_type is not supported')

        dm_resblock += [common.ConvBlock(n_feats, n_feats, 3, bias=True)]
        m_dm_up = [
            common.Upsampler(2, n_feats, norm_type, act_type, bias=bias),
            common.ConvBlock(n_feats, 3, 3, bias=True)
        ]

        self.model_dm = nn.Sequential(
            *dm_head, common.ShortcutBlock(nn.Sequential(*dm_resblock)),
            *m_dm_up)

        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                nn.init.xavier_normal_(m.weight)
                m.weight.requires_grad = True
                if m.bias is not None:
                    m.bias.data.zero_()
                    m.bias.requires_grad = True
Exemple #5
0
    def __init__(self, opt):
        super(m_res, self).__init__()
        sr_n_resblocks = opt.sr_n_resblocks
        dm_n_resblocks = opt.dm_n_resblocks
        sr_n_feats = opt.channels
        dm_n_feats = opt.channels
        scale = opt.scale

        denoise = opt.denoise
        block_type = opt.block_type
        act_type = opt.act_type
        bias = opt.bias
        norm_type = opt.norm_type

        self.r1 = common.RRDB2(dm_n_feats, dm_n_feats, 3, 1, bias, norm_type,
                               act_type, 0.2)
        self.r2 = common.RRDB2(dm_n_feats, dm_n_feats, 3, 1, bias, norm_type,
                               act_type, 0.2)
        self.r3 = common.RRDB2(dm_n_feats, dm_n_feats, 3, 1, bias, norm_type,
                               act_type, 0.2)
        self.r4 = common.RRDB2(dm_n_feats, dm_n_feats, 3, 1, bias, norm_type,
                               act_type, 0.2)
        self.r5 = common.RRDB2(dm_n_feats, dm_n_feats, 3, 1, bias, norm_type,
                               act_type, 0.2)
        self.r6 = common.RRDB2(dm_n_feats, dm_n_feats, 3, 1, bias, norm_type,
                               act_type, 0.2)
        self.final = common.ConvBlock(dm_n_feats, dm_n_feats, 3, bias=bias)
Exemple #6
0
    def __init__(self, opt):
        super(NET, self).__init__()

        sr_n_resblocks = opt.sr_n_resblocks
        dm_n_resblocks = opt.dm_n_resblocks
        sr_n_feats = opt.channels
        dm_n_feats = opt.channels
        scale = opt.scale

        denoise = opt.denoise
        block_type = opt.block_type
        act_type = opt.act_type
        bias = opt.bias
        norm_type = opt.norm_type

        # define sr module
        if denoise:
            m_sr_head = [
                common.ConvBlock(5,
                                 sr_n_feats,
                                 5,
                                 act_type=act_type,
                                 bias=True)
            ]
        else:
            m_sr_head = [
                common.ConvBlock(4,
                                 sr_n_feats,
                                 5,
                                 act_type=act_type,
                                 bias=True)
            ]
        if block_type.lower() == 'rrdb':
            m_sr_resblock = [
                common.RRDB(sr_n_feats, sr_n_feats, 3, 1, bias, norm_type,
                            act_type, 0.2) for _ in range(sr_n_resblocks)
            ]
        elif block_type.lower() == 'dudb':
            m_sr_resblock = [
                common.DUDB(sr_n_feats, 3, 1, bias, norm_type, act_type, 0.2)
                for _ in range(sr_n_resblocks)
            ]
        elif block_type.lower() == 'res':
            m_sr_resblock = [
                common.ResBlock(sr_n_feats,
                                3,
                                norm_type,
                                act_type,
                                res_scale=1,
                                bias=bias) for _ in range(sr_n_resblocks)
            ]
        else:
            raise RuntimeError('block_type is not supported')

        m_sr_resblock += [
            common.ConvBlock(sr_n_feats, sr_n_feats, 3, bias=bias)
        ]
        m_sr_up = [
            common.Upsampler(scale, sr_n_feats, norm_type, act_type,
                             bias=bias),
            common.ConvBlock(sr_n_feats, 4, 3, bias=True)
        ]

        # branch for sr_raw output
        m_sr_tail = [nn.PixelShuffle(2)]

        # define demosaick module
        m_dm_head = [
            common.ConvBlock(4, dm_n_feats, 5, act_type=act_type, bias=True)
        ]

        if block_type.lower() == 'rrdb':
            m_dm_resblock = [
                common.RRDB(dm_n_feats, dm_n_feats, 3, 1, bias, norm_type,
                            act_type, 0.2) for _ in range(dm_n_resblocks)
            ]
        elif block_type.lower() == 'dudb':
            m_dm_resblock = [
                common.DUDB(dm_n_feats, 3, 1, bias, norm_type, act_type, 0.2)
                for _ in range(dm_n_resblocks)
            ]
        elif block_type.lower() == 'res':
            m_dm_resblock = [
                common.ResBlock(dm_n_feats,
                                3,
                                norm_type,
                                act_type,
                                res_scale=1,
                                bias=bias) for _ in range(dm_n_resblocks)
            ]
        else:
            raise RuntimeError('block_type is not supported')

        m_dm_resblock += [
            common.ConvBlock(dm_n_feats, dm_n_feats, 3, bias=bias)
        ]
        m_dm_up = [
            common.Upsampler(2, dm_n_feats, norm_type, act_type, bias=bias),
            common.ConvBlock(dm_n_feats, 3, 3, bias=True)
        ]

        self.model_sr = nn.Sequential(
            *m_sr_head, common.ShortcutBlock(nn.Sequential(*m_sr_resblock)),
            *m_sr_up)
        self.sr_output = nn.Sequential(*m_sr_tail)
        self.model_dm = nn.Sequential(
            *m_dm_head, common.ShortcutBlock(nn.Sequential(*m_dm_resblock)),
            *m_dm_up)

        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                nn.init.xavier_normal_(m.weight)
                m.weight.requires_grad = True
                if m.bias is not None:
                    m.bias.data.zero_()
                    m.bias.requires_grad = True
Exemple #7
0
    def __init__(self, name="CSPDarkNet53", **kwargs):
        super(CSPDarkNet53, self).__init__(self, name=name, **kwargs)

        self.conv1_1 = cm.ConvBlock((3, 3, 3, 32), activate_type="mish")
        self.conv1_2 = cm.ConvBlock((3, 3, 32, 64), downsample=True, activate_type="mish")
        
        self.conv2_1 = cm.ConvBlock((1, 1, 64, 64), activate_type="mish")
        self.conv2_2 = cm.ConvBlock((1, 1, 64, 64), activate_type="mish")
        self.res_blocks1 = []
        for _ in range(1):
            self.res_blocks1.append(cm.ResBlock(64, 32, 64, activate_type="mish"))
        self.conv2_3 = cm.ConvBlock((1, 1, 64, 64), activate_type="mish")

        self.conv3_1 = cm.ConvBlock((1, 1, 128, 64), activate_type="mish")
        self.conv3_2 = cm.ConvBlock((3, 3, 64, 128), downsample=True, activate_type="mish")

        self.conv4_1 = cm.ConvBlock((1, 1, 128, 64), activate_type="mish")
        self.conv4_2 = cm.ConvBlock((1, 1, 128, 64), activate_type="mish")
        self.res_blocks2 = []
        for _ in range(2):
            self.res_blocks2.append(cm.ResBlock(64, 64, 64, activate_type="mish"))
        self.conv4_3 = cm.ConvBlock((1, 1, 64, 64), activate_type="mish")
        
        self.conv5_1 = cm.ConvBlock((1, 1, 128, 128), activate_type="mish")
        self.conv5_2 =  cm.ConvBlock((3, 3, 128, 256), downsample=True, activate_type="mish")

        self.conv6_1 = cm.ConvBlock((1, 1, 256, 128), activate_type="mish")
        self.conv6_2 = cm.ConvBlock((1, 1, 256, 128), activate_type="mish")
        self.res_blocks3 = []
        for _ in range(8):
            self.res_blocks3.append(cm.ResBlock(128, 128, 128, activate_type="mish"))
        self.conv6_3 = cm.ConvBlock((1, 1, 128, 128), activate_type="mish")
        
        self.conv7_1 = cm.ConvBlock((1, 1, 256, 256), activate_type="mish")
        self.conv7_2 = cm.ConvBlock((3, 3, 256, 512), downsample=True, activate_type="mish")

        self.conv8_1 = cm.ConvBlock((1, 1, 512, 256), activate_type="mish")
        self.conv8_2 = cm.ConvBlock((1, 1, 512, 256), activate_type="mish")
        self.res_blocks4 = []
        for _ in range(8):
            self.res_blocks4.append(cm.ResBlock(256, 256, 256, activate_type="mish"))
        self.conv8_3 = cm.ConvBlock((1, 1, 256, 256), activate_type="mish")
        
        self.conv9_1 = cm.ConvBlock((1, 1, 512, 512), activate_type="mish")
        self.conv9_2 = cm.ConvBlock((3, 3, 512, 1024), downsample=True, activate_type="mish")

        self.conv10_1 = cm.ConvBlock((1, 1, 1024, 512), activate_type="mish")
        self.conv10_2 = cm.ConvBlock((1, 1, 1024, 512), activate_type="mish")
        self.res_blocks5 = []
        for _ in range(4):
            self.res_blocks5.append(cm.ResBlock(512, 512, 512, activate_type="mish"))
        self.conv10_3 = cm.ConvBlock((1, 1, 512, 512), activate_type="mish")
        
        self.conv_last = cm.ConvBlock((1, 1, 1024, 1024), activate_type="mish")
Exemple #8
0
def YOLOv4_head(input_data, NUM_CLASS, bbox_per_head=2):
    shape1, shape2, shape3 = input_data
    route1 = tf.keras.layers.Input(shape=shape1[1:], name="head1")
    route2 = tf.keras.layers.Input(shape=shape2[1:], name="head2")
    route3 = tf.keras.layers.Input(shape=shape3[1:], name="head3")

    conv_small_box = cm.ConvBlock((3, 3, 128, 256))(route1)
    confidence = cm.ConvBlock((3, 3, 256, bbox_per_head),
                              activate=True,
                              activate_type="sigmoid",
                              bn=False)(conv_small_box)
    box_params = cm.ConvBlock((3, 3, 256, bbox_per_head * 4),
                              activate=True,
                              activate_type="partial_sigmoid",
                              bn=False)(conv_small_box)
    classes = []
    for i in range(bbox_per_head):
        classes.append(
            cm.ConvBlock((3, 3, 256, NUM_CLASS),
                         activate=True,
                         activate_type="softmax",
                         bn=False)(conv_small_box))
    conv_small_box = tf.keras.layers.concatenate(
        [confidence, box_params,
         tf.concat(classes, axis=-1)],
        axis=-1,
        name="output_1")

    x = cm.ConvBlock((3, 3, 128, 256), downsample=True)(route1)
    x = tf.concat([x, route2], axis=-1)

    x = cm.ConvBlock((1, 1, 512, 256))(x)
    x = cm.ConvBlock((3, 3, 256, 512))(x)
    x = cm.ConvBlock((1, 1, 512, 256))(x)
    x = cm.ConvBlock((3, 3, 256, 512))(x)
    x = cm.ConvBlock((1, 1, 512, 256))(x)

    skip = x

    conv_medium_box = cm.ConvBlock((3, 3, 256, 512))(x)
    confidence = cm.ConvBlock((3, 3, 512, bbox_per_head),
                              activate=True,
                              activate_type="sigmoid",
                              bn=False)(conv_medium_box)
    box_params = cm.ConvBlock((3, 3, 512, bbox_per_head * 4),
                              activate=True,
                              activate_type="partial_sigmoid",
                              bn=False)(conv_medium_box)
    classes = []
    for i in range(bbox_per_head):
        classes.append(
            cm.ConvBlock((3, 3, 512, NUM_CLASS),
                         activate=True,
                         activate_type="softmax",
                         bn=False)(conv_medium_box))
    conv_medium_box = tf.keras.layers.concatenate(
        [confidence, box_params,
         tf.concat(classes, axis=-1)],
        axis=-1,
        name="output_2")

    x = cm.ConvBlock((3, 3, 256, 512), downsample=True)(skip)
    x = tf.concat([x, route3], axis=-1)

    x = cm.ConvBlock((1, 1, 1024, 512))(x)
    x = cm.ConvBlock((3, 3, 512, 1024))(x)
    x = cm.ConvBlock((1, 1, 1024, 512))(x)
    x = cm.ConvBlock((3, 3, 512, 1024))(x)
    x = cm.ConvBlock((1, 1, 1024, 512))(x)

    conv_big_box = cm.ConvBlock((3, 3, 512, 1024))(x)
    confidence = cm.ConvBlock((3, 3, 1024, bbox_per_head),
                              activate=True,
                              activate_type="sigmoid",
                              bn=False)(conv_big_box)
    box_params = cm.ConvBlock((3, 3, 1024, bbox_per_head * 4),
                              activate=True,
                              activate_type="partial_sigmoid",
                              bn=False)(conv_big_box)
    classes = []
    for i in range(bbox_per_head):
        classes.append(
            cm.ConvBlock((3, 3, 1024, NUM_CLASS),
                         activate=True,
                         activate_type="softmax",
                         bn=False)(conv_big_box))
    conv_big_box = tf.keras.layers.concatenate(
        [confidence, box_params,
         tf.concat(classes, axis=-1)],
        axis=-1,
        name="output_3")

    return tf.keras.Model(inputs=[route1, route2, route3],
                          outputs=(conv_small_box, conv_medium_box,
                                   conv_big_box),
                          name="Heads")
Exemple #9
0
    def __init__(self,
                 sr_n_resblocks=6,
                 dm_n_resblock=6,
                 sr_n_feats=64,
                 dm_n_feats=64,
                 scale=2,
                 denoise=True,
                 bias=True,
                 norm_type=False,
                 act_type='relu',
                 block_type='rrdb'):
        super(NET, self).__init__()
        # act = nn.LeakyReLU(negative_slope=0.1, inplace=False)
        # act = nn.PReLU(n_feats)

        # define sr module
        if denoise:
            m_sr_head = [
                common.ConvBlock(5,
                                 sr_n_feats,
                                 5,
                                 act_type=act_type,
                                 bias=True)
            ]
        else:
            m_sr_head = [
                common.ConvBlock(4,
                                 sr_n_feats,
                                 5,
                                 act_type=act_type,
                                 bias=True)
            ]
        if block_type.lower() == 'rrdb':
            m_sr_resblock = [
                common.RRDB(sr_n_feats, sr_n_feats, 3, 1, bias, norm_type,
                            act_type, 0.2) for _ in range(sr_n_resblocks)
            ]
        elif block_type.lower() == 'dudb':
            m_sr_resblock = [
                common.DUDB(sr_n_feats, 3, 1, bias, norm_type, act_type, 0.2)
                for _ in range(sr_n_resblocks)
            ]
        elif block_type.lower() == 'res':
            m_sr_resblock = [
                common.ResBlock(sr_n_feats,
                                3,
                                norm_type,
                                act_type,
                                res_scale=1,
                                bias=bias) for _ in range(sr_n_resblocks)
            ]
        else:
            raise RuntimeError('block_type is not supported')

        m_sr_resblock += [
            common.ConvBlock(sr_n_feats, sr_n_feats, 3, bias=bias)
        ]
        m_sr_up = [
            common.Upsampler(scale, sr_n_feats, norm_type, act_type,
                             bias=bias),
            common.ConvBlock(sr_n_feats, 4, 3, bias=True)
        ]

        # branch for sr_raw output
        m_sr_tail = [nn.PixelShuffle(2)]

        # define demosaick module
        m_dm_head = [
            common.ConvBlock(4, dm_n_feats, 5, act_type=act_type, bias=True)
        ]

        if block_type.lower() == 'rrdb':
            m_dm_resblock = [
                common.RRDB(dm_n_feats, dm_n_feats, 3, 1, bias, norm_type,
                            act_type, 0.2) for _ in range(dm_n_resblock)
            ]
        elif block_type.lower() == 'dudb':
            m_dm_resblock = [
                common.DUDB(dm_n_feats, 3, 1, bias, norm_type, act_type, 0.2)
                for _ in range(dm_n_resblock)
            ]
        elif block_type.lower() == 'res':
            m_dm_resblock = [
                common.ResBlock(dm_n_feats,
                                3,
                                norm_type,
                                act_type,
                                res_scale=1,
                                bias=bias) for _ in range(dm_n_resblock)
            ]
        else:
            raise RuntimeError('block_type is not supported')

        m_dm_resblock += [
            common.ConvBlock(dm_n_feats, dm_n_feats, 3, bias=bias)
        ]
        m_dm_up = [
            common.Upsampler(2, dm_n_feats, norm_type, act_type, bias=bias),
            common.ConvBlock(dm_n_feats, 3, 3, bias=True)
        ]

        self.model_sr = nn.Sequential(
            *m_sr_head, common.ShortcutBlock(nn.Sequential(*m_sr_resblock)),
            *m_sr_up)
        self.sr_output = nn.Sequential(*m_sr_tail)
        self.model_dm = nn.Sequential(
            *m_dm_head, common.ShortcutBlock(nn.Sequential(*m_dm_resblock)),
            *m_dm_up)

        for m in self.modules():
            # pdb.set_trace()
            if isinstance(m, nn.Conv2d):
                # Xavier
                # nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
                nn.init.xavier_normal_(m.weight)
                m.weight.requires_grad = True
                if m.bias is not None:
                    m.bias.data.zero_()
                    m.bias.requires_grad = True
Exemple #10
0
def YOLOv4_body(input_shape):
    inp = tf.keras.layers.Input(shape=input_shape)
    route1, route2, x = back.CSPDarkNet53()(inp)

    x = cm.ConvBlock((1, 1, 1024, 512))(x)
    x = cm.ConvBlock((3, 3, 512, 1024))(x)
    x = cm.ConvBlock((1, 1, 1024, 512))(x)

    x = SPP()(x)

    x = cm.ConvBlock((1, 1, 2048, 512))(x)
    x = cm.ConvBlock((3, 3, 512, 1024))(x)
    x = cm.ConvBlock((1, 1, 1024, 512), name="neck3")(x)

    route3 = x

    x = cm.ConvBlock((1, 1, 512, 256))(route3)
    upsampled = L.UpSampling2D()(x)

    x = cm.ConvBlock((1, 1, 512, 256))(route2)
    x = tf.concat([x, upsampled], axis=-1)

    x = cm.ConvBlock((1, 1, 512, 256))(x)
    x = cm.ConvBlock((3, 3, 256, 512))(x)
    x = cm.ConvBlock((1, 1, 512, 256))(x)
    x = cm.ConvBlock((3, 3, 256, 512))(x)
    x = cm.ConvBlock((1, 1, 512, 256), name="neck2")(x)

    route2 = x

    x = cm.ConvBlock((1, 1, 256, 128))(route2)
    upsampled = L.UpSampling2D()(x)

    x = cm.ConvBlock((1, 1, 256, 128))(route1)
    x = tf.concat([x, upsampled], axis=-1)

    x = cm.ConvBlock((1, 1, 256, 128))(x)
    x = cm.ConvBlock((3, 3, 128, 256))(x)
    x = cm.ConvBlock((1, 1, 256, 128))(x)
    x = cm.ConvBlock((3, 3, 128, 256))(x)
    x = cm.ConvBlock((1, 1, 256, 128), name="neck1")(x)

    route1 = x

    return keras.Model(inputs=[inp],
                       outputs=(route1, route2, route3),
                       name="Body")
Exemple #11
0
    def __init__(self, opt):
        super(NET, self).__init__()

        sr_n_resblocks = opt.sr_n_resblocks
        dm_n_resblocks = opt.dm_n_resblocks
        sr_n_feats = opt.channels
        dm_n_feats = opt.channels
        scale = opt.scale

        denoise = opt.denoise
        block_type = opt.block_type
        act_type = opt.act_type
        bias = opt.bias
        norm_type = opt.norm_type

        # define sr module
        if denoise:
            m_sr_head = [
                common.ConvBlock(6,
                                 sr_n_feats,
                                 5,
                                 act_type=act_type,
                                 bias=True)
            ]
        else:
            m_sr_head = [
                common.ConvBlock(4,
                                 sr_n_feats,
                                 5,
                                 act_type=act_type,
                                 bias=True)
            ]
        if block_type.lower() == 'rrdb':
            m_sr_resblock = [
                common.RRDB(sr_n_feats, sr_n_feats, 3, 1, bias, norm_type,
                            act_type, 0.2) for _ in range(sr_n_resblocks)
            ]
        elif block_type.lower() == 'dudb':
            m_sr_resblock = [
                common.DUDB(sr_n_feats, 3, 1, bias, norm_type, act_type, 0.2)
                for _ in range(sr_n_resblocks)
            ]
        elif block_type.lower() == 'res':
            m_sr_resblock = [
                common.ResBlock(sr_n_feats,
                                3,
                                norm_type,
                                act_type,
                                res_scale=1,
                                bias=bias) for _ in range(sr_n_resblocks)
            ]
        else:
            raise RuntimeError('block_type is not supported')

        m_sr_resblock += [
            common.ConvBlock(sr_n_feats, sr_n_feats, 3, bias=bias)
        ]
        m_sr_up = [
            common.Upsampler(scale, sr_n_feats, norm_type, act_type,
                             bias=bias),
            common.ConvBlock(sr_n_feats, 4, 3, bias=True)
        ]

        # branch for sr_raw output
        m_sr_tail = [nn.PixelShuffle(2)]

        # define demosaick module
        m_dm_head = [
            common.ConvBlock(4, dm_n_feats, 5, act_type=act_type, bias=True)
        ]

        if block_type.lower() == 'rrdb':
            m_dm_resblock = m_res(
                opt)  #[common.RRDB(dm_n_feats, dm_n_feats, 3,
            #1, bias, norm_type, act_type, 0.2)
            #for _ in range(dm_n_resblocks)]
        elif block_type.lower() == 'dudb':
            m_dm_resblock = [
                common.DUDB(dm_n_feats, 3, 1, bias, norm_type, act_type, 0.2)
                for _ in range(dm_n_resblocks)
            ]
        elif block_type.lower() == 'res':
            m_dm_resblock = [
                common.ResBlock(dm_n_feats,
                                3,
                                norm_type,
                                act_type,
                                res_scale=1,
                                bias=bias) for _ in range(dm_n_resblocks)
            ]
        else:
            raise RuntimeError('block_type is not supported')

        #m_dm_resblock += [common.ConvBlock(dm_n_feats, dm_n_feats, 3, bias=bias)]
        m_dm_up = [
            common.Upsampler(2, dm_n_feats, norm_type, act_type, bias=bias)
        ]
        #common.ConvBlock(dm_n_feats, 3, 3, bias=True)]

        self.model_sr = nn.Sequential(
            *m_sr_head, common.ShortcutBlock(nn.Sequential(*m_sr_resblock)),
            *m_sr_up)
        self.sr_output = nn.Sequential(*m_sr_tail)
        self.model_dm1 = nn.Sequential(*m_dm_head)
        self.model_dm2 = m_dm_resblock
        self.model_dm3 = nn.Sequential(*m_dm_up)

        greenresblock = green_res(opt)
        self.green = greenresblock
        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                nn.init.xavier_normal_(m.weight)
                m.weight.requires_grad = True
                if m.bias is not None:
                    m.bias.data.zero_()
                    m.bias.requires_grad = True
        #self.sft = SFTLayer()
        self.combine = nn.Sequential(
            common.ConvBlock(dm_n_feats + 1, dm_n_feats, 1, bias=True),
            nn.LeakyReLU(0.2, inplace=True))
        self.greenup = nn.Sequential(common.ConvBlock(1, 4, 1, bias=True),
                                     nn.LeakyReLU(0.2, inplace=True),
                                     common.ConvBlock(4, 8, 1, bias=True),
                                     nn.LeakyReLU(0.2, inplace=True))

        self.pac = PacConvTranspose2d(64,
                                      64,
                                      kernel_size=5,
                                      stride=2,
                                      padding=2,
                                      output_padding=1)
        self.final = common.ConvBlock(dm_n_feats, 3, 3, bias=True)
        self.norm = nn.InstanceNorm2d(1)
Exemple #12
0
    def __init__(self, opt):
        super(NET, self).__init__()
        act_type = opt.act_type
        bias = opt.bias
        norm_type = opt.norm_type

        self.conv1 = nn.Sequential(
            common.ConvBlock(5, 64, 3, act_type=act_type, bias=True),
            common.ConvBlock(64, 64, 3, act_type=act_type, bias=True),
            common.ConvBlock(64, 64, 3, act_type=act_type, bias=True),
            common.ConvBlock(64, 64, 3, act_type=act_type, bias=True),
            common.ConvBlock(64, 64, 3, act_type=act_type, bias=True),
            common.ConvBlock(64, 64, 3, act_type=act_type, bias=True),
            common.ConvBlock(64, 64, 3, act_type=act_type, bias=True),
            common.ConvBlock(64, 64, 3, act_type=act_type, bias=True),
            common.ConvBlock(64, 64, 3, act_type=act_type, bias=True),
            common.ConvBlock(64, 64, 3, act_type=act_type, bias=True),
            common.ConvBlock(64, 64, 3, act_type=act_type, bias=True),
            common.ConvBlock(64, 64, 3, act_type=act_type, bias=True),
            common.ConvBlock(64, 64, 3, act_type=act_type, bias=True),
            common.ConvBlock(64, 64, 3, act_type=act_type, bias=True),
            common.ConvBlock(64, 12, 3, act_type=act_type, bias=True),
        )

        self.conv2 = nn.Sequential(
            common.ConvBlock(6, 64, 3, act_type=act_type, bias=True),
            common.ConvBlock(64, 3, 3, act_type=act_type, bias=True))

        self.ps = nn.PixelShuffle(2)
        self.size = 64
        s = self.size
        mask1 = torch.zeros(s, s)
        mask2 = torch.zeros(s, s)
        mask3 = torch.zeros(s, s)
        for i in range(0, s):
            for j in range(0, s):
                if i % 2 == 1 and j % 2 == 0:
                    mask1[i, j] = 1
        for i in range(0, s):
            for j in range(0, s):
                if (i % 2 == 0 and j % 2 == 0) or (i % 2 == 1 and j % 2 == 1):
                    mask2[i, j] = 1

        for i in range(0, s):
            for j in range(0, s):
                if i % 2 == 0 and j % 2 == 1:
                    mask3[i, j] = 1
        self.mask1 = mask1.cuda()
        self.mask2 = mask2.cuda()
        self.mask3 = mask3.cuda()
        self.srcnn = srcnn()
        self.carn = carn()
        self.edsr = EDSR()