Пример #1
0
    def __init__(self, in_nc=3, out_nc=3, nf=64, nb=16, upscale=4):
        super(MSRResNet, self).__init__()
        self.upscale = upscale

        self.conv_first = nn.Conv2d(in_nc, nf, 3, 1, 1, bias=True)
        basic_block = functools.partial(mutil.ResidualBlock_noBN, nf=nf)
        self.recon_trunk = mutil.make_layer(basic_block, nb)

        # upsampling
        if self.upscale == 2:
            self.upconv1 = nn.Conv2d(nf, nf * 4, 3, 1, 1, bias=True)
            self.pixel_shuffle = nn.PixelShuffle(2)
        elif self.upscale == 3:
            self.upconv1 = nn.Conv2d(nf, nf * 9, 3, 1, 1, bias=True)
            self.pixel_shuffle = nn.PixelShuffle(3)
        elif self.upscale == 4:
            self.upconv1 = nn.Conv2d(nf, nf * 4, 3, 1, 1, bias=True)
            self.upconv2 = nn.Conv2d(nf, nf * 4, 3, 1, 1, bias=True)
            self.pixel_shuffle = nn.PixelShuffle(2)

        self.HRconv = nn.Conv2d(nf, nf, 3, 1, 1, bias=True)
        self.conv_last = nn.Conv2d(nf, out_nc, 3, 1, 1, bias=True)

        # activation function
        self.lrelu = nn.LeakyReLU(negative_slope=0.1, inplace=True)

        # initialization
        mutil.initialize_weights(
            [self.conv_first, self.upconv1, self.HRconv, self.conv_last], 0.1)
        if self.upscale == 4:
            mutil.initialize_weights(self.upconv2, 0.1)
Пример #2
0
    def __init__(self, nf=64, gc=32, bias=True, use_snorm=False):
        super(ResidualDenseBlock_5C, self).__init__()
        # gc: growth channel, i.e. intermediate channels
        if use_snorm:
            self.conv1 = nn.utils.spectral_norm(
                nn.Conv2d(nf, gc, 3, 1, 1, bias=bias))
            self.conv2 = nn.utils.spectral_norm(
                nn.Conv2d(nf + gc, gc, 3, 1, 1, bias=bias))
            self.conv3 = nn.utils.spectral_norm(
                nn.Conv2d(nf + 2 * gc, gc, 3, 1, 1, bias=bias))
            self.conv4 = nn.utils.spectral_norm(
                nn.Conv2d(nf + 3 * gc, gc, 3, 1, 1, bias=bias))
            self.conv5 = nn.utils.spectral_norm(
                nn.Conv2d(nf + 4 * gc, nf, 3, 1, 1, bias=bias))
        else:
            self.conv1 = nn.Conv2d(nf, gc, 3, 1, 1, bias=bias)
            self.conv2 = nn.Conv2d(nf + gc, gc, 3, 1, 1, bias=bias)
            self.conv3 = nn.Conv2d(nf + 2 * gc, gc, 3, 1, 1, bias=bias)
            self.conv4 = nn.Conv2d(nf + 3 * gc, gc, 3, 1, 1, bias=bias)
            self.conv5 = nn.Conv2d(nf + 4 * gc, nf, 3, 1, 1, bias=bias)
        self.lrelu = nn.LeakyReLU(negative_slope=0.2, inplace=True)

        # initialization
        mutil.initialize_weights(
            [self.conv1, self.conv2, self.conv3, self.conv4, self.conv5], 0.1)
Пример #3
0
    def __init__(
        self,
        channel_in: int,
        channel_out: int,
        init: str = "xavier",
        gc: int = 32,
        bias=True,
    ):
        # https://pytorch.org/docs/stable/generated/torch.nn.Conv2d.html
        super(DenseBlock, self).__init__()
        self.conv1 = nn.Conv2d(channel_in, gc, 3, 1, 1, bias=bias)
        self.conv2 = nn.Conv2d(channel_in + gc, gc, 3, 1, 1, bias=bias)
        self.conv3 = nn.Conv2d(channel_in + 2 * gc, gc, 3, 1, 1, bias=bias)
        self.conv4 = nn.Conv2d(channel_in + 3 * gc, gc, 3, 1, 1, bias=bias)
        self.conv5 = nn.Conv2d(channel_in + 4 * gc,
                               channel_out,
                               3,
                               1,
                               1,
                               bias=bias)
        self.lrelu = nn.LeakyReLU(negative_slope=0.2, inplace=True)

        if init == "xavier":
            mutil.initialize_weights_xavier(
                [self.conv1, self.conv2, self.conv3, self.conv4], 0.1)
        else:
            mutil.initialize_weights(
                [self.conv1, self.conv2, self.conv3, self.conv4], 0.1)
        mutil.initialize_weights(self.conv5, 0)
Пример #4
0
    def __init__(self,
                 channel_in,
                 channel_out,
                 init='xavier',
                 gc=32,
                 bias=True):
        super(DenseBlock, self).__init__()
        self.conv1 = nn.Conv2d(channel_in, gc, 3, 1, 1, bias=bias)
        self.conv2 = nn.Conv2d(channel_in + gc, gc, 3, 1, 1, bias=bias)
        self.conv3 = nn.Conv2d(channel_in + 2 * gc, gc, 3, 1, 1, bias=bias)
        self.conv4 = nn.Conv2d(channel_in + 3 * gc, gc, 3, 1, 1, bias=bias)
        self.conv5 = nn.Conv2d(channel_in + 4 * gc,
                               channel_out,
                               3,
                               1,
                               1,
                               bias=bias)
        self.lrelu = nn.LeakyReLU(negative_slope=0.2, inplace=True)

        if init == 'xavier':
            mutil.initialize_weights_xavier(
                [self.conv1, self.conv2, self.conv3, self.conv4], 0.1)
        else:
            mutil.initialize_weights(
                [self.conv1, self.conv2, self.conv3, self.conv4], 0.1)
        mutil.initialize_weights(self.conv5, 0)
Пример #5
0
    def __init__(self, filters=64, bias=True):
        super(ResidualDenseBlock_2C, self).__init__()
        # gc: growth channel, i.e. intermediate channels
        self.conv1 = nn.Conv2d(filters, filters, 3, 1, 1, bias=bias)
        self.conv2 = nn.Conv2d(filters, filters, 3, 1, 1, bias=bias)
        self.lrelu = nn.LeakyReLU(negative_slope=0.2, inplace=True)

        # initialization
        mutil.initialize_weights([self.conv1, self.conv2], 0.1)
Пример #6
0
    def __init__(self, nf=64, gc=32, bias=True):
        super(ResidualDenseBlock_5C, self).__init__()

        self.conv1 = nn.Conv2d(nf, gc, 3, 1, 1, bias=bias)
        self.conv2 = nn.Conv2d(nf + gc, gc, 3, 1, 1, bias=bias)
        self.conv3 = nn.Conv2d(nf + 2 * gc, gc, 3, 1, 1, bias=bias)
        self.conv4 = nn.Conv2d(nf + 3 * gc, gc, 3, 1, 1, bias=bias)
        self.conv5 = nn.Conv2d(nf + 4 * gc, nf, 3, 1, 1, bias=bias)
        self.lrelu = nn.LeakyReLU(negative_slope=0.2, inplace=True)

        mutil.initialize_weights(
            [self.conv1, self.conv2, self.conv3, self.conv4, self.conv5], 0.1)
Пример #7
0
    def __init__(self, nf=64, gc=32, bias=True):
        super(TimeResidualDenseBlock5C, self).__init__()
        # gc: growth channel, i.e. intermediate channels
        self.conv1 = ConcatConv2d(nf, gc, 3, 1, 1, bias=bias)
        self.conv2 = ConcatConv2d(nf + gc, gc, 3, 1, 1, bias=bias)
        self.conv3 = ConcatConv2d(nf + 2 * gc, gc, 3, 1, 1, bias=bias)
        self.conv4 = ConcatConv2d(nf + 3 * gc, gc, 3, 1, 1, bias=bias)
        self.conv5 = ConcatConv2d(nf + 4 * gc, nf, 3, 1, 1, bias=bias)
        self.lrelu = nn.LeakyReLU(negative_slope=0.2, inplace=True)

        # initialization
        mutil.initialize_weights([self.conv1, self.conv2, self.conv3, self.conv4, self.conv5], 0.1)
Пример #8
0
    def __init__(self, in_nc, out_nc, nf, nb, gc=32, differential=None, time_dependent=False, adjoint=False, sb=5):
        super(RRDBNet, self).__init__()
        self.conv_first = nn.Conv2d(in_nc, nf, 3, 1, 1, bias=True)
        self.trunk_conv = nn.Conv2d(nf, nf, 3, 1, 1, bias=True)
        if differential == "checkpointed":
            self.conv_trunk = SRTrunk(nf, nb, make_odeblock(5, 'RK4'))
            mutil.initialize_weights(self.conv_trunk.odefunc.convs)
        elif differential == "standard":
            self.conv_trunk = ODEBlock(ODEfunc(nf, nb=nb, normalization=False, time_dependent=time_dependent), adjoint=adjoint)
            mutil.initialize_weights(self.conv_trunk.odefunc.convs)
        elif differential == "sequential":
            self.conv_trunk = nn.Sequential(*[ODEBlock(ODEfunc(nf, nb=sb, normalization=False, time_dependent=time_dependent), adjoint=adjoint) for _ in range(nb)])
            for block in self.conv_trunk:
                mutil.initialize_weights(block.odefunc.convs)
        elif differential == "augmented":
            augment_dim = nf//4
            method = 'dopri5'
            if method == 'euler':
                warnings.warn("euler mode")
            self.conv_trunk = AugBlock(AugFunc(nf=nf, nb=nb, augment_dim=augment_dim, time_dependent=time_dependent), adjoint=adjoint, is_conv=True, method=method)
            self.trunk_conv = nn.Conv2d(nf+augment_dim, nf, 3, 1, 1, bias=True)
            mutil.initialize_weights(self.conv_trunk.odefunc.convs)
        elif differential is None or differential == "nodiff":
            RRDB_block_f = functools.partial(RRDB, nf=nf, gc=gc)
            self.conv_trunk = mutil.make_layer(RRDB_block_f, nb)
        else:
            raise NotImplementedError("unrecognized differential system passed")
        #### upsampling
        self.upconv1 = nn.Conv2d(nf, nf, 3, 1, 1, bias=True)
        self.upconv2 = nn.Conv2d(nf, nf, 3, 1, 1, bias=True)
        self.HRconv = nn.Conv2d(nf, nf, 3, 1, 1, bias=True)
        self.conv_last = nn.Conv2d(nf, out_nc, 3, 1, 1, bias=True)

        self.lrelu = nn.LeakyReLU(negative_slope=0.2, inplace=True)
Пример #9
0
    def __init__(self, in_nc=3, out_nc=3, nf=64, nb=16):
        super(MSRResNet, self).__init__()

        self.conv_first = nn.Conv2d(in_nc, nf, 3, 1, 1, bias=True)
        basic_block = functools.partial(mutil.ResidualBlock_noBN, nf=nf)
        self.recon_trunk = mutil.make_layer(basic_block, nb)

        self.HRconv = nn.Conv2d(nf, nf, 3, 1, 1, bias=True)
        self.conv_last = nn.Conv2d(nf, out_nc, 3, 1, 1, bias=True)

        # activation function
        self.lrelu = nn.LeakyReLU(negative_slope=0.1, inplace=True)

        # initialization
        mutil.initialize_weights(
            [self.conv_first, self.HRconv, self.conv_last], 0.1)
Пример #10
0
 def __init__(self, nf=64, gc=32, bias=True, use_snorm=False):
     super(Multi_extfea, self).__init__()
     # gc: growth channel, i.e. intermediate channels
     if use_snorm:
         self.conv0 = nn.utils.spectral_norm(
             nn.Conv2d(nf, gc, 1, 1, 1, bias=bias))
         self.conv1 = nn.utils.spectral_norm(
             nn.Conv2d(nf, gc, 3, 1, 1, bias=bias))
         self.conv2 = nn.utils.spectral_norm(
             nn.Conv2d(nf, gc, 5, 1, 2, bias=bias))
         self.conv3 = nn.utils.spectral_norm(
             nn.Conv2d(nf, gc, 7, 1, 3, bias=bias))
     else:
         self.conv0 = nn.Conv2d(nf, gc, 1, 1, 1, bias=bias)
         self.conv1 = nn.Conv2d(nf, gc, 3, 1, 1, bias=bias)
         self.conv2 = nn.Conv2d(nf, gc, 5, 1, 2, bias=bias)
         self.conv3 = nn.Conv2d(nf, gc, 7, 1, 3, bias=bias)
     self.lrelu = nn.LeakyReLU(negative_slope=0.2, inplace=True)
     self.conv_out = nn.Conv2d(gc * 4, gc, 3, 1, 1, bias=bias)
     # initialization
     mutil.initialize_weights(
         [self.conv0, self.conv1, self.conv2, self.conv3, self.conv_out],
         0.1)
Пример #11
0
    def __init__(self,
                 in_nc=3,
                 out_nc=3,
                 nf=64,
                 nb=16,
                 upscale=4,
                 differential=False):
        super(MSRResNet, self).__init__()
        self.upscale = upscale

        self.conv_first = nn.Conv2d(in_nc, nf, 3, 1, 1, bias=True)
        if differential == "checkpointed":
            self.conv_trunk = SRTrunk(nf, nb, make_odeblock(2, 'RK4'))
            mutil.initialize_weights(self.conv_trunk.odefunc.convs)
        elif differential == "standard":
            self.conv_trunk = ODEBlock(ODEfunc(nf, nb=nb, normalization=False))
            mutil.initialize_weights(self.conv_trunk.odefunc.convs)
        elif differential is None:
            basic_block = functools.partial(mutil.ResidualBlock_noBN, nf=nf)
            self.conv_trunk = mutil.make_layer(basic_block, nb)
        else:
            raise NotImplementedError(
                "unrecognized differential system passed")

        # upsampling
        if self.upscale == 2:
            self.upconv1 = nn.Conv2d(nf, nf * 4, 3, 1, 1, bias=True)
            self.pixel_shuffle = nn.PixelShuffle(2)
        elif self.upscale == 3:
            self.upconv1 = nn.Conv2d(nf, nf * 9, 3, 1, 1, bias=True)
            self.pixel_shuffle = nn.PixelShuffle(3)
        elif self.upscale == 4:
            self.upconv1 = nn.Conv2d(nf, nf * 4, 3, 1, 1, bias=True)
            self.upconv2 = nn.Conv2d(nf, nf * 4, 3, 1, 1, bias=True)
            self.pixel_shuffle = nn.PixelShuffle(2)

        self.HRconv = nn.Conv2d(nf, nf, 3, 1, 1, bias=True)
        self.conv_last = nn.Conv2d(nf, out_nc, 3, 1, 1, bias=True)

        # activation function
        self.lrelu = nn.LeakyReLU(negative_slope=0.1, inplace=False)

        # initialization
        mutil.initialize_weights(
            [self.conv_first, self.upconv1, self.HRconv, self.conv_last], 0.1)
        if self.upscale == 4:
            mutil.initialize_weights(self.upconv2, 0.1)