Пример #1
0
 def __init__(self):
     super(Net, self).__init__()
     self.downs = nn.ModuleList(
         (Down(3, 32, 32), Down(32, 32, 64), Down(64, 64, 64),
          Down(64, 64, 64), Down(64, 64, 64)))
     branch1 = nn.Sequential(Conv3x3(32, 3), nn.PReLU())
     self.branches = nn.ModuleList(
         (branch1, Branch(64), Branch(64, 64), Branch(64, 64, 32),
          Branch(64, 64, 32, 32)))
     self.ms = tuple(zip(self.downs, self.branches))
Пример #2
0
    def __init__(self, scaleLayers, strides, non_local=True):
        super(Branch, self).__init__()
        self.conv_input = Conv3x3(64, 64)
        self.relu = nn.PReLU()

        self.convt_F = nn.ModuleList(CARB(64) for _ in strides)
        # style encode
        self.s_conv = nn.ModuleList(Conv3x3(64, 64, stride=k) for k in strides)

        self.non_local = Nonlocal_CA(in_feat=64,
                                     inter_feat=64 // 8,
                                     reduction=8,
                                     sub_sample=False,
                                     bn_layer=False) if non_local else identity

        self.u = nn.Sequential(*(upsample_block(64, 256)
                                 for _ in range(scaleLayers)))
        self.convt_shape1 = Conv3x3(64, 3)

        initConvParameters(self)
Пример #3
0
  def __init__(self, in_c=3, out_c=3, n_feat=96, scale_unetfeats=48, scale_orsnetfeats=32, num_cab=8, reduction=4, bias=False):
    super(MPRNet, self).__init__()

    act = nn.PReLU()
    kernel_size = 3
    self.shallow_feat = nn.ModuleList(nn.Sequential(Conv3x3(in_c, n_feat, bias=bias), CAB(n_feat, kernel_size, reduction, bias=bias, act=act)) for _ in range(3))

    # Cross Stage Feature Fusion (CSFF)
    self.encoder = nn.ModuleList((
      Encoder(n_feat, kernel_size, reduction, act, bias, scale_unetfeats, csff=False), # stage 1
      Encoder(n_feat, kernel_size, reduction, act, bias, scale_unetfeats, csff=True), # stage 2
      ORSNet(n_feat, scale_orsnetfeats, kernel_size, reduction, act, bias, scale_unetfeats, num_cab) # stage 3
    ))

    self.decoder = nn.ModuleList(Decoder(n_feat, kernel_size, reduction, act, bias, scale_unetfeats) for _ in range(2))

    self.sam = nn.ModuleList(SAM(n_feat, kernel_size=1, bias=bias) for _ in range(2))

    self.concat = nn.ModuleList(Conv3x3(n_feat * 2, feats, bias=bias) for feats in (n_feat, n_feat + scale_orsnetfeats))
    self.tail = Conv3x3(n_feat + scale_orsnetfeats, out_c, bias=bias)
Пример #4
0
    def __init__(self,
                 deepFs,
                 combine=None,
                 cat=True,
                 u2=identity,
                 in_channels=64):
        super(Branch, self).__init__()
        self.inputF = namedSequential(('conv_input', Conv3x3(in_channels, 64)),
                                      ('relu', nn.PReLU()))
        if cat:
            deepFs = [CAT(128)] + list(deepFs)
            self.shallowF = nn.Sequential(*(CARB(64) for _ in range(5)))
        else:
            self.shallowF = None
        self.deepF = nn.Sequential(*deepFs)
        self.combineF = namedSequential(*combine) if combine else None
        self.u2 = u2

        initConvParameters(self)
Пример #5
0
    feat_std = feat_var.sqrt().unsqueeze(-1).unsqueeze(-1)
    feat_mean = featHW.mean(dim=2).unsqueeze(-1).unsqueeze(-1)
    return feat_mean, feat_std


def din(content_feat, encode_feat, eps=1e-4):
    # eps is a small value added to the variance to avoid divide-by-zero.
    size = content_feat.size()
    content_mean, content_std = calc_mean_std(content_feat)
    encode_mean, encode_std = calc_mean_std(encode_feat)
    normalized_feat = (content_feat - content_mean) / (content_std + eps)
    return normalized_feat * encode_std + encode_mean


Down2 = lambda c_in, c_out: namedSequential(
    ('conv_input', Conv3x3(c_in, 32)), ('relu', nn.PReLU()),
    ('down', Conv3x3(32, 32, stride=2)), ('convt_R1', Conv3x3(32, c_out)))

Branch1 = lambda: namedSequential(('conv_input', Conv3x3(3, 3)),
                                  ('relu', nn.PReLU()),
                                  ('conv_input2', Conv3x3(3, 3)))

couplePath = lambda feat, s: (din(feat, s), s)
forwardPath = lambda xs, fs: couplePath(fs[0](xs[0]), fs[1](xs[1]))


class Branch(nn.Module):
    def __init__(self, scaleLayers, strides, non_local=True):
        super(Branch, self).__init__()
        self.conv_input = Conv3x3(64, 64)
        self.relu = nn.PReLU()
Пример #6
0
        self.ms = nn.ModuleList(
            [RK3Trans(n_feats, kernel_size, bias) for _ in range(3)])
        self.scale = nn.ModuleList(
            [ScaleLayer(s) for s in (0.5, 2.0, -1.0, 2 / 3, 1 / 6)])

    def forward(self, x):
        k1 = self.ms[0](x)
        yn_1 = self.scale[0](k1) + x
        k2 = self.ms[1](yn_1)
        yn_2 = self.scale[1](k2) + self.scale[2](k1) + x
        k3 = self.ms[2](yn_2)
        return self.scale[3](k2) + self.scale[4](k3 + k1) + x


Down2 = lambda c_in, c_out: namedSequential(
    ('conv_input', Conv3x3(c_in, 32)), ('relu', nn.PReLU()),
    ('down', Conv3x3(32, 32, stride=2)), ('convt_R1', Conv3x3(32, c_out)),
    ('block', CARB(64)))


class Branch(nn.Module):
    def __init__(self,
                 deepFs,
                 combine=None,
                 cat=True,
                 u2=identity,
                 in_channels=64):
        super(Branch, self).__init__()
        self.inputF = namedSequential(('conv_input', Conv3x3(in_channels, 64)),
                                      ('relu', nn.PReLU()))
        if cat:
Пример #7
0
 def __init__(self, cin, cm, cout):
     super(Down, self).__init__()
     stride = 2 if cin == cm else 1
     self.relu = nn.PReLU()
     self.convt_R1 = Conv3x3(cm, cout)
     self.down = Conv3x3(cin, cm, stride)
Пример #8
0
        self.down = Conv3x3(cin, cm, stride)

    def forward(self, x):
        out = self.relu(self.down(x))
        LR = self.convt_R1(out)
        return LR


convTLayer = lambda cin, cout: [
    nn.ConvTranspose2d(cin, cout, 4, 2, 1),
    nn.PReLU()
]
chainLayers = lambda l, f: sum(
    (f(c1, c2) for c1, c2 in zip(l[:-1], l[1:])), [])
Branch = lambda *l: nn.Sequential(*chainLayers(
    (*l, 32), convTLayer), Conv3x3(32, 3))
gNet = lambda f, acc, feat: (acc + f(feat), feat)
fNet = lambda acc, cur: gNet(cur[1], acc[0], cur[0](acc[1]))


class Net(nn.Module):
    def __init__(self):
        super(Net, self).__init__()
        self.downs = nn.ModuleList(
            (Down(3, 32, 32), Down(32, 32, 64), Down(64, 64, 64),
             Down(64, 64, 64), Down(64, 64, 64)))
        branch1 = nn.Sequential(Conv3x3(32, 3), nn.PReLU())
        self.branches = nn.ModuleList(
            (branch1, Branch(64), Branch(64, 64), Branch(64, 64, 32),
             Branch(64, 64, 32, 32)))
        self.ms = tuple(zip(self.downs, self.branches))