def __init__(self, args, conv=Feat_Layer): super(ISR, self).__init__() n_Blk = args.n_Blk n_resgroups = args.n_resgroups reduction = args.reduction n_feats = args.n_feats kernel_size = 3 scale = args.scale[0] act = nn.ReLU(True) ########################################################################################################## # RGB mean for DIV2K self.sub_mean = common.MeanShift(args.rgb_range) ######################################################################################################### # define head module h = [nn.Conv2d(n_feats * 5, n_feats, kernel_size)] self.he = nn.Sequential(*h) modules_head = [conv(args.n_colors, n_feats, kernel_size)] ############################################################################################################ ############################################################################################################### # define tail module modules_tail = [ common.Upsampler(conv, scale, n_feats, act=False), conv(n_feats, args.n_colors, kernel_size) ] ############################################################################################################### self.add_mean = common.MeanShift(args.rgb_range, sign=1) ############################################################################################################## self.head = nn.Sequential(*modules_head) ############################################### self.SA0 = Space_attention(n_feats, n_feats, 1, 1, 0, 1) #################################################3 # define tail module modules_tail = [ common.Upsampler(conv, scale, n_feats, act=False), conv(n_feats, args.n_colors, kernel_size) ] self.tail = nn.Sequential(*modules_tail) self.ac = torch.nn.PReLU() ######################################################################## ############################################## modules_convo1 = [conv(n_feats, n_feats, kernel_size)] self.convo1 = nn.Sequential(*modules_convo1)
def __init__(self, args, conv=common.default_conv): super(EDSR, self).__init__() self.fullTrain = args.fullTrain self.fullInputScale = args.fullInputScale self.fullTargetScale = args.fullTargetScale self.fulltrainSecondScale = int(self.fullInputScale[0]) self.fulltrainFirstScale = int(8/int(self.fullTargetScale[0])) n_resblock = args.n_resblocks n_feats = args.n_feats kernel_size = 3 #scale = args.scale[0] act = nn.ReLU(True) rgb_mean = (0.4488, 0.4371, 0.4040) self.sub_mean = common.MeanShift(args.rgb_range, rgb_mean, -1) # define head module modules_head = [conv(args.n_colors, n_feats, kernel_size)] # define body module modules_body = [ common.ResBlock( conv, n_feats, kernel_size, act=act, res_scale=args.res_scale) \ for _ in range(n_resblock)] modules_body.append(conv(n_feats, n_feats, kernel_size)) # define tail module # define tail module modules_tail_first = [ common.Upsampler(conv, self.fulltrainFirstScale, n_feats, act=False), conv(n_feats, args.n_colors, kernel_size)] modules_tail_second = [ common.Upsampler(conv, self.fulltrainSecondScale, n_feats, act=False), conv(n_feats, args.n_colors, kernel_size)] self.add_mean = common.MeanShift(args.rgb_range, rgb_mean, 1) self.head_first = nn.Sequential(*modules_head) self.body_first = nn.Sequential(*modules_body) self.tail_first = nn.Sequential(*modules_tail_first) #self.head_2 = nn.Sequential(*modules_head) #self.body_2 = nn.Sequential(*modules_body) #self.tail_2 = nn.Sequential(*modules_tail) self.head_second = nn.Sequential(*modules_head) self.body_second = nn.Sequential(*modules_body) self.tail_second = nn.Sequential(*modules_tail_second)
def __init__(self, args, conv=common.default_conv): super(EDSR, self).__init__() n_resblocks = args.n_resblocks n_feats = args.n_feats kernel_size = 3 scale = args.scale[0] act = nn.Sigmoid() #sigmoid activation function # self.url = url['r{}f{}x{}'.format(n_resblocks, n_feats, scale)] self.sub_mean = common.MeanShift(args.rgb_range) self.add_mean = common.MeanShift(args.rgb_range, sign=1) # define head module m_head = [conv(args.n_colors, n_feats, kernel_size)] # define body module m_body = [ common.ResBlock(conv, n_feats, kernel_size, act=act, res_scale=args.res_scale) for _ in range(n_resblocks) ] m_body.append(conv(n_feats, n_feats, kernel_size)) # m_body.append(conv(n_feats, n_feats, kernel_size)) # define tail module m_tail = [ common.Upsampler(conv, scale, n_feats, act=False), conv(n_feats, args.n_colors, kernel_size) ] self.head = nn.Sequential(*m_head) self.body = nn.Sequential(*m_body) self.tail = nn.Sequential(*m_tail)
def __init__(self, args, conv=common.default_conv, n_feats=64): super(LFF, self).__init__() kernel_size = 3 n_layes = 5 scale = args.scale[0] act = nn.ReLU(True) m_head = [conv(3, n_feats, kernel_size)] m_body = [ conv( n_feats, n_feats, kernel_size ) for _ in range(n_layes) ] m_tail = [ common.Upsampler(conv, scale, n_feats, act=False), nn.Conv2d( n_feats, args.n_colors, kernel_size, padding=(kernel_size//2) ) ] self.LLF_head = nn.Sequential(*m_head) self.LLF_body = nn.Sequential(*m_body) self.LLF_tail = nn.Sequential(*m_tail)
def __init__(self, args, conv=common.default_conv): super(SPSR, self).__init__() n_resblocks = args.n_resblocks n_feats = args.n_feats print(n_resblocks, n_feats) kernel_size = 3 act = nn.ReLU(True) scale = args.scale[0] self.scale_idx = 0 self.sub_mean = common.MeanShift(args.rgb_range) self.add_mean = common.MeanShift(args.rgb_range, sign=1) m_head = [conv(args.n_colors, n_feats, kernel_size)] self.pre_process = nn.ModuleList([ nn.Sequential(common.ResBlock(conv, n_feats, 5, act=act), ) for _ in range(args.kinds) ]) m_body = [ common.ResBlock(conv, n_feats, kernel_size, act=act) for _ in range(n_resblocks) ] m_body.append(conv(n_feats, n_feats, kernel_size)) # self.upsample = nn.ModuleList([ # common.Upsampler(conv, args.scale, n_feats, act=False) # for _ in range(args.kinds) # ]) self.upsample = common.Upsampler(conv, scale, n_feats, act=False) m_tail = [conv(n_feats, args.n_colors, kernel_size)] self.head = nn.Sequential(*m_head) self.body = nn.Sequential(*m_body) self.tail = nn.Sequential(*m_tail)
def __init__(self, args, conv=common.default_conv): super(MAMNet, self).__init__() n_resblocks = args.n_resblocks n_feats = args.n_feats kernel_size = 3 scale = args.scale[0] act = nn.ReLU(True) self.sub_mean = common.MeanShift(args.rgb_range) self.add_mean = common.MeanShift(args.rgb_range, sign=1) # define head module m_head = [conv(args.n_colors, n_feats, kernel_size)] # define body module m_body = [ common.MAMB(conv, n_feats, kernel_size, act=act, res_scale=args.res_scale) for _ in range(n_resblocks) ] m_body.append(conv(n_feats, n_feats, kernel_size)) # define tail module m_tail = [ common.Upsampler(conv, scale, n_feats, act=False), conv(n_feats, args.n_colors, kernel_size) ] self.head = nn.Sequential(*m_head) self.body = nn.Sequential(*m_body) self.tail = nn.Sequential(*m_tail) # initialization common.initialize_weights(self.body, 0.1)
def __init__(self, args, conv=common.default_conv): super(SRResNet_Factor, self).__init__() n_resblock = args.n_resblocks n_feats = args.n_feats scale = args.scale[0] kernel_size = 3 act = nn.PReLU() head = [conv(args.n_colors, n_feats, kernel_size=9), act] body = [ common.ResBlock_Factor(common.conv_factor, n_feats, kernel_size, bn=True, act=act, sic_layer=args.sic_layer) for _ in range(n_resblock) ] body.extend( [conv(n_feats, n_feats, kernel_size), nn.BatchNorm2d(n_feats)]) tail = [ common.Upsampler(conv, scale, n_feats, act=act), conv(n_feats, args.n_colors, kernel_size) ] self.head = nn.Sequential(*head) self.body = nn.Sequential(*body) self.tail = nn.Sequential(*tail)
def __init__(self, args, conv=common.default_conv): super(NHR_Res32, self).__init__() n_resblocks = args.n_resblocks args.n_resblocks = args.n_resblocks - args.n_resblocks_ft n_feats = args.n_feats kernel_size = 3 scale = args.scale[0] act = nn.ReLU(True) tail_ft2 = [ common.ResBlock(conv, n_feats + 4, kernel_size, act=act, res_scale=args.res_scale) for _ in range(args.n_resblocks_ft) ] tail_ft2.append(conv(n_feats + 4, args.n_colors, kernel_size)) tail_ft1 = [ common.Upsampler(conv, scale, n_feats, act=False), ] premodel = EDSR(args) self.sub_mean = premodel.sub_mean self.head = premodel.head body = premodel.body body_child = list(body.children()) body_ft = [body_child.pop()] self.body = nn.Sequential(*body_child) self.body_ft = nn.Sequential(*body_ft) self.tail_ft1 = nn.Sequential(*tail_ft1) self.tail_ft2 = nn.Sequential(*tail_ft2) self.add_mean = premodel.add_mean args.n_resblocks = n_resblocks
def __init__(self, args, conv=common.default_conv): super(EDSR, self).__init__() n_resblocks = args.n_resblocks # 16 n_feats = args.n_feats # 64 kernel_size = 3 scale = args.scale[0] # 4 act = nn.ReLU(True) self.url = url['r{}f{}x{}'.format(n_resblocks, n_feats, scale)] self.sub_mean = common.MeanShift(args.rgb_range) # 标准化输入 self.add_mean = common.MeanShift(args.rgb_range, sign=1) # 还原输出 # define head module m_head = [conv(args.n_colors, n_feats, kernel_size)] # channels:3->64 # define body module m_body = [ # 16个resblock common.ResBlock( # ## 参数:64, 3, relu, 1 conv, n_feats, kernel_size, act=act, res_scale=args.res_scale) for _ in range(n_resblocks) ] m_body.append(conv(n_feats, n_feats, kernel_size)) # channels:64->64 # define tail module m_tail = [ common.Upsampler(conv, scale, n_feats, act=False), # 上采样集中在这里 conv(n_feats, args.n_colors, kernel_size) ] self.head = nn.Sequential(*m_head) self.body = nn.Sequential(*m_body) self.tail = nn.Sequential(*m_tail)
def __init__(self, scale_list, model_path = 'weight/EDSR_weight.pt' ): super(EDSR, self).__init__() # args scale = scale_list[0] input_channel = 5 output_channel = 3 num_block = 16 inp = 64 rgb_range = 255 res_scale = 0.1 act = nn.ReLU(True) #act = nn.LeakyReLU(negative_slope=0.05, inplace=True) # head self.head = nn.Sequential( common.conv(3, inp, input_channel) ) # body self.body = nn.Sequential( *[ common.ResBlock(inp, bias = True, act = act, res_scale = res_scale) for _ in range( num_block) ] ) self.body.add_module( str(num_block), common.conv(inp, inp, 3) ) # tail if scale > 1: self.tail = nn.Sequential( *[ common.Upsampler(scale, inp, act = False, choice = 0), common.conv(inp, 3, output_channel) ] ) else: self.tail = nn.Sequential( *[ common.conv(inp, 3, output_channel) ] ) self.sub_mean = common.MeanShift(rgb_range, sign = -1) self.add_mean = common.MeanShift(rgb_range, sign = 1) self.model_path = model_path self.load()
def __init__(self, args, conv=common.default_conv): super(VDSR, self).__init__() n_resblocks = args.n_resblocks n_feats = args.n_feats kernel_size = 3 scale = args.scale[0] #self.url = url['r{}f{}'.format(n_resblocks, n_feats)] self.sub_mean = common.MeanShift(args.rgb_range) self.add_mean = common.MeanShift(args.rgb_range, sign=1) m_head = [conv(args.n_colors, n_feats, kernel_size)] def basic_block(in_channels, out_channels, act): return common.BasicBlock( conv, in_channels, out_channels, kernel_size, bias=True, bn=False, act=act ) # define body module m_body = [] m_body.append(basic_block(n_feats, n_feats, nn.ReLU(False))) for _ in range(n_resblocks - 2): m_body.append(basic_block(n_feats, n_feats, nn.ReLU(False))) #m_body.append(basic_block(n_feats, args.n_colors, None)) m_tail = [ common.Upsampler(conv, scale, n_feats, act=False), conv(n_feats, 3, kernel_size) ] self.head = nn.Sequential(*m_head) self.tail = nn.Sequential(*m_tail) self.body = nn.Sequential(*m_body)
def __init__(self, opt): super(green_res, self).__init__() sr_n_resblocks = opt.sr_n_resblocks dm_n_resblocks = opt.dm_n_resblocks sr_n_feats = opt.channels dm_n_feats = opt.channels scale = opt.scale denoise = opt.denoise block_type = opt.block_type act_type = opt.act_type bias = opt.bias norm_type = opt.norm_type self.head = common.ConvBlock(2, dm_n_feats, 5, act_type=act_type, bias=True) self.r1 = common.RRDB(dm_n_feats, dm_n_feats, 3, 1, bias, norm_type, act_type, 0.2) self.r2 = common.RRDB(dm_n_feats, dm_n_feats, 3, 1, bias, norm_type, act_type, 0.2) #self.r3 = common.RRDB2(dm_n_feats, dm_n_feats, 3, 1, bias, norm_type, act_type, 0.2) #self.r4 = common.RRDB2(dm_n_feats, dm_n_feats, 3, 1, bias, norm_type, act_type, 0.2) #self.r5 = common.RRDB2(dm_n_feats, dm_n_feats, 3, 1, bias, norm_type, act_type, 0.2) #self.r6 = common.RRDB2(dm_n_feats, dm_n_feats, 3, 1, bias, norm_type, act_type, 0.2) self.final = common.ConvBlock(dm_n_feats, dm_n_feats, 3, bias=bias) self.up = nn.Sequential( common.Upsampler(2, dm_n_feats, norm_type, act_type, bias=bias), common.ConvBlock(dm_n_feats, 1, 3, bias=True), nn.LeakyReLU(0.2, inplace=True))
def __init__(self, args, conv=common.default_conv): super(DNLN, self).__init__() n_feats = args.n_feats kernel_size = 3 scale = args.scale[0] n_frames = args.n_frames self.n_deform_conv = args.n_deform_conv self.act = nn.LeakyReLU(negative_slope=0.1, inplace=True) # Feature Extraction Module # self.feat0 = conv(args.n_colors, n_feats, kernel_size) modules_body1 = [ ResBlock(conv, n_feats, kernel_size, bias=True, bn=False, act=self.act, res_scale=1) for _ in range(5) ] modules_body1.append(conv(n_feats, n_feats, kernel_size)) self.res_feat1 = nn.Sequential(*modules_body1) # Alignment Module # self.fuse_layers = nn.ModuleList() self.HFFBs = nn.ModuleList() self.dconvs = nn.ModuleList() for i in range(self.n_deform_conv): self.fuse_layers.append( nn.Conv2d(n_feats * 2, n_feats, kernel_size=3, padding=1, bias=True)) self.HFFBs.append(HFFB(n_feats)) self.dconvs.append( DeformConv(n_feats, n_feats, kernel_size=kernel_size, stride=1, padding=1, dilation=1, groups=1, deformable_groups=8, im2col_step=1)) # Non-local Attention Module # self.non_local = NonLocalBlock(n_feats, n_feats // 2) # Reconstruction Module # self.res_feat2 = RRDBNet(n_feats * n_frames, n_feats, nb=23, gc=32) modules_tail = [ common.Upsampler(conv, scale, n_feats, act=False), conv(n_feats, args.n_colors, kernel_size) ] self.tail = nn.Sequential(*modules_tail)
def __init__(self, conv=common.default_conv): super(EDSR, self).__init__() n_resblocks = 32 n_feats = 64 kernel_size = 3 scale = 2 act = 'relu' #self.url = url['r{}f{}x{}'.format(n_resblocks, n_feats, scale)] # define head module m_head = [conv(3, n_feats, kernel_size)] # define body module m_body = [ common.ResBlock(n_feats, kernel_size, act_type=act, bias=True, res_scale=1) for _ in range(n_resblocks) ] m_body.append(conv(n_feats, n_feats, kernel_size)) # define tail module m_tail = [ common.Upsampler(scale, n_feats, norm_type=False, act_type=False), conv(n_feats, 3, kernel_size) ] self.sub_mean = ops.MeanShift((0.4488, 0.4371, 0.4040), sub=True) self.add_mean = ops.MeanShift((0.4488, 0.4371, 0.4040), sub=False) self.head = nn.Sequential(*m_head) self.body = nn.Sequential(*m_body) self.tail = nn.Sequential(*m_tail)
def __init__(self, args, conv=common.default_conv): super(SRResNet, self).__init__() n_resblock = args.n_resblocks n_feats = args.n_feats scale = args.scale[0] act_res_flag = args.act_res kernel_size = 3 if act_res_flag == 'No': act_res = None else: act_res = 'prelu' head = [conv(args.n_colors, n_feats, kernel_size=9), nn.PReLU()] body = [ common.ResBlock(conv, n_feats, kernel_size, bias=True, bn=True, act=act_res) for _ in range(n_resblock) ] body.extend( [conv(n_feats, n_feats, kernel_size), nn.BatchNorm2d(n_feats)]) tail = [ common.Upsampler(conv, scale, n_feats, act=nn.PReLU), conv(n_feats, args.n_colors, kernel_size) ] self.head = nn.Sequential(*head) self.body = nn.Sequential(*body) self.tail = nn.Sequential(*tail)
def __init__(self, args, conv=common.default_conv): super(RCAN, self).__init__() n_resgroups = args.n_resgroups n_resblocks = args.n_resblocks n_feats = args.n_feats kernel_size = 3 reduction = args.reduction scale = args.scale[0] act = nn.ReLU(True) # RGB mean for DIV2K self.sub_mean = common.MeanShift(args.rgb_range) # define head module modules_head = [conv(args.n_colors, n_feats, kernel_size)] # define body module modules_body = [ ResidualGroup( conv, n_feats, kernel_size, reduction, act=act, res_scale=args.res_scale, n_resblocks=n_resblocks) \ for _ in range(n_resgroups)] modules_body.append(conv(n_feats, n_feats, kernel_size)) # define tail module modules_tail = [ common.Upsampler(conv, scale, n_feats, act=False), conv(n_feats, args.n_colors, kernel_size)] self.add_mean = common.MeanShift(args.rgb_range, sign=1) self.head = nn.Sequential(*modules_head) self.body = nn.Sequential(*modules_body) self.tail = nn.Sequential(*modules_tail)
def __init__(self, args, conv=common.default_conv): super(RCAN, self).__init__() n_resgroups = args.n_resgroups - 1 n_resblocks = args.n_resblocks n_feats = args.n_feats kernel_size = 3 reduction = args.reduction scale = args.scale[0] act = nn.ReLU(True) # RGB mean for DIV2K 1-800 #rgb_mean = (0.4488, 0.4371, 0.4040) # RGB mean for DIVFlickr2K 1-3450 # rgb_mean = (0.4690, 0.4490, 0.4036) if args.data_train == 'DIV2K': print('Use DIV2K mean (0.4488, 0.4371, 0.4040)') rgb_mean = (0.4488, 0.4371, 0.4040) elif args.data_train == 'DIVFlickr2K': print('Use DIVFlickr2K mean (0.4690, 0.4490, 0.4036)') rgb_mean = (0.4690, 0.4490, 0.4036) rgb_std = (1.0, 1.0, 1.0) self.sub_mean = common.MeanShift(args.rgb_range, rgb_mean, rgb_std) # define head module modules_head = [conv(args.n_colors, n_feats, kernel_size)] # # define body module modules_prebody = [] modules_prebody.append( MulConvGroup( conv, n_feats, kernel_size, reduction, act=act, res_scale=args.res_scale, n_resblocks=n_resblocks) ) modules_body = [ MulConvGroup( conv, n_feats, kernel_size, reduction, act=act, res_scale=args.res_scale, n_resblocks=n_resblocks) \ for _ in range(n_resgroups)] # modules_body = [ # MulConv_Group(n_feats, reduction, act=act) for _ in range(n_resgroups) # ] modules_body.append(conv(n_feats, n_feats, kernel_size)) # define tail module modules_up = [ common.Upsampler(conv, scale, n_feats, act=False), ] modules_tail = [ CRB(conv, n_feats, kernel_size, reduction), ] modules_re = [conv(n_feats, args.n_colors, kernel_size)] self.add_mean = common.MeanShift(args.rgb_range, rgb_mean, rgb_std, 1) self.head = nn.Sequential(*modules_head) self.prebody = nn.Sequential(*modules_prebody) self.body = nn.Sequential(*modules_body) self.up = nn.Sequential(*modules_up) self.tail = nn.Sequential(*modules_tail) self.re = nn.Sequential(*modules_re)
def __init__(self, conv=common.default_conv): super(EDCNN, self).__init__() n_feats = 64 kernel_size = 3 scale = 1 rgb_range = 255 self.sub_mean = common.MeanShift(rgb_range) self.add_mean = common.MeanShift(rgb_range, sign=1) m_head = [conv(3, n_feats, kernel_size)] m_body = [ common.BaseBranch(n_feats, kernel_size), conv(n_feats, 128, kernel_size), common.BaseBranch(128, kernel_size), conv(128, 256, kernel_size), common.BaseBranch(256, kernel_size), conv(256, 512, kernel_size), common.BaseBranch(512, kernel_size), conv(512, 256, kernel_size), common.BaseBranch(256, kernel_size), conv(256, 128, kernel_size), common.BaseBranch(128, kernel_size), conv(128, 64, kernel_size), common.BaseBranch(64, kernel_size), conv(n_feats, n_feats, kernel_size) ] m_tail = [ common.Upsampler(conv, scale, n_feats, act=False), conv(n_feats, 3, kernel_size) ] self.head = nn.Sequential(*m_head) self.body = nn.Sequential(*m_body) self.tail = nn.Sequential(*m_tail)
def __init__(self, scale=4, resgroups=10, resblocks=20, feats=64, conv=common.default_conv): super(RCAN, self).__init__() n_resgroups = resgroups n_resblocks = resblocks n_feats = feats kernel_size = 3 reduction = 16 scale = scale act = nn.ReLU(True) # define head module modules_head = [conv(1, n_feats, kernel_size)] # define body module modules_body = [ ResidualGroup( conv, n_feats, kernel_size, reduction, act=act, res_scale=1.0, n_resblocks=n_resblocks) \ for _ in range(n_resgroups)] modules_body.append(conv(n_feats, n_feats, kernel_size)) # define tail module modules_tail = [ common.Upsampler(conv, scale, n_feats, act=False), conv(n_feats, 1, kernel_size)] #self.add_mean = common.MeanShift(args.rgb_range, rgb_mean, rgb_std, 1) self.head = nn.Sequential(*modules_head) self.body = nn.Sequential(*modules_body) self.tail = nn.Sequential(*modules_tail)
def __init__(self, args, conv=common.default_conv): super(EDSR_ATT, self).__init__() n_resblocks = args.n_resblocks n_resgroups = args.n_resgroups reduction = args.reduction n_feats = args.n_feats kernel_size = 3 scale = args.scale[0] act = nn.ReLU(True) self.url = "" #url['r{}f{}x{}'.format(n_resblocks, n_feats, scale)] self.sub_mean = common.MeanShift(args.rgb_range) self.add_mean = common.MeanShift(args.rgb_range, sign=1) # define head module m_head = [conv(args.n_colors, n_feats, kernel_size)] # define body module m_body = [ common.ResGroupAtt( conv, n_feats, kernel_size, reduction, act=act, res_scale=args.res_scale, n_resblocks=n_resblocks) \ for _ in range(n_resgroups) ] m_body.append(conv(n_feats, n_feats, kernel_size)) # define tail module m_tail = [ common.Upsampler(conv, scale, n_feats, act=False), conv(n_feats, args.n_colors, kernel_size) ] self.head = nn.Sequential(*m_head) self.body = nn.Sequential(*m_body) self.tail = nn.Sequential(*m_tail)
def __init__(self, args, conv=common.default_conv, use_skip=False): super(CARN_M, self).__init__() upscale_factor = args.scale[0] in_channels = args.n_colors out_channels = args.n_colors num_fea = 64 self.use_skip = use_skip self.upscale_factor = upscale_factor self.sub_mean = common.MeanShift(args.rgb_range) self.add_mean = common.MeanShift(args.rgb_range, sign=1) # extract features self.fea_in = nn.Conv2d(in_channels, num_fea, 3, 1, 1) # CARN body self.b1 = Block(num_fea) self.c1 = nn.Sequential(nn.Conv2d(num_fea * 2, num_fea, 1, 1, 0), nn.ReLU(True)) self.b2 = Block(num_fea) self.c2 = nn.Sequential(nn.Conv2d(num_fea * 3, num_fea, 1, 1, 0), nn.ReLU(True)) self.b3 = Block(num_fea) self.c3 = nn.Sequential(nn.Conv2d(num_fea * 4, num_fea, 1, 1, 0), nn.ReLU(True)) # Reconstruct self.upsampler = common.Upsampler(conv, upscale_factor, num_fea, act=False) self.last_conv = nn.Conv2d(num_fea, out_channels, 3, 1, 1)
def __init__(self, args, conv=common.default_conv): super(EASR, self).__init__() self.n_resgroups = args.n_resgroups # 8 self.n_resblocks = args.n_resblocks # 16 n_feats = args.n_feats kernel_size = 3 scale = args.scale self.sub_mean = common.MeanShift(args.rgb_range) # define head module modules_IFE =[conv(args.n_colors, n_feats, kernel_size)] modules_head = [ conv(n_feats, n_feats, kernel_size)] # define body module modules_body = nn.ModuleList() modules_GFF = nn.ModuleList() for i in range(self.n_resgroups): modules_body.append(RIRGroup(conv, n_feats, kernel_size, n_resblocks=self.n_resblocks)) modules_GFF.append(GFF_unit(n_feats, feat_in=(i+2))) modules_conv = [conv(n_feats, n_feats, kernel_size)] # define tail module modules_tail = [ common.Upsampler(conv, scale, n_feats, act=False), conv(n_feats, args.n_colors, kernel_size)] self.add_mean = common.MeanShift(args.rgb_range, sign=1) self.IFE = nn.Sequential(*modules_IFE) self.head = nn.Sequential(*modules_head) self.body = modules_body self.GFF = modules_GFF self.conv = nn.Sequential(*modules_conv) self.tail = nn.Sequential(*modules_tail)
def __init__(self, args, conv=common.default_conv): super(MDSR, self).__init__() n_resblocks = args.n_resblocks n_feats = args.n_feats kernel_size = 3 act = nn.ReLU(True) self.scale_idx = 0 self.url = url['r{}f{}'.format(n_resblocks, n_feats)] self.sub_mean = common.MeanShift(args.rgb_range) self.add_mean = common.MeanShift(args.rgb_range, sign=1) m_head = [conv(args.n_colors, n_feats, kernel_size)] self.pre_process = nn.ModuleList([ nn.Sequential(common.ResBlock(conv, n_feats, 5, act=act), common.ResBlock(conv, n_feats, 5, act=act)) for _ in args.scale ]) m_body = [ common.ResBlock(conv, n_feats, kernel_size, act=act) for _ in range(n_resblocks) ] m_body.append(conv(n_feats, n_feats, kernel_size)) self.upsample = nn.ModuleList([ common.Upsampler(conv, s, n_feats, act=False) for s in args.scale ]) m_tail = [conv(n_feats, args.n_colors, kernel_size)] self.head = nn.Sequential(*m_head) self.body = nn.Sequential(*m_body) self.tail = nn.Sequential(*m_tail)
def __init__(self, args, conv=common.default_conv): super(RFSN, self).__init__() n_resgroups = args.n_resgroups n_resblocks = args.n_resblocks n = 4 n_feats = args.n_feats kernel_size = 3 scale = args.scale[0] rgb_mean = (0.4488, 0.4371, 0.4040) rgb_std = (1.0, 1.0, 1.0) self.sub_mean = common.MeanShift(args.rgb_range, rgb_mean, rgb_std) modules_head = [conv(args.n_colors, n_feats, kernel_size)] modules_body = [ RFSG(n_feat=n_feats, n_resblocks=n_resblocks) for _ in range(n_resgroups) ] rs = [ResBlock(n_feats=n_feats) for i in range(n)] modules_tail = [ common.Upsampler(conv, scale, n_feats, act=False), conv(n_feats, args.n_colors, kernel_size) ] self.add_mean = common.MeanShift(args.rgb_range, rgb_mean, rgb_std, 1) self.head = nn.Sequential(*modules_head) self.body = nn.Sequential(*modules_body) self.LA = LA(channels=n_feats, n_grou=n_resgroups) self.con = conv(n_feats, n_feats, kernel_size) self.rs = nn.Sequential(*rs) self.tail = nn.Sequential(*modules_tail)
def __init__(self, args, conv=common.default_conv): super(EDSR, self).__init__() n_resblock = args.n_resblocks n_feats = args.n_feats #kernel_size = 5 # My code 22.4.2018. origin is 3 kernel_size = 3 scale = args.scale[0] act = nn.ReLU(True) #act = common.Swish() #rgb_mean = (0.4488, 0.4371, 0.4040) #self.sub_mean = common.MeanShift(args.rgb_range, rgb_mean, -1) # define head module modules_head = [conv(args.n_colors, n_feats, kernel_size)] # define body module modules_body = [ common.ResBlock( conv, n_feats, kernel_size, act=act, res_scale=args.res_scale) \ for _ in range(n_resblock)] modules_body.append(conv(n_feats, n_feats, kernel_size)) # define tail module modules_tail = [ common.Upsampler(conv, scale, n_feats, act=False), conv(n_feats, args.n_colors, kernel_size) ] #self.add_mean = common.MeanShift(args.rgb_range, rgb_mean, 1) self.head = nn.Sequential(*modules_head) self.body = nn.Sequential(*modules_body) self.tail = nn.Sequential(*modules_tail)
def __init__(self, args, conv=common.default_conv): super(CARN, self).__init__() scale = args.scale[0] # RGB mean for DIV2K self.sub_mean = ops.MeanShift((0.4488, 0.4371, 0.4040), sub=True) self.add_mean = ops.MeanShift((0.4488, 0.4371, 0.4040), sub=False) #Single Image Mean # self.sub_mean = common.SubMeanStd # self.add_mean = common.AddMeanStd self.entry = nn.Conv2d(3, 64, 3, 1, 1) self.b1 = Block(64, 64) self.b2 = Block(64, 64) self.b3 = Block(64, 64) self.c1 = ops.BasicBlock(64 * 2, 64, 1, 1, 0) self.c2 = ops.BasicBlock(64 * 3, 64, 1, 1, 0) self.c3 = ops.BasicBlock(64 * 4, 64, 1, 1, 0) self.denoiser = nn.Sequential(*[ConvBN(64) for _ in range(5)]) self.upsample = common.Upsampler(conv, scale, 64, act=False) self.exit = nn.Conv2d(64, 3, 3, 1, 1)
def __init__(self,args,conv=common.default_conv): super(FADN,self).__init__() self.n_resblocks=args.n_resblocks n_feats=args.n_feats kernel_size=3 scale = args.scale[0] act = nn.ReLU(True) self.sub_mean = common.MeanShift(args.rgb_range) self.add_mean = common.MeanShift(args.rgb_range, sign=1) # define head module m_head = [conv(args.n_colors, n_feats, kernel_size)] # define body module m_body = [DyResBlock(kernel_size,n_feats,n_feats) for _ in range(self.n_resblocks)] m_body.append(conv(n_feats, n_feats, kernel_size)) # define tail module m_tail = [ common.Upsampler(conv, scale, n_feats, act=False), conv(n_feats, args.n_colors, kernel_size) ] self.head = nn.Sequential(*m_head) self.body = nn.Sequential(*m_body) self.tail = nn.Sequential(*m_tail)
def __init__(self, args, conv=common.default_conv): super(WRANSR, self).__init__() n_resgroups = args.n_resgroups n_resblocks = args.n_resblocks n_feats = args.n_feats kernel_size = 3 scale = args.scale[0] act = nn.ReLU(True) rabtype = args.rabtype rgb_mean = (0.4488, 0.4371, 0.4040) rgb_std = (1.0, 1.0, 1.0) self.sub_mean = common.MeanShift(args.rgb_range, rgb_mean, rgb_std) # head m_head = [conv(args.n_colors, n_feats, kernel_size)] # body m_body = [ ResidualAttentionGroup(rabtype, conv, n_feats, kernel_size, n_resblocks) for _ in range(n_resgroups) ] # tail m_tail = [ common.Upsampler(conv, scale, n_feats, act=False), conv(n_feats, args.n_colors, kernel_size) ] self.add_mean = common.MeanShift(args.rgb_range, rgb_mean, rgb_std, 1) self.head = nn.Sequential(*m_head) self.body = nn.Sequential(*m_body) self.tail = nn.Sequential(*m_tail)
def __init__(self, args, conv=common.default_conv, n_feats=64): super(Edge_Net, self).__init__() kernel_size = 3 scale = args.scale[0] act = nn.ReLU(True) n_blocks = 5 self.n_blocks = n_blocks modules_head = [conv(3, n_feats, kernel_size)] modules_body = nn.ModuleList() for i in range(n_blocks): modules_body.append( MSRB()) modules_tail = [ nn.Conv2d(n_feats * (self.n_blocks + 1), n_feats, 1, padding=0, stride=1), conv(n_feats, n_feats, kernel_size), common.Upsampler(conv, scale, n_feats, act=False), conv(n_feats, args.n_colors, kernel_size)] self.Edge_Net_head = nn.Sequential(*modules_head) self.Edge_Net_body = nn.Sequential(*modules_body) self.Edge_Net_tail = nn.Sequential(*modules_tail)
def __init__(self, args, conv3x3=common.default_conv, conv1x1=common.default_conv): super(SRRESNET_CLUSTER, self).__init__() n_resblock = args.n_resblocks n_feats = args.n_feats scale = args.scale[0] act_res_flag = args.act_res kernel_size = 3 if act_res_flag == 'No': act_res = None else: act_res = 'prelu' head = [common.default_conv(args.n_colors, n_feats, kernel_size=9), nn.PReLU()] body = [common.ResBlock(conv3x3, n_feats, kernel_size, bn=True, act=act_res) for _ in range(n_resblock)] body.extend([conv3x3(n_feats, n_feats, kernel_size), nn.BatchNorm2d(n_feats)]) tail = [ common.Upsampler(conv3x3, scale, n_feats, act=act_res), conv3x3(n_feats, args.n_colors, kernel_size) ] self.head = nn.Sequential(*head) self.body = nn.Sequential(*body) self.tail = nn.Sequential(*tail) if conv3x3 == common.default_conv: # print('Loading from checkpoint {}'.format(args.pretrain_cluster)) # for (k1, v1), (k2, v2) in zip(self.state_dict().items(), torch.load(args.pretrain_cluster).items()): # print('{:<50}\t{:<50}\t{} {}'.format(k1, k2, list(v1.shape), list(v2.shape))) self.load_state_dict(torch.load(args.pretrain_cluster))