def __init__(self, stem_channels=64, mid_channels=192, growth_rate=48, num_units=(6, 8, 8, 8)): super().__init__() self.features = nn.Sequential() self.features.add_module("init_block", StemBlock(stem_channels)) in_channels = stem_channels * 2 for i, n in enumerate(num_units): stage = nn.Sequential() if i != len(num_units) - 1: stage.add_module("trans", Transition(in_channels, in_channels)) for j in range(n): stage.add_module( "unit%d" % (j + 1), DenseUnit(in_channels, mid_channels, growth_rate)) in_channels += growth_rate self.features.add_module("stage%d" % (i + 1), stage) self.features.add_module( "post_activ", seq( ("bn", Norm("default", in_channels)), ("relu", Act("default")), ))
def __init__(self, in_channels, channels=64, out_channels=3, use_sn=True, non_local=True): super().__init__() self.in_channels = in_channels self.dense = nn.Linear(in_channels, 6 * 6 * channels * 8) self.conv = seq( ("block1", ResBlock( channels * 8, channels * 4, 'up', use_sn=use_sn)), ("block2", ResBlock( channels * 4, channels * 2, 'up', use_sn=use_sn)), ("attn", SelfAttention2(channels * 2) if non_local else None), ("block3", ResBlock( channels * 2, channels * 1, 'up', use_sn=use_sn)), ("bn", nn.BatchNorm2d(channels * 1)), ("relu", nn.ReLU(True)), ("conv", nn.Conv2d(channels * 1, out_channels, kernel_size=3, padding=1)), ("tanh", nn.Tanh()), ) if use_sn: spectral_norm(self.dense) spectral_norm(self.conv.stem)
def __init__(self, stem_channels=64, mid_channels=192, growth_rate=48, num_units=(6, 8, 8, 8)): super().__init__() self.init_block = StemBlock(stem_channels) in_channels = stem_channels * 2 out_channels = [in_channels] for i, n in enumerate(num_units): stage = nn.Sequential() stage.add_module( "pool", nn.MaxPool2d(kernel_size=2, stride=2, ceil_mode=True)) for j in range(n): stage.add_module( "unit%d" % (j + 1), DenseUnit(in_channels, mid_channels, growth_rate)) in_channels += growth_rate if i != len(num_units) - 1: stage.add_module("trans", Transition(in_channels, in_channels)) out_channels.append(in_channels) self.add_module("stage%d" % (i + 1), stage) self.post_activ = seq( ("bn", Norm("default", in_channels)), ("relu", Act("default")), ) del self.stage4.pool self.trans = Transition(out_channels[-1], out_channels[-1]) self.proj = Transition(out_channels[-1], 512) self.extra1 = BasicBlock(512, 512) self.extra2 = BasicBlock(512, 256) self.extra3 = BasicBlock(256, 256) self.extra4 = BasicBlock(256, 256) self.out_channels = [out_channels[-3], 512, 512, 256, 256, 256]
def __init__(self, stem_channels=64, mid_channels=192, growth_rate=48, num_units=(6, 8, 8, 8)): super().__init__() self.init_block = StemBlock(stem_channels) in_channels = stem_channels * 2 out_channels = [in_channels] for i, n in enumerate(num_units): stage = nn.Sequential() stage.add_module("pool", nn.MaxPool2d(kernel_size=2, stride=2, ceil_mode=True)) for j in range(n): stage.add_module("unit%d" % (j + 1), DenseUnit(in_channels, mid_channels, growth_rate)) in_channels += growth_rate if i != len(num_units) - 1: stage.add_module( "trans", Transition(in_channels, in_channels)) out_channels.append(in_channels) self.add_module("stage%d" % (i + 1), stage) self.post_activ = seq( ("bn", get_norm_layer("default", in_channels)), ("relu", get_activation("default")), ) del self.stage4.pool print(out_channels) self.trans = Transition(out_channels[-1], 256) self.ds1 = DenseSupervision1(out_channels[-3], 256) self.ds2 = DenseSupervision(512, 256) self.ds3 = DenseSupervision(512, 128) self.ds4 = DenseSupervision(256, 128) self.ds5 = DenseSupervision(256, 128) self.out_channels = [out_channels[-3], 512, 512, 256, 256, 256]