def __init__(self, in_ch, out_ch, ring=True): horizontal = "circular" if ring else "reflect" super().__init__( ops.Pad(padding=1, horizontal=horizontal, vertical="reflect"), ops.EqualLR(nn.Conv2d(in_ch, out_ch, 4, 2, 0, bias=False)), ops.FusedLeakyReLU(out_ch), )
def __init__(self, in_ch, out_ch={"rgb": 3}, ring=True): super().__init__() assert isinstance(out_ch, dict) self.in_ch = in_ch self.heads = nn.ModuleDict() horizontal = "circular" if ring else "reflect" for name, ch in out_ch.items(): self.heads[name] = nn.Sequential( ops.Pad(padding=1, horizontal=horizontal, vertical="reflect"), ops.EqualLR( nn.ConvTranspose2d(in_ch, ch, 4, 2, 1 + 2, bias=True)), )
def __init__(self, in_ch, ch_base=64, ch_max=512, shape=(64, 256), ring=True): shape_out = (shape[0] >> 4, shape[1] >> 4) ch = lambda i: min(ch_base << i, ch_max) super().__init__( ops.BlurVH(ring), Down(in_ch * 2, ch(0), ring), Down(ch(0), ch(1), ring), Down(ch(1), ch(2), ring), Down(ch(2), ch(3), ring), ops.EqualLR(nn.Conv2d(ch(3), 1, shape_out, 1, 0)), )
def __init__(self, in_ch, out_ch, kernel=(4, 16)): super().__init__( ops.EqualLR( nn.ConvTranspose2d(in_ch, out_ch, kernel, 1, 0, bias=False)), ops.FusedLeakyReLU(out_ch), )