def __init__(self, ascii, shapes): super(Critic, self).__init__() filters = 64 #self.init_shape = (filters, shapes[0][0], shapes[0][1]) #self.preprocess = nn.Sequential( # nn.Linear(z_size, reduce(mul, self.init_shape, 1)), # nn.ReLU(True)) self.blocks = nn.ModuleList() in_ch = len(ascii) out_ch = filters for s in shapes[1:-1]: block = nn.Sequential(utils.Resize(s), nn.Conv2d(in_ch, out_ch, 3, padding=1), nn.BatchNorm2d(out_ch), nn.LeakyReLU(True)) in_ch = out_ch out_ch = in_ch * 2 self.blocks.append(block) self.postprocess = nn.Sequential(utils.Resize(shapes[-1]), nn.AdaptiveAvgPool2d(1)) self.winner = nn.Sequential(nn.Linear(in_ch, 1), nn.Sigmoid()) self.steps = nn.Sequential(nn.Linear(in_ch, 1), nn.Sigmoid()) self.compiles = nn.Sequential(nn.Linear(in_ch, 1), nn.Sigmoid())
def __init__(self, mapping, shapes, z_shape, dropout): super(Generator, self).__init__() self.z_size = z_shape[0] filters = 512 self.init_shape = (filters, *shapes[0]) self.preprocess = nn.Sequential( nn.Linear(self.z_size, reduce(mul, self.init_shape), bias=False), nn.LeakyReLU(True)) self.blocks = nn.ModuleList() in_ch = filters for s in shapes[1:-1]: out_ch = in_ch // 2 block = nn.Sequential( utils.Resize(s), nn.Conv2d(in_ch, out_ch, 3, padding=1, bias=False), nn.LeakyReLU(True), nn.Conv2d(out_ch, out_ch, 3, padding=1, bias=False), nn.BatchNorm2d(out_ch), nn.LeakyReLU(True), ) in_ch = out_ch self.blocks.append(block) out_ch = len(mapping) self.output = nn.Sequential( utils.Resize(shapes[-1]), nn.Conv2d(in_ch, out_ch, 3, padding=1, bias=True), nn.Softmax2d())
def __init__(self, num_outputs, hidden_size=512, shapes=[]): super(Decoder, self).__init__() in_ch = 32 self.initial_shape = (in_ch, *shapes[0]) self.features = nn.Linear(hidden_size, in_ch * reduce(mul, shapes[0])) #[(3, 4),(6, 8),(12, 16)] self.blocks = nn.ModuleList() halfway = math.ceil(len(shapes) / 2) for i, s in enumerate(shapes): if (i == len(shapes) - 1): out_ch = num_outputs elif (i + 1 == halfway and len(shapes) % 2 == 0): out_ch = in_ch elif (i + 1 < halfway): out_ch = in_ch * 2 else: out_ch = in_ch // 2 block = nn.Sequential(utils.Resize(s), nn.Conv2d(in_ch, out_ch, 3, padding=1), nn.ReLU()) in_ch = out_ch self.blocks.append(block)