def forward(self, x): x = self.layer00(x) x = x[:, 64:70] # Extract at hardcoded z_gaze indices x = F.selu_(x) x = self.layer01(x) x = F.selu_(x) x = self.layer02(x) x = F.normalize(x, dim=-1) # Normalize return x
def forward(self, x): x = F.selu_(self.bn1(self.Conv_1(x))) x = F.selu_(self.bn2(self.Conv_2(x))) x = F.selu_(self.bn3(self.Conv_3(x))) x = x.view((x.shape[0], -1)) x = F.selu_(self.bn4(self.l4(x))) x = F.selu_(self.bn5(self.l5(x))) x = self.l6(x) if (not self.wass): x = torch.sigmoid(x) return x
def forward(self, x): x = F.selu_(self.bn1(self.Conv_1(x))) x = F.selu_(self.bn2(self.Conv_2(x))) x = F.selu_(self.bn3(self.Conv_3(x))) x = x.view((x.shape[0], -1)) x = F.selu_(self.bn4(self.l4(x))) z_img = self.l5(x) z = z_img.view(z_img.shape[0], -1) zn = z[:, 0:self.latent_dim] zc_logits = z[:, self.latent_dim:] zc = F.softmax(zc_logits, dim=1) return zn, zc, zc_logits
def forward(self, x): x = self.rnb_1(x) x = self.rnb_2(x) x = self.rnb_3(x) x = self.rnb_4(x) x = self.rnb_5(x) x = self.rnb_6(x) x = self.rnb_7(x) x = self.rnb_8(x) x = self.rnb_9(x) x = self.rnb_10(x) x = self.rnb_11(x) x = self.rnb_12(x) x = self.rnb_13(x) x = self.rnb_14(x) x = self.rnb_15(x) x = self.rnb_16(x) x = self.rnb_17(x) x = self.rnb_18(x) x = x.view((x.shape[0], -1)) x = F.selu_(self.bn_19(self.linear_19(x))) z_img = self.linear_20(x) z = z_img.view(z_img.shape[0], -1) zn = z[:, 0:self.latent_dim] zc_logits = z[:, self.latent_dim:] zc = F.softmax(zc_logits, dim=1) return zn, zc, zc_logits
def forward(self, zn, zc): z = torch.cat((zn, zc), 1) x = F.selu_(self.bn1(self.l1(z))) x = F.selu_(self.bn2(self.l2(x))) x = x.view([x.shape[0]] + self.ishape) x = self.rnb_1(x) x = self.rnb_2(x) x = self.rnb_3(x) # 64x4x4 x = F.interpolate(x, scale_factor=2, mode='bicubic') # 64x8x8 x = self.rnb_4(x) x = self.rnb_5(x) x = self.rnb_6(x) # 32x8x8 x = F.interpolate(x, scale_factor=2, mode='bicubic') # 32x16x16 x = self.rnb_7(x) x = self.rnb_8(x) x = self.rnb_9(x) # 16x16x16 x = F.interpolate(x, scale_factor=2, mode='bicubic') # 16x32x32 x = self.rnb_10(x) x = self.rnb_11(x) x = self.rnb_12(x) # 8x32x32 x = F.interpolate(x, scale_factor=2, mode='bicubic') # 8x64x64 x = self.rnb_13(x) x = self.rnb_14(x) x = self.rnb_15(x) # 4x64x64 x = F.interpolate(x, scale_factor=2, mode='bicubic') # 4x128x128 x = self.rnb_16(x) x = self.rnb_17(x) x = self.rnb_18(x) # 2x128x128 x = F.interpolate(x, scale_factor=2, mode='bicubic') # 2x256x256 x = self.rnb_19(x) x = self.rnb_20(x) x = self.rnb_21(x) # 2x256x256 x_gen = torch.sigmoid(self.Conv_gen_final(x)) # 1x256x256 return x_gen
def forward(self, x): x = self.rnb_1(x) x = self.rnb_3(x) x = self.rnb_4(x) x = self.rnb_6(x) x = self.rnb_7(x) x = self.rnb_9(x) x = self.rnb_10(x) x = self.rnb_12(x) x = self.rnb_13(x) x = self.rnb_15(x) x = self.rnb_16(x) x = self.rnb_18(x) x = x.view((x.shape[0], -1)) x = F.selu_(self.bn_19(self.linear_19(x))) x = F.selu_(self.bn_20(self.linear_20(x))) x = self.linear_21(x) if (not self.wass): x = torch.sigmoid(x) return x
def forward(self, zn, zc): z = torch.cat((zn, zc), 1) x = F.selu_(self.bn1(self.l1(z))) x = F.selu_(self.bn2(self.l2(x))) x = x.view([x.shape[0]] + self.ishape) x = F.selu_(self.bn3(self.Conv_1(x))) x = F.interpolate(x, scale_factor=2, mode='bicubic') x = F.selu_(self.bn4(self.Conv_2(x))) x = F.interpolate(x, scale_factor=2, mode='bicubic') x = F.selu_(self.bn5(self.Conv_3(x))) x = F.interpolate(x, scale_factor=2, mode='bicubic') x = F.selu_(self.bn6(self.Conv_4(x))) x = F.interpolate(x, size=64, mode='bicubic') x = F.selu_(self.bn7(self.Conv_5(x))) x_gen = torch.sigmoid(self.Conv_6(x)) return x_gen
def forward(self, x): for name, layer in self.layers[:-1]: x = layer(x) if self.activation_type == 'relu': x = F.relu_(x) elif self.activation_type == 'leaky_relu': x = F.leaky_relu_(x) elif self.activation_type == 'elu': x = F.elu_(x) elif self.activation_type == 'selu': x = F.selu_(x) elif self.activation_type == 'tanh': x = torch.tanh_(x) elif self.activation_type == 'sigmoid': x = torch.sigmoid_(x) elif self.activation_type == 'none': pass else: raise ValueError('Unknown activation function "%s"' % self.activation_type) x = self.layers[-1][1](x) # No activation on output of last layer x = F.normalize(x, dim=-1) # Normalize return x