def __init__(self, args): super(Decoder, self).__init__() self.dim_h = args.dim_h self.n_z = args.n_z self.output = args.n_input self.dec1 = MLPLayer(self.n_z, self.dim_h, args.sigma_prior) #self.bn1 = nn.BatchNorm1d(self.dim_h) self.dec1_act = nn.Tanh() self.dec2 = MLPLayer(self.dim_h, self.dim_h, args.sigma_prior) #self.bn2 = nn.BatchNorm1d(self.dim_h) self.dec2_act = nn.Tanh() self.dec3 = MLPLayer(self.dim_h, self.dim_h, args.sigma_prior) #self.bn3 = nn.BatchNorm1d(self.dim_h) self.dec3_act = nn.Tanh() self.dec4 = MLPLayer(self.dim_h, self.output, args.sigma_prior)
def __init__(self, args): super(Encoder, self).__init__() self.dim_h = args.dim_h self.n_z = args.n_z self.input = args.n_input self.l1 = MLPLayer(self.input, self.dim_h, args.sigma_prior) self.l1_act = nn.ReLU() self.l2 = MLPLayer(self.dim_h, self.dim_h, args.sigma_prior) self.l2_act = nn.ReLU() self.l3 = MLPLayer(self.dim_h, self.dim_h, args.sigma_prior) self.l3_act = nn.ReLU() self.l4 = MLPLayer(self.dim_h, self.dim_h, args.sigma_prior) self.l4_act = nn.ReLU() self.l5 = MLPLayer(self.dim_h, self.n_z, args.sigma_prior)
def __init__(self, args): super(Encoder, self).__init__() self.dim_h = args.dim_h self.n_z = args.n_z self.input = args.n_input self.enc1 = MLPLayer(self.input, self.dim_h * 4, args.sigma_prior) self.bn1 = nn.BatchNorm1d(self.dim_h * 4) self.enc1_act = nn.ReLU() self.enc2 = MLPLayer(self.dim_h * 4, self.dim_h * 2, args.sigma_prior) self.bn2 = nn.BatchNorm1d(self.dim_h * 2) self.enc2_act = nn.ReLU() self.enc3 = MLPLayer(self.dim_h * 2, self.dim_h, args.sigma_prior) self.bn3 = nn.BatchNorm1d(self.dim_h) self.enc3_act = nn.ReLU() self.enc4 = MLPLayer(self.dim_h, self.n_z, args.sigma_prior) self.enc5 = MLPLayer(self.dim_h, self.n_z, args.sigma_prior)
def __init__(self, args): super(Decoder, self).__init__() self.dim_h = args.dim_h self.n_z = args.n_z self.output = args.n_input self.label_emb = nn.Embedding(y_dim, args.n_z) self.dec1 = MLPLayer(self.n_z, self.dim_h * 2, args.sigma_prior) self.bn1 = nn.BatchNorm1d(self.dim_h * 2) self.dec1_act = nn.ReLU() self.dec2 = MLPLayer(self.dim_h * 2, self.dim_h * 2, args.sigma_prior) self.bn2 = nn.BatchNorm1d(self.dim_h * 2) self.dec2_act = nn.ReLU() self.dec3 = MLPLayer(self.dim_h * 2, self.dim_h * 2, args.sigma_prior) self.bn3 = nn.BatchNorm1d(self.dim_h * 2) self.dec3_act = nn.ReLU() self.dec3_1 = MLPLayer(self.dim_h * 2, self.dim_h * 2, args.sigma_prior) self.bn3_1 = nn.BatchNorm1d(self.dim_h * 2) self.dec3_1_act = nn.ReLU() self.dec4 = MLPLayer(self.dim_h * 2, self.output, args.sigma_prior) # self.bn4 = nn.BatchNorm1d(self.output) self.dec4_act = nn.Tanh()
def __init__(self, args): super(Decoder, self).__init__() self.output = args.n_input self.dim_h = args.dim_h self.n_z = args.n_z self.l1 = MLPLayer(self.n_z, self.dim_h, args.sigma_prior) self.l1_act = nn.Tanh() self.l2 = MLPLayer(self.dim_h, self.dim_h, args.sigma_prior) self.l2_act = nn.Tanh() self.l3 = MLPLayer(self.dim_h, self.dim_h, args.sigma_prior) self.l3_act = nn.Tanh() self.l4 = MLPLayer(self.dim_h, self.dim_h, args.sigma_prior) self.l4_act = nn.Tanh() self.l5 = MLPLayer(self.dim_h, self.dim_h, args.sigma_prior) self.l5_act = nn.Tanh() self.l6 = MLPLayer(self.dim_h, self.output, args.sigma_prior)