def generator(Z, out_dim): layer = [layers.Dense(Z, 16)] layer.append(layers.Activation(layer[-1], T.leaky_relu)) layer.append(layers.Dense(layer[-1], 16)) layer.append(layers.Activation(layer[-1], T.leaky_relu)) layer.append(layers.Dense(layer[-1], out_dim)) return layer
def encoder(X, latent_dim): layer = [layers.Dense(X, 32)] layer.append(layers.Activation(layer[-1], T.leaky_relu)) layer.append(layers.Dense(layer[-1], 32)) layer.append(layers.Activation(layer[-1], T.leaky_relu)) layer.append(layers.Dense(layer[-1], latent_dim * 2)) return layer
def discriminator(X): layer = [layers.Dense(X, 32)] layer.append(layers.Activation(layer[-1], T.leaky_relu)) layer.append(layers.Dense(layer[-1], 32)) layer.append(layers.Activation(layer[-1], T.leaky_relu)) layer.append(layers.Dense(layer[-1], 2)) return layer
label = T.Placeholder((BS, ), 'int32') deterministic = T.Placeholder((1, ), 'bool') # first layer NN = 32 if L > 0: x, y, = T.meshgrid(T.linspace(-5, 5, NN), T.linspace(-5, 5, NN)) grid = T.stack([x.flatten(), y.flatten()], 1) cov = T.Variable(np.eye(2), name='cov') gaussian = T.exp(-(grid.dot(cov.T().dot(cov)) * grid).sum(1)).reshape( (1, 1, NN, NN)) layer = [ layers.Conv2D(tf, 1, (NN, NN), strides=(6, 6), W=gaussian, b=None) ] layer[-1].add_variable(cov) layer.append(layers.Activation(layer[-1], lambda x: T.log(T.abs(x) + 0.01))) else: layer = [layers.Activation(tf + 0.01, T.log)] layer.append(layers.Conv2D(layer[-1], 16, (3, 3))) layer.append(layers.BatchNormalization(layer[-1], [0, 2, 3], deterministic)) layer.append(layers.Activation(layer[-1], T.leaky_relu)) layer.append(layers.Pool2D(layer[-1], (3, 3))) layer.append(layers.Conv2D(layer[-1], 16, (3, 3))) layer.append(layers.BatchNormalization(layer[-1], [0, 2, 3], deterministic)) layer.append(layers.Activation(layer[-1], T.leaky_relu)) layer.append(layers.Pool2D(layer[-1], (3, 3))) layer.append(layers.Conv2D(layer[-1], 16, (3, 3))) layer.append(layers.BatchNormalization(layer[-1], [0, 2, 3], deterministic))