def discriminator_cate(self, zs, reuse=False, name='discriminator_cate'): with tf.variable_scope(name, reuse=reuse): layer = Stacker(zs) layer.linear_block(256, relu) layer.linear_block(256, relu) layer.linear(1) layer.sigmoid() return layer.last_layer
def discriminator(self, X, reuse=False): with tf.variable_scope('discriminator', reuse=reuse): layer = Stacker(X) layer.conv_block(128, CONV_FILTER_5522, lrelu) layer.conv_block(256, CONV_FILTER_5522, lrelu) layer.reshape([self.batch_size, -1]) layer.linear(1) layer.sigmoid() return layer.last_layer
def discriminator(self, X, net_shapes, reuse=False, name='discriminator'): with tf.variable_scope(name, reuse=reuse): layer = Stacker(flatten(X)) for shape in net_shapes: layer.linear(shape) layer.linear(1) layer.sigmoid() return layer.last_layer
def classifier(self, Xs, net_shapes, name='classifier'): with tf.variable_scope(name): layer = Stacker(flatten(Xs)) for net_shape in net_shapes: layer.linear_block(net_shape, relu) layer.linear(self.Y_size) logit = layer.last_layer h = softmax(logit) return logit, h
def generator(self, z, net_shapes, reuse=False, name='generator'): with tf.variable_scope(name, reuse=reuse): layer = Stacker(z) for shape in net_shapes: layer.linear(shape) layer.linear(self.X_flatten_size) layer.sigmoid() layer.reshape(self.Xs_shape) return layer.last_layer
def discriminator(self, X, Y, reuse=False): with tf.variable_scope('discriminator', reuse=reuse): Y = linear(Y, self.input_h * self.input_w) Y = reshape(Y, [self.batch_size, self.input_h, self.input_w, 1]) layer = Stacker(tf.concat((X, Y), axis=3)) layer.conv_block(128, CONV_FILTER_5522, lrelu) layer.conv_block(256, CONV_FILTER_5522, lrelu) layer.reshape([self.batch_size, -1]) layer.linear(1) layer.sigmoid() return layer.last_layer
def discriminator_gauss(self, zs, net_shapes, reuse=False, name='discriminator_gauss'): with tf.variable_scope(name, reuse=reuse): layer = Stacker(zs) for shape in net_shapes: layer.linear_block(shape, relu) layer.linear(1) layer.sigmoid() return layer.last_layer
def Q_function(self, X_gen, reuse=False): with tf.variable_scope('Q_function', reuse=reuse): layer = Stacker(X_gen) layer.linear_block(128, relu) layer.linear_block(128, relu) code_logit = layer.linear(10 + 2) code = layer.softmax() return code, code_logit
def classifier(self, x, dropout_rate): with tf.variable_scope('classifier'): layer = Stacker(x) layer.linear_block(128, lrelu) layer.dropout(dropout_rate) layer.linear_block(128, relu) layer.dropout(dropout_rate) # layer.linear_block(32, lrelu) # layer.dropout(dropout_rate) # # layer.linear_block(16, lrelu) # layer.dropout(dropout_rate) layer.linear(2) logit = layer.last_layer h = softmax(logit) return logit, h