Пример #1
0
    def discriminator_cate(self, zs, reuse=False, name='discriminator_cate'):
        with tf.variable_scope(name, reuse=reuse):
            layer = Stacker(zs)
            layer.linear_block(256, relu)
            layer.linear_block(256, relu)
            layer.linear(1)
            layer.sigmoid()

        return layer.last_layer
Пример #2
0
    def Q_function(self, X_gen, reuse=False):
        with tf.variable_scope('Q_function', reuse=reuse):
            layer = Stacker(X_gen)
            layer.linear_block(128, relu)
            layer.linear_block(128, relu)
            code_logit = layer.linear(10 + 2)
            code = layer.softmax()

        return code, code_logit
Пример #3
0
    def decoder(self, zs, net_shapes, reuse=False, name='decoder'):
        with tf.variable_scope(name, reuse=reuse):
            stack = Stacker(zs)
            for shape in net_shapes:
                stack.linear_block(shape, relu)

            stack.linear_block(self.X_flatten_size, sigmoid)
            stack.reshape(self.Xs_shape)

        return stack.last_layer
Пример #4
0
    def encoder(self, Xs, net_shapes, reuse=False, name='encoder'):
        with tf.variable_scope(name, reuse=reuse):
            stack = Stacker(Xs)
            stack.flatten()
            for shape in net_shapes:
                stack.linear_block(shape, relu)

            stack.linear_block(self.latent_code_size, relu)

        return stack.last_layer
Пример #5
0
    def classifier(self, Xs, net_shapes, name='classifier'):
        with tf.variable_scope(name):
            layer = Stacker(flatten(Xs))

            for net_shape in net_shapes:
                layer.linear_block(net_shape, relu)

            layer.linear(self.Y_size)
            logit = layer.last_layer
            h = softmax(logit)
        return logit, h
Пример #6
0
    def encoder(self, Xs, net_shapes, reuse=False, name='encoder'):
        with tf.variable_scope(name, reuse=reuse):
            stack = Stacker(Xs)
            stack.flatten()

            for shape in net_shapes:
                stack.linear_block(shape, relu)

            stack.linear_block(self.z_size + self.Y_size, relu)
            zs = stack.last_layer[:, :self.z_size]
            Ys_gen = stack.last_layer[:, self.z_size:]

            hs = softmax(Ys_gen)
        return zs, Ys_gen, hs
Пример #7
0
    def discriminator_gauss(self,
                            zs,
                            net_shapes,
                            reuse=False,
                            name='discriminator_gauss'):
        with tf.variable_scope(name, reuse=reuse):
            layer = Stacker(zs)
            for shape in net_shapes:
                layer.linear_block(shape, relu)

            layer.linear(1)
            layer.sigmoid()

        return layer.last_layer
Пример #8
0
    def classifier(self, x, dropout_rate):
        with tf.variable_scope('classifier'):
            layer = Stacker(x)

            layer.linear_block(128, lrelu)
            layer.dropout(dropout_rate)

            layer.linear_block(128, relu)
            layer.dropout(dropout_rate)

            # layer.linear_block(32, lrelu)
            # layer.dropout(dropout_rate)
            #
            # layer.linear_block(16, lrelu)
            # layer.dropout(dropout_rate)

            layer.linear(2)
            logit = layer.last_layer

            h = softmax(logit)
        return logit, h
Пример #9
0
    def encoder(self, Xs, name='encoder'):
        with tf.variable_scope(name):
            stack = Stacker(Xs)
            stack.flatten()
            stack.linear_block(512, relu)
            stack.linear_block(256, relu)
            stack.linear_block(128, relu)
            stack.linear_block(self.code_size, relu)

        return stack.last_layer
Пример #10
0
    def decoder(self, zs, Ys, reuse=False, name='decoder'):
        with tf.variable_scope(name, reuse=reuse):
            stack = Stacker(concat((zs, Ys), axis=1))
            stack.linear_block(128, relu)
            stack.linear_block(256, relu)
            stack.linear_block(512, relu)
            stack.linear_block(self.X_flatten_size, sigmoid)
            stack.reshape(self.Xs_shape)

        return stack.last_layer
Пример #11
0
    def encoder(self, Xs, Ys, name='encoder'):
        with tf.variable_scope(name):
            stack = Stacker(concat((flatten(Xs), Ys), axis=1))
            stack.linear_block(512, relu)
            stack.linear_block(256, relu)
            stack.linear_block(128, relu)
            stack.linear_block(self.z_size * 2, relu)

            h = stack.last_layer
            mean = h[:, :self.z_size]
            std = tf.nn.softplus(h[:, self.z_size:])

        return mean, std