Example #1
0
        def apply_constraints():
            logger.debug("Using constraints: {}".format(
                str(experiment.constraints)))
            with tf.variable_scope("constrained_out"):
                d_constraints_kernel = tf.get_variable(
                    "d_constraints_kernel",
                    shape=[constraints_features.shape[1], 1],
                    initializer=xavier_init())
                logger.debug(msg.format(
                    "d_constraints_kernel", d_constraints_kernel.shape, reuse))
                input_concat = tf.concat(
                    [d_hidden4, constraints_features],
                    axis=1, name="input_concat_{}".format(reuse))
                logger.debug(msg.format(
                    "input_concat", input_concat.shape, reuse))
                weight_concat = tf.concat(
                    [d_out_kernel, d_constraints_kernel],
                    axis=0, name="weight_concat_{}".format(reuse))
                logger.debug(msg.format(
                    "weight_concat", weight_concat.shape, reuse))
                d_constrained_out = tf.add(
                    tf.matmul(input_concat, weight_concat), d_out_bias,
                    name="d_constrained_out_{}".format(reuse))
                logger.debug(msg.format(
                    "constrained_out", d_constrained_out.shape, reuse))

                # define summaries on the last layer
                d_summaries = tf.summary.merge(
                    [stats_summaries(d_constrained_out),
                     d_summaries_dh4])
            return d_constrained_out, d_summaries
Example #2
0
        def apply_constraints():
            logger.debug("Using constraints: {}".format(
                str(experiment.constraints)))
            with tf.variable_scope("constrained_ll_out"):
                constraints_out = dense(
                    constraints_features, 1, activation=lrelu,
                    kernel_initializer=xavier_init())

                d_out_kernel = tf.get_variable(
                    "d_out_kernel", shape=[2, 1], initializer=xavier_init())
                logger.debug(
                    msg.format("d_out_kernel", d_out_kernel.shape, reuse))
                d_out_bias = tf.get_variable(
                    "d_out_bias", shape=[1, 1], initializer=xavier_init())
                logger.debug(msg.format("d_out_bias", d_out_bias.shape, reuse))

                input_concat = tf.concat(
                    [d_out, constraints_out],
                    axis=1, name="input_concat_{}".format(reuse))
                logger.debug(msg.format(
                    "input_concat", input_concat.shape, reuse))
                d_constrained_out = tf.add(
                    tf.matmul(input_concat, d_out_kernel), d_out_bias,
                    name="d_constrained_out_{}".format(reuse))
                logger.debug(msg.format(
                    "constrained_out", d_constrained_out.shape, reuse))

                # define summaries on the last layer
                d_summaries = tf.summary.merge(
                    [stats_summaries(d_constrained_out), d_out_summaries])
            return d_constrained_out, d_summaries
Example #3
0
def _gan28_discr(experiment=None, X=None, reuse=False, **kwargs):
    lrelu = partial(leaky_relu, leakiness=experiment.leak)
    h_dim = experiment.h_dim
    logger = experiment.logger

    msg = "D_SHAPE {} {} [reuse={}]"
    logger.debug(msg.format("in", X.shape, reuse))
    with tf.variable_scope("discriminator", reuse=reuse):
        with tf.variable_scope("hidden1"):
            d_hidden1 = conv2d(
                X, filters=h_dim, kernel_size=5, strides=2, padding="same",
                activation=lrelu, kernel_initializer=xavier_init())
            logger.debug(msg.format("dh1", d_hidden1.shape, reuse))
        with tf.variable_scope("hidden2"):
            d_hidden2 = conv2d(
                d_hidden1, filters=h_dim * 2, kernel_size=5, strides=2,
                padding="same", activation=lrelu,
                kernel_initializer=xavier_init())
            logger.debug(msg.format("dh2", d_hidden2.shape, reuse))
        with tf.variable_scope("hidden3"):
            d_hidden2 = tf.reshape(
                d_hidden2, [-1, np.prod(d_hidden2.shape[1:], dtype=int)])
            d_hidden3 = dense(d_hidden2, 1024, activation=lrelu,
                              kernel_initializer=xavier_init())
            logger.debug(msg.format("dh3", d_hidden3.shape, reuse))
        with tf.variable_scope("output"):
            d_out = dense(d_hidden3, 1, kernel_initializer=xavier_init())
            logger.debug(msg.format("out", d_out.shape, reuse))

            # define summaries on the last layer
            d_summaries = stats_summaries(d_out)

    return d_out, d_summaries
Example #4
0
        def skip_constraints():
            with tf.variable_scope("output"):
                d_out = tf.add(tf.matmul(d_hidden4, d_out_kernel),
                               d_out_bias, name="d_out_{}".format(reuse))
                logger.debug(msg.format("out", d_out.shape, reuse))

                # define summaries on the last layer
                d_summaries = tf.summary.merge(
                    [stats_summaries(d_out), d_summaries_dh4])
            return d_out, d_summaries
Example #5
0
def _can60_discr_32layer_auto(
        experiment=None, X=None, reuse=False, use_constraints=None,
        constraints_features=None, **kwargs):
    lrelu = partial(leaky_relu, leakiness=experiment.leak)
    h_dim = experiment.h_dim
    logger = experiment.logger

    msg = "D_SHAPE {} {} [reuse={}]"
    logger.debug(msg.format("in", X.shape, reuse))
    with tf.variable_scope("discriminator", reuse=reuse):
        with tf.variable_scope("hidden1"):
            d_hidden1 = conv2d(
                X, filters=h_dim, kernel_size=5, strides=2, padding="same",
                activation=lrelu, kernel_initializer=xavier_init())
            logger.debug(msg.format("dh1", d_hidden1.shape, reuse))
        with tf.variable_scope("hidden15"):
            d_hidden15 = conv2d(
                d_hidden1, filters=h_dim, kernel_size=5, strides=2,
                padding="same", activation=lrelu,
                kernel_initializer=xavier_init())
            logger.debug(msg.format("dh15", d_hidden15.shape, reuse))
        with tf.variable_scope("hidden2"):
            d_hidden2 = conv2d(
                d_hidden15, filters=h_dim * 2, kernel_size=5, strides=2,
                padding="same", activation=lrelu,
                kernel_initializer=xavier_init())
            logger.debug(msg.format("dh2", d_hidden2.shape, reuse))
            d_hidden2 = tf.reshape(
                d_hidden2, [-1, np.prod(d_hidden2.shape[1:], dtype=int)])
            logger.debug(msg.format("dh2", d_hidden2.shape, reuse))
        with tf.variable_scope("hidden3"):
            d_hidden3 = dense(d_hidden2, 1024, activation=lrelu,
                              kernel_initializer=xavier_init())
            logger.debug(msg.format("dh3", d_hidden3.shape, reuse))
        with tf.variable_scope("hidden4"):
            d_hidden4 = dense(d_hidden3, 32, activation=lrelu,
                              kernel_initializer=xavier_init())
            logger.debug(msg.format("dh4", d_hidden4.shape, reuse))
            d_summaries_dh4 = stats_summaries(d_hidden4, "dh4_pre_cond")
        with tf.variable_scope("shared_weights"):
            d_out_kernel = tf.get_variable(
                "d_out_kernel", shape=[32, 1], initializer=xavier_init())
            logger.debug(msg.format("d_out_kernel", d_out_kernel.shape, reuse))
            d_out_bias = tf.get_variable(
                "d_out_bias", shape=[1, 1], initializer=xavier_init())
            logger.debug(msg.format("d_out_bias", d_out_bias.shape, reuse))

        def skip_constraints():
            with tf.variable_scope("output"):
                d_out = tf.add(tf.matmul(d_hidden4, d_out_kernel),
                               d_out_bias, name="d_out_{}".format(reuse))
                logger.debug(msg.format("out", d_out.shape, reuse))

                # define summaries on the last layer
                d_summaries = tf.summary.merge(
                    [stats_summaries(d_out), d_summaries_dh4])
            return d_out, d_summaries

        def apply_constraints():
            logger.debug("Using constraints: {}".format(
                str(experiment.constraints)))
            with tf.variable_scope("constrained_out"):
                d_constraints_kernel = tf.get_variable(
                    "d_constraints_kernel",
                    shape=[constraints_features.shape[1], 1],
                    initializer=xavier_init())
                logger.debug(msg.format(
                    "d_constraints_kernel", d_constraints_kernel.shape, reuse))
                input_concat = tf.concat(
                    [d_hidden4, constraints_features],
                    axis=1, name="input_concat_{}".format(reuse))
                logger.debug(msg.format(
                    "input_concat", input_concat.shape, reuse))
                weight_concat = tf.concat(
                    [d_out_kernel, d_constraints_kernel],
                    axis=0, name="weight_concat_{}".format(reuse))
                logger.debug(msg.format(
                    "weight_concat", weight_concat.shape, reuse))
                d_constrained_out = tf.add(
                    tf.matmul(input_concat, weight_concat), d_out_bias,
                    name="d_constrained_out_{}".format(reuse))
                logger.debug(msg.format(
                    "constrained_out", d_constrained_out.shape, reuse))

                # define summaries on the last layer
                d_summaries = tf.summary.merge(
                    [stats_summaries(d_constrained_out),
                     d_summaries_dh4])
            return d_constrained_out, d_summaries

        return tf.cond(tf.cast(use_constraints, tf.bool),
                       skip_constraints, apply_constraints)