Exemplo n.º 1
0
    def network(context: modular.ModularContext):
        # 4 modular CNN layers
        activation = inputs
        for _ in range(4):
            input_channels = activation.shape[-1]
            filter_shape = [3, 3, input_channels, 8]
            modules = modular.create_conv_modules(filter_shape,
                                                  module_count=5,
                                                  strides=[1, 1, 1, 1])
            hidden = modular.modular_layer(activation,
                                           modules,
                                           parallel_count=1,
                                           context=context)
            pooled = tf.nn.max_pool(hidden,
                                    ksize=[1, 2, 2, 1],
                                    strides=[1, 2, 2, 1],
                                    padding='SAME')
            activation = tf.nn.relu(pooled)

        flattened = tf.layers.flatten(activation)
        logits = tf.layers.dense(flattened, units=10)

        target = modular.modularize_target(labels, context)
        loglikelihood = tf.distributions.Categorical(logits).log_prob(target)

        predicted = tf.argmax(logits, axis=-1, output_type=tf.int32)
        accuracy = tf.reduce_mean(
            tf.cast(tf.equal(predicted, target), tf.float32))

        selection_entropy = context.selection_entropy()
        batch_selection_entropy = context.batch_selection_entropy()

        return loglikelihood, logits, accuracy, selection_entropy, batch_selection_entropy
Exemplo n.º 2
0
    def network(context: modular.ModularContext):
        modules = modular.create_dense_modules(inputs,
                                               module_count=10,
                                               units=32)
        hidden = modular.modular_layer(inputs,
                                       modules,
                                       parallel_count=1,
                                       context=context)
        hidden = tf.nn.relu(hidden)

        modules = modular.create_dense_modules(hidden,
                                               module_count=8,
                                               units=10)
        logits = modular.modular_layer(hidden,
                                       modules,
                                       parallel_count=1,
                                       context=context)

        target = modular.modularize_target(labels, context)
        loglikelihood = tf.distributions.Categorical(logits).log_prob(target)

        predicted = tf.argmax(logits, axis=-1, output_type=tf.int32)
        accuracy = tf.reduce_mean(
            tf.cast(tf.equal(predicted, target), tf.float32))

        selection_entropy = context.selection_entropy()
        batch_selection_entropy = context.batch_selection_entropy()

        return loglikelihood, logits, accuracy, selection_entropy, batch_selection_entropy
Exemplo n.º 3
0
def network(context: modular.ModularContext):
    """
    Args:
        Instantiation of the ModularContext class
    """
    hidden = inputs
    units = [1, 100]
    layers = len(units)
    s_log = []
    ctrl_logits = []
    pi_log = []
    bs_perst_log = []
    module_count = 10

    for i in range(layers):

        modules = modular.create_dense_modules(hidden,
                                               module_count,
                                               units=units[i])
        hidden, l, s, pi, bs = modular.dep_variational_mask(hidden,
                                                            modules,
                                                            context,
                                                            tf.shape(inputs)[0],
                                                            iterate,
                                                            [3.5, 3.5],
                                                            [0.3, 0.3],
                                                            output_add=True,
                                                            cnn_ctrl=False)
        hidden = modular.batch_norm(hidden)

        pi_log.append(pi)
        s_log.append(
            tf.cast(tf.reshape(s, [1, -1, module_count, 1]), tf.float32))

        ctrl_logits.append(
            tf.cast(tf.reshape(l, [1, -1, module_count, 1]), tf.float32))
        bs_perst_log.append(
            tf.cast(tf.reshape(bs, [1, -1, module_count, 1]), tf.float32))

    logits = tf.layers.dense(hidden, 2)

    target = modular.modularize_target(labels, context)
    loglikelihood = -tf.losses.mean_squared_error(target, logits)

    loglikelihood = sum_and_mean_il(loglikelihood, context.sample_size)

    # predicted = tf.argmax(logits, axis=-1, output_type=tf.int32)
    # accuracy = tf.reduce_mean(tf.cast(tf.equal(predicted, target), tf.float32))
    accuracy = tf.constant(1)

    return (loglikelihood, ctrl_logits, accuracy,
            bs_perst_log, s_log, pi_log, context)
Exemplo n.º 4
0
def network(context: modular.ModularContext):
    """
    Args:
        Instantiation of the ModularContext class
    """
    hidden = inputs
    units = [2]
    layers = len(units)
    s_log = []
    ctrl_logits = []
    pi_log = []
    bs_perst_log = []
    module_count = 10

    for i in range(layers):

        modules = modular.create_dense_modules(hidden,
                                               module_count,
                                               units=units[i],
                                               activation=tf.nn.relu)
        hidden, l, s, pi, bs = modular.variational_mask(
            hidden, modules, context, 0.001,
            tf.shape(inputs)[0])
        pi_log.append(pi)
        s_log.append(
            tf.cast(tf.reshape(s, [1, -1, module_count, 1]), tf.float32))

    ctrl_logits.append(
        tf.cast(tf.reshape(l, [1, -1, module_count, 1]), tf.float32))
    bs_perst_log.append(
        tf.cast(tf.reshape(bs, [1, -1, module_count, 1]), tf.float32))

    logits = tf.layers.dense(hidden, 2)
    # logits = hidden

    target = modular.modularize_target(labels, context)
    loglikelihood = tf.distributions.Categorical(logits).log_prob(target)

    loglikelihood = sum_and_mean_il(loglikelihood, context.sample_size)

    predicted = tf.argmax(logits, axis=-1, output_type=tf.int32)
    accuracy = tf.reduce_mean(tf.cast(tf.equal(predicted, target), tf.float32))

    return (loglikelihood, ctrl_logits, accuracy, bs_perst_log, s_log, pi_log,
            context)
Exemplo n.º 5
0
    def network(context: modular.ModularContext):
        modules = modular.create_dense_modules(inputs, module_count=10, units=32)
        hidden = modular.modular_layer(inputs, modules, parallel_count=1, context=context)
        hidden = tf.nn.relu(hidden)

        modules = modular.create_dense_modules(hidden, module_count=8, units=10)
        logits = modular.modular_layer(hidden, modules, parallel_count=1, context=context)

        target = modular.modularize_target(labels, context)
        loglikelihood = tf.distributions.Categorical(logits).log_prob(target)

        predicted = tf.argmax(logits, axis=-1, output_type=tf.int32)
        accuracy = tf.reduce_mean(tf.cast(tf.equal(predicted, target), tf.float32))

        selection_entropy = context.selection_entropy()
        batch_selection_entropy = context.batch_selection_entropy()

        return loglikelihood, logits, accuracy, selection_entropy, batch_selection_entropy
Exemplo n.º 6
0
    def network(context: modular.ModularContext):
        # 4 modular CNN layers
        activation = inputs_tr
        s_log = []
        ctrl_logits = []
        l_out_log = []
        pi_log = []
        bs_perst_log = []

        # CNN layers
        for j in range(len(arguments['cnn_module_list'])):
            input_channels = activation.shape[-1]
            module_count = arguments['cnn_module_list'][j]
            out_channel = arguments['cnn_filter_size'][j]
            filter_shape = [3, 3, input_channels, out_channel]
            modules = modular.create_conv_modules(filter_shape,
                                                  module_count,
                                                  strides=[1, 2, 2, 1])

            hidden, l, s, pi, bs = modular.dep_variational_mask(
                inputs=activation,
                modules=modules,
                context=context,
                tile_shape=tf.shape(inputs_tr)[0],
                iteration=iterate,
                a_init=arguments['a_init_range'],
                b_init=arguments['b_init_range'],
                output_add=FLAGS.output_add,
                cnn_ctrl=FLAGS.cnn_ctrl)

            fix_image_summary(ctrl_logits, l, module_count)
            fix_image_summary(s_log, s, module_count)
            fix_image_summary(bs_perst_log, bs, module_count)
            pi_log.append(pi)

            activation = modular.batch_norm(hidden)

        flattened = tf.layers.flatten(activation)

        # Linear layers
        for i in range(len(arguments['linear_module_list'])):
            print('Linear')
            module_count = arguments['linear_module_list'][i]
            modules = modular.create_dense_modules(
                flattened, module_count, units=arguments['linear_units'])
            flattened, l, s, pi, bs = modular.dep_variational_mask(
                inputs=flattened,
                modules=modules,
                context=context,
                tile_shape=tf.shape(inputs_tr)[0],
                iteration=iterate,
                a_init=arguments['a_init_range'],
                b_init=arguments['b_init_range'],
                output_add=FLAGS.output_add,
                cnn_ctrl=False)

            flattened = modular.batch_norm(flattened)

            fix_image_summary(ctrl_logits, l, module_count)
            fix_image_summary(s_log, s, module_count)
            fix_image_summary(bs_perst_log, bs, module_count)
            pi_log.append(pi)

        logits = tf.layers.dense(flattened, units=10)

        target = modular.modularize_target(labels_cast, context)
        loglikelihood = tf.distributions.Categorical(logits).log_prob(target)

        loglikelihood = sum_and_mean_il(loglikelihood, context.sample_size,
                                        tf.shape(inputs_tr)[0])

        predicted = tf.argmax(logits, axis=-1, output_type=tf.int32)
        accuracy = tf.reduce_mean(
            tf.cast(tf.equal(predicted, target), tf.float32))

        return (loglikelihood, logits, accuracy, ctrl_logits, s_log, context,
                pi_log, bs_perst_log)