Esempio n. 1
0
def discriminator_forward(config,
                          labels,
                          incoming,
                          scope="discriminator",
                          name=None,
                          reuse=False):
    with tf.variable_scope(scope, name, reuse=reuse):
        output = leaky_relu(
            batch_normalization(
                conv_2d(incoming, config.dim, 5, 2, name="conv1"), 0.2))
        output = leaky_relu(
            batch_normalization(
                conv_2d(output, 2 * config.dim, 5, 2, name="conv2"), 0.2))
        output = leaky_relu(
            batch_normalization(
                conv_2d(output, 4 * config.dim, 5, 2, name="Conv3"), 0.2))
        output = tf.reshape(output, [-1, 4 * 4 * 4 * config.dim])

        output = fully_connected(output, 56 * config.dim, name="fc1_1")
        embed = fully_connected(labels, 8 * config.dim, name="fc1_2")

        output = leaky_relu(
            batch_normalization(tf.concat([output, embed], axis=-1)), 0.2)
        output = fully_connected(output, 8 * config.dim, name="fc2")
        output = batch_normalization(output)
        output = leaky_relu(output, 0.2)
        output = tf.reshape(fully_connected(output, 1, bias=False, name="fc3"),
                            [-1])

    return output
Esempio n. 2
0
def classifier_forward(config,
                       incoming,
                       name=None,
                       reuse=False,
                       scope="classifier"):
    with tf.variable_scope(scope, name, reuse=reuse):
        network = incoming
        network = relu(
            batch_normalization(
                conv_2d(network,
                        32,
                        5,
                        activation='relu',
                        regularizer="L2",
                        strides=2)))
        network = relu(
            batch_normalization(
                conv_2d(network,
                        64,
                        5,
                        activation='relu',
                        regularizer="L2",
                        strides=2)))
        network = flatten(network)

        network = relu(batch_normalization(fully_connected(network, 1024)))
        network = dropout(network, 0.5)

        network = fully_connected(network, 10)

    return network
Esempio n. 3
0
 def conv_block(t, filters):
     t = layers.conv_1d(t, filters, 3, activation='linear', bias=False)
     t = layers.batch_normalization(t)
     t = layers.activation(t, 'relu')
     t = layers.conv_1d(t, filters, 3, activation='linear', bias=False)
     t = layers.batch_normalization(t)
     t = layers.activation(t, 'relu')
     return t
Esempio n. 4
0
 def gen_func(args):
     """Generator function"""
     with tf.variable_scope("generator", reuse=False):
         inp = tf.concat(args, axis=1)
         inp = fully_connected(inp, 1, activation='elu')
         inp = batch_normalization(inp)
         inp = fully_connected(inp, 1, activation='elu')
         inp = batch_normalization(inp)
         return [inp]
Esempio n. 5
0
def generator_forward(config,
                      noise=None,
                      scope="generator",
                      name=None,
                      reuse=False,
                      num_samples=-1):
    with tf.variable_scope(scope, name, reuse=reuse):
        if noise is None:
            noise = tf.random_normal(
                [config.batch_size if num_samples == -1 else num_samples, 128],
                name="noise")

        output = fully_connected(noise,
                                 4 * 4 * 8 * config.gen_dim,
                                 name="input")
        output = tf.reshape(output, [-1, 4, 4, 8 * config.gen_dim])
        output = batch_normalization(output)
        output = relu(output)

        output = conv_2d_transpose(output,
                                   4 * config.gen_dim,
                                   5, [8, 8],
                                   name="conv1",
                                   strides=2)
        output = batch_normalization(output)
        output = relu(output)

        output = conv_2d_transpose(output,
                                   2 * config.gen_dim,
                                   5, [16, 16],
                                   name="conv2",
                                   strides=2)
        output = batch_normalization(output)
        output = relu(output)

        output = conv_2d_transpose(output,
                                   config.gen_dim,
                                   5, [32, 32],
                                   name="conv3",
                                   strides=2)
        output = batch_normalization(output)
        output = relu(output)

        output = conv_2d_transpose(output,
                                   3,
                                   5, [64, 64],
                                   name="conv4",
                                   strides=2)
        output = tf.tanh(output)

    return output
Esempio n. 6
0
 def __init__(self, sequence_length, num_classes, embeddings, num_filters, l2_reg_lambda=0.0, dropout=None, bn=False):
     self.input_text = layers.input_data( (None, sequence_length), dtype=tf.int32)
     
     with tf.variable_scope('Embedding'):
         embeddings_var = tf.Variable(embeddings, name='W', dtype=tf.float32)
         embeddings_var = tf.concat([np.zeros((1, embeddings.shape[1]) ), embeddings_var[1:] ] , axis = 0)
         self.embeded_text = tf.gather(embeddings_var, self.input_text)
     
     net = self.embeded_text
     for num_filter in num_filters:
         if bn:
             # , weights_init=tflearn.initializations.uniform(minval=-0.001, maxval=0.001)
             net = layers.conv_1d(net, num_filter, 3, padding='valid', activation='linear', bias=False)
             net = layers.batch_normalization(net)
             net = layers.activation(net, 'relu')
         else:
             net = layers.conv_1d(net, num_filter, 3, padding='valid', activation='relu', bias=True, regularizer='L2', weight_decay=l2_reg_lambda)
             
     if dropout is not None:
         net = layers.dropout(net, float(dropout) )
    
     features = layers.flatten( layers.max_pool_1d(net, net.shape.as_list()[1], padding='valid') )
     self.probas = layers.fully_connected(features, num_classes, activation='softmax', regularizer='L2', weight_decay=l2_reg_lambda)
     #optimizer = tflearn.optimizers.Momentum(learning_rate=0.1, momentum=0.9, lr_decay=0.2, decay_step=1000, staircase=True)
     optimizer = tflearn.optimizers.Adam(learning_rate=0.001)
     self.train_op = layers.regression(
         self.probas, 
         optimizer=optimizer,
         batch_size=128)
Esempio n. 7
0
def generator_forward(config,
                      noise=None,
                      scope="generator",
                      name=None,
                      num_samples=-1,
                      reuse=False):
    with tf.variable_scope(scope, name, reuse=reuse):
        if noise is None:
            noise = tf.random_normal(
                [config.batch_size if num_samples == -1 else num_samples, 128],
                name="noise")

        output = fully_connected(noise, 4 * 4 * config.gen_dim, name="input")
        output = tf.reshape(output, [-1, 4, 4, config.gen_dim])

        output = residual_block_upsample(output, config.gen_dim, 3, name="rb1")
        output = residual_block_upsample(output, config.gen_dim, 3, name="rb2")
        output = residual_block_upsample(output, config.gen_dim, 3, name="rb3")

        output = batch_normalization(output)
        output = tf.nn.relu(output)
        output = conv_2d(output, 3, 3, name="output")
        output = tf.tanh(output)

    return output
Esempio n. 8
0
def generator_forward(config,
                      noise=None,
                      scope="generator",
                      name=None,
                      reuse=False,
                      num_samples=-1):
    with tf.variable_scope(scope, name, reuse=reuse):
        if noise is None:
            noise = tf.random_normal(
                [config.batch_size if num_samples == -1 else num_samples, 128],
                name="noise")

        output = fully_connected(noise, 4 * 4 * 4 * config.dim)
        output = batch_normalization(output)
        output = tf.nn.relu(output)
        output = tf.reshape(output, [-1, 4, 4, 4 * config.dim])

        output = conv_2d_transpose(output,
                                   2 * config.dim,
                                   5, [8, 8],
                                   strides=2)
        output = output[:, :7, :7, :]

        output = conv_2d_transpose(output, config.dim, 5, [14, 14], strides=2)
        output = tf.nn.relu(output)

        output = conv_2d_transpose(output, 1, 5, [28, 28], strides=2)

        output = tf.tanh(output)

    return output
Esempio n. 9
0
 def conv_block(t, filters):
     t = layers.conv_1d(
         t,
         filters,
         3,
         weights_init=tflearn.initializations.variance_scaling(),
         activation='linear',
         bias=False)
     t = layers.batch_normalization(t)
     t = layers.activation(t, 'relu')
     t = layers.conv_1d(
         t,
         filters,
         3,
         weights_init=tflearn.initializations.variance_scaling(),
         activation='linear',
         bias=False)
     t = layers.batch_normalization(t)
     t = layers.activation(t, 'relu')
     return t
Esempio n. 10
0
def res18_forward(incoming, scope=None, name="resnet_18", reuse=False):
    with tf.variable_scope(scope, default_name=name, reuse=reuse):
        network = conv_2d(incoming, 32, 5, 2, name="conv1",)
        network = residual_block(network, 2, 32, downsample=True, batch_norm=True, name="rb1")
        network = residual_block(network, 2, 64, downsample=True, batch_norm=True, name="rb2")
        network = residual_block(network, 2, 128, downsample=True, batch_norm=True, name="rb3")
        network = residual_block(network, 2, 256, downsample=True, batch_norm=True, name="rb4")
        network = relu(batch_normalization(fully_connected(network, 256, name="fc1")))
        network = fully_connected(network, 5, name="fc2")

    return network