Beispiel #1
0
def build_model(data_tensor, reuse, training):
    """Create the hgru from Learning long-range..."""
    with tf.variable_scope('cnn', reuse=reuse):
        with tf.variable_scope('input', reuse=reuse):
            conv_aux = {
                'pretrained': os.path.join('weights',
                                           'gabors_for_contours_7.npy'),
                'pretrained_key': 's1',
                'nonlinearity': 'square'
            }
            x = conv.conv_layer(bottom=data_tensor,
                                name='gabor_input',
                                stride=[1, 1, 1, 1],
                                padding='SAME',
                                trainable=training,
                                use_bias=True,
                                aux=conv_aux)
            layer_hgru = hgru.hGRU('hgru_1',
                                   x_shape=x.get_shape().as_list(),
                                   timesteps=8,
                                   h_ext=15,
                                   strides=[1, 1, 1, 1],
                                   padding='SAME',
                                   aux={
                                       'reuse': False,
                                       'constrain': False
                                   },
                                   train=training)
            h2 = layer_hgru.build(x)
            h2 = normalization.batch(
                bottom=h2,
                reuse=reuse,
                # renorm=True,
                name='hgru_bn',
                training=training)

        with tf.variable_scope('readout_1', reuse=reuse):
            activity = conv.conv_layer(bottom=h2,
                                       name='pre_readout_conv',
                                       num_filters=2,
                                       kernel_size=1,
                                       trainable=training,
                                       use_bias=False)
            pool_aux = {'pool_type': 'max'}
            activity = pooling.global_pool(bottom=activity,
                                           name='pre_readout_pool',
                                           aux=pool_aux)
            activity = normalization.batch(
                bottom=activity,
                reuse=reuse,
                # renorm=True,
                name='readout_1_bn',
                training=training)

        with tf.variable_scope('readout_2', reuse=reuse):
            activity = tf.layers.flatten(activity, name='flat_readout')
            activity = tf.layers.dense(inputs=activity, units=2)
    return activity, h2
Beispiel #2
0
def build_model(data_tensor,
                reuse,
                training,
                output_shape,
                data_format='NHWC'):
    """Create the hgru from Learning long-range..."""
    if isinstance(output_shape, list):
        output_shape = output_shape[-1]
    elif isinstance(output_shape, dict):
        output_shape = output_shape['output']
    output_normalization_type = 'batch_norm_original'
    ff_kernel_size = (5, 5)
    ff_nl = tf.nn.elu
    data_tensor, long_data_format = tf_fun.interpret_data_format(
        data_tensor=data_tensor, data_format=data_format)

    # Build model
    with tf.variable_scope('gammanet', reuse=reuse):
        conv_aux = {
            'pretrained': os.path.join('weights',
                                       'gabors_for_contours_11.npy'),
            'pretrained_key': 's1',
            'nonlinearity': 'square'
        }
        activity = conv.conv_layer(bottom=data_tensor,
                                   name='gabor_input',
                                   stride=[1, 1, 1, 1],
                                   padding='SAME',
                                   trainable=training,
                                   use_bias=True,
                                   aux=conv_aux)
        layer_hgru = hgru.hGRU('hgru_1',
                               x_shape=activity.get_shape().as_list(),
                               timesteps=8,
                               h_ext=15,
                               strides=[1, 1, 1, 1],
                               padding='SAME',
                               aux={
                                   'reuse': False,
                                   'constrain': False
                               },
                               train=training)
        h2 = layer_hgru.build(activity)
        h2 = normalization.batch_contrib(bottom=h2,
                                         name='hgru_bn',
                                         training=training)
        mask = np.load('weights/cardena_mask.npy')[None, :, :, None]
        activity = h2 * mask
    with tf.variable_scope('cv_readout', reuse=reuse):
        activity = tf.reduce_mean(activity, reduction_indices=[1, 2])
        activity = tf.layers.dense(activity, output_shape)
    if long_data_format is 'channels_first':
        activity = tf.transpose(activity, (0, 2, 3, 1))
    extra_activities = {}
    if activity.dtype != tf.float32:
        activity = tf.cast(activity, tf.float32)
    # return [activity, h_deep], extra_activities
    return activity, extra_activities
def build_model(data_tensor, reuse, training, output_shape):
    """Create the hgru from Learning long-range..."""
    if isinstance(output_shape, list):
        output_shape = output_shape[0]
    with tf.variable_scope('cnn', reuse=reuse):
        # Add input
        so_filters = np.load(os.path.join('weights',
                                          'so_filters.npy')).squeeze().reshape(
                                              11, 11, 3, 8 * 4)[..., 0:32:4]
        so_filter_tensor = tf.get_variable(name='so_filters',
                                           initializer=so_filters,
                                           trainable=training)
        so_bias = tf.get_variable(name='so_bias',
                                  initializer=tf.zeros(so_filters.shape[-1]),
                                  trainable=training)
        in_emb = tf.nn.conv2d(input=data_tensor,
                              filter=so_filter_tensor,
                              strides=[1, 1, 1, 1],
                              padding='SAME',
                              name='so')
        in_emb = tf.nn.bias_add(in_emb, so_bias)
        # in_emb = in_emb ** 2
        layer_hgru = hgru.hGRU('hgru_1',
                               x_shape=in_emb.get_shape().as_list(),
                               timesteps=8,
                               h_ext=21,
                               strides=[1, 1, 1, 1],
                               padding='SAME',
                               aux={
                                   'reuse': False,
                                   'constrain': False,
                                   'nonnegative': True,
                                   'while_loop': False,
                                   'mirror_horizontal': True,
                                   'horizontal_dilations': [1, 2, 2, 1]
                               },
                               train=training)
        h2 = layer_hgru.build(in_emb)
        h2 = normalization.batch(bottom=h2,
                                 renorm=False,
                                 name='hgru_bn',
                                 training=training)
        activity = conv.readout_layer(
            activity=h2,
            reuse=reuse,
            training=training,
            pool_type='max',  # 'select',
            output_shape=output_shape,
            features=so_filters.shape[-1])
    extra_activities = {'activity': h2}
    return activity, extra_activities
def build_model(data_tensor, reuse, training, output_shape):
    """Create the hgru from Learning long-range..."""
    if isinstance(output_shape, list):
        output_shape = output_shape[0]
    with tf.variable_scope('cnn', reuse=reuse):
        # Add input
        so_filters = np.load(
            '/media/data_cifs/clicktionary/pretrained_weights/alexnet.npy'
        ).item()
        so_filter_tensor = tf.get_variable(name='so_filters',
                                           initializer=so_filters['conv1'][0],
                                           trainable=training)
        so_bias = tf.get_variable(name='so_bias',
                                  initializer=so_filters['conv1'][1],
                                  trainable=training)
        in_emb = tf.nn.conv2d(input=data_tensor,
                              filter=so_filter_tensor,
                              strides=[1, 1, 1, 1],
                              padding='SAME',
                              name='so')
        in_emb = tf.nn.bias_add(in_emb, so_bias)
        # in_emb = in_emb ** 2
        layer_hgru = hgru.hGRU('hgru_1',
                               x_shape=in_emb.get_shape().as_list(),
                               timesteps=8,
                               h_ext=11,
                               strides=[1, 1, 1, 1],
                               padding='SAME',
                               aux={
                                   'reuse': False,
                                   'constrain': False,
                                   'nonnegative': True,
                                   'while_loop': False,
                                   'horizontal_dilations': [1, 2, 2, 1]
                               },
                               train=training)
        h2 = layer_hgru.build(in_emb)
        h2 = normalization.batch(bottom=h2,
                                 renorm=False,
                                 name='hgru_bn',
                                 training=training)
        activity = conv.readout_layer(
            activity=h2,
            reuse=reuse,
            training=training,
            pool_type='max',  # 'select',
            output_shape=output_shape,
            features=output_shape)
    extra_activities = {'activity': h2}
    return activity, extra_activities
Beispiel #5
0
def build_model(data_tensor, reuse, training, output_shape):
    """Create the hgru from Learning long-range..."""
    if isinstance(output_shape, list):
        output_shape = output_shape[0]
    reduction_filters = 16
    with tf.variable_scope('cnn', reuse=reuse):
        # Add input
        so_filters = np.load(os.path.join('weights',
                                          'so_filters.npy')).squeeze().reshape(
                                              11, 11, 3, 8 * 4)
        in_emb = tf.nn.conv2d(input=data_tensor,
                              filter=so_filters,
                              strides=[1, 1, 1, 1],
                              padding='SAME',
                              name='so')
        in_emb = tf.layers.conv2d(inputs=in_emb,
                                  filters=reduction_filters,
                                  kernel_size=(1, 1))
        in_emb = in_emb**2
        layer_hgru = hgru.hGRU('hgru_1',
                               x_shape=in_emb.get_shape().as_list(),
                               timesteps=8,
                               h_ext=11,
                               strides=[1, 1, 1, 1],
                               padding='SAME',
                               aux={
                                   'reuse': False,
                                   'constrain': False,
                                   'nonnegative': True,
                                   'while_loop': False
                               },
                               train=training)
        h2 = layer_hgru.build(in_emb)
        h2 = normalization.batch(bottom=h2,
                                 renorm=False,
                                 name='hgru_bn',
                                 training=training)
        activity = conv.readout_layer(
            activity=h2,
            reuse=reuse,
            training=training,
            pool_type='max',  # 'select',
            output_shape=output_shape,
            features=reduction_filters)
    extra_activities = {'activity': h2}
    return activity, extra_activities
Beispiel #6
0
def build_model(data_tensor, reuse, training, output_shape):
    """Create the hgru from Learning long-range..."""
    if isinstance(output_shape, list):
        output_shape = output_shape[0]
    with tf.variable_scope('cnn', reuse=reuse):
        # Add input
        conv_aux = {
            'pretrained': os.path.join('weights',
                                       'gabors_for_contours_11.npy'),
            'pretrained_key': 's1',
            'nonlinearity': 'square'
        }
        activity = conv.conv_layer(bottom=data_tensor,
                                   name='gabor_input',
                                   stride=[1, 1, 1, 1],
                                   padding='SAME',
                                   trainable=training,
                                   use_bias=True,
                                   aux=conv_aux)
        layer_hgru = hgru.hGRU('hgru_1',
                               x_shape=activity.get_shape().as_list(),
                               timesteps=8,
                               h_ext=15,
                               strides=[1, 1, 1, 1],
                               padding='SAME',
                               aux={
                                   'reuse': False,
                                   'constrain': False
                               },
                               train=training)
        h2 = layer_hgru.build(activity)
        h2 = normalization.batch(bottom=h2,
                                 renorm=False,
                                 name='hgru_bn',
                                 training=training)
        activity = conv.readout_layer(activity=h2,
                                      reuse=reuse,
                                      training=training,
                                      output_shape=output_shape)
    extra_activities = {'activity': h2}
    return activity, extra_activities
Beispiel #7
0
def build_model(data_tensor, reuse, training, output_shape):
    """Create the hgru from Learning long-range..."""
    if isinstance(output_shape, list):
        output_shape = output_shape[0]
    with tf.variable_scope('cnn', reuse=reuse):
        # Add input
        in_emb = conv.skinny_input_layer(X=data_tensor,
                                         reuse=reuse,
                                         training=training,
                                         features=24,
                                         conv_activation=tf.nn.elu,
                                         conv_kernel_size=7,
                                         pool=False,
                                         name='l0')
        layer_hgru = hgru.hGRU('hgru_1',
                               x_shape=in_emb.get_shape().as_list(),
                               timesteps=8,
                               h_ext=9,
                               strides=[1, 1, 1, 1],
                               padding='SAME',
                               aux={
                                   'reuse': False,
                                   'constrain': False,
                                   'nonnegative': True
                               },
                               train=training)
        h2 = layer_hgru.build(in_emb)
        h2 = normalization.batch(bottom=h2,
                                 renorm=True,
                                 name='hgru_bn',
                                 training=training)
        activity = conv.conv_layer(bottom=h2,
                                   name='pre_readout_conv',
                                   num_filters=output_shape['output'],
                                   kernel_size=1,
                                   trainable=training,
                                   use_bias=True)
    extra_activities = {}
    return activity, extra_activities
Beispiel #8
0
def build_model(data_tensor, reuse, training, output_shape):
    """Create the hgru from Learning long-range..."""
    if isinstance(output_shape, list):
        output_shape = output_shape[0]
    with tf.variable_scope('cnn', reuse=reuse):
        with tf.variable_scope('input', reuse=reuse):
            x = tf.layers.conv2d(
                inputs=data_tensor,
                filters=24,
                kernel_size=11,
                name='l0',
                strides=(1, 1),
                padding='same',
                activation=tf.nn.relu,
                trainable=training,
                use_bias=True)
            gauss_mask = generate_gaussian_masks(x, [20, 40], num_centers_list=[30, 15])
            o_x = masked_substitution_noise(x, gauss_mask)
            layer_hgru = hgru.hGRU(
                'hgru_1',
                x_shape=o_x.get_shape().as_list(),
                timesteps=8,
                h_ext=15,
                strides=[1, 1, 1, 1],
                padding='SAME',
                aux={'reuse': False, 'constrain': False},
                train=training)
            h2 = layer_hgru.build(o_x)

        # h2 = normalization.batch(
        #     bottom=h2,
        #     renorm=False,
        #     name='hgru_bn',
        #     training=training)

    return h2 - o_x, {}