def capsules_init(inputs, shape, strides, padding, pose_shape, add_bias,
                  name):  #nets, shape=[1, 1, 32, 16], strides=[1, 1, 1, 1],
    # padding='VALID', pose_shape=16, add_bias=True, name='primary')
    # nets = capsule_conv_layer(nets, shape=[3, 1, 16, 16], strides=[1, 1, 1, 1],
    # iterations=3, name='conv2'
    with tf.variable_scope(name):
        poses = _conv2d_wrapper(
            inputs,
            shape=shape[0:-1] + [shape[-1] * pose_shape],  # [1, 1, 32, 16*16]
            strides=strides,
            padding=padding,
            add_bias=add_bias,
            activation_fn=None,
            name='pose_stacked')
        poses_shape = poses.get_shape().as_list()  #[batch , len-2, 1, 16*16]
        poses = tf.reshape(
            poses, [-1, poses_shape[1], poses_shape[2], shape[-1], pose_shape
                    ])  # [batch , len-2 , 1 , 16, 16]
        beta_a = _get_weights_wrapper(name='beta_a', shape=[1, shape[-1]])
        poses = squash_v1(poses, axis=-1)  # [batch , len-2 , 1 , 16, 16]
        activations = K.sqrt(K.sum(
            K.square(poses), axis=-1)) + beta_a  # [batch , len-2 , 1 , 16]
        tf.logging.info("prim poses dimension:{}".format(poses.get_shape()))

    return poses, activations  # [batch , len-2 , 1 , 16, 16]  # [batch , len-2 , 1 , 16]
Exemple #2
0
def capsule_model_B(X, num_classes):
    poses_list = []
    for _, ngram in enumerate([3, 4, 5]):
        with tf.variable_scope('capsule_' + str(ngram)):
            nets = _conv2d_wrapper(X,
                                   shape=[ngram, 300, 1, 32],
                                   strides=[1, 2, 1, 1],
                                   padding='VALID',
                                   add_bias=True,
                                   activation_fn=tf.nn.relu,
                                   name='conv1')
            tf.logging.info('output shape: {}'.format(nets.get_shape()))
            nets = capsules_init(nets,
                                 shape=[1, 1, 32, 16],
                                 strides=[1, 1, 1, 1],
                                 padding='VALID',
                                 pose_shape=16,
                                 add_bias=True,
                                 name='primary')
            nets = capsule_conv_layer(nets,
                                      shape=[3, 1, 16, 16],
                                      strides=[1, 1, 1, 1],
                                      iterations=3,
                                      name='conv2')
            nets = capsule_flatten(nets)
            poses, activations = capsule_fc_layer(nets, num_classes, 3, 'fc2')
            poses_list.append(poses)

    poses = tf.reduce_mean(tf.convert_to_tensor(poses_list), axis=0)
    activations = K.sqrt(K.sum(K.square(poses), 2))
    return poses, activations
Exemple #3
0
def capsule_model_A(X, num_classes):
    #input dimension:(25, 200, 300, 1)
    with tf.variable_scope('capsule_' + str(3)):
        nets = _conv2d_wrapper(X,
                               shape=[3, 300, 1, 32],
                               strides=[1, 2, 1, 1],
                               padding='VALID',
                               add_bias=True,
                               activation_fn=tf.nn.relu,
                               name='conv1')  #output shape: (25, 99, 1, 32)
        tf.logging.info('output shape: {}'.format(nets.get_shape()))
        nets = capsules_init(nets,
                             shape=[1, 1, 32, 16],
                             strides=[1, 1, 1, 1],
                             padding='VALID',
                             pose_shape=16,
                             add_bias=True,
                             name='primary')  # (25, 99, 1, 16, 16)
        nets = capsule_conv_layer(
            nets,
            shape=[3, 1, 16, 16],
            strides=[1, 1, 1, 1],
            iterations=3,
            name='conv2')  #25, 97, 1, 16, 16   #25, 97, 1, 16
        nets = capsule_flatten(nets)  #25, 97*16, 16   #25, 97*16
        poses, activations = capsule_fc_layer(nets, num_classes, 3, 'fc2')
    return poses, activations
def capsules_init(inputs, shape, strides, padding, pose_shape, add_bias, name):
    with tf.variable_scope(name):
        poses = _conv2d_wrapper(inputs,
                                shape=shape[0:-1] + [shape[-1] * pose_shape],
                                strides=strides,
                                padding=padding,
                                add_bias=add_bias,
                                activation_fn=None,
                                name='pose_stacked')
        poses_shape = poses.get_shape().as_list()
        poses = tf.reshape(
            poses, [-1, poses_shape[1], poses_shape[2], shape[-1], pose_shape])
        beta_a = _get_weights_wrapper(name='beta_a', shape=[1, shape[-1]])
        if False:
            u_hat_vecs = tf.reshape(poses, [-1, shape[-1], 1, pose_shape])
            tf.logging.info("prim votes dimension:{}".format(
                u_hat_vecs.get_shape()))
            poses, activations = routing(u_hat_vecs, beta_a, 1, shape[3], None)

            poses = tf.reshape(
                poses,
                [-1, poses_shape[1], poses_shape[2], shape[-1], pose_shape])
            activations = tf.reshape(
                activations, [-1, poses_shape[1], poses_shape[2], shape[-1]])
        else:
            poses = squash_v1(poses, axis=-1)
            activations = K.sqrt(K.sum(K.square(poses), axis=-1)) + beta_a

        tf.summary.histogram('activations', activations)
        tf.logging.info("prim poses dimension:{}".format(poses.get_shape()))
        tf.logging.info("prim activations dimension:{}".format(
            activations.get_shape()))

    return poses, activations
Exemple #5
0
def capsule_model_A(X, num_classes):
    with tf.variable_scope('capsule_' + str(3)):
        print('capsule_' + str(3))
        print('X ', X)
        cnnout = _conv2d_wrapper(X, shape=[3, 300, 1, 32], strides=[1, 2, 1, 1], padding='VALID',
                add_bias=True, activation_fn=tf.nn.relu, name='conv1')
        print('cnnout', cnnout.shape)
        tf.logging.info('output shape: {}'.format(cnnout.get_shape()))
        poses_init, activations_init = capsules_init(cnnout, shape=[1, 1, 32, 16], strides=[1, 1, 1, 1],
                             padding='VALID', pose_shape=16, add_bias=True, name='primary')
        print('poses_init', poses_init.shape)
        print('activations_init', activations_init.get_shape())
        poses_conv, activations_conv = capsule_conv_layer(poses_init, activations_init, shape=[3, 1, 16, 16], strides=[1, 1, 1, 1],
                                                          iterations=3, name='conv2')
        print('poses_conv', poses_conv.shape)
        print('activations_conv', activations_conv.shape)
        poses_flat, activations_flat = capsule_flatten(poses_conv, activations_conv)
        print('capsule_flatten', poses_flat.shape)
        print('activations_flat', activations_flat.shape)

        poses, activations = capsule_fc_layer(poses_flat, activations_flat, num_classes, 3, 'fc2')
        print('poses ', poses.shape)
        print('activations ', activations.shape)

    return poses, activations
Exemple #6
0
def baseline_model_cnn(X, num_classes):
    nets = _conv2d_wrapper(
        X, shape=[3, 300, 1, 32], strides=[1, 1, 1, 1], padding='VALID', 
        add_bias=False, activation_fn=tf.nn.relu, name='conv1'
        )
    nets = slim.flatten(nets)
    tf.logging.info('flatten shape: {}'.format(nets.get_shape()))
    nets = slim.fully_connected(nets, 128, scope='relu_fc3', activation_fn=tf.nn.relu)
    tf.logging.info('fc shape: {}'.format(nets.get_shape()))

    activations = tf.sigmoid(slim.fully_connected(nets, num_classes, scope='final_layer', activation_fn=None))
    tf.logging.info('fc shape: {}'.format(activations.get_shape()))
    return tf.zeros([0]), activations
Exemple #7
0
def capsules_init(inputs, shape, strides, padding, pose_shape, add_bias, name):
    with tf.variable_scope(name):
        poses = _conv2d_wrapper(inputs,
                                shape=shape[0:-1] + [shape[-1] * pose_shape],
                                strides=strides,
                                padding=padding,
                                add_bias=add_bias,
                                activation_fn=None,
                                name='pose_stacked')
        poses_shape = poses.get_shape().as_list()
        poses = tf.reshape(
            poses, [-1, poses_shape[1], poses_shape[2], shape[-1], pose_shape])
        beta_a = _get_weights_wrapper(name='beta_a', shape=[1, shape[-1]])
        poses = squash_v1(poses, axis=-1)
        activations = K.sqrt(K.sum(K.square(poses), axis=-1)) + beta_a
        tf.logging.info("prim poses dimension:{}".format(poses.get_shape()))

    return poses, activations
Exemple #8
0
def capsule_model_B(X, num_classes):
    print('X.shape', X.shape)  # (25, 200, 300, 1)
    print('num_classes: ', num_classes.shape)
    poses_list = []
    for _, ngram in enumerate([3, 4, 5]):
        with tf.variable_scope('capsule_' + str(ngram)):
            print('capsule_' + str(ngram))
            cnnout = _conv2d_wrapper(
                X, shape=[ngram, 300, 1, 32], strides=[1, 2, 1, 1], padding='VALID', 
                add_bias=True, activation_fn=tf.nn.relu, name='conv1'
            )
            print('cnnout', cnnout.shape)  # (25, 99, 1, 32)

            tf.logging.info('output shape: {}'.format(cnnout.get_shape()))
            poses_init, activations_init = capsules_init(cnnout, shape=[1, 1, 32, 16], strides=[1, 1, 1, 1],
                                 padding='VALID', pose_shape=16, add_bias=True, name='primary')
            print('poses_init', poses_init.shape)  # (25, 99, 1, 16, 16)
            print('activations_init', activations_init.get_shape())  # (25, 99, 1, 16)
            poses_conv, activations_conv = capsule_conv_layer(poses_init, activations_init, shape=[3, 1, 16, 16], strides=[1, 1, 1, 1],
                                      iterations=3, name='conv2')
            print('poses_conv', poses_conv.shape)  # (25, 97, 1, 16, 16)
            print('activations_conv', activations_conv.shape)  # (25, 97, 1, 16)
            poses_flat, activations_flat = capsule_flatten(poses_conv, activations_conv)
            print('capsule_flatten', poses_flat.shape)  # (25, 1552, 16)
            print('activations_flat', activations_flat.shape)  # (25, 1552)

            poses, activations = capsule_fc_layer(poses_flat, activations_flat, num_classes, 3, 'fc2')
            print('poses ', poses.shape)  # (25, 9, 16)
            print('activations ', activations.shape)  # (25, 9)
            poses_list.append(poses)
    print('-------------------------------')
    poses = tf.reduce_mean(tf.convert_to_tensor(poses_list), axis=0)
    print('poses ', poses.shape)
    activations = K.sqrt(K.sum(K.square(poses), 2))
    print('activations ', activations.shape)
    return poses, activations
Exemple #9
0
def capsules_init(inputs, shape, strides, padding, pose_shape, add_bias, name):
    with tf.variable_scope(name):   
        poses = _conv2d_wrapper(
          inputs,
          shape=shape[0:-1] + [shape[-1] * pose_shape],
          strides=strides,
          padding=padding,
          add_bias=add_bias,
          activation_fn=None,
          name='pose_stacked'
        )        
        poses_shape = poses.get_shape().as_list()    
        poses = tf.reshape(
                    poses, [
                        -1, poses_shape[1], poses_shape[2], shape[-1], pose_shape
                    ])        
        beta_a = _get_weights_wrapper(
                        name='beta_a', shape=[1, shape[-1]]
                    )    
        poses = squash_v1(poses, axis=-1)  
        activations = K.sqrt(K.sum(K.square(poses), axis=-1)) + beta_a        
        tf.logging.info("prim poses dimension:{}".format(poses.get_shape()))

    return poses, activations