Exemplo n.º 1
0
def conv_pool_bn(pre_layer,
                 kernel_num,
                 kernel_size,
                 padding,
                 pool_size,
                 activation,
                 training,
                 epsilon=1e-4,
                 alpha=.1,
                 binary=True,
                 stochastic=False,
                 H=1.,
                 W_LR_scale="Glorot"):
    conv = binary_layer.conv2d_binary(pre_layer,
                                      kernel_num,
                                      kernel_size,
                                      padding=padding,
                                      binary=binary,
                                      stochastic=stochastic,
                                      H=H,
                                      W_LR_scale=W_LR_scale)
    pool = tf.layers.max_pooling2d(conv,
                                   pool_size=pool_size,
                                   strides=pool_size)
    bn = binary_layer.batch_normalization(pool,
                                          epsilon=epsilon,
                                          momentum=1 - alpha,
                                          training=training)
    output = activation(bn)
    return output
Exemplo n.º 2
0
def conv_latent(pre_layer,
                kernel_num,
                kernel_size,
                padding,
                activation,
                training,
                epsilon=1e-4,
                alpha=.1,
                binary=True,
                stochastic=False,
                H=1.,
                W_LR_scale="Glorot"):
    conv = binary_layer.conv2d_latent(pre_layer,
                                      kernel_num,
                                      kernel_size,
                                      padding=padding,
                                      binary=binary,
                                      stochastic=stochastic,
                                      H=H,
                                      W_LR_scale=W_LR_scale)
    bn = binary_layer.batch_normalization(conv,
                                          epsilon=epsilon,
                                          momentum=1 - alpha,
                                          training=training)
    output = activation(bn)
    return output
Exemplo n.º 3
0
def fully_connect_latent(pre_layer,
                         output_dim,
                         act,
                         use_bias,
                         training,
                         epsilon=1e-4,
                         alpha=.1,
                         binary=True,
                         stochastic=False,
                         H=1.,
                         W_LR_scale="Glorot"):
    pre_act = binary_layer.dense_latent(
        pre_layer,
        output_dim,
        use_bias=use_bias,
        kernel_constraint=lambda w: tf.clip_by_value(w, -1.0, 1.0))
    bn = binary_layer.batch_normalization(pre_act,
                                          momentum=1 - alpha,
                                          epsilon=epsilon,
                                          training=training)
    if act == None:
        output = bn
    else:
        output = act(bn)
    return output
Exemplo n.º 4
0
def fully_connect_bn(pre_layer, output_dim, act, use_bias, training):
    pre_act = binary_layer.dense_binary(
        pre_layer,
        output_dim,
        use_bias=use_bias,
        activation=None,
        kernel_constraint=lambda w: tf.clip_by_value(w, -1.0, 1.0))
    bn = binary_layer.batch_normalization(pre_act,
                                          momentum=0.9,
                                          epsilon=1e-4,
                                          training=training)
    if act == None:
        output = bn
    else:
        output = act(bn)
    return output