Ejemplo n.º 1
0
def fully_connected(features, mode):
    keep_prob = hypers.get_param('fd')
    training = (mode == tf.contrib.learn.ModeKeys.TRAIN)
    fc_act_func = activationFunc(hypers.get_param('fa'))
    width = hypers.get_param('fw')
    bias = hypers.get_param('fb')
    scale = hypers.get_param('ff')
    mode = hypers.get_param('fm')
    uniform = hypers.get_param('fu')

    if width > 0:
        winitializer = ctblayers.variance_scaling_initializer(factor=scale,
                                                              mode=mode,
                                                              uniform=uniform)
        binitializer = tf.constant_initializer(bias)
        output = ctblayers.fully_connected(features,
                                           width,
                                           weights_initializer=winitializer,
                                           biases_initializer=binitializer,
                                           activation_fn=fc_act_func)
    else:
        output = features

    if keep_prob > 0.0 and keep_prob < 1.0:
        output = tf.layers.dropout(output, keep_prob, training=training)

    # [dim] output fc fw>0 [bc, fw]
    # [dim] output fc fw=0 [bc, input]
    return output
Ejemplo n.º 2
0
def layer_normalize(features, mode):
    act_func = activationFunc(hypers.get_param('wa'))
    center = hypers.get_param('wc')
    scale = hypers.get_param('w1')

    output = ctblayers.layer_norm(features,
                                  center=center,
                                  scale=scale,
                                  activation_fn=act_func)

    # dim output normalize ['tb', 'dx', 'ws']
    return output
Ejemplo n.º 3
0
def output(features):
    width = hypers.get_param('on')
    act_func = activationFunc(hypers.get_param('oa'))
    bias = hypers.get_param('ob')
    scale = hypers.get_param('of')
    mode = hypers.get_param('om')
    uniform = hypers.get_param('ou')

    winitializer = ctblayers.variance_scaling_initializer(factor=scale, mode=mode, uniform=uniform)
    binitializer = tf.constant_initializer(bias)
    regularizer = ctblayers.l1_l2_regularizer(scale_l1=hypers.get_param('o1'), scale_l2=hypers.get_param('o2'))
    out_layer = ctblayers.fully_connected(features, width, weights_initializer=winitializer, weights_regularizer=regularizer, biases_initializer=binitializer, biases_regularizer=regularizer, activation_fn=act_func)

    # [dim] output ot fw>0 [bc, fw]
    return out_layer
Ejemplo n.º 4
0
def batch_normalize(features, mode):
    act_func = activationFunc(hypers.get_param('wa'))
    decay = hypers.get_param('wy')
    center = hypers.get_param('wc')
    scale = hypers.get_param('w1')
    epsilon = hypers.get_param('we')
    training = (mode == tf.contrib.learn.ModeKeys.TRAIN)

    output = ctblayers.batch_norm(features,
                                  decay=decay,
                                  center=center,
                                  scale=scale,
                                  epsilon=epsilon,
                                  activation_fn=act_func,
                                  is_training=training)

    # dim output normalize ['tb', 'dx', 'ws']
    return output
Ejemplo n.º 5
0
def rnn_cells():
    cell_type = hypers.get_param('rc')
    act_func = activationFunc(hypers.get_param('ra'))
    forget_bias = hypers.get_param('rb')
    rnn_width = hypers.get_param('rw')
    peepholes = hypers.get_param('rp')

    # Forward / Backward layer_type cell
    if cell_type == "lstm":
        fw_cell = lstm_cell(rnn_width,
                            use_peepholes=peepholes,
                            forget_bias=forget_bias,
                            activation=act_func)
        bw_cell = lstm_cell(rnn_width,
                            use_peepholes=peepholes,
                            forget_bias=forget_bias,
                            activation=act_func)
    elif cell_type == "clstm":
        fw_cell = clstm_cell(rnn_width,
                             use_peepholes=peepholes,
                             forget_bias=forget_bias,
                             activation=act_func)
        bw_cell = clstm_cell(rnn_width,
                             use_peepholes=peepholes,
                             forget_bias=forget_bias,
                             activation=act_func)
    elif cell_type == "gru":
        fw_cell = gru_cell(rnn_width, activation=act_func)
        bw_cell = gru_cell(rnn_width, activation=act_func)
    elif cell_type == "ugrnn":
        fw_cell = ugrnn_cell(rnn_width,
                             forget_bias=forget_bias,
                             activation=act_func)
        bw_cell = ugrnn_cell(rnn_width,
                             forget_bias=forget_bias,
                             activation=act_func)

    return fw_cell, bw_cell