Exemplo n.º 1
0
    with tf.variable_scope('Dense'):
        for i, unit in enumerate(params['hidden_units']):
            dense = tf.layers.dense(dense,
                                    units=unit,
                                    activation='relu',
                                    name='dense{}'.format(i))
            dense = tf.layers.batch_normalization(
                dense,
                center=True,
                scale=True,
                trainable=True,
                training=(mode == tf.estimator.ModeKeys.TRAIN))
            dense = tf.layers.dropout(
                dense,
                rate=params['dropout_rate'],
                training=(mode == tf.estimator.ModeKeys.TRAIN))
            add_layer_summary(dense.name, dense)

    with tf.variable_scope('output'):
        y = tf.layers.dense(dense, units=1, name='output')
        add_layer_summary('output', y)

    return y


build_estimator = build_estimator_helper({'dense': model_fn},
                                         params={
                                             'dropout_rate': 0.2,
                                             'learning_rate': 0.002,
                                             'hidden_units': [5, 5]
                                         })
Exemplo n.º 2
0
Arquivo: FM.py Projeto: mindis/CTR-2
        add_layer_summary(linear_term.name, linear_term)

    with tf.variable_scope('fm_interaction'):
        init = tf.truncated_normal(shape=(input_dim, params['factor_dim']))
        v = tf.get_variable('v',
                            dtype=tf.float32,
                            initializer=init,
                            validate_shape=False)

        sum_square = tf.pow(tf.matmul(input, v), 2)
        square_sum = tf.matmul(tf.pow(input, 2), tf.pow(v, 2))

        interaction_term = 0.5 * tf.reduce_mean(
            sum_square - square_sum, axis=1, keep_dims=True)

        add_layer_summary(interaction_term.name, interaction_term)

    with tf.variable_scope('output'):
        y = tf.math.add(interaction_term, linear_term)
        add_layer_summary(y.name, y)

    return y


build_estimator = build_estimator_helper(
    model_fn={'dense': model_fn},
    params={'dense': {
        'learning_rate': 0.01,
        'factor_dim': 20
    }})
Exemplo n.º 3
0
build_estimator = build_estimator_helper(
    model_fn={
        'amazon': model_fn_varlen,
        'movielens': model_fn_varlen,
        'heybox': model_fn_varlen
    },
    params={
        'amazon': {
            'dropout_rate':
            0.2,
            'batch_norm':
            True,
            'learning_rate':
            0.01,
            'hidden_units': [80, 40],
            'attention_hidden_unit':
            80,
            'atten_mode':
            'ln',
            'num_heads':
            1,
            'item_count':
            AMAZON_ITEM_COUNT,
            'cate_count':
            AMAZON_CATE_COUNT,
            'seq_names': ['item', 'cate'],
            'num_of_expert':
            50,
            'sparse_emb_dim':
            128,
            'emb_dim':
            128,
            'model_name':
            'userperexpert',
            'data_name':
            'amazon',
            'input_features': [
                'dense_emb', 'item_emb', 'cate_emb', 'item_att_emb',
                'cate_att_emb'
            ]  # 每个element表示一个field,对应的嵌入尺寸一致
        },
        'heybox': {
            'dropout_rate':
            0.2,
            'batch_norm':
            True,
            'learning_rate':
            0.01,
            'hidden_units': [80, 40],
            'attention_hidden_unit':
            80,
            'atten_mode':
            'ln',
            'num_heads':
            1,
            'item_count':
            HEYBOX_ITEM_COUNT,
            'cate_count':
            HEYBOX_CATE_COUNT,
            'seq_names': ['item', 'cate'],
            'num_of_expert':
            50,
            'sparse_emb_dim':
            128,
            'emb_dim':
            128,
            'model_name':
            'userperexpert',
            'data_name':
            'heybox',
            'input_features': [
                'dense_emb', 'item_emb', 'cate_emb', 'item_att_emb',
                'cate_att_emb'
            ]  # 每个element表示一个field,对应的嵌入尺寸一致
        },
        'movielens': {
            'dropout_rate':
            0.2,
            'batch_norm':
            True,
            'learning_rate':
            0.01,
            'hidden_units': [80, 40],
            'attention_hidden_unit':
            80,
            'atten_mode':
            'ln',
            'num_heads':
            1,
            'item_count':
            ML_ITEM_COUNT,
            'cate_count':
            ML_CATE_COUNT,
            'seq_names': ['item', 'cate'],
            'num_of_expert':
            50,
            'sparse_emb_dim':
            128,
            'emb_dim':
            128,
            'model_name':
            'userperexpert',
            'data_name':
            'movielens',
            'input_features': [
                'dense_emb', 'item_emb', 'cate_emb', 'item_att_emb',
                'cate_att_emb'
            ]  # 每个element表示一个field,对应的嵌入尺寸一致
        }
    })
Exemplo n.º 4
0
        interaction_output = tf.reduce_sum(tf.multiply(elementwise_matrix,
                                                       attention_weight),
                                           axis=1)  # batch * k
        interaction_output = tf.layers.dense(interaction_output,
                                             units=1)  # batch * 1

    with tf.variable_scope('output'):
        y = interaction_output + linear_output
        add_layer_summary('output', y)

    return y


build_estimator = build_estimator_helper(model_fn={
    'census': model_fn_dense,
    'frappe': model_fn_sparse
},
                                         params={
                                             'census': {
                                                 'attention_factor': 3,
                                                 'dropout_rate': 0.2,
                                                 'learning_rate': 0.01
                                             },
                                             'frappe': {
                                                 'attention_factor': 16,
                                                 'dropout_rate': 0.2,
                                                 'learning_rate': 0.01,
                                                 'hidden_units': [128, 64, 1],
                                                 'data_params': FRAPPE_PARAMS
                                             }
                                         })
Exemplo n.º 5
0
            numeric_input = tf.layers.batch_normalization(numeric_input, center = True, scale = True, trainable =True,
                                                          training = (mode == tf.estimator.ModeKeys.TRAIN))
            add_layer_summary( numeric_input.name, numeric_input )
            dense = tf.concat([dense, numeric_input], axis = 1, name ='numeric_concat')
            add_layer_summary(dense.name, dense)

    with tf.variable_scope('MLP'):
        for i, unit in enumerate(params['hidden_units']):
            dense = tf.layers.dense(dense, units = unit, activation = 'relu', name = 'Dense_{}'.format(i))
            if mode == tf.estimator.ModeKeys.TRAIN:
                add_layer_summary(dense.name, dense)
                dense = tf.layers.dropout(dense, rate = params['dropout_rate'], training = (mode==tf.estimator.ModeKeys.TRAIN))

    with tf.variable_scope('output'):
        y = tf.layers.dense(dense, units=1, name = 'output')

    return y


build_estimator = build_estimator_helper(
    {'dense':model_fn},
     params = {
            'learning_rate' :0.002,
            'numeric_handle':'dense', # dense or bucketize are supported
            'hidden_units': [20,10],
            'embedding_dim': 4,
            'dropout_rate': 0.1
        }
)

Exemplo n.º 6
0
                                    name='dense{}'.format(i))
            dense = tf.layers.batch_normalization(
                dense,
                center=True,
                scale=True,
                trainable=True,
                training=(mode == tf.estimator.ModeKeys.TRAIN))
            dense = tf.layers.dropout(
                dense,
                rate=params['dropout_rate'],
                training=(mode == tf.estimator.ModeKeys.TRAIN))
            add_layer_summary(dense.name, dense)

    with tf.variable_scope('output'):
        y = tf.layers.dense(dense, units=1, name='output')
        add_layer_summary('output', y)

    return y


build_estimator = build_estimator_helper(
    model_fn={'census': model_fn},
    params={
        'census': {
            'model_type': 'IPNN',  # support IPNN/OPNN/PNN
            'dropout_rate': 0.2,
            'learning_rate': 0.01,
            'hidden_units': [24, 12, 1]
        }
    })
Exemplo n.º 7
0
Arquivo: DCN.py Projeto: yulinhu/CTR
        x_stack = tf.concat([dense, xl], axis=1)

    with tf.variable_scope('output'):
        y = tf.layers.dense(x_stack, units=1)
        add_layer_summary('output', y)

    return y


build_estimator = build_estimator_helper(model_fn={
    'dense': model_fn_dense,
    'sparse': model_fn_sparse
},
                                         params={
                                             'dense': {
                                                 'dropout_rate': 0.2,
                                                 'batch_norm': True,
                                                 'learning_rate': 0.002,
                                                 'hidden_units': [10, 5],
                                                 'cross_layers': 3,
                                                 'cross_op': 'raw'
                                             },
                                             'sparse': {
                                                 'dropout_rate': 0.2,
                                                 'batch_norm': True,
                                                 'learning_rate': 0.01,
                                                 'hidden_units': [128, 64],
                                                 'cross_layers': 3,
                                                 'cross_op': 'better'
                                             }
                                         })
Exemplo n.º 8
0
@tf_estimator_model
def model_fn(features, labels, mode, params):
    dense_feature = build_features()
    dense = tf.feature_column.input_layer(features, dense_feature)

    # stacked residual layer
    with tf.variable_scope('Residual_layers'):
        for i, unit in enumerate(params['hidden_units']):
            dense = residual_layer(dense,
                                   unit,
                                   dropout_rate=params['dropout_rate'],
                                   batch_norm=params['batch_norm'],
                                   mode=mode)
            add_layer_summary('residual_layer{}'.format(i), dense)

    with tf.variable_scope('output'):
        y = tf.layers.dense(dense, units=1)
        add_layer_summary('output', y)

    return y


build_estimator = build_estimator_helper(model_fn={'census': model_fn},
                                         params={
                                             'census': {
                                                 'dropout_rate': 0.2,
                                                 'batch_norm': True,
                                                 'learning_rate': 0.01,
                                                 'hidden_units': [10, 5]
                                             }
                                         })
Exemplo n.º 9
0
                training=(mode == tf.estimator.ModeKeys.TRAIN))
            dense = tf.layers.dropout(
                dense,
                rate=params['dropout_rate'],
                training=(mode == tf.estimator.ModeKeys.TRAIN))
            add_layer_summary(dense.name, dense)

    with tf.variable_scope('output'):
        y = dense + fm_output + linear_output  # batch * 1
        add_layer_summary('output', y)

    return y


build_estimator = build_estimator_helper(model_fn={
    'dense': model_fn_dense,
    'sparse': model_fn_sparse
},
                                         params={
                                             'dense': {
                                                 'dropout_rate': 0.2,
                                                 'learning_rate': 0.001,
                                                 'hidden_units': [20, 10, 1]
                                             },
                                             'sparse': {
                                                 'dropout_rate': 0.2,
                                                 'learning_rate': 0.002,
                                                 'hidden_units': [128, 64, 1]
                                             }
                                         })
Exemplo n.º 10
0
            add_layer_summary(dense.name, dense)

    with tf.variable_scope('MLP'):
        for i, unit in enumerate(params['hidden_units']):
            dense = tf.layers.dense(dense, units = unit, activation = 'relu', name = 'Dense_{}'.format(i))
            if mode == tf.estimator.ModeKeys.TRAIN:
                add_layer_summary(dense.name, dense)
                dense = tf.layers.dropout(dense, rate = params['dropout_rate'], training = (mode==tf.estimator.ModeKeys.TRAIN))

    with tf.variable_scope('output'):
        y = tf.layers.dense(dense, units=1, name = 'output')

    return y


build_estimator = build_estimator_helper(
     model_fn = {
         'census':model_fn
     },
     params = {
         'census':
             {'learning_rate' :0.01,
            'model_type':'dense', # dense or bucketize are supported
            'hidden_units': [20,10],
            'embedding_dim': 4,
            'dropout_rate': 0.1
            }
     }
)

Exemplo n.º 11
0
    # concat and output
    with tf.variable_scope('output'):
        y = tf.concat([dense_output, cin_output, linear_output], axis=1)
        y = tf.layers.dense(y, units=1)
        add_layer_summary('output', y)

    return y


build_estimator = build_estimator_helper(model_fn={
    'dense': model_fn_dense,
    'sparse': model_fn_sparse
},
                                         params={
                                             'dense': {
                                                 'dropout_rate': 0.2,
                                                 'learning_rate': 0.001,
                                                 'hidden_units': [20, 10],
                                                 'batch_norm': True,
                                                 'cin_layer_size': [8, 4, 4]
                                             },
                                             'sparse': {
                                                 'dropout_rate': 0.2,
                                                 'learning_rate': 0.002,
                                                 'hidden_units': [128, 64, 32],
                                                 'batch_norm': True,
                                                 'cin_layer_size': [32, 16, 8]
                                             }
                                         })
Exemplo n.º 12
0
                              dropout_rate=params['dropout_rate'],
                              batch_norm=params['batch_norm'],
                              mode=mode,
                              add_summary=True)

    with tf.variable_scope('output'):
        y = linear_output + dense
        add_layer_summary('output', y)

    return y


build_estimator = build_estimator_helper(model_fn={
    'census': model_fn_dense,
    'frappe': model_fn_sparse
},
                                         params={
                                             'census': {
                                                 'dropout_rate': 0.2,
                                                 'batch_norm': True,
                                                 'learning_rate': 0.01,
                                                 'hidden_units': [20, 10, 1]
                                             },
                                             'frappe': {
                                                 'dropout_rate': 0.2,
                                                 'batch_norm': True,
                                                 'learning_rate': 0.01,
                                                 'hidden_units': [128, 64, 1],
                                                 'data_params': FRAPPE_PARAMS
                                             }
                                         })
Exemplo n.º 13
0
                dense,
                rate=params['dropout_rate'],
                training=(mode == tf.estimator.ModeKeys.TRAIN))
            add_layer_summary(dense.name, dense)

    with tf.variable_scope('output'):
        y = tf.layers.dense(dense, units=1, name='output')
        tf.summary.histogram(y.name, y)

    return y


build_estimator = build_estimator_helper(model_fn={'census': model_fn},
                                         params={
                                             'census': {
                                                 'dropout_rate': 0.2,
                                                 'learning_rate': 0.01,
                                                 'hidden_units': [24, 12, 1]
                                             }
                                         })

# check name of all the tensor in the census_checkpoint

if __name__ == '__main__':
    print(
        'checking name of all the tensor in the FNN pretrain census_checkpoint'
    )
    from tensorflow.python.tools.inspect_checkpoint import print_tensors_in_checkpoint_file
    latest_ckp = tf.train.latest_checkpoint('./census_checkpoint/FM')
    print_tensors_in_checkpoint_file(latest_ckp, all_tensors=True)
    print_tensors_in_checkpoint_file(latest_ckp,
                                     all_tensors=False,
Exemplo n.º 14
0
    with tf.variable_scope('Attention_pooling'):
        interaction_output = tf.reduce_sum(tf.multiply(elementwise_matrix,
                                                       attention_weight),
                                           axis=1)  # batch * k
        interaction_output = tf.layers.dense(interaction_output,
                                             units=1)  # batch * 1

    with tf.variable_scope('output'):
        y = interaction_output + linear_output
        add_layer_summary('output', y)

    return y


build_estimator = build_estimator_helper(model_fn={
    'dense': model_fn_dense,
    'sparse': model_fn_sparse
},
                                         params={
                                             'dense': {
                                                 'attention_factor': 3,
                                                 'dropout_rate': 0.2,
                                                 'learning_rate': 0.002
                                             },
                                             'sparse': {
                                                 'attention_factor': 16,
                                                 'dropout_rate': 0.2,
                                                 'learning_rate': 0.002,
                                                 'hidden_units': [128, 64, 1]
                                             }
                                         })
Exemplo n.º 15
0
    with tf.variable_scope('output'):
        y = tf.layers.dense(x_stack, units=1)
        add_layer_summary('output', y)

    return y


build_estimator = build_estimator_helper(model_fn={
    'census': model_fn_dense,
    'frappe': model_fn_sparse
},
                                         params={
                                             'census': {
                                                 'dropout_rate': 0.2,
                                                 'batch_norm': True,
                                                 'learning_rate': 0.01,
                                                 'hidden_units': [10, 5],
                                                 'cross_layers': 3,
                                                 'cross_op': 'raw'
                                             },
                                             'frappe': {
                                                 'dropout_rate': 0.2,
                                                 'batch_norm': True,
                                                 'learning_rate': 0.01,
                                                 'hidden_units': [128, 64],
                                                 'cross_layers': 3,
                                                 'cross_op': 'better',
                                                 'data_params': FRAPPE_PARAMS
                                             }
                                         })
Exemplo n.º 16
0
build_estimator = build_estimator_helper(
    model_fn={
        'census': model_fn_dense,
        'frappe': model_fn_sparse
    },
    params={
        'census': {
            'dropout_rate': 0.2,
            'learning_rate': 0.01,
            'hidden_units': [20, 10, 1],
            'batch_norm': True,
            'cin_layer_size': [8, 4, 4],
            'pool_op': 'avg',
            'senet_ratio': 2,
            'model_type':
            'field_all'  # support field_all / field_each / field_interaction
        },
        'frappe': {
            'dropout_rate': 0.2,
            'learning_rate': 0.01,
            'hidden_units': [128, 64, 32, 1],
            'batch_norm': True,
            'cin_layer_size': [32, 16, 8],
            'pool_op': 'avg',
            'senet_ratio': 2,
            'model_type':
            'field_all',  # support field_all / field_each / field_interaction
            'data_params': FRAPPE_PARAMS
        }
    })