def model_fn(): # build Keras Model # use feature_column as keras input input = {} for f in FEATURE_NAME: if f != TARGET: input[f] = Input(shape=(1, ), name=f, dtype=DTYPE[f]) feature_columns = build_features() feature_layer = tf.keras.layers.DenseFeatures(feature_columns) dense_feature = feature_layer(input) fm = FM_Layer(name='fm_layer', factor_dim=8)(dense_feature) tf.summary.histogram('fm_output', fm) output = Dense(1, activation='sigmoid', name='output')(fm) model = Model(inputs=[i for i in input.values()], outputs=output) optimizer = tf.keras.optimizers.Adam(learning_rate=0.01, beta_1=0.9, beta_2=0.999, amsgrad=False) model.compile(optimizer=optimizer, loss='binary_crossentropy', metrics=['binary_accuracy', 'AUC']) print(model.summary()) return model
def model_fn(features, labels, mode, params): """ FM model """ feature_columns= build_features() input = tf.feature_column.input_layer(features, feature_columns) input_dim = input.get_shape().as_list()[-1] with tf.variable_scope('linear'): init = tf.random_normal( shape = (input_dim,1) ) w = tf.get_variable('w', dtype = tf.float32, initializer = init, validate_shape = False) b = tf.get_variable('b', shape = [1], dtype= tf.float32) linear_term = tf.add(tf.matmul(input,w), b) add_layer_summary( linear_term.name, linear_term) with tf.variable_scope('fm_interaction'): init = tf.truncated_normal(shape = (input_dim, params['factor_dim'])) v = tf.get_variable('v', dtype = tf.float32, initializer = init, validate_shape = False) sum_square = tf.pow(tf.matmul(input, v),2) square_sum = tf.matmul(tf.pow(input,2), tf.pow(v,2)) interaction_term = 0.5 * tf.reduce_mean(sum_square - square_sum, axis=1, keep_dims= True) add_layer_summary(interaction_term.name, interaction_term) with tf.variable_scope('output'): y = tf.math.add(interaction_term, linear_term) add_layer_summary(y.name, y) return y