def main(_): model_dir = os.path.join(os.path.dirname(__file__), 'logdata') print(model_dir) # Build 2 hidden layer DNN with 10, 10 units respectively. if True: ws = tf.estimator.WarmStartSettings( ckpt_to_initialize_from="/home/lucius/Projects/notebook/homework/ctr_predict/models/v3/logdata_95", vars_to_warm_start=".*input_layer.*") else: ws = None for i in range(15): classifier = tf.estimator.Estimator( model_fn=my_model, params={ 'feature_columns': build_model_columns_fm(), 'optimizer': 'sgd', 'learning_rate': 0.001/(i + 1), }, model_dir=model_dir, warm_start_from=ws) print('perform {}s epoch...'.format(i)) # Train the Model. classifier.train( input_fn=get_input_fn(os.path.join(os.path.dirname(__file__), '../../FE/FE1/train_split.tfrecord'), 256, 1, 5000, use_tfrecord=True)) # Evaluate the model. eval_result = classifier.evaluate( input_fn=get_input_fn(os.path.join(os.path.dirname(__file__), '../../FE/FE1/valid_split.tfrecord'), 256, 1, 5000, use_tfrecord=True)) print('\nTest set accuracy: {accuracy:0.3f}\n'.format(**eval_result))
def main(_): model_dir = os.path.join(os.path.dirname(__file__), 'logdata_fm_noreg') restore_map = { 'input_layer/' + feature_name + '_embed_lr': feature_name + '_embed_lr' for feature_name in COLUMN_NAMES[2:] } restore_map.update({ 'input_layer/' + feature_name + '_embed_fm': feature_name + '_embed_lr' for feature_name in COLUMN_NAMES[2:] }) # Build 2 hidden layer DNN with 10, 10 units respectively. if True: ws = tf.estimator.WarmStartSettings( ckpt_to_initialize_from= "/home/lucius/Projects/notebook/homework/ctr_predict/models/v2/logdata_3_4", vars_to_warm_start=".*input_layer.*") else: ws = None classifier = tf.estimator.Estimator(model_fn=my_model, params={ 'feature_columns': build_model_columns_fm(), 'optimizer': 'sgd' }, model_dir=model_dir, warm_start_from=ws) for i in range(15): print('perform {}s epoch...'.format(i)) # Train the Model. classifier.train(input_fn=get_input_fn(os.path.join( os.path.dirname(__file__), '../../FE/FE1/train_split.tfrecord'), 256, 1, 5000, use_tfrecord=True)) # Evaluate the model. eval_result = classifier.evaluate(input_fn=get_input_fn( os.path.join(os.path.dirname(__file__), '../../FE/FE1/valid_split.tfrecord'), 256, 1, 5000, use_tfrecord=True)) print('\nTest set accuracy: {accuracy:0.3f}\n'.format(**eval_result))
def main(_): # Build 2 hidden layer DNN with 10, 10 units respectively. if False: ws = tf.estimator.WarmStartSettings( ckpt_to_initialize_from="/home/lucius/Projects/notebook/homework/ctr_predict/models/v1/logdata_fm_10", vars_to_warm_start=".*input_layer.*") else: ws = None for i in range(5): fm_dim = i*5 + 3 for j in range(5): model_reg = (10**(-j)) * 1.0 model_dir = os.path.join(os.path.dirname(__file__), 'logdata_' + str(i) + '_' + str(j)) for k in range(1): classifier = tf.estimator.Estimator( model_fn=my_model, params={ 'feature_columns': build_model_columns_fm(), 'optimizer': 'sgd' if k != 0 else None, 'learning_rate': 0.0001, 'dim': fm_dim, 'regularizer': model_reg }, model_dir=model_dir, warm_start_from=ws) print('perform {}s epoch...'.format(i)) # Train the Model. # classifier.train( # input_fn=get_input_fn(os.path.join(os.path.dirname(__file__), '../../FE/FE1/train_split.tfrecord'), 256, 1, 5000, use_tfrecord=True)) # Evaluate the model. eval_result = classifier.evaluate( input_fn=get_input_fn(os.path.join(os.path.dirname(__file__), '../../FE/FE1/valid_split.tfrecord'), 256, 1, 5000, use_tfrecord=True)) print('\nTest set accuracy: {accuracy:0.3f}\n'.format(**eval_result))