def _experiment_fn(output_dir): return Experiment(model.build_estimator(output_dir), train_input_fn=model.get_input_fn( filename=os.path.join(data_dir, 'train.tfrecords'), batch_size=train_batch_size), eval_input_fn=model.get_input_fn( filename=os.path.join(data_dir, 'test.tfrecords'), batch_size=eval_batch_size), export_strategies=[ saved_model_export_utils.make_export_strategy( model.serving_input_fn, default_output_alternative_key=None, exports_to_keep=1) ], train_steps=train_steps, eval_metrics=model.get_eval_metrics(), eval_steps=eval_steps, **experiment_args)
def _experiment_fn(output_dir): input_fn = model.generate_csv_input_fn train_input = input_fn(train_data_paths, num_epochs=num_epochs, batch_size=train_batch_size) eval_input = input_fn(eval_data_paths, batch_size=eval_batch_size, mode=tf.contrib.learn.ModeKeys.EVAL) return Experiment(model.build_estimator(output_dir, hidden_units=hidden_units), train_input_fn=train_input, eval_input_fn=eval_input, export_strategies=[ saved_model_export_utils.make_export_strategy( model.serving_input_fn, default_output_alternative_key=None, exports_to_keep=1) ], eval_metrics=model.get_eval_metrics(), **experiment_args)
def _experiment_fn(output_dir): input_fn = model.generate_csv_input_fn train_input = input_fn( train_data_paths, num_epochs=num_epochs, batch_size=train_batch_size) eval_input = input_fn( eval_data_paths, batch_size=eval_batch_size, mode=tf.contrib.learn.ModeKeys.EVAL) return Experiment( model.build_estimator( output_dir, hidden_units=hidden_units ), train_input_fn=train_input, eval_input_fn=eval_input, export_strategies=[saved_model_export_utils.make_export_strategy( model.serving_input_fn, default_output_alternative_key=None, exports_to_keep=1 )], eval_metrics=model.get_eval_metrics(), #min_eval_frequency = 1000, # change this to speed up training on large datasets **experiment_args )
def _experiment_fn(output_dir): input_fn = (model.generate_csv_input_fn if format == 'csv' else model.generate_tfrecord_input_fn) train_input = input_fn( train_data_paths, num_epochs=num_epochs, batch_size=train_batch_size) eval_input = input_fn( eval_data_paths, batch_size=eval_batch_size, mode=tf.contrib.learn.ModeKeys.EVAL) return Experiment( model.build_estimator( output_dir, nbuckets=nbuckets, hidden_units=parse_to_int(hidden_units) ), train_input_fn=train_input, eval_input_fn=eval_input, export_strategies=[saved_model_export_utils.make_export_strategy( model.serving_input_fn, default_output_alternative_key=None, exports_to_keep=1 )], eval_metrics=model.get_eval_metrics(), #min_eval_frequency = 1000, # change this to speed up training on large datasets **experiment_args )