def start_prediction(output_directory, data_directory, dataset_name, model_dir, network_name, batch_size, batch_threads, num_classes=None): dataset_factory = DatasetFactory(dataset_name=dataset_name, data_directory=data_directory, augment=False) if num_classes is None: num_classes = dataset_factory.get_dataset('train').num_classes() run_config = RunConfig(keep_checkpoint_max=10, save_checkpoints_steps=None) # Instantiate Estimator estimator = tf.estimator.Estimator(model_fn=get_model_function( model_dir, network_name, num_classes), model_dir=model_dir, config=run_config, params={}) image_size = nets_factory.get_input_size(network_name) run_prediction_and_evaluation(output_directory, batch_size, batch_threads, dataset_factory, estimator, image_size)
def start_training(data_directory, dataset_name, output_directory, network_name, batch_size, learning_rate, batch_threads, num_epochs, initial_checkpoint, checkpoint_exclude_scopes, ignore_missing_variables, trainable_scopes, not_trainable_scopes, fixed_learning_rate, learning_rate_decay_rate, do_evaluation, learning_rate_decay_steps): dataset_factory = DatasetFactory(dataset_name=dataset_name, data_directory=data_directory) model_params = { 'learning_rate': learning_rate, 'fixed_learning_rate': fixed_learning_rate, 'learning_rate_decay_rate': learning_rate_decay_rate, 'learning_rate_decay_steps': (dataset_factory.get_dataset('train').get_number_of_samples() if learning_rate_decay_steps is None else learning_rate_decay_steps) // batch_size } run_config = RunConfig(keep_checkpoint_max=10, save_checkpoints_steps=None) # Instantiate Estimator estimator = tf.estimator.Estimator(model_fn=get_model_function( output_directory, network_name, dataset_factory.get_dataset('train').num_classes(), initial_checkpoint, checkpoint_exclude_scopes, ignore_missing_variables, trainable_scopes, not_trainable_scopes), params=model_params, model_dir=output_directory, config=run_config) image_size = nets_factory.get_input_size(network_name) dataset = dataset_factory.get_dataset('train') evaluation_summary_writer = get_evaluation_summary_writer( do_evaluation, output_directory) for epoch in range(num_epochs): run_training(dataset=dataset, batch_size=batch_size, batch_threads=batch_threads, epoch=epoch, estimator=estimator, num_epochs=num_epochs, image_size=image_size) if do_evaluation: run_evaluation_conserving_best( estimator=estimator, batch_size=2 * batch_size, batch_threads=batch_threads, dataset_factory=dataset_factory, image_size=image_size, evaluation_summary_writer=evaluation_summary_writer) print('Finished training')
def start_training(data_directory, dataset_name, mean, output_directory, network_name, batch_size, learning_rate, learning_rate_gen, beta1_gen, separable_conv, batch_threads, num_epochs, initial_checkpoint, checkpoint_exclude_scopes, ignore_missing_variables, trainable_scopes, not_trainable_scopes, fixed_learning_rate, learning_rate_decay_rate, do_evaluation, learning_rate_decay_steps, img_size): dataset_factory = DatasetFactory(dataset_name=dataset_name, data_directory=data_directory, mean=mean) model_params = { 'learning_rate': learning_rate, 'learning_rate_gen': learning_rate_gen, 'beta1_gen': beta1_gen, 'fixed_learning_rate': fixed_learning_rate, 'learning_rate_decay_rate': learning_rate_decay_rate, 'separable_conv': separable_conv, 'learning_rate_decay_steps': (dataset_factory.get_dataset('train').get_number_of_samples() if learning_rate_decay_steps is None else learning_rate_decay_steps) // batch_size } run_config = RunConfig(keep_checkpoint_max=10, save_checkpoints_steps=None) # Instantiate Estimator estimator = tf.estimator.Estimator(model_fn=get_model_function( output_directory, network_name, dataset_factory.get_dataset('train').num_classes(), initial_checkpoint, checkpoint_exclude_scopes, ignore_missing_variables, trainable_scopes, not_trainable_scopes), params=model_params, model_dir=output_directory, config=run_config) image_size = img_size for epoch in range(num_epochs): run_training(dataset_factory, batch_size=batch_size, batch_threads=batch_threads, epoch=epoch, estimator=estimator, num_epochs=num_epochs, image_size=image_size) print('Finished training')
def start_training(data_directory, dataset_name, output_directory, network_name, batch_size, learning_rate, batch_threads, num_epochs, initial_checkpoint, checkpoint_exclude_scopes,ignore_missing_variables, trainable_scopes, fixed_learning_rate, learning_rate_decay_rate, num_classes): dataset_factory = DatasetFactory(dataset_name=dataset_name, data_directory=data_directory) model_params = {'learning_rate': learning_rate,'fixed_learning_rate': fixed_learning_rate,'learning_rate_decay_rate': learning_rate_decay_rate,'learning_rate_decay_steps': dataset_factory.get_dataset('train').get_number_of_samples() // batch_size} run_config = RunConfig(keep_checkpoint_max=10, save_checkpoints_steps=None) # Instantiate Estimator estimator = tf.estimator.Estimator(model_fn=get_model_function(output_directory, network_name, dataset_factory.get_dataset('train').num_classes() if num_classes is None else num_classes, initial_checkpoint,checkpoint_exclude_scopes, ignore_missing_variables, trainable_scopes),params=model_params,model_dir=output_directory,config=run_config) image_size = nets_factory.get_input_size(network_name) for epoch in range(num_epochs): run_training(dataset_factory, batch_size, batch_threads, epoch, estimator, num_epochs, image_size) #run_validation(dataset_factory, batch_size, batch_threads, estimator, image_size) run_evaluation(batch_size, batch_threads, dataset_factory, estimator, image_size)
def start_prediction(data_directory, dataset_name, mean, model_dir, network_name, batch_size, batch_threads, num_classes, result_dir, img_size, model, mode): dataset_factory = DatasetFactory(dataset_name=dataset_name, data_directory=data_directory, mean=mean, augment=False, num_classes=num_classes) run_config = RunConfig(keep_checkpoint_max=10, save_checkpoints_steps=None) # Instantiate Estimator estimator = tf.estimator.Estimator(model_fn=get_model_function( model_dir, network_name, dataset_factory.get_dataset('train').num_classes()), model_dir=model_dir, config=run_config, params={}) image_size = img_size run_prediction_and_evaluation(batch_size, batch_threads, dataset_factory, estimator, image_size, result_dir, mode)