# ask user to keep the run should_keep_run = query_yes_no("Should the run '{}' be kept?".format( os.path.basename(run.base_path)), default="yes") if not should_keep_run: shutil.rmtree(run.base_path) if __name__ == "__main__": # get arguments from console arguments = parse_arguments() # initialize new run run = Run(run_id=None) run.open() run.set_config_value(arguments.input_style_file, "files", "input_style") run.set_config_value(arguments.input_map_file, "files", "input_map") run.set_config_value(arguments.output_map_file, "files", "output_map") run.set_config_value(arguments.output_content_file, "files", "output_content") run.set_config_value(arguments.content_weight, "training", "content_weight") run.set_config_value(arguments.content_layers, "training", "content_layers") run.set_config_value(arguments.style_layers, "training", "style_layers") run.set_config_value(arguments.style_weight, "training", "style_weight") run.set_config_value(arguments.map_channel_weight, "training", "map_channel_weight") run.set_config_value(arguments.num_phases, "training", "num_phases")
default=32) argument_list.add_input_device_argument("Device for processing inputs.", default="/cpu:0") argument_list.add_inference_device_argument("Device for inference.", default="/gpu:0") argument_list.add_optimization_device_argument("Device for optimization.", default="/cpu:0") argument_list.add_tf_verbosity_argument("Tensorflow verbosity.", default="info") argument_list.add_tf_min_log_level_argument( "Tensorflow minimum log level.", default=3) arguments = argument_list.parse() # load run run = Run(run_id=arguments.run) if not run.open(): logging_error("There is no run '{}'.".format(arguments.run)) # print some information logging_info("Load run '{}'.".format(arguments.run)) logging_info("Model: {}".format( run.get_config_value("model", "name"))) logging_info("Dataset: {} {}".format( arguments.dataset, arguments.dataset_split)) logging_info("Preprocessing parallel calls: {}".format( arguments.num_parallel_calls)) logging_info("Prefetch buffer size: {}".format( arguments.prefetch_buffer_size)) logging_info("Batch size: {}".format( arguments.batch_size)) logging_info("Input device: {}".format(