def execute_validation(gpus, exp_folder, exp_alias, validation_datasets, erase_bad_validations, restart_validations, suppress_output=True): """ Args: gpus: The gpu being used for this execution. exp_folder: The folder this driving experiment is being executed exp_alias: The experiment alias, file name, to be executed. validation_datasets: Validation datasets to be deleted/restarted erase_bad_validations: restart_validations: suppress_output: Returns: """ validation_datasets = validation_datasets.split( ',') # Turn a string into a list of the names create_log_folder(exp_folder) create_exp_path(exp_folder, exp_alias) if erase_bad_validations: erase_wrong_plotting_summaries(exp_folder, validation_datasets) if restart_validations: erase_validations(exp_folder, validation_datasets) # The difference between train and validation is the p = multiprocessing.Process(target=validate.execute, args=(gpus, exp_folder, exp_alias, validation_datasets[0], suppress_output)) p.start()
def execute_train_encoder(gpu, exp_batch, exp_alias, suppress_output=True, number_of_workers=12): """ Args: gpu: The gpu being used for this execution. module_name: The module name, if it is train, drive or evaluate exp_alias: The experiment alias, file name, to be executed. path: The path were the datasets are Returns: """ create_exp_path(exp_batch, exp_alias) p = multiprocessing.Process(target=train_encoder.execute, args=(gpu, exp_batch, exp_alias, suppress_output, number_of_workers)) p.start()
def test_basic_data(self): # the town2-town01 data, try to load. g_conf.immutable(False) g_conf.EXPERIMENT_NAME = 'coil_icra' create_log_folder('sample') create_exp_path('sample', 'coil_icra') merge_with_yaml('configs/sample/coil_icra.yaml') set_type_of_process('train') full_dataset = os.path.join(os.environ["COIL_DATASET_PATH"], 'CoILTrain') dataset = CoILDataset(full_dataset, transform=None, preload_name=str(g_conf.NUMBER_OF_HOURS) + 'hours_' + g_conf.TRAIN_DATASET_NAME)
def test_town3_data(self): # the town3 data has different names and does not have pedestrians of vehicle stop # indications g_conf.immutable(False) g_conf.EXPERIMENT_NAME = 'resnet34imnet' create_log_folder('town03') create_exp_path('town03', 'resnet34imnet') merge_with_yaml('configs/town03/resnet34imnet.yaml') set_type_of_process('train') full_dataset = os.path.join(os.environ["COIL_DATASET_PATH"], 'CoILTrainTown03') dataset = CoILDataset(full_dataset, transform=None, preload_name=str(g_conf.NUMBER_OF_HOURS) + 'hours_' + g_conf.TRAIN_DATASET_NAME)
def execute_drive(gpu, exp_batch, exp_alias, exp_set_name, params): """ Args: gpu: gpu being used for this execution. exp_batch: folder this driving experiment is being executed exp_alias: experiment alias, file name, to be executed. exp_set_name: driving environment params: all the rest of parameter, if there is recording and etc. Returns: """ params.update({'host': "127.0.0.1"}) create_exp_path(exp_batch, exp_alias) p = multiprocessing.Process(target=run_drive.execute, args=(gpu, exp_batch, exp_alias, exp_set_name, params)) p.start()
def execute_train(gpu, exp_batch, exp_alias, suppress_output=True, number_of_workers=12): """ Args: gpu: gpu being used for this execution. exp_batch: folder this driving experiment is being executed exp_alias: experiment alias, file name, to be executed. Returns: """ create_exp_path(exp_batch, exp_alias) p = multiprocessing.Process(target=train.execute, args=(gpu, exp_batch, exp_alias, suppress_output, number_of_workers)) p.start()
def execute_validation(gpu, exp_batch, exp_alias, dataset, suppress_output=True): """ Args: gpu: gpu being used for this execution. exp_batch: folder this driving experiment is being executed exp_alias: experiment alias, file name, to be executed. dataset: validation dataset Returns: """ create_exp_path(exp_batch, exp_alias) p = multiprocessing.Process(target=validate.execute, args=(gpu, exp_batch, exp_alias, dataset, suppress_output)) p.start()
def execute_train(gpus, exp_folder, exp_alias, suppress_output=True, number_of_workers=12): """ Args: gpus: The gpu being used for this execution. exp_folder: Folder name in configs exp_alias: The experiment alias (yaml file) suppress_output: number_of_workers: Returns: """ create_log_folder(exp_folder) create_exp_path(exp_folder, exp_alias) p = multiprocessing.Process(target=train.execute, args=(gpus, exp_folder, exp_alias, suppress_output, number_of_workers)) p.start()
def execute_drive(gpus, exp_folder, exp_alias, exp_set_name, suppress_output, docker, record_collisions, no_screen): """ Args: gpus: The gpu being used for this execution. exp_folder: The folder this driving experiment is being executed exp_alias: The experiment alias, file name, to be executed. exp_set_name: suppress_output: docker: record_collisions: no_screen: Returns: """ create_log_folder(exp_folder) create_exp_path(exp_folder, exp_alias) p = multiprocessing.Process(target=run_drive.execute, args=(gpus, exp_folder, exp_alias, exp_set_name, suppress_output, docker, record_collisions, no_screen)) p.start()
def execute_train(gpu, exp_batch, exp_alias, suppress_output=True, number_of_workers=12, encoder_params = None): """ Args: gpu: The gpu being used for this execution. module_name: The module name, if it is train, drive or evaluate exp_alias: The experiment alias, file name, to be executed. path: The path were the datasets are Returns: """ if encoder_params: create_exp_path(exp_batch, exp_alias + '_' + str(encoder_params['encoder_checkpoint'])) else: create_exp_path(exp_batch, exp_alias) p = multiprocessing.Process(target=train.execute, args=(gpu, exp_batch, exp_alias, suppress_output, number_of_workers, encoder_params)) p.start()
def execute_validation(gpu, exp_batch, exp_alias, dataset, suppress_output=True): """ Args: gpu: The gpu being used for this execution. module_name: The module name, if it is train, drive or evaluate exp_alias: The experiment alias, file name, to be executed. path: The path were the datasets are Returns: """ create_exp_path(exp_batch, exp_alias) # The difference between train and validation is the p = multiprocessing.Process(target=validate.execute, args=(gpu, exp_batch, exp_alias, dataset, suppress_output)) p.start()
def execute_validation(gpu, exp_batch, exp_alias, json_file_path, suppress_output=True, encoder_params = None): """ Args: gpu: The gpu being used for this execution. module_name: The module name, if it is train, drive or evaluate exp_alias: The experiment alias, file name, to be executed. path: The path were the datasets are Returns: """ if encoder_params: create_exp_path(exp_batch, exp_alias + '_' + str(encoder_params['encoder_checkpoint'])) else: create_exp_path(exp_batch, exp_alias) # The difference between train and validation is the p = multiprocessing.Process(target=validate.execute, args=(gpu, exp_batch, exp_alias, json_file_path, suppress_output, encoder_params)) p.start()
"suppress_output": True, "no_screen": args.no_screen, "docker": args.docker, "record_collisions": args.record_collisions } # There are two modes of execution if args.single_process is not None: #### # MODE 1: Single Process. Just execute a single experiment alias. #### if args.exp is None: raise ValueError( " You should set the exp alias when using single process") create_exp_path(args.folder, args.exp) if args.single_process == 'train': execute_train(gpu="0", exp_batch=args.folder, exp_alias=args.exp, suppress_output=False, number_of_workers=args.number_of_workers) elif args.single_process == 'validation': execute_validation(gpu="0", exp_batch=args.folder, exp_alias=args.exp, dataset=args.validation_datasets[0], suppress_output=False)
from coil_core import execute_train from coilutils.general import create_log_folder, create_exp_path, erase_logs if __name__ == '__main__': folder = 'cvpr' exp = 'img_gtseg_camv_control' create_log_folder(folder) erase_logs(folder) create_exp_path(folder, exp) execute_train('0', folder, exp) print("SUCCESSFULLY RAN TRAINING")