def backup_settings(args): """ The settings saved in setting_folder_path/task_name/cur_data_setting.json and setting_folder_path/task_name/cur_task_setting.json :param args: :return: None """ setting_folder_path = args.setting_folder_path dm_json_path = os.path.join(setting_folder_path, 'cur_data_setting.json') tsm_json_path = os.path.join(setting_folder_path, 'cur_task_setting.json') dm = DataTask('task_reg', dm_json_path) if os.path.isfile(dm_json_path) else None tsm = ModelTask('task_reg', tsm_json_path) task_name = args.task_name_record setting_backup = os.path.join(setting_folder_path, task_name + '_backup') os.makedirs(setting_backup, exist_ok=True) dm_backup_json_path = os.path.join(setting_backup, 'cur_data_setting.json') tsm_backup_json_path = os.path.join(setting_backup, 'cur_task_setting.json') if tsm.task_par['tsk_set'][ 'model'] == 'reg_net' and 'mermaid' in tsm.task_par['tsk_set'][ 'method_name']: mermaid_backup_json_path = os.path.join(setting_backup, 'mermaid_nonp_settings.json') mermaid_setting_json = tsm.task_par['tsk_set']['reg']['mermaid_net'][ 'mermaid_net_json_pth'] if len(mermaid_setting_json) == 0: mermaid_setting_json = os.path.join(setting_folder_path, 'mermaid_nonp_settings.json') mermaid_setting = pars.ParameterDict() mermaid_setting.load_JSON(mermaid_setting_json) mermaid_setting.write_ext_JSON(mermaid_backup_json_path) tsm.save(tsm_backup_json_path) if dm is not None: dm.save(dm_backup_json_path)
def init_train_env(setting_path, output_root_path, task_name, data_task_name=None): """ create train environment. :param setting_path: the path to load 'cur_task_setting.json' and 'cur_data_setting.json' (optional if the related settings are in cur_task_setting) :param output_root_path: the output path :param data_task_name: data task name i.e. lung_seg_task , oai_seg_task :param task_name: task name i.e. run_unet, run_with_ncc_loss :return: """ dm_json_path = os.path.join(setting_path, 'cur_data_setting.json') tsm_json_path = os.path.join(setting_path, 'cur_task_setting.json') assert os.path.isfile(tsm_json_path), "task setting not exists" dm = DataTask('task_reg', dm_json_path) if os.path.isfile(dm_json_path) else None tsm = ModelTask('task_reg', tsm_json_path) data_task_name = data_task_name if len(data_task_name) else 'custom' data_task_path = os.path.join(output_root_path, data_task_name) if dm is not None: dm.data_par['datapro']['dataset']['output_path'] = output_root_path dm.data_par['datapro']['dataset']['task_name'] = data_task_name tsm.task_par['tsk_set']['task_name'] = task_name tsm.task_par['tsk_set']['output_root_path'] = data_task_path return dm, tsm
def init_train_env(self): assert os.path.isfile(self.tsm_json_path),"task setting not exists" dm = DataTask('task_reg', self.dm_json_path) if os.path.isfile(self.dm_json_path) else None tsm = ModelTask('task_reg',tsm_json_path) self.data_task_name = self.data_task_name if len(self.data_task_name)>0 else 'custom' if dm is not None: dm.data_par['datapro']['dataset']['output_path'] = self.output_root_path dm.data_par['datapro']['dataset']['task_name'] = self.data_task_name tsm.task_par['tsk_set']['task_name'] = self.task_name tsm.task_par['tsk_set']['output_root_path'] = self.data_task_path return dm, tsm
def save_settings(self): self.setting_folder_path = args.setting_folder_path self.dm_json_path = os.path.join(setting_folder_path, 'cur_data_setting.json') self.tsm_json_path = os.path.join(setting_folder_path, 'cur_task_setting.json') dm = DataTask('task_reg', self.dm_json_path) if os.path.isfile(self.dm_json_path) else None tsm = ModelTask('task_reg', tsm_json_path) task_name = args.task_name_record setting_backup = os.path.join(setting_folder_path, task_name+'_backup') os.makedirs(setting_backup, exist_ok=True) dm_backup_json_path = os.path.join(setting_backup, 'cur_data_setting.json') tsm_backup_json_path =os.path.join(setting_backup,'cur_task_setting.json') tsm.save(tsm_backup_json_path) if dm is not None: dm.save(dm_backup_json_path)
def init_test_env(setting_path, output_path, file_list, fname_list): """ create test environment, the file list would be saved into output_path/reg/test/file_path_list.txt, a corresponding auto-parsed filename list would also be saved in output/path/reg/test/file_name_list.txt :param setting_path: the path to load 'cur_task_setting.json' and 'cur_data_setting.json' (optional if the related settings are in cur_task_setting) :param output_path: the output path of the task :param image_path_list: the image list, each item refers to the abstract path of the image :param l_path_list:optional, the label of image list, each item refers to the abstract path of the image :return: tuple of ParameterDict, datapro (optional) and tsk_set """ dm_json_path = os.path.join(setting_path, 'cur_data_setting.json') tsm_json_path = os.path.join(setting_path, 'cur_task_setting.json') assert os.path.isfile(tsm_json_path), "task setting not exists" dm = DataTask('task_reg', dm_json_path) if os.path.isfile(dm_json_path) else None tsm = ModelTask('task_reg', tsm_json_path) file_num = len(file_list) os.makedirs(os.path.join(output_path, 'seg/test'), exist_ok=True) os.makedirs(os.path.join(output_path, 'seg/res'), exist_ok=True) file_txt_path = os.path.join(output_path, 'seg/test/file_path_list.txt') fn_txt_path = os.path.join(output_path, 'seg/test/file_name_list.txt') has_label = len(file_list[0]) == 2 if fname_list is None: if has_label: fname_list = [ get_file_name(file_list[i][0]) for i in range(file_num) ] else: fname_list = [get_file_name(file_list[i]) for i in range(file_num)] write_list_into_txt(file_txt_path, file_list) write_list_into_txt(fn_txt_path, fname_list) data_task_name = 'seg' cur_task_name = 'res' if dm is not None: dm.data_par['datapro']['dataset']['output_path'] = output_path dm.data_par['datapro']['dataset']['task_name'] = data_task_name tsm.task_par['tsk_set']['task_name'] = cur_task_name tsm.task_par['tsk_set']['output_root_path'] = os.path.join( output_path, data_task_name) return dm, tsm
def backup_settings(args): """ The settings saved in setting_folder_path/task_name/cur_data_setting.json and setting_folder_path/task_name/cur_task_setting.json :param args: :return: None """ setting_folder_path = args.setting_folder_path dm_json_path = os.path.join(setting_folder_path, 'cur_data_setting.json') tsm_json_path = os.path.join(setting_folder_path, 'cur_task_setting.json') dm = DataTask('task_reg', dm_json_path) if os.path.isfile(dm_json_path) else None tsm = ModelTask('task_reg', tsm_json_path) task_name = args.task_name_record setting_backup = os.path.join(setting_folder_path, task_name + '_backup') os.makedirs(setting_backup, exist_ok=True) dm_backup_json_path = os.path.join(setting_backup, 'cur_data_setting.json') tsm_backup_json_path = os.path.join(setting_backup, 'cur_task_setting.json') tsm.save(tsm_backup_json_path) if dm is not None: dm.save(dm_backup_json_path)
def init_train_env(setting_path, output_root_path, task_name, data_task_name=None): """ create train environment. :param setting_path: the path to load 'cur_task_setting.json' and 'cur_data_setting.json' (optional if the related settings are in cur_task_setting) :param output_root_path: the output path :param data_task_name: data task name i.e. lung_reg_task , oai_reg_task :param task_name: task name i.e. run_training_vsvf_task, run_training_rdmm_task :return: """ dm_json_path = os.path.join(setting_path, 'cur_data_setting.json') tsm_json_path = os.path.join(setting_path, 'cur_task_setting.json') assert os.path.isfile(tsm_json_path), "task setting not exists" dm = DataTask('task_reg', dm_json_path) if os.path.isfile(dm_json_path) else None tsm = ModelTask('task_reg', tsm_json_path) data_task_name = data_task_name if len(data_task_name) else 'custom' data_task_path = os.path.join(output_root_path, data_task_name) if dm is not None: dm.data_par['datapro']['dataset']['output_path'] = output_root_path dm.data_par['datapro']['dataset']['task_name'] = data_task_name tsm.task_par['tsk_set']['task_name'] = task_name tsm.task_par['tsk_set']['output_root_path'] = data_task_path if tsm.task_par['tsk_set'][ 'model'] == 'reg_net' and 'mermaid' in tsm.task_par['tsk_set'][ 'method_name']: mermaid_setting_json = tsm.task_par['tsk_set']['reg']['mermaid_net'][ 'mermaid_net_json_pth'] if len(mermaid_setting_json) == 0: tsm.task_par['tsk_set']['reg']['mermaid_net'][ 'mermaid_net_json_pth'] = os.path.join( setting_path, 'mermaid_nonp_settings.json') return dm, tsm
def init_test_env(setting_path, output_path, registration_pair_list, pair_name_list=None): """ create test environment, the pair list would be saved into output_path/reg/test/pair_path_list.txt, a corresponding auto-parsed filename list would also be saved in output/path/reg/test/pair_name_list.txt :param setting_path: the path to load 'cur_task_setting.json' and 'cur_data_setting.json' (optional if the related settings are in cur_task_setting) :param output_path: the output path of the task :param registration_pair_list: including source_path_list, target_path_list, l_source_path_list, l_target_path_list :return: tuple of ParameterDict, datapro (optional) and tsk_set """ source_path_list, target_path_list, l_source_path_list, l_target_path_list = registration_pair_list dm_json_path = os.path.join(setting_path, 'cur_data_setting.json') tsm_json_path = os.path.join(setting_path, 'cur_task_setting.json') assert os.path.isfile(tsm_json_path), "task setting {} not exists".format( tsm_json_path) dm = DataTask('task_reg', dm_json_path) if os.path.isfile(dm_json_path) else None tsm = ModelTask('task_reg', tsm_json_path) file_num = len(source_path_list) if l_source_path_list is not None and l_target_path_list is not None: file_list = [[ source_path_list[i], target_path_list[i], l_source_path_list[i], l_target_path_list[i] ] for i in range(file_num)] else: file_list = [[source_path_list[i], target_path_list[i]] for i in range(file_num)] os.makedirs(os.path.join(output_path, 'reg/test'), exist_ok=True) os.makedirs(os.path.join(output_path, 'reg/res'), exist_ok=True) pair_txt_path = os.path.join(output_path, 'reg/test/pair_path_list.txt') fn_txt_path = os.path.join(output_path, 'reg/test/pair_name_list.txt') if pair_name_list is None: pair_name_list = [ generate_pair_name([file_list[i][0], file_list[i][1]], detail=True) for i in range(file_num) ] write_list_into_txt(pair_txt_path, file_list) write_list_into_txt(fn_txt_path, pair_name_list) data_task_name = 'reg' cur_task_name = 'res' if dm is not None: dm.data_par['datapro']['dataset']['output_path'] = output_path dm.data_par['datapro']['dataset']['task_name'] = data_task_name tsm.task_par['tsk_set']['task_name'] = cur_task_name tsm.task_par['tsk_set']['output_root_path'] = os.path.join( output_path, data_task_name) if tsm.task_par['tsk_set']['model'] == 'reg_net': tsm.task_par['tsk_set']['reg']['mermaid_net'][ 'mermaid_net_json_pth'] = os.path.join( setting_path, 'mermaid_nonp_settings.json') if tsm.task_par['tsk_set']['model'] == 'mermaid_iter': tsm.task_par['tsk_set']['reg']['mermaid_iter'][ 'mermaid_affine_json'] = os.path.join( setting_path, 'mermaid_affine_settings.json') tsm.task_par['tsk_set']['reg']['mermaid_iter'][ 'mermaid_nonp_json'] = os.path.join(setting_path, 'mermaid_nonp_settings.json') return dm, tsm