def are_arguments_valid(args): # Check for data_tiers if not argparse_helper.is_valid(args, 'data_tiers', ['nanoaod', 'miniaod']): return False, 'data_tier: ' + str( args['data_tiers']) + ' is not valid.' # Check for mc_data if not argparse_helper.is_valid(args, 'mc_data', ['mc', 'data']): return False, 'mc_data: ' + str(args['mc_data']) + ' is not valid.' # Check if files exists with in_json_prefix if 'mc' in args['mc_data']: t_path = os.path.join( args['in_json_folder'], args['in_json_prefix'] + 'mc_dataset_files_info.json') if not os.path.isfile(t_path): return False, t_path + ' does not exists.' if 'data' in args['mc_data']: t_path = os.path.join( args['in_json_folder'], args['in_json_prefix'] + 'data_dataset_files_info.json') if not os.path.isfile(t_path): return False, t_path + ' does not exists.' # Check if output folder exits if not os.path.isdir(args['out_results_folder']): return False, 'out_results_folder: ' + args[ 'out_results_folder'] + " doesn't exist." # Check if files exists with out_results_prefix if 'mc' in args['mc_data']: t_path = os.path.join(args['out_results_folder'], args['out_results_prefix'] + 'mc_dataset_files') if os.path.isfile(t_path): overwrite = ask.ask_key( t_path + ' already exists. Do you want to overwrite? (y/n) Default is n. ', ['y', 'n'], 'n') if overwrite == 'n': return False, t_path + ' already exists.' if 'data' in args['mc_data']: t_path = os.path.join( args['out_results_folder'], args['out_results_prefix'] + 'data_dataset_files') if os.path.isfile(t_path): overwrite = ask.ask_key( t_path + ' already exists. Do you want to overwrite? (y/n) Default is n. ', ['y', 'n'], 'n') if overwrite == 'n': return False, t_path + ' already exists.' return True, ''
def are_arguments_valid(args): # Check for data_tiers if not argparse_helper.is_valid(args, 'data_tiers', ['nanoaod', 'miniaod']): return False, 'data_tier: '+str(args['data_tiers'])+' is not valid.' # Check for mc_data if not argparse_helper.is_valid(args, 'mc_data', ['mc', 'data']): return False, 'mc_data: '+str(args['mc_data'])+' is not valid.' # Check for meta files if not os.path.isdir(args['meta_folder']): return False, 'meta_folder: '+args['meta_folder']+" doesn't exist." t_path = os.path.join(args['meta_folder'],'mc_dataset_common_names') if not os.path.isfile(os.path.join(t_path)): return False, 'meta_mc_dataset_common: '+t_path+" doesn't exist." t_path = os.path.join(args['meta_folder'],'mc_dataset_2016_names') if not os.path.isfile(os.path.join(t_path)): return False, 'meta_mc_dataset_2016_names: '+t_path+" doesn't exist." t_path = os.path.join(args['meta_folder'],'mc_dataset_2017_names') if not os.path.isfile(os.path.join(t_path)): return False, 'meta_mc_dataset_2017_names: '+t_path+" doesn't exist." t_path = os.path.join(args['meta_folder'],'mc_dataset_2018_names') if not os.path.isfile(os.path.join(t_path)): return False, 'meta_mc_dataset_2018_names: '+t_path+" doesn't exist." if 'mc' in args['mc_data']: t_path = os.path.join(args['meta_folder'],'mc_tag_meta') if not os.path.isfile(os.path.join(t_path)): return False, 'meta_mc_tag_meta: '+t_path+" doesn't exist." if 'data' in args['mc_data']: t_path = os.path.join(args['meta_folder'],'data_tag_meta') if not os.path.isfile(os.path.join(t_path)): return False, 'meta_data_tag_meta: '+t_path+" doesn't exist." # Check if output folder exits if not os.path.isdir(args['out_json_folder']): return False, 'out_json_folder: '+args['out_json_folder']+" doesn't exist." # Check if files exists with in_json_prefix if 'mc' in args['mc_data']: t_path = os.path.join(args['in_json_folder'], args['in_json_prefix']+'mc_datasets.json') if not os.path.isfile(t_path): return False, t_path+' does not exists.' if 'data' in args['mc_data']: t_path = os.path.join(args['in_json_folder'], args['in_json_prefix']+'data_datasets.json') if not os.path.isfile(t_path): return False, t_path+' does not exists.' # Check if files exists with out_json_prefix if 'mc' in args['mc_data']: t_path = os.path.join(args['out_json_folder'], args['out_json_prefix']+'mc_multiple_selection.json') if os.path.isfile(t_path): overwrite = ask.ask_key(t_path+' already exists. Do you want to overwrite? (y/n) Default is n. ', ['y','n'], 'n') if overwrite == 'n': return False, t_path+' already exists.' t_path = os.path.join(args['out_json_folder'], args['out_json_prefix']+'mc_datasets.json') if os.path.isfile(t_path): overwrite = ask.ask_key(t_path+' already exists. Do you want to overwrite? (y/n) Default is n. ', ['y','n'], 'n') if overwrite == 'n': return False, t_path+' already exists.' t_path = os.path.join(args['out_json_folder'], args['out_json_prefix']+'data_datasets.json') if os.path.isfile(t_path): overwrite = ask.ask_key(t_path+' already exists. Do you want to overwrite? (y/n) Default is n. ', ['y','n'], 'n') if overwrite == 'n': return False, t_path+' already exists.' return True, ''