if isinstance(input, dict): return { unpack_unicode(key): unpack_unicode(value) for key, value in input.iteritems() } elif isinstance(input, list): return [unpack_unicode(element) for element in input] elif isinstance(input, unicode): return input.encode('utf-8') else: return input model_cache_file = 'cache_model.py' if not os.path.exists(model_cache_file): modelDict = getModelDict(mcDirectory, mcProduction, 'darkHiggs_ToWWToLNujj', models) model_cache_file_o = open(model_cache_file, 'w') model_cache_file_o.write(json.dumps(modelDict, indent=2)) model_cache_file_o.close() baseW_cache = {} else: model_cache_file_o = open(model_cache_file, 'r') modelDict_u = json.load(model_cache_file_o) model_cache_file_o.close() modelDict = unpack_unicode(modelDict_u) signal = {} col_idx = 0 nMP = 0 #print('darkHiggs signal samples:') for model in models:
param_points.append('sinp_0p35_tanb_1p0_mXd_10_MH3_700_MH4_250') param_points.append('sinp_0p35_tanb_1p0_mXd_10_MH3_700_MH4_350') param_points.append('sinp_0p35_tanb_1p0_mXd_10_MH3_800_MH4_250') param_points.append('sinp_0p35_tanb_1p0_mXd_10_MH3_800_MH4_350') param_points.append('sinp_0p35_tanb_1p0_mXd_10_MH3_900_MH4_350') # Split models, collect baseW's # model example name: GenModel_monoH_2HDMaTochichihWWTojjlnu_bb_sinp_0p35_tanb_8p0_mXd_10_MH3_300_MH4_150_MH2_300_MHC_300 models = [] for pp in param_points: mh3 = pp.split('_MH3_')[-1].split('_')[0] for prod in ['bb', 'gg']: model = 'monoH_2HDMaTochichihWWTojjlnu_'+prod+'_'+pp+'_MH2_'+mh3+'_MHC_'+mh3 models.append(model) print(models) modelDict = getModelDict(mcDirectory, mcProduction, 'pseudoscalar2HDMa_ToWWToLNujj', models) # Collect all pieces in signal dict for samples.py signal = {} col_idx = 0 for pp in param_points: # Collect both bb and gg prod from models eff_mods = [] for model in models: if pp in model: eff_mods.append(model) print(pp, eff_mods) # Sanity check if not len(eff_mods) == 2: print('Unexpected amount of models for "'+pp+'": '+str(eff_mods)) exit()