def GetSystsList(mc16a, mc16c, r20): dt_a = toolkit.json_load(mc16a) dt_c = toolkit.json_load(mc16c) dt_r20 = toolkit.json_load(r20) new_dt_a = deepcopy(dt_a) new_dt_c = deepcopy(dt_c) keys_a = set(dt_a['sf'].keys()) keys_c = set(dt_c['sf'].keys()) keys_r20 = set(dt_r20['sf'].keys()) c_a = keys_a - keys_c a_r20 = keys_r20 - keys_a a_r20 = a_r20 - set(['SysJvtEfficiency']) print a_r20 print c_a xbinsIn = [20, 30, 60, 90, 140, 200, 300] xbinsOut = [20, 30, 40, 60, 85, 110, 140, 175, 250, 600] central = dt_r20['sf']['nominal'] error = dt_r20['sf']['mc stats'] extroplator = ExtroplateErrors(xbinsIn, xbinsOut) systs = {} for entry in a_r20: systs[entry] = dt_r20['sf'][entry][1:] central_new_a_r20, systs_new_a_r20 = extroplator.PerformFit( 'test1', central[1:], error[1:], systs, True) systs = {} for entry in c_a: systs[entry] = dt_r20['sf'][entry][1:] central_new_c_a, systs_new_c_a = extroplator.PerformFit( 'test2', central[1:], error[1:], systs, True) for entry in a_r20: new_dt_a['sf'][entry] = systs_new_a_r20[entry] new_dt_c['sf'][entry] = systs_new_a_r20[entry] for entry in c_a: new_dt_c['sf'][entry] = systs_new_c_a[entry] with open('mc16a_extraplote.json', 'w') as fa, open('mc16c_extraplote.json', 'w') as fc: toolkit.DumpToJson(new_dt_a, fa) toolkit.DumpToJson(new_dt_c, fc)
def init_varList(self): if not self._onlyNominal: vars_json = '%s/variations.json' % (self.outputDir) if not os.path.isfile(vars_json): data = root_toolkit.GetVarsList( self.file, self._format['nominal']['var'], self._format['variation']['var']) with open(vars_json, 'w') as f: toolkit.DumpToJson(data, f) with open(vars_json, 'r') as f: self.varsList = toolkit.json_loads(f.read())
def newFun(*args, **kw): json_name = '%s/%s_%s.json' % (self.outputDir, name, self.CurrentItem_Str) if not self.isLoadRawFromCache or not os.path.isfile( json_name): self.Stdout('Producing json file :', '\t%s' % (json_name)) res_h = fun(*args, **kw) res_j = self.JsonToHist(res_h, Hist, inverse=True) with open(json_name, 'w') as f: toolkit.DumpToJson(res_j, f) with open(json_name, 'r') as f: res_j = toolkit.json_loads(f.read()) res_h = self.JsonToHist(res_j, Hist) return res_h
def DumpDishes(self, nominal, scales, variations, modellings): def dump_dish_err(data, dish, key): for name in dish.keys(): data[key][name + '__down'] = dish[name][0][key].vals data[key][name + '__up'] = dish[name][1][key].vals data = {} for key in nominal.keys(): data[key] = {} data[key]['nominal'] = nominal[key].vals dump_dish_err(data, scales, key) dump_dish_err(data, variations, key) dump_dish_err(data, modellings, key) self.Stdout('going to dump dishes') with open( '%s/variations_%s.json' % (self.outputDir, self.CurrentItem_Str), 'w') as f: toolkit.DumpToJson(data, f)
def DumpResults(self, results, err_stat, err_mod, err_scal, err_var): def dump_err(data, errs, key): for name in errs.keys(): data[key][name] = errs[name][key].vals data = {} for key in results.keys(): data[key] = {} data[key]['nominal'] = results[key].vals dump_err(data, err_stat, key) dump_err(data, err_mod, key) dump_err(data, err_scal, key) dump_err(data, err_var, key) self.Stdout('going to dump results') with open('%s/output_%s.json' % (self.outputDir, self.CurrentItem_Str), 'w') as f: toolkit.DumpToJson(data, f)
def UpdateConfig(self, fname): config = self.GetConfig_default() with open(fname, 'w') as f: toolkit.DumpToJson(config, f) print fname, 'updated.'
def WorkParallel(self): output_path = self.output_path output_path, project_name = os.path.split(output_path) if len(project_name) == 0: output_path, project_name = os.path.split(output_path) toolkit.mkdir('./{0:}/submit'.format(self.output_path)) config = copy.deepcopy(self.calibration_config) cats = config['cats'] keys = cats.keys() values = itertools.product(*[cats[key] for key in keys]) jobids = [] for value in values: name = '_'.join( [str(k[0]) + '_' + str(k[1]) for k in zip(keys, value)]) #print name for n, key in enumerate(keys): cats[key] = [value[n]] config_name = './{0:}/submit/Config_{1:}.json'.format( self.output_path, name) script_name = './{0:}/submit/Submit_{1:}.sh'.format( self.output_path, name) log_name = './{0:}/submit/Log_{1:}'.format(self.output_path, name) with open(config_name, 'w') as f: toolkit.DumpToJson(config, f) pwd = os.environ['PWD'] isLoadRaw = '--load_raw' if self.isLoadRawFromCache else '' with open(script_name, 'w') as f: print >> f, 'INPUT="{0:}"'.format(self.input_file) print >> f, 'OUTPUT="{0:}"'.format(self.output_path) print >> f, 'LOADRAW="{0:}"'.format( int(self.isLoadRawFromCache)) print >> f, 'CONFIG="{0:}"'.format(config_name) print >> f, 'cd {0:}'.format(pwd) print >> f, 'source ./setup.sh' print >> f, 'python ./scripts/submit_hook.py --input_file ${INPUT} --output_path ${OUTPUT} --config_file ${CONFIG} ' + isLoadRaw cmd = 'qsub -cwd -P P_atlas -l sps=1 -l h_vmem=8g -q long -N {0:} -o {1:}.log -e {1:}.err {2:}'.format( name, log_name, script_name) jobids.append(name) res = commands.getstatusoutput(cmd) print res[1] join_jobs = './submit/{0:}/Join'.format(project_name) with open(join_jobs + '.sh', 'w') as f: print >> f, '#!/bin/bash' print >> f, 'cd {0:}'.format(pwd) print >> f, 'echo "Finished!"' jids = ','.join(jobids) cmd_join = 'qsub -cwd -o {0:}.log -e {0:}.err -hold_jid {1:} -sync y {0:}.sh'.format( join_jobs, jids) print '\nWaiting for all jobs to be finished' res = commands.getstatusoutput(cmd_join) print res raise RuntimeError('debug')
def LoadPickleDumpJson(pickle_file_path): with open(pickle_file_path, 'rb') as f: data = pickle.load(f) with open('./test/data/Nov.24.Fixed.json', 'w') as f: toolkit.DumpToJson(data, f)