def read_scales(): scales = {} if os.path.isfile(SCALES_FILE): log.info("reading background scale factors from %s" % SCALES_FILE) with lock(SCALES_FILE): with open(SCALES_FILE) as cache: scales = pickle.load(cache) return scales
def read_scales(): scales = {} if os.path.isfile(SCALES_FILE): log.info("reading background scale factors from %s" % SCALES_FILE) with lock(SCALES_FILE): with open(SCALES_FILE) as cache: scales = yaml.load(cache) return scales
def run(self): model = FitModel(self.ws, self.cat) process_fitmodel(model, self.fit_res) components = [comp for comp in model.components ] + [model.signal, model.background] with lock(self.root_name): with root_open(self.root_name, 'update') as fout: log.info('{0}'.format(model.frame)) model.frame.Write() for comp in components: log.info('{0}: {1}'.format(comp.hist, comp.hist.Integral())) comp.hist.Write() model.data_hist.Write() with lock(self.pickle_name): with open(self.pickle_name) as pickle_file: yields = pickle.load(pickle_file) yields_cat = {} for comp in components: yields_cat[comp.name] = (comp.integral, comp.integral_err) yields_cat['Data'] = (model.data_hist.Integral(), ) yields[model.cat.name] = yields_cat with open(self.pickle_name, 'w') as pickle_file: pickle.dump(yields, pickle_file)
def run(self): model = FitModel(self.ws, self.cat) process_fitmodel(model, self.fit_res) components = [ comp for comp in model.components] + [ model.signal, model.background] with lock(self.root_name): with root_open(self.root_name, 'update') as fout: log.info('{0}'.format(model.frame)) model.frame.Write() for comp in components: log.info('{0}: {1}'.format(comp.hist, comp.hist.Integral())) comp.hist.Write() model.data_hist.Write() with lock(self.pickle_name): with open(self.pickle_name) as pickle_file: yields = pickle.load(pickle_file) yields_cat = {} for comp in components: yields_cat[comp.name] = (comp.integral, comp.integral_err) yields_cat['Data'] = (model.data_hist.Integral(),) yields[model.cat.name] = yields_cat with open(self.pickle_name, 'w') as pickle_file: pickle.dump(yields, pickle_file)
def run(self): # get the nll value for the given nuisance parameter fixed at the given val nll = get_nuis_nll(self.ws, self.mc, self.nuispar_name, self.nuispar_val, self.ws_snapshot) # write the value into a pickle with lock(self.pickle_name): with open(self.pickle_name) as pickle_file: scans = pickle.load(pickle_file) scans.append((self.nuispar_val, nll)) with open(self.pickle_name, 'w') as pickle_file: pickle.dump(scans, pickle_file)
def run(self): # get the pulls poi_prefit_pull, poi_postfit_pull, np_pull = get_pull( self.ws, self.mc, self.poi_name, self.np_name, self.ws_snapshot ) # write the value into a pickle with lock(self.pickle_name): with open(self.pickle_name) as pickle_file: pulls = pickle.load(pickle_file) if not isinstance(pulls, dict): pulls = {} pulls[self.np_name] = {"poi_prefit": poi_prefit_pull, "poi_postfit": poi_postfit_pull, "np": np_pull} with open(self.pickle_name, "w") as pickle_file: pickle.dump(pulls, pickle_file)
def run(self): # get the pulls poi_prefit_pull, poi_postfit_pull, np_pull = get_pull( self.ws, self.mc, self.poi_name, self.np_name, self.ws_snapshot) # write the value into a pickle with lock(self.pickle_name): with open(self.pickle_name) as pickle_file: pulls = pickle.load(pickle_file) if not isinstance(pulls, dict): pulls = {} pulls[self.np_name] = { 'poi_prefit': poi_prefit_pull, 'poi_postfit': poi_postfit_pull, 'np': np_pull } with open(self.pickle_name, 'w') as pickle_file: pickle.dump(pulls, pickle_file)
def write_scales(): if not MODIFIED: return with lock(SCALES_FILE): # merge with possible changes made by another process scales = {} if os.path.isfile(SCALES_FILE): with open(SCALES_FILE) as cache: scales = pickle.load(cache) for year, category, embedded, param, shape_region in UPDATED: if year not in scales: scales[year] = {} if category not in scales[year]: scales[year][category] = {} if embedded not in scales[year][category]: scales[year][category][embedded] = {} if param not in scales[year][category][embedded]: scales[year][category][embedded][param] = {} scales[year][category][embedded][param][shape_region] = \ SCALES[year][category][embedded][param][shape_region] with open(SCALES_FILE, 'w') as cache: pickle.dump(scales, cache)
def write_scales(): if not MODIFIED: return with lock(SCALES_FILE): # merge with possible changes made by another process scales = {} if os.path.isfile(SCALES_FILE): with open(SCALES_FILE) as cache: scales = yaml.load(cache) for year, category, embedded, param, shape_region, target_region in UPDATED: if year not in scales: scales[year] = {} if category not in scales[year]: scales[year][category] = {} if embedded not in scales[year][category]: scales[year][category][embedded] = {} if param not in scales[year][category][embedded]: scales[year][category][embedded][param] = {} if shape_region not in scales[year][category][embedded][param]: scales[year][category][embedded][param][shape_region] = {} scales[year][category][embedded][param][shape_region][target_region] = \ SCALES[year][category][embedded][param][shape_region][target_region] with open(SCALES_FILE, 'w') as cache: yaml.dump(scales, cache, default_flow_style=False)
from . import log; log = log[__name__] from rootpy import asrootpy from rootpy.utils.lock import lock import os import ROOT HERE = os.path.dirname(os.path.abspath(__file__)) with lock(HERE): ROOT.gSystem.CompileMacro(os.path.join(HERE, 'src', 'smooth.C'), 'k', 'smooth', '/tmp') from ROOT import Smooth __all__ = [ 'smooth', 'smooth_alt', ] def smooth(nom, sys, frac=0.5, **kwargs): log.info('smoothing {0}'.format(sys.name)) return asrootpy(Smooth.EqualArea(nom, sys, frac), **kwargs) def smooth_alt(nom, sys, **kwargs): log.info('smoothing {0}'.format(sys.name)) return asrootpy(Smooth.EqualAreaGabriel(nom, sys), **kwargs)