def cachedTemplate(self, selection, weight='(1)', save=True, overwrite=False): key = { "selection": selection, "weight": weight, "source": self.source_sample.name } if (self.cache and self.cache.contains(key)) and not overwrite: result = self.cache.get(key) logger.info("Loaded MC PU profile from %s" % (self.cache.database_file)) logger.debug("Key used: %s result: %r" % (key, result)) elif self.cache: logger.info("Obtain PU profile for %s" % (key, )) result = self.makeTemplate(selection=selection, weight=weight) if result: result = self.cache.addData(key, result, overwrite=save) logger.info("Adding PU profile to cache for %s : %r" % (key, result)) else: logger.warning( "Couldn't create PU profile to cache for %s : %r" % (key, result)) else: result = self.makeTemplate(selection=selection, weight=weight) return result
def cachedTemplate(self, selection, weight='weight', save=True, overwrite=False): key = { "selection": selection, "weight": weight, "source": self.source_sample.name, "target": self.target_sample.name } #key = self.uniqueKey( selection, weight, self.source_sample.name, self.target_sample.name) if (self.cache and self.cache.contains(key)) and not overwrite: result = self.cache.get(key) logger.info("Loaded reweighting template from %s for %s : %r" % (self.cache.database_file, key, result)) logger.debug("With properties %s : %s" % (key, result)) elif self.cache: logger.info("Obtain template for %s" % (key, )) result = self.makeTemplate(selection=selection, weight=weight) if result: result = self.cache.addData(key, result, overwrite=save) #print "Adding template to cache for %s : %r" %( key, result) logger.info("Adding template to cache for %s : %r" % (key, result)) else: logger.info("Couldn't create template to cache for %s : %r" % (key, result)) else: result = self.makeTemplate(selection=selection, weight=weight) return result
def cachedTemplate( self, selection, weight = 'weight', save = True, overwrite = False): key = self.uniqueKey( selection, weight, self.sample.name) if (self.cache and self.cache.contains(key)) and not overwrite: result = self.cache.get(key) logger.debug( "Loading cached template for %s : %s"%( key, result) ) elif self.cache: logger.info( "Obtain template for %s"%( key, ) ) result = self.makeTemplate( selection = selection, weight = weight) result = self.cache.add( key, result, save=save) #print "Adding template to cache for %s : %r" %( key, result) logger.debug( "Adding template to cache for %s : %r" %( key, result) ) else: result = self.makeTemplate( selection = selection, weight = weight) return result
def filler(event): event.run, event.lumi, event.evt = reader.evt if reader.position % 100 == 0: logger.info("At event %i/%i", reader.position, reader.nEvents) if args.addReweights: event.nrw = weightInfo.nid lhe_weights = reader.products['lhe'].weights() weights = [] param_points = [] for weight in lhe_weights: # Store nominal weight (First position!) if weight.id == 'rwgt_1': event.rw_nominal = weight.wgt if not weight.id in weightInfo.id: continue pos = weightInfo.data[weight.id] event.rw_w[pos] = weight.wgt weights.append(weight.wgt) interpreted_weight = interpret_weight(weight.id) for var in weightInfo.variables: getattr(event, "rw_" + var)[pos] = interpreted_weight[var] # weight data for interpolation if not hyperPoly.initialized: param_points.append( tuple(interpreted_weight[var] for var in weightInfo.variables)) # Initialize if not hyperPoly.initialized: hyperPoly.initialize(param_points) coeff = hyperPoly.get_parametrization(weights) # = HyperPoly(weight_data, args.interpolationOrder) event.np = hyperPoly.ndof event.chi2_ndof = hyperPoly.chi2_ndof(coeff, weights) #logger.debug( "chi2_ndof %f coeff %r", event.chi2_ndof, coeff ) logger.debug("chi2_ndof %f", event.chi2_ndof) for n in xrange(hyperPoly.ndof): event.p_C[n] = coeff[n] # All gen particles gp = reader.products['gp'] # for searching search = GenSearch(gp) # find heavy objects before they decay tops = map(lambda t: {var: getattr(t, var)() for var in top_varnames}, filter(lambda p: abs(p.pdgId()) == 6 and search.isLast(p), gp)) tops.sort(key=lambda p: -p['pt']) fill_vector(event, "top", top_varnames, tops) gen_Zs = filter(lambda p: abs(p.pdgId()) == 23 and search.isLast(p), gp) gen_Zs.sort(key=lambda p: -p.pt()) if len(gen_Zs) > 0: gen_Z = gen_Zs[0] for var in Z_read_varnames: setattr(event, "Z_" + var, getattr(gen_Z, var)()) else: gen_Z = None if gen_Z is not None: d1, d2 = gen_Z.daughter(0), gen_Z.daughter(1) if d1.pdgId() > 0: lm, lp = d1, d2 else: lm, lp = d2, d1 event.Z_daughterPdg = lm.pdgId() event.Z_cosThetaStar = cosThetaStar(gen_Z.mass(), gen_Z.pt(), gen_Z.eta(), gen_Z.phi(), lm.pt(), lm.eta(), lm.phi()) gen_Gammas = filter(lambda p: abs(p.pdgId()) == 22 and search.isLast(p), gp) gen_Gammas.sort(key=lambda p: -p.pt()) if len(gen_Gammas) > 0: gen_Gamma = gen_Gammas[0] for var in gamma_read_varnames: setattr(event, "gamma_" + var, getattr(gen_Gamma, var)()) else: gen_Gamma = None # find all leptons leptons = [(search.ascend(l), l) for l in filter( lambda p: abs(p.pdgId()) in [11, 13] and search.isLast(p) and p.pt() >= 0, gp)] leps = [] for first, last in leptons: mother_pdgId = first.mother( 0).pdgId() if first.numberOfMothers() > 0 else -1 leps.append({var: getattr(last, var)() for var in lep_varnames}) leps[-1]['motherPdgId'] = mother_pdgId leps.sort(key=lambda p: -p['pt']) fill_vector(event, "GenLep", lep_all_varnames, leps) # MET event.GenMet_pt = reader.products['genMET'][0].pt() event.GenMet_phi = reader.products['genMET'][0].phi() # jets jets = map(lambda t: {var: getattr(t, var)() for var in jet_read_varnames}, filter(lambda j: j.pt() > 30, reader.products['genJets'])) # jet/lepton disambiguation jets = filter( lambda j: (min([999] + [deltaR2(j, l) for l in leps if l['pt'] > 10]) > 0.3**2), jets) # find b's from tops: b_partons = [ b for b in filter( lambda p: abs(p.pdgId()) == 5 and p.numberOfMothers() == 1 and abs( p.mother(0).pdgId()) == 6, gp) ] for jet in jets: jet['matchBParton'] = (min([999] + [ deltaR2(jet, { 'eta': b.eta(), 'phi': b.phi() }) for b in b_partons ]) < 0.2**2) jets.sort(key=lambda p: -p['pt']) fill_vector(event, "GenJet", jet_write_varnames, jets)
except: raise ValueError( "Could not load sample '%s' from %s "%( options.sample, module_ ) ) sample = lepton_heppy_mapper.from_heppy_samplename(heppy_sample.name, maxN = maxN) if sample is None or len(sample.files)==0: logger.info( "Sample %r is empty. Exiting" % sample ) sys.exit(-1) else: logger.info( "Sample %s has %i files", sample.name, len(sample.files)) len_orig = len(sample.files) sample = sample.split( n=options.nJobs, nSub=options.job) logger.info( " Run over %i/%i files for job %i/%i."%(len(sample.files), len_orig, options.job, options.nJobs)) logger.debug( "Files to be run over:\n%s", "\n".join(sample.files) ) #output directory output_directory = os.path.join( skim_output_directory, options.version+('_small' if options.small else ''), str(options.year) ) leptonClasses = [{'Name':'Prompt', 'Var': 'lep_isPromptId'}, {'Name':'NonPrompt', 'Var': 'lep_isNonPromptId'}, {'Name':'Fake', 'Var': 'lep_isFakeId'}] leptonFlavours = [ {'Name':'muo', 'pdgId': 13}, {'Name':'ele', 'pdgId': 11}, ] #pt cuts ptCuts = [[10,float("inf")]] #make FileList
def _estimate(self, region, channel, setup): ''' Concrete implementation of abstract method 'estimate' as defined in Systematic ''' logger.debug( "Obtain polarisation Estimate for channel %s region %s", channel, region ) # Obtain fit template from an unbias Z sample. FIXME: Should be eta and pt reweighted #def_setup = setup.defaultClone() # Don't use systematic variations for templates sub_templates = [] # Here, I assume the sample(!) is the same for all flavors template_maker = PolarisationTemplateMaker( setup.samples['TTZ']['3mu'], cacheDir = os.path.join( results_directory, 'PolarisationTemplateCache' ) ) region_cut = region.cutString().replace('Z_pt','genZ_pt') # Make the template cut on genZ. Approximation. Don't uses sys. variations cuts = [region_cut] # If we know which Z flavor, then require it for the template if channel in ['3e', '2e1mu']: cuts.append( "genZ_daughter_flavor==11" ) elif channel in ['3mu', '2mu1e']: cuts.append( "genZ_daughter_flavor==13" ) cut = "&&".join( cuts ) logger.debug( "Making sub_template '%s' for polarisation fit using selection '%s' and weight '%s'", channel, cut, 'weight') templates = template_maker.cachedTemplate(cut, 'weight') # Obtain selection strings & weight from setup background_mc = {} background_mc_keys = [] ttz_mc = [] data = [] for ch in ( [channel] if channel!='all' else channels): # Background MC for sample_name, sample in setup.samples.iteritems(): if sample_name == 'Data': pre_selection = setup.preselection('Data', channel=ch) cut = "&&".join( [ region.cutString(), pre_selection['cut'] ] ) weight = pre_selection['weightStr'] else: pre_selection = setup.preselection('MC', channel=ch) cut = "&&".join( [ region.cutString(setup.sys['selectionModifier']), pre_selection['cut'] ] ) weight = pre_selection['weightStr'] if sample_name not in background_mc_keys: background_mc_keys.append( sample_name ) logger.info( "Get cosThetaStar histogram for sample %s channel %s cut %s weight %s" %( sample[ch].name, ch, cut, weight) ) h = sample[ch].get1DHistoFromDraw( 'cosThetaStar', [20, -1, 1 ], selectionString = cut, weightString = weight ) # Append & Scale if sample_name == 'Data': data.append( h ) elif sample_name == 'TTZ': h.Scale( setup.lumi[ch]/1000. ) ttz_mc.append( h ) else: h.Scale( setup.lumi[ch]/1000. ) if background_mc.has_key(sample_name): background_mc[sample_name].append(h) else: background_mc[sample_name] = [h] h_background_mc = [] for sample_name in background_mc_keys: if sample_name=='TTZ': continue h_background_mc.append( sum_histos(background_mc[sample_name]) ) h_background_mc[-1].style = styles.fillStyle( getattr(color, sample_name)) h_background_mc[-1].legendText = sample_name h_ttz_mc = sum_histos( ttz_mc ) h_ttz_mc.style = styles.fillStyle( color.TTZtoLLNuNu ) h_ttz_mc.legendText = 'TTZ' h_data = sum_histos(data) h_data.style = styles.errorStyle( ROOT.kBlack ) h_data.legendText = 'Data (%s)' % channel # Subtract MC from Data if self.usePseudoData: h_data_subtracted = h_ttz_mc.Clone() h_data_subtracted.Sumw2(0) else: scale = h_data.Integral() / (sum( [h.Integral() for h in h_background_mc ]) + h_ttz_mc.Integral()) for h in h_background_mc: h.Scale( 1./scale ) h_ttz_mc.Scale( 1./scale ) h_data_subtracted = sum_histos( h_background_mc ) h_data_subtracted.Scale(-1) h_data_subtracted.Add( h_data ) h_data.style = styles.errorStyle( ROOT.kBlack ) h_data.legendText = 'Data (%s)' % channel h_data_subtracted.style = styles.errorStyle( ROOT.kBlack ) h_data_subtracted.legendText = 'Data (%s) subtr %3.1f' % ( channel, h_data_subtracted.Integral() ) # Perform Fit y_p, y_m, y_L = map( u_float, ZPolarisationFit( h_data_subtracted, [templates[p] for p in ['p','m','L']], \ fit_plot_directory = os.path.join( plot_directory, 'polFits'), fit_filename = "fit_pseudoData_%s_%s_%s"%( self.usePseudoData, channel, region), sumW2Error = False # predict stat error )) templates['p'].Scale(y_p.val) templates['m'].Scale(y_m.val) templates['L'].Scale(y_L.val) templates['p'].style = styles.lineStyle( ROOT.kRed, width=2 ) templates['m'].style = styles.lineStyle( ROOT.kGreen, width=2 ) templates['L'].style = styles.lineStyle( ROOT.kMagenta, width=2 ) h_fitresults = sum_histos( templates.values() ) h_fitresults.style = styles.lineStyle( ROOT.kBlue, width = 2 ) h_fitresults.legendText = "TTZ fit (sum)" histos = [ h_background_mc + [h_ttz_mc], [templates['p']], [templates['m']], [templates['L']], [h_fitresults], [h_data]] plot = Plot.fromHisto(name = "fit_plot_pseudoData_%s_%s_%s"%( self.usePseudoData, channel, region), histos = histos , texX = "cos#theta^{*}", texY = "Events" ) plotting.draw(plot, plot_directory = os.path.join( plot_directory, 'polFits'), logX = False, logY = False, sorting = True, legend = ([0.15,0.7,0.90,0.90], 2) ) templates['p'].legendText = 'pol(+) %3.1f #pm %3.1f'%( y_p.val, y_p.sigma ) templates['m'].legendText = 'pol(-) %3.1f #pm %3.1f'%( y_m.val, y_m.sigma ) templates['L'].legendText = 'pol(L) %3.1f #pm %3.1f'%( y_L.val, y_L.sigma ) histos = [ [h_ttz_mc], [templates['p']], [templates['m']], [templates['L']], [h_fitresults], [h_data_subtracted]] plot = Plot.fromHisto(name = "fit_plot_subtracted_pseudoData_%s_%s_%s"%( self.usePseudoData, channel, region), histos = histos , texX = "cos#theta^{*}", texY = "Events" ) plotting.draw(plot, plot_directory = os.path.join( plot_directory, 'polFits'), logX = False, logY = False, sorting = False, legend = ([0.15,0.7,0.90,0.90], 2), yRange = (0, 30), )
maxN = 2 if options.small else None from TopEFT.samples.helpers import fromHeppySample # select MC generation. For 2016, always use Summer16 MCgeneration = options.MCgeneration if options.year == 2016: MCgeneration = "Summer16" samples = [ fromHeppySample(s, data_path=options.dataDir, maxN=maxN, MCgeneration=MCgeneration) for s in options.samples ] logger.debug("Reading from CMG tuples: %s", ",".join(",".join(s.files) for s in samples)) if len(samples) == 0: logger.info("No samples found. Was looking for %s. Exiting" % options.samples) sys.exit(-1) isData = False not in [s.isData for s in samples] isMC = True not in [s.isData for s in samples] # Check that all samples which are concatenated have the same x-section. assert isData or len(set([ s.heppy.xSection for s in samples ])) == 1, "Not all samples have the same xSection: %s !" % (",".join( [s.name for s in samples])) assert isMC or len(samples) == 1, "Don't concatenate data samples"
'TRACE', 'NOTSET' ], default='INFO', help="Log level for logging") argParser.add_argument('--makeGridpack', action='store_true', help="make gridPack?") argParser.add_argument('--calcXSec', action='store_true', help="calculate x-sec?") args = argParser.parse_args() logger = logger.get_logger(args.logLevel, logFile=None) logger.debug("Coupling arguments: %r", args.couplings) # Single argument -> interpret as file if len(args.couplings) == 1 and os.path.isfile(args.couplings[0]): with open(args.couplings[0], 'r') as f: param_points = [line.rstrip().split() for line in f.readlines()] # Interpret couplings #elif len(args.couplings)>=2: elif len(args.couplings) == 0 or len(args.couplings) >= 2: # make a list of the form [ ['c1', v1, v2, ...], ['c2', ...] ] so we can recurse in the couplings c1,c2,... coupling_list = [] for a in args.couplings: try: val = float(a) except ValueError:
pYield += res pError += pYield.sigma**2 #if not p == "signal": # backgroundYield += pYield #totalYield += pYield if not options.postFit: postfix = "_%s"%year uncertainties = ['PU'+postfix, 'JEC'+postfix, 'btag_heavy'+postfix, 'btag_light'+postfix, 'trigger'+postfix, 'leptonSFSyst', 'leptonTracking', 'eleSFStat'+postfix, 'muSFStat'+postfix, 'scale', 'scale_sig', 'PDF', 'nonprompt', 'WZ_xsec', 'WZ_bb', 'WZ_powheg', 'WZ_njet', 'XG_xsec', 'ZZ_xsec', 'rare', 'ttX', 'Lumi'+postfix] for u in uncertainties: try: unc = getPreFitUncFromCard(cardFile, proc, u, binName, ) except: logger.debug("No uncertainty %s for process %s"%(u, p)) if unc > 0: pError += (unc*pYield.val)**2 totalUncertainty += pError if not p == "signal": backgroundUncertainty += pError if not p == "signal": backgroundYield += pYield totalYield += pYield if pYield.val > 0:
os.remove( gen_file ) logger.info( "Deleted, because I overwrite." ) # Produce the GEN file if needed if not os.path.exists( gen_file ): logger.info( "Making gen file" ) gen_dir = os.path.join( args.genSampleDir, gp ) if not os.path.exists(gen_dir): os.makedirs( gen_dir ) cfg_file = os.path.expandvars( args.cfg ) if not os.path.exists( cfg_file ): logger.error( "cmsrun cfg %s not found. Exit.", cfg_file ) sys.exit( 1 ) command = "cd {gen_dir}; cmsRun {cfg_file} gridpack={gridpack} maxEvents={maxEvents} nJetMax={nJetMax} outputDir={gen_dir}".format( gen_dir=gen_dir, cfg_file=cfg_file, gridpack=args.gridpack, maxEvents=args.maxEvents, nJetMax=args.nJetMax) logger.debug( "Executing %s", command ) subprocess.call(command, shell=True) # Now it should be here either way if not os.path.exists( gen_file ): logger.error( "Edm gen file %s not found. Exit.", gen_file) sys.exit(1) # Run genpostprocessing command = "python {genPostprocessingScript} --targetDir={outDir} --inputFiles={gen_file} --targetSampleName={gp} --logLevel={logLevel}".format( genPostprocessingScript=os.path.expandvars("$CMSSW_BASE/src/TopEFT/postprocessing/genPostProcessing.py"), outDir = args.outDir, gen_file = gen_file, gp = gp, logLevel = args.logLevel
postProcessing_directory = "TopEFT_PP_2016_mva_v21/trilep/" data_directory = "/afs/hephy.at/data/dspitzbart02/cmgTuples/" from TopEFT.samples.cmgTuples_ttZ0j_Summer16_mAODv2_postProcessed import * source_reco = ttZ0j_ll #target_reco = ttZ0j_ll_DC1A_0p600000_DC1V_m0p240000_DC2A_0p176700_DC2V_0p176700 #target_reco = ttZ0j_ll_DC2A_0p200000_DC2V_0p200000 target_reco = ttZ0j_ll_DC1A_0p500000_DC1V_0p500000 #target_reco = ttZ0j_ll_DC1A_1p000000 #target_reco = ttZ0j_ll_DC1A_0p600000_DC1V_m0p240000_DC2V_m0p250000 # cutInterpreter & selectionString from TopEFT.Tools.cutInterpreter import cutInterpreter #reco_selection = cutInterpreter.cutString( 'trilep-lepSelTTZ-njet3p-btag1p-onZ') reco_selection = 'nJetSelected>=3&&nBTag>=1&&min_dl_mass>=12&&abs(Z_mass - 91.1876)<=10&&Z_fromTight>0&&nLeptons_tight_3l==3&&nLeptons_tight_3l==3&&Sum$((lep_tight_3l*(lep_pt - lep_ptCorr) + lep_ptCorr)>40&&lep_tight_3l>0)>0&&Sum$((lep_tight_3l*(lep_pt - lep_ptCorr) + lep_ptCorr)>20&&lep_tight_3l>0)>1&&Sum$((lep_tight_3l*(lep_pt - lep_ptCorr) + lep_ptCorr)>10&&lep_tight_3l>0)>2&&!(nLeptons_tight_4l>=4)&&Flag_goodVertices&&Flag_HBHENoiseIsoFilter&&Flag_HBHENoiseFilter&&Flag_globalTightHalo2016Filter&&Flag_EcalDeadCellTriggerPrimitiveFilter&&Flag_badChargedHadronSummer2016&&Flag_badMuonSummer2016&&((HLT_TripleMu_12_10_5||HLT_Mu17_TrkIsoVVL_Mu8_TrkIsoVVL_DZ||HLT_Mu17_TrkIsoVVL_TkMu8_TrkIsoVVL_DZ)||(HLT_Ele16_Ele12_Ele8_CaloIdL_TrackIdL||HLT_Ele23_Ele12_CaloIdL_TrackIdL_IsoVL_DZ)||(HLT_Mu23_TrkIsoVVL_Ele8_CaloIdL_TrackIdL_IsoVL||HLT_Mu23_TrkIsoVVL_Ele8_CaloIdL_TrackIdL_IsoVL_DZ||HLT_Mu8_TrkIsoVVL_Ele23_CaloIdL_TrackIdL_IsoVL||HLT_Mu8_TrkIsoVVL_Ele23_CaloIdL_TrackIdL_IsoVL_DZ||HLT_Mu8_DiEle12_CaloIdL_TrackIdL||HLT_DiMu9_Ele9_CaloIdL_TrackIdL)||(HLT_SingleMuTTZ)||(HLT_SingleEleTTZ))' logger.debug("Setting selectionString %s for reco samples", reco_selection) source_reco.setSelectionString( reco_selection ) target_reco.setSelectionString( reco_selection ) # Histos pt_bins = [(0,100), (100,200), (200,400), (400,-1), (-1,-1)] h_pt = {s:ROOT.TH1F('h_pt', 'h_pt', 10, 0, 500) for s in ['source', 'target', 'source_reweighted']} h_pt['source'] .legendText = "SM (reco)" h_pt['source'] .style = styles.lineStyle( ROOT.kBlue, errors = True, width=2) h_pt['target'] .legendText = "BSM (reco)" h_pt['target'] .style = styles.lineStyle( ROOT.kGreen+1, errors = True, width=2) h_pt['source_reweighted'] .legendText = "SM (reco, reweighted)" h_pt['source_reweighted'] .style = styles.lineStyle( ROOT.kRed+1, errors = True, width=2) h_g_pt = {s:ROOT.TH1F('h_g_pt', 'h_g_pt', 10, 0, 500) for s in ['source', 'target', 'source_reweighted']}
def make_batch_job(batch_job_file, batch_job_title, batch_output_dir, command): # If X509_USER_PROXY is set, use existing proxy. if options.dpm: if host == 'lxplus': from StopsDilepton.Tools.user import cern_proxy_certificate proxy_location = cern_proxy_certificate else: proxy_location = None from RootTools.core.helpers import renew_proxy proxy = renew_proxy(proxy_location) logger.info("Using proxy certificate %s", proxy) proxy_cmd = "export X509_USER_PROXY=%s" % proxy else: proxy_cmd = "" import subprocess if host == 'heplx': template =\ """\ #!/bin/sh -x #SBATCH -J {batch_job_title} #SBATCH -D {pwd} #SBATCH -o {batch_output_dir}batch-test.%j.out {proxy_cmd} voms-proxy-info -all eval \`"scram runtime -sh"\` echo CMSSW_BASE: {cmssw_base} echo Executing user command echo "{command}" {command} voms-proxy-info -all """.format(\ command = command, cmssw_base = os.getenv("CMSSW_BASE"), batch_output_dir = batch_output_dir, batch_job_title = batch_job_title, pwd = os.getenv("PWD"), proxy_cmd = proxy_cmd ) elif host == 'lxplus': template =\ """\ #!/bin/bash export CMSSW_PROJECT_SRC={cmssw_base}/src cd $CMSSW_PROJECT_SRC eval `scramv1 ru -sh` alias python={python_release} which python python --version {proxy_cmd} voms-proxy-info -all echo CMSSW_BASE: $CMSSW_BASE cd {pwd} echo Executing user command while in $PWD echo "{command}" {command} voms-proxy-info -all """.format(\ command = command, cmssw_base = os.getenv("CMSSW_BASE"), #batch_output_dir = batch_output_dir, #batch_job_title = batch_job_title, pwd = os.getenv("PWD"), proxy_cmd = proxy_cmd, python_release = subprocess.check_output(['which', 'python']).rstrip(), ) batch_job = file(batch_job_file, "w") batch_job.write(template) batch_job.close() logger.debug("Local batch job file: %s", batch_job_file) logger.debug("Batch job:\n%s", template) return
fixedPoints[0]: v, fixedPoints[1]: w, comb[0]: x, comb[1]: y } #logger.info("Couplings: %s", ", ".join( [ "%s=%5.4f" % c for c in modified_couplings.items()] )) # Create configuration class config = Configuration(model_name=model_name) p = Process(process=proc, nEvents=50000, config=config) if p.hasXSec(modified_couplings=modified_couplings): logger.debug( "Couplings: %s", ", ".join([ "%s=%5.4f" % c for c in modified_couplings.items() ])) xsec_val = p.xsec( modified_couplings=modified_couplings) ratio = xsec_val / SM_xsec[proc] x_list.append(x) y_list.append(y) z_list.append(ratio.val) else: ratio = u_float(-1.) config.cleanup() if interpolate: a = toGraph2D(nameStr, nameStr, len(x_list), x_list, y_list, z_list)