def makePlotOp(OP=(), cutTree=None, cut=None, label=""): """docstring for makePlotOp""" out = [] if OP[1] != None: plotpset = deepcopy(OP[1]) plotpset.DirName = label print plotpset.DirName op = eval(OP[0] + "(plotpset.ps())") else: op = eval(OP[0]) out.append(op) cutTree.TAttach(cut, op) alpha = OP_CommonAlphaTCut(0.55) dump = EventDump() skim_ps = PSet(SkimName="myskim", DropBranches=False, Branches=[" keep * "]) skim = SkimOp(skim_ps.ps()) # out.append(skim) # out.append(skim_ps) cutTree.TAttach(cut, alpha) cutTree.TAttach(alpha, dump) # cutTree.TAttach(alpha,skim) out.append(alpha) out.append(dump) return out pass
def createRatioPlots(ratio, truth=False, pt_default=50., ht_default=350., pt1_default=125., pt2_default=100.): # Define pT bins to be used pt_min = 20 pt_max = 50 pt_step = 10 # Define x3 from pT and HT defaults for analysis x3_default = (2. * pt_default) / (ht_default + pt_default) factor = (2. - x3_default) / x3_default # Determine maximum values for HT and Meff (from max pT value) ht_max = pt_max * 1. * factor meff_max = pt_max * 1. + ht_max # Iterate through AlphaT values for ii in range(0, 11): alphat = 500 + ii * 5 # Iterate through pT values for pt in range(pt_min, pt_max + pt_step, pt_step): # Define HT and Meff from pT value ht = pt * 1. * factor meff = pt * 1. + ht # Define scaled pT values appropriate for given Meff pt1 = pt1_default * meff / meff_max pt2 = pt2_default * meff / meff_max # Iterate through scaled pT values for jj in range(0.8, 1.2, 0.1): # Define scaled pT values appropriate for given Meff pt1 *= jj pt2 *= jj # Define histograms dir = "Ratio" + str(alphat) + "Pt" + str(pt) + "Scale" + str( jj) ratio.append( RobPlottingOps( PSet( DirName=dir, MinObjects=2, MaxObjects=8, Ratio=True, MinJetPt=pt * 1., MinJetPt1=pt1, MinJetPt2=pt2, UseGen=truth, MinGenPt=pt_min * 1., MaxGenMET=10., AlphaTcut=(alphat / 1000.), ).ps()))
def AddBinedHist(cutTree=None, OP=(), cut=None, htBins=[], TriggerDict=None, lab=""): """docstring for AddBinedHist""" out = [] if TriggerDict is not None: for lower, upper in zip(htBins, htBins[1:] + [None]): # print "Lower , Upper =", lower , upper if int(lower) == 325 and upper is None: continue if int(lower) == 375 and upper is None: continue # print "continue should have happened now" lowerCut = eval("RECO_CommonHTCut(%d)" % lower) triggerps = PSet(Verbose=False, UsePreScaledTriggers=False, Triggers=[]) triggerps.Triggers = TriggerDict["%d%s" % (lower, "_%d" % upper if upper else "")] Trigger = OP_MultiTrigger(triggerps.ps()) out.append(triggerps) out.append(Trigger) out.append(lowerCut) cutTree.TAttach(cut, Trigger) cutTree.TAttach(Trigger, lowerCut) if upper: upperCut = eval("RECO_CommonHTLessThanCut(%d)" % upper) out.append(upperCut) cutTree.TAttach(lowerCut, upperCut) pOps = makePlotOp(cutTree=cutTree, OP=OP, cut=upperCut if upper else lowerCut, label="%s%d%s" % (lab, lower, "_%d" % upper if upper else "")) out.append(pOps) else: for lower, upper in zip(htBins, htBins[1:] + [None]): # print "Lower , Upper =", lower , upper if int(lower) == 325 and upper is None: continue if int(lower) == 375 and upper is None: continue # print "continue should have happened now" lowerCut = eval("RECO_CommonHTCut(%d)" % lower) out.append(lowerCut) cutTree.TAttach(cut, lowerCut) if upper: upperCut = eval("RECO_CommonHTLessThanCut(%d)" % upper) out.append(upperCut) cutTree.TAttach(lowerCut, upperCut) pOps = makePlotOp(cutTree=cutTree, OP=OP, cut=upperCut if upper else lowerCut, label="%s%d%s" % (lab, lower, "_%d" % upper if upper else "")) out.append(pOps) return out pass
def CorrectionPset(susydir, corrfile): CorFile = open(susydir + corrfile, "r") Eta1 = [] Eta2 = [] Corr = [] for aRow in CorFile: rows = aRow.split() for coloum in rows: Eta1.append(float(rows[0])) Eta2.append(float(rows[1])) Corr.append(float(rows[5])) ps = PSet(EtaLow=Eta1, EtaHi=Eta2, Correction=Corr) return ps
ps = PSet(EtaLow=Eta1, EtaHi=Eta2, Correction=Corr) return ps YourSusyDir = commands.getoutput('echo $SUSY_WORKING_SW_DIR') caloResCorrections = CorrectionPset( YourSusyDir, "/framework/python/icf/ResidualJetEnergyCorrections.txt") ak5pfResCorrections = CorrectionPset( YourSusyDir, "/framework/python/icf/Spring10DataV2_L2L3Residual_AK5PF.txt") evDiagPs = PSet( LatexFilename="diagSlides.tex", Triggers=[ "HLT_HT100U", "HLT_HT120U", "HLT_HT140U", "HLT_HT150U_v3", "HLT_HT160U_v1", "HLT_HT160U_v3", ], # CaloJetCorrections = caloResCorrections.ps(), ) evDiag = OP_PFDiagnosticSlideMaker(evDiagPs.ps(), caloResCorrections.ps(), ak5pfResCorrections.ps()) #cutTreeData.TAttach(MHToverMET2om_350plus,evDisplay) cutTreeData.TAttach(MHToverMET2om_350plus, evId) cutTreeData.TAttach(MHToverMET2om_350plus, evDiag) # # # number of common jets = 2 cutTreeData.TAttach(count_total, numComJets2Please) cutTreeData.TAttach(numComJets2Please, count_numComJets2eq)
from icf.core import PSet, Analysis from icf.config import defaultConfig from copy import deepcopy from icf.JetCorrections import * # ----------------------------------------------------------------------------- # Samples #import yours in your running script # ----------------------------------------------------------------------------- # Reading the collections from the ntuple default_ntuple = deepcopy(defaultConfig.Ntuple) default_ntuple.Electrons = PSet( Prefix="electron", Suffix="Pat", LooseID="EIDLoose", TightID="EIDTight", ) default_ntuple.Muons = PSet( Prefix="muon", Suffix="Pat", LooseID="IsGlobalMuon", TightID="IDGlobalMuonPromptTight", ) default_ntuple.SecMuons = PSet(Prefix="muon", Suffix="PF") default_ntuple.Taus = PSet(Prefix="tau", Suffix="Pat", LooseID="TauIdbyTaNCfrOnePercent", TightID="TauIdbyTaNCfrTenthPercent") default_ntuple.Jets = PSet( Prefix="ic5Jet",
#======================== #Avaiable criteria for MC and for Data are at current slightly different Hence the making of two trees # #jet multiplicity numComJetsGeq2 = OP_NumComJets(">=", 2) numComJetsEq2 = OP_NumComJets("==", 2) numComJetsGeq = OP_NumComJets(">=", 3) #JetTrigger = OP_TriggerCut("HLT_HT100U") #new trigger operations (TW) #This will be changing in the new run to the HLT_HTXXX_AlphaT0pYY triggers datatriggerps = PSet(Verbose=False, Triggers=[ "HLT_HT100U", "HLT_HT120U", "HLT_HT140U", "HLT_HT150U_v3", "HLT_HT160U_v1", "HLT_HT160U_v3", ]) DataTrigger = OP_HadronicDataTrigger(datatriggerps.ps()) #MC trigger - uses second jet collection (IC5, uncorrected) to act as trigger emulator. mctriggerps = PSet( Verbose=False, JetPtCut=20., HTCut=140., ) mcTrigger = OP_HadronicMCTrigger(mctriggerps.ps()) #NoiseFilt = OP_HadronicHBHEnoiseFilter()
# conf_ak5_calo.Common.print_out() anal_ak5_caloData = Analysis("AK5Calo") addCutFlowData(anal_ak5_caloData) # AK5 PF conf_ak5_pfData = deepcopy(defaultConfig) conf_ak5_pfData.Ntuple = deepcopy(ak5_pf) conf_ak5_pfData.XCleaning = deepcopy(default_cc) conf_ak5_pfData.Common = deepcopy(default_common) anal_ak5_pfData = Analysis("AK5PF") addCutFlowData(anal_ak5_pfData) tedSkim = PSet(Name="TedSkim", Format=("ICF", 3), Weight=1.0, File="../results_" + strftime("%d_%b_%H") + "/WithSingleTop//Data/AK5Calo_Jets.root") #"~elaird1/public_html/73_candidates/v3/350_bin/calo.root") from data.Run2011.HT_Run2011_promptReco_DCS import * from data.Run2011.HT42_incomplete import * from data.Run2011.RA1ToBurn import * from data.Run2011.HTRun2011AB import * outDir = "../results_" + strftime("%d_%b_%H") + "//Data37" ensure_dir(outDir) # HT42_incomplete.LastEntry = 100 # HT42_incomplete.File = HT42_incomplete.File[0:1] anal_ak5_caloData.Run(outDir, conf_ak5_caloData, [HTRun2011AB])
conf_ak5_caloData.Common = deepcopy(default_common) # conf_ak5_calo.Common.print_out() anal_ak5_caloData = Analysis("AK5Calo") addCutFlowData(anal_ak5_caloData) # AK5 PF conf_ak5_pfData = deepcopy(defaultConfig) conf_ak5_pfData.Ntuple = deepcopy(ak5_pf) conf_ak5_pfData.XCleaning = deepcopy(default_cc) conf_ak5_pfData.Common = deepcopy(default_common) anal_ak5_pfData = Analysis("AK5PF") addCutFlowData(anal_ak5_pfData) tedSkim = PSet(Name="TedSkim", Format=("ICF", 3), Weight=1.0, File="../resultsWithSingleTop//Data/AK5Calo_Jets.root") #"~elaird1/public_html/73_candidates/v3/350_bin/calo.root") from data.Run2011.HT_Run2011_promptReco_DCS import * from data.Run2011.HT42_incomplete import * from data.Run2011.RA1ToBurn import * outDir = "../results/Data43" ensure_dir(outDir) anal_ak5_caloData.Run(outDir, conf_ak5_caloData, [HT42_incomplete]) # from data.MultiJet_Run2010B_Nov4ReReco_v1 import * # # anal_ak5_pfData.Run("../results/Data",conf_ak5_pfData,[HT_Run2011_promptReco_DCS]) # anal_ak5_jptData.Run("../results/",conf_ak5_jptData,data)
QCD_Pt_1400to1800_7TeV_pythia8_Summer10_START36_V10_S09_v2, QCD_Pt_1800to2200_7TeV_pythia8_Summer10_START36_V10_S09_v2, QCD_Pt_2200to2600_7TeV_pythia8_Summer10_START36_V10_S09_v2, QCD_Pt_2600to3000_7TeV_pythia8_Summer10_START36_V10_S09_v2, QCD_Pt_3000to3500_7TeV_pythia8_Summer10_START36_V10_S09_v2 ] QCD_PYTHIA6 = [ QCD_Pt80_7TeV_Pythia, QCD_Pt170_7TeV_Pythia, QCD_Pt300_7TeV_Pythia, QCD_Pt470_7TeV_Pythia, QCD_Pt800_7TeV_Pythia, QCD_Pt1400_7TeV_Pythia ] pset_standard_hadronic_smallPFMET_noLowerLimit = PSet( DirName="HadronicCommon_smallPFMET_noLowerLimit", MinObjects=1, MaxObjects=10, StandardPlots=True, CountPlots=False, DeadECALPlots=True, BabyPlots=True) pset_standard_hadronic_beforeAlphaT = deepcopy( pset_standard_hadronic_smallPFMET_noLowerLimit) pset_standard_hadronic_beforeAlphaT.DirName = "HadronicCommon_beforeAlphaT" pset_standard_hadronic_PFMET = deepcopy( pset_standard_hadronic_smallPFMET_noLowerLimit) pset_standard_hadronic_PFMET.DirName = "HadronicCommon_PFMET" pset_standard_hadronic_PFMET_babyMHT = deepcopy( pset_standard_hadronic_smallPFMET_noLowerLimit) pset_standard_hadronic_PFMET_babyMHT.DirName = "HadronicCommon_PFMET_babyMHT"
conf_ak5_caloData = deepcopy(defaultConfig) conf_ak5_caloData.Ntuple = deepcopy(ak5_calo) conf_ak5_caloData.XCleaning = deepcopy(default_cc) conf_ak5_caloData.Common = deepcopy(default_common) # conf_ak5_calo.Common.print_out() anal_ak5_caloData = Analysis("AK5Calo") addCutFlowData(anal_ak5_caloData) # AK5 PF conf_ak5_pfData = deepcopy(defaultConfig) conf_ak5_pfData.Ntuple = deepcopy(ak5_pf) conf_ak5_pfData.XCleaning = deepcopy(default_cc) conf_ak5_pfData.Common = deepcopy(default_common) anal_ak5_pfData = Analysis("AK5PF") addCutFlowData(anal_ak5_pfData) from data.Run2011.HT_Run2011_promptReco_DCS import * from data.Run2011.RA1ToBurn import * from data.Run2011.HT42_incomplete import * from data.Run2011.HT_Run2011A import * from data.Run2011.HTRun2011AB import * test = PSet(Name="test", Format=("ICF", 3), Weight=1.0, File="../../Ntuples/375_newFormat.root") outdir = "../results_" + strftime("%d_%b_%H") + "/Data/" ensure_dir(outdir) anal_ak5_caloData.Run(outdir, conf_ak5_caloData, [HTRun2011AB]) #HT_Run2011A])
conf.Common.Jets.EtCut = 20.0 conf.Common.Jets.EtaCut = 5.0 conf.Common.Muons.EtaCut = 2.4 conf.Common.Muons.PtCut = 15.0 conf.Common.Muons.TrkIsoCut = -1. conf.Common.Muons.CombIsoCut = 0.1 conf.Common.Electrons.PtCut = 15.0 conf.Common.Electrons.EtaCut = 2.4 conf.Common.Photons.EtCut = 30. conf.Common.Electrons.ApplyID = False # Create the analysis a = Analysis("KinCorrection") tree = Tree("Main") Trigger = PSet(McAnal=False, MSugraScan=0., TriggerBits=PSet(bit1="")) ssdlTrigg = SSDLTrigger("Trigger", Trigger.ps()) ZeroMuons = OP_NumComMuons("==", 0) KinCorrParPlus = PSet( zMassMin=71., zMassMax=111., BarrelEtaMax=1.4442, EndCapEtaMin=1.56, EndCapEtaMax=2.5, MinElecRescaledPt=0., c_ErNu_pt=0., wID=+24., ##24 W+, -24 W- mW=80.398, mZ=90.188,
QCD_Pt_300to380_7TeV_pythia8_Summer10_START36_V10_S09_v1, QCD_Pt_30to50_7TeV_pythia8_Summer10_START36_V10_S09_v2, QCD_Pt_380to470_7TeV_pythia8_Summer10_START36_V10_S09_v1, QCD_Pt_470to600_7TeV_pythia8_Summer10_START36_V10_S09_v1, QCD_Pt_50to80_7TeV_pythia8_Summer10_START36_V10_S09_v1, QCD_Pt_600to800_7TeV_pythia8_Summer10_START36_V10_S09_v1, QCD_Pt_800to1000_7TeV_pythia8_Summer10_START36_V10_S09_v2, QCD_Pt_80to120_7TeV_pythia8_Summer10_START36_V10_S09_v1] # ----------------------------------------------------------------------------- # Reading the collections from the ntuple default_ntuple = deepcopy(defaultConfig.Ntuple) default_ntuple.Electrons=PSet( Prefix="electron", Suffix="Pat", LooseID="EIDLoose", TightID="EIDTight", ) default_ntuple.Muons=PSet( Prefix="muon", Suffix="Pat", LooseID="IsGlobalMuon", TightID="IDGlobalMuonPromptTight", ) default_ntuple.Taus=PSet( Prefix="tau", Suffix="Pat", LooseID="TauIdbyTaNCfrOnePercent", TightID="TauIdbyTaNCfrTenthPercent" ) default_ntuple.Jets=PSet(
try: os.makedirs(path) except OSError as exc: # Python >2.5 if exc.errno == errno.EEXIST: pass else: raise # ----------------------------------------------------------------------------- # Reading the collections from the ntuple default_ntuple = deepcopy(defaultConfig.Ntuple) default_ntuple.Electrons = PSet( Prefix="electron", Suffix="Pat", LooseID="EIDLoose", TightID="EIDTight", ) default_ntuple.Muons = PSet( Prefix="muon", Suffix="Pat", LooseID="IsGlobalMuon", TightID="IDGlobalMuonPromptTight", ) default_ntuple.SecMuons = PSet(Prefix="muon", Suffix="PF") default_ntuple.Taus = PSet(Prefix="tau", Suffix="Pat", LooseID="TauIdbyTaNCfrOnePercent", TightID="TauIdbyTaNCfrTenthPercent") default_ntuple.Jets = PSet( Prefix="ic5Jet",
from libHadronic import * from icf.core import PSet, Analysis from icf.config import defaultConfig from copy import deepcopy # from montecarlo.LMx import * # from bryn_samples import * # from montecarlo.WJets_Madgraph import * # ----------------------------------------------------------------------------- # Samples #from local_samples import * # from met_samples import * # from JetMetTau139375_Uncleaned import * f = PSet(Name="Testing", File=["../SusyCAF_Tree_9_1.root"], Format=("ICF", 2), Weight=1.0) # ----------------------------------------------------------------------------- # Reading the collections from the ntuple default_ntuple = deepcopy(defaultConfig.Ntuple) default_ntuple.Electrons = PSet( Prefix="electron", Suffix="Pat", LooseID="EIDLoose", TightID="EIDTight", ) default_ntuple.Muons = PSet( Prefix="muon", Suffix="Pat",
from icf.core import PSet, Analysis from icf.config import defaultConfig from icf.utils import json_to_pset from allhadronic.golden_cff import * from copy import deepcopy pwd = commands.getoutput('echo $SUSY_WORKING_SW_DIR') output_dir = pwd + "/results/v60" # ----------------------------------------------------------------------------- # Reading the collections from the ntuple default_ntuple = deepcopy(defaultConfig.Ntuple) default_ntuple.Electrons = PSet( Prefix="electron", Suffix="Pat", LooseID="EIDLoose", TightID="EIDTight", ) default_ntuple.Muons = PSet( Prefix="muon", Suffix="Pat", LooseID="IsGlobalMuon", TightID="IDGlobalMuonPromptTight", ) default_ntuple.Taus = PSet(Prefix="tau", Suffix="Pat", LooseID="TauIdbyTaNCfrOnePercent", TightID="TauIdbyTaNCfrTenthPercent")
import setupSUSY from libFrameworkSUSY import * from libHadronic import * from liballhadronic import * from icf.core import PSet, Analysis from icf.config import defaultConfig from copy import deepcopy # ----------------------------------------------------------------------------- # Reading the collections from the ntuple default_ntuple = deepcopy(defaultConfig.Ntuple) default_ntuple.Electrons = PSet( Prefix="electron", Suffix="Pat", LooseID="EIDLoose", TightID="EIDTight", ) default_ntuple.Muons = PSet( Prefix="muon", Suffix="Pat", LooseID="IsGlobalMuon", TightID="IDGlobalMuonPromptTight", ) default_ntuple.Taus = PSet(Prefix="tau", Suffix="Pat", LooseID="TauIdbyTaNCfrOnePercent", TightID="TauIdbyTaNCfrTenthPercent")
from libHadronic import * from icf.core import PSet,Analysis from icf.config import defaultConfig from copy import deepcopy # ----------------------------------------------------------------------------- # Samples # put yout samples in here or import a file containing your data samples from met_samples import * # ----------------------------------------------------------------------------- # Reading the collections from the ntuple default_ntuple = deepcopy(defaultConfig.Ntuple) default_ntuple.Electrons=PSet( Prefix="electron", Suffix="Pat", LooseID="EIDLoose", TightID="EIDTight", ) default_ntuple.Muons=PSet( Prefix="muon", Suffix="Pat", LooseID="IsGlobalMuon", TightID="IDGlobalMuonPromptTight", ) default_ntuple.Taus=PSet( Prefix="tau", Suffix="Pat", ) default_ntuple.Jets=PSet( Prefix="ic5Jet", Suffix="Pat",
from icf.core import PSet, Analysis from icf.config import defaultConfig from copy import deepcopy from icf.JetCorrections import * # ----------------------------------------------------------------------------- # Samples #import yours in your running script # ----------------------------------------------------------------------------- # Reading the collections from the ntuple default_ntuple = deepcopy(defaultConfig.Ntuple) default_ntuple.Electrons = PSet( Prefix="electron", Suffix="Pat", LooseID="EIDLoose", TightID="EIDTight", ) default_ntuple.Muons = PSet( Prefix="muon", Suffix="Pat", LooseID="IsGlobalMuon", TightID="IDGlobalMuonPromptTight", ) default_ntuple.Taus = PSet(Prefix="tau", Suffix="Pat", LooseID="TauIdbyTaNCfrOnePercent", TightID="TauIdbyTaNCfrTenthPercent") default_ntuple.Jets = PSet( Prefix="ic5Jet", Suffix="Pat",
"HLT_HT550_v6", "HLT_HT550_v7", "", ] NoQcdOption = False if ( dataset == 2 or dataset == 3 ) : NoQcdOption = True # ----------------------------------------------------------------------------- # Reading the collections from the ntuple default_ntuple = deepcopy(defaultConfig.Ntuple) default_ntuple.Electrons=PSet( Prefix="electron", Suffix="Pat", LooseID="EIDLoose", TightID="EIDTight", ) default_ntuple.Muons=PSet( Prefix="muon", Suffix="Pat", LooseID="IsGlobalMuon", TightID="IDGlobalMuonPromptTight", ) default_ntuple.Taus=PSet( Prefix="tau", Suffix="Pat", LooseID="TauIdbyTaNCfrOnePercent", TightID="TauIdbyTaNCfrTenthPercent"
# Notes: # * A few variables need the 38 ntuples (pixel hits, number of chambers). from icf.core import PSet vbtfmuonidps = PSet( Verbose = False, LooseCuts = PSet( #ID variables RequireTrackerMuon = False, RequireGlobalMuon = True, ChiSquaredCut = 999999., NumChambersCut = 0, NumGlobalHits = 0, NumTrackerHits = 0, NumPixelHits = 0, DxyCut = 9999999., ),#end of LooseCuts TightCuts = PSet( #ID variables RequireTrackerMuon = True, RequireGlobalMuon = True, ChiSquaredCut = 10.0, NumChambersCut = 2, NumGlobalHits = 1, NumTrackerHits = 10, NumPixelHits = 1, DxyCut = 0.2, #it's in cm, right? This should be 2mm. ),#end of TightCuts )#end of VBTF electron ID PSet
# conf_ak5_jMCpt.Common.print_out() anal_ak5_jptMC = Analysis("AK5JPT") addCutFlowMC(anal_ak5_jptMC) # AK7 Calo conf_ak7_caloMC = deepcopy(defaultConfig) conf_ak7_caloMC.Ntuple = deepcopy(ak7_calo) conf_ak7_caloMC.XCleaning = deepcopy(default_cc) conf_ak7_caloMC.Common = deepcopy(default_common) # conf_ak5_calo.Common.print_out() anal_ak7_caloMC = Analysis("AK7Calo") addCutFlowMC(anal_ak7_caloMC) outDir = "../results_" + strftime("%d_%b_%H") + "//NoSmear37/" ensure_dir(outDir) testFile = PSet( Name="SingleEvent", Format=("ICF", 3), File="/home/hep/elaird1/84_darrens_event/event.root", Weight=1.0, ) anal_ak5_caloMC.Run(outDir, conf_ak5_caloMC, MC) # anal_ak5_pfMC.Run(outDir,conf_ak5_pfMC,MC) # anal_ak5_pfMC.Run("../results_"+strftime("%d_%b_%H")+"//NoSmear",conf_ak5_pfMC,[QCD_AllPtBins_7TeV_Pythia]) # anal_ak5_jptMC.Run("../results_"+strftime("%d_%b_%H")+"//NoSmear",conf_ak5_jptMC,MC) # anal_ak5_jptMC.Run("../results_"+strftime("%d_%b_%H")+"//NoSmear",conf_ak5_jptMC,[QCD_AllPtBins_7TeV_Pythia]) # anal_ak7_caloMC.Run("../results_"+strftime("%d_%b_%H")+"//NoSmear",conf_ak7_caloMC,MC) # anal_ak7_caloMC.Run("../results_"+strftime("%d_%b_%H")+"//NoSmear",conf_ak7_caloMC,[QCD_AllPtBins_7TeV_Pythia])
#!/usr/bin/env python """ Plot settings for Bryn's plotting operations. """ from libbryn import * from icf.core import PSet from copy import deepcopy #plot configuration pset_2om = PSet( DirName="250_infGev_2om", MinObjects=2, MaxObjects=15, StandardPlots=True, ) pset_2eq = PSet( DirName="250_infGev_2eq", MinObjects=2, MaxObjects=2, StandardPlots=True, ) pset_3om = PSet( DirName="250_infGev_3om", MinObjects=3, MaxObjects=15, StandardPlots=True,
QCD_Pt_2200to2600_7TeV_pythia8_Summer10_START36_V10_S09_v2, QCD_Pt_3000to3500_7TeV_pythia8_Summer10_START36_V10_S09_v2 ] MCS = [QCD_Pt_230to300_7TeV_pythia8_Summer10_START36_V10_S09_v2] ttWPlottingOpsMHT120 = RECO_ttWPlotting("ttW_AfterMHT120") ttWPlottingOps100 = RECO_ttWPlotting("ttW_After250") ttWPlottingOps350 = RECO_ttWPlotting("ttW_After350") ttWPlottingOpsAT = RECO_ttWPlotting("ttW_AfterAT") psetS = PSet(DirName="After_350Gev", MinObjects=2, MaxObjects=9, StandardPlots=True, DeadECALPlots=False, CleaningControlPlots=False, MECPlots=False, DeadECALFile="./deadRegionList_START36_V9.txt", DeadECAL_MinJetPtCut=10., DeadECAL_MinBiasCut=0.5) HadStandard350 = HadronicCommonPlots(psetS.ps()) psetMHT = PSet( # DirName = "HadronicCommon_1", # MinObjects = 2, # MaxObjects = 6, # StandardPlots = False, # DeadECALPlots = True, # CleaningControlPlots = True, # MECPlots = True, # DeadECALFile = "./deadRegionList_START36_V9.txt",
#anal_ak5_caloMC.Run("../results_100_100_50_eff",conf_ak5_calo,[LM4]) #anal_ak5_caloMC.Run("../results_100_100_50_eff",conf_ak5_calo,[LM5]) #QCD #anal_ak5_caloMC.Run("../results_100_100_50_eff",conf_ak5_calo,[QCD_AllPtBins_7TeV_Pythia]) #anal_ak5_caloMC.Run("../results_100_100_50_eff",conf_ak5_calo,QCD_Pythia8_ALL) #anal_ak5_caloMC.Run("../results_100_100_50_eff",conf_ak5_calo,QCD_MadGraph_ALL) # #anal_ak5_caloMC.Run("../results_100_100_50_eff",conf_ak5_calo,QCD_Pythia6_384patch3_V14_00_02_ALL) #anal_ak5_caloMC.Run("../results_100_100_50_eff",conf_ak5_calo,QCD_Pythia8_384patch3_V14_00_02_ALL) #anal_ak5_caloMC.Run("../results_100_100_50_eff",conf_ak5_calo,[PhotonJet_AllPtBins_7TeV_Pythia]) #anal_ak5_caloMC.Run("../results_100_100_50_eff",conf_ak5_calo,PhotonJet_MadGraph_ALL) #anal_ak5_caloMC.Run("../results_100_100_50_eff",conf_ak5_calo,[ttbarTauola]) #wjets_madgraph_vols.FirstEntry = 0 #wjets_madgraph_vols.LastEntry = 199999 #anal_ak5_caloMC.Run("../results_100_100_50_eff",conf_ak5_calo,[wjets_madgraph]) #anal_ak5_caloMC.Run("../results_100_100_50_eff",conf_ak5_calo,[wjets_madgraph_vols]) #anal_ak5_caloMC.Run("../results_100_100_50_eff",conf_ak5_calo,[zjets_madgraph]) #anal_ak5_caloMC.Run("../results_100_100_50_eff",conf_ak5_calo,[Zinvisible_jets]) myMagical14events = PSet( #from 15pb-1 Name="14magicalEvents", Weight=1., Format=("ICF", 2), File=[ "/vols/cms02/whyntie/public/ntuple_14eventsPassingAlphaT_15invpb.root", ]) #anal_ak5_caloData.Run("../results_100_100_50_eff",conf_ak5_calo,[myMagical14events]) #15.0865/pb