#-------------------------------------------------------------------------------- # # Add histograms, numbers in FilterStatisticsTables and run + event numbers # stored as DQM MonitorElements in different ROOT files # # NOTE: The jobs get submitted to the '1nh' queue, # which allows for an execution time of the cmsRun jobs of up to 1 hour # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) # #-------------------------------------------------------------------------------- # harvest Z --> tau tau submitToBatch(configFile = "harvestZtoElecMuPlots_cfg.py", channel = "ZtoElecMu", sample = "Ztautau", replFunction = makeReplacementsHarvesting, replacements = "inputFilePath = " + inputFilePath + "; recoSampleDefinitionsFile = TauAnalysis.Configuration.plotZtoElecMu_processes_7TeV_cfi", job = "harvesting", queue = "1nh", outputFilePath = outputFilePath) # harvest Z --> mu mu submitToBatch(configFile = "harvestZtoElecMuPlots_cfg.py", channel = "ZtoElecMu", sample = "Zmumu", replFunction = makeReplacementsHarvesting, replacements = "inputFilePath = " + inputFilePath + "; recoSampleDefinitionsFile = TauAnalysis.Configuration.plotZtoElecMu_processes_7TeV_cfi", job = "harvesting", queue = "1nh", outputFilePath = outputFilePath) # harvest InclusivePPmuX submitToBatch(configFile = "harvestZtoElecMuPlots_cfg.py", channel = "ZtoElecMu", sample = "InclusivePPmuX", replFunction = makeReplacementsHarvesting, replacements = "inputFilePath = " + inputFilePath + "; recoSampleDefinitionsFile = TauAnalysis.Configuration.plotZtoElecMu_processes_7TeV_cfi", job = "harvesting", queue = "1nh", outputFilePath = outputFilePath) # harvest PPmuXptGt20
#-------------------------------------------------------------------------------- # NOTE: The jobs get submitted to the '1nd' queue, # which allows for an execution time of the cmsRun jobs of up to 24 hours # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) #-------------------------------------------------------------------------------- # W --> tau nu jobs for i in range(4): #4 submitToBatch( configFile="runWtoTauNu_cfg.py", channel="WtoTauNu", sample="Wtaunu_part%(i)02d" % {"i": (i + 1)}, replFunction=makeReplacementsAnalysis, replacements="maxEvents = -1; inputFileType = " + inputFileType + ";inputFilePath = " + inputFilePath + "; applyFactorization = false; estimateSysUncertainties = false", job="analysis", queue="1nd", outputDirectory=outputDirectory) #QCD jobs for i in range(33): #33 submitToBatch( configFile="runWtoTauNu_cfg.py", channel="WtoTauNu", sample="qcd_W_part%(i)02d" % {"i": (i + 1)}, replFunction=makeReplacementsAnalysis, replacements="maxEvents = -1; inputFileType = " + inputFileType + ";inputFilePath = " + inputFilePath +
# reprocessed with CMSSW_3_1_2, skimmed by Letizia and Manuel # # NOTE: The jobs get submitted to the '1nd' queue, # which allows for an execution time of the cmsRun jobs of up to 24 hours # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) # # -------------------------------------------------------------------------------- # Z --> tau+ tau- jobs for i in range(16): submitToBatch( configFile="prodNtupleZtoMuTau_cfg.py", channel="ZtoMuTau", sample="Ztautau_7TeV_part%(i)02d" % {"i": (i + 1)}, replFunction=makeReplacementsBgEstNtupleProd, replacements="maxEvents = -1; skipEvents = 0; intLumiData = intLumiZtoMuTau_Data_7TeV; globalTag = MC_36Y_V7A::All", job="ntupleProd", queue="1nd", outputFilePath=outputFilePath, ) # Z --> mu+ mu- jobs for i in range(15): submitToBatch( configFile="prodNtupleZtoMuTau_cfg.py", channel="ZtoMuTau", sample="Zmumu_7TeV_part%(i)02d" % {"i": (i + 1)}, replFunction=makeReplacementsBgEstNtupleProd, replacements="maxEvents = -1; skipEvents = 0; intLumiData = intLumiZtoMuTau_Data_7TeV; globalTag = MC_36Y_V7A::All", job="ntupleProd", queue="1nd",
# reprocessed with CMSSW_2_2_3, skimmed by Letizia and Monica # # NOTE: The jobs get submitted to the '1nd' queue, # which allows for an execution time of the cmsRun jobs of up to 24 hours # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) # # -------------------------------------------------------------------------------- # Z --> tau tau jobs for i in range(3): submitToBatch( configFile="skimZtoElecMu_cfg.py", channel="ZtoElecMu", sample="Ztautau_part%(i)02d" % {"i": (i + 1)}, replFunction=makeReplacementsBgEstSkim, replacements="maxEvents = -1", job="bgEstSkim", queue="1nd", outputDirectory=outputDirectory, ) # Z --> e e jobs submitToBatch( configFile="skimZtoElecMu_cfg.py", channel="ZtoElecMu", sample="Zee", replFunction=makeReplacementsBgEstSkim, replacements="maxEvents = -1", job="bgEstSkim", queue="1nd", outputDirectory=outputDirectory,
# Add histograms, numbers in FilterStatisticsTables and run + event numbers # stored as DQM MonitorElements in different ROOT files # # NOTE: The jobs get submitted to the '1nh' queue, # which allows for an execution time of the cmsRun jobs of up to 1 hour # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) # #-------------------------------------------------------------------------------- # 7TeV samples # harvest 2010A data for i in range(9): submitToBatch(configFile = "harvestZtoElecTauPlots_cfg.py", channel = "ZtoElecTau", sample = "Data_7TeV" + "_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsHarvesting, replacements = "inputFilePath = " + inputFilePath, job = "harvesting", queue = "1nd", outputFilePath = outputFilePath, type = "data") # harvest 2010B data for i in range(14): submitToBatch(configFile = "harvestZtoElecTauPlots_cfg.py", channel = "ZtoElecTau", sample = "Data2010B" + "_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsHarvesting, replacements = "inputFilePath = " + inputFilePath, job = "harvesting", queue = "1nd", outputFilePath = outputFilePath, type = "data") # harvest Z --> tau tau submitToBatch(configFile = "harvestZtoElecTauPlots_cfg.py", channel = "ZtoElecTau", sample = "Ztautau", replFunction = makeReplacementsHarvesting, replacements = "inputFilePath = " + inputFilePath, job = "harvesting", queue = "1nd", outputFilePath = outputFilePath, type = "mc") # harvest Z --> e e
('Ztautau',5), ('Zee',28), ('QCD_EMenriched_Pt20to30',16), ('QCD_EMenriched_Pt30to80',83), ('QCD_EMenriched_Pt80to170',30), ('QCD_BCtoE_Pt20to30',24), ('QCD_BCtoE_Pt30to80',27), ('QCD_BCtoE_Pt80to170',15), ('WplusJets',18), ('ZplusJets',13), ('TTplusJets',18), # ('PhotonJets_Pt15to20',-1), # ('PhotonJets_Pt20to25',-1), # ('PhotonJets_Pt25to30',-1), # ('PhotonJets_Pt30to25',-1), # ('PhotonJets_PtGt35',-1) ('ZeePlusJets',13), ('ZtautauPlusJets',13), ] #samples = [ # ('AH115bb_tautau',1) #] for s in samples: for p in range(1,s[1]+1): submitToBatch(configFile = "prodNtuplebbAHtoElecTau_cfg.py", channel = "bbAHtoElecTau", sample = "%s_part%02d"%(s[0],p), replFunction = makeReplacementsBgEstNtupleProd, replacements = "maxEvents = -1; skipEvents = 0", job = "bgEstNtupleProd", queue = "1nd", outputDirectory = outputDirectory)
#!/usr/bin/env python from TauAnalysis.Configuration.submitToBatch import submitToBatch from TauAnalysis.Skimming.tools.makeReplacementsSkimByRunEventNumbers import makeReplacementsSkimByRunEventNumbers # name of the directory (either on afs area or castor) # to which all .root files produced by the cmsRun job will be copied outputFilePath = "/castor/cern.ch/user/v/veelken/CMSSW_3_1_2/" #-------------------------------------------------------------------------------- # # NOTE: The jobs get submitted to the '1nd' queue, # which allows for an execution time of the cmsRun jobs of up to 24 hours # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) # #-------------------------------------------------------------------------------- # pp --> mu X QCD jobs for i in range(21): submitToBatch(configFile = "skimByRunEventNumbers_cfg.py", channel = "ZtoMuTau", sample = "InclusivePPmuX_10TeV_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsSkimByRunEventNumbers, replacements = "maxEvents = -1; recoSampleDefinitionsFileName = recoSampleDefinitionsZtoMuTau_10TeV_cfi; runEventNumberFileName = /afs/cern.ch/user/v/veelken/scratch0/CMSSW_3_1_4/src/TauAnalysis/DQMTools/test/selEvents_ZtoTauNu_InclusivePPmuX_passed.txt", job = "skimByRunEventNumbers", queue = "1nd", outputFilePath = outputFilePath)
# # Monte Carlo samples from Summer'09 production # reprocessed with CMSSW_3_1_2, skimmed by Sunil and Artur # # NOTE: The jobs get submitted to the '1nd' queue, # which allows for an execution time of the cmsRun jobs of up to 24 hours # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) # #-------------------------------------------------------------------------------- # Z --> tau tau jobs for i in range(6): submitToBatch(configFile = "runZtoElecMu_cfg.py", channel = "ZtoElecMu", sample = "Ztautau_7TeV_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsAnalysis, replacements = "maxEvents = -1; globalTag = START3X_V27::All; inputFileType = " + inputFileType + "; inputFilePath = " + inputFilePath + "; applyFactorization = false; estimateSysUncertainties = false", job = "analysis", queue = "1nd", outputFilePath = outputFilePath) # Z --> mu mu + jets jobs for i in range(2): submitToBatch(configFile = "runZtoElecMu_cfg.py", channel = "ZtoElecMu", sample = "ZmumuPlusJets_7TeV_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsAnalysis, replacements = "maxEvents = -1; globalTag = START3X_V27::All; inputFileType = " + inputFileType + "; inputFilePath = " + inputFilePath + "; applyFactorization = false; estimateSysUncertainties = false", job = "analysis", queue = "1nd", outputFilePath = outputFilePath) # Z --> e e + jets jobs for i in range(2): submitToBatch(configFile = "runZtoElecMu_cfg.py", channel = "ZtoElecMu", sample = "ZeePlusJets_7TeV_part%(i)02d" % {"i" : (i + 1)},
('QCD_EMenriched_Pt80to170', 30), ('QCD_BCtoE_Pt20to30', 24), ('QCD_BCtoE_Pt30to80', 27), ('QCD_BCtoE_Pt80to170', 15), ('WplusJets', 18), ('ZplusJets', 13), ('TTplusJets', 18), # ('PhotonJets_Pt15to20',-1), # ('PhotonJets_Pt20to25',-1), # ('PhotonJets_Pt25to30',-1), # ('PhotonJets_Pt30to25',-1), # ('PhotonJets_PtGt35',-1) ('ZeePlusJets', 13), ('ZtautauPlusJets', 13), ] #samples = [ # ('AH115bb_tautau',1) #] for s in samples: for p in range(1, s[1] + 1): submitToBatch(configFile="prodNtuplebbAHtoElecTau_cfg.py", channel="bbAHtoElecTau", sample="%s_part%02d" % (s[0], p), replFunction=makeReplacementsBgEstNtupleProd, replacements="maxEvents = -1; skipEvents = 0", job="bgEstNtupleProd", queue="1nd", outputDirectory=outputDirectory)
# processed with CMSSW_38_x, skimmed by Jeff Kolb # # NOTE: The jobs get submitted to the '1nd' queue, # which allows for an execution time of the cmsRun jobs of up to 24 hours # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) # #-------------------------------------------------------------------------------- # # 7TeV data samples # for i in range(402): submitToBatch(configFile = "producePatTuple_cfg.py", channel = "ZtoElecTau", sample = "Data2010A_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsPatProduction, replacements = "maxEvents = -1; globalTag = GR_R_38X_V14::All", job = "PatProduction", queue = "1nd", outputFilePath = outputFilePath, type = "data") for i in range(417,1097): submitToBatch(configFile = "producePatTuple_cfg.py", channel = "ZtoElecTau", sample = "Data2010B_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsPatProduction, replacements = "maxEvents = -1; globalTag = GR10_P_V10::All", job = "PatProduction", queue = "1nd", outputFilePath = outputFilePath, type = "data") # # 7TeV MC samples # # Z --> tau tau jobs for i in range(21): submitToBatch(configFile = "producePatTuple_cfg.py", channel = "ZtoElecTau",
# ('QCD_EMenriched_Pt30to80',83), # ('QCD_EMenriched_Pt80to170',30), # ('QCD_BCtoE_Pt20to30',24), # ('QCD_BCtoE_Pt30to80',27), # ('QCD_BCtoE_Pt80to170',15), ('WplusJets',18), ('ZplusJets',13), ('TTplusJets',18), # ('PhotonJets_Pt15to20',-1), # ('PhotonJets_Pt20to25',-1), # ('PhotonJets_Pt25to30',-1), # ('PhotonJets_Pt30to25',-1), # ('PhotonJets_PtGt35',-1) ('ZeePlusJets',13), ('ZtautauPlusJets',13), ] for s in samples: if len(s)==2: s = (s[0],s[1],-1) if s[1]==-1: submitToBatch(configFile = "runbbAHtoElecTau_cfg.py", channel = "bbAHtoElecTau", sample = s[0], replFunction = makeReplacementsAnalysis, replacements = "maxEvents = %d; applyFactorization = false; estimateSysUncertainties = false"%s[2], job = "analysis", queue = "8nh", outputDirectory = outputDirectory) else: for p in range(1,s[1]+1): submitToBatch(configFile = "runbbAHtoElecTau_cfg.py", channel = "bbAHtoElecTau", sample = "%s_part%02d"%(s[0],p), replFunction = makeReplacementsAnalysis, replacements = "maxEvents = %d; applyFactorization = false; estimateSysUncertainties = false"%s[2], job = "analysis", queue = "8nh", outputDirectory = outputDirectory)
# # NOTE: The jobs get submitted to the '1nd' queue, # which allows for an execution time of the cmsRun jobs of up to 24 hours # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) # #-------------------------------------------------------------------------------- # Z --> tau tau jobs for i in range(15): submitToBatch( configFile="runFakeRateAnalysisZtoMuTau_cfg.py", channel="ZtoMuTau", sample="Ztautau_10TeV_part%(i)02d" % {"i": (i + 1)}, replFunction=makeReplacementsAnalysis, replacements="maxEvents = -1; inputFileType = " + inputFileType + "; inputFilePath = " + inputFilePath + "; applyFactorization = false; estimateSysUncertainties = false", job="frAnalysis", queue="1nd", outputFilePath=outputFilePath) # Z --> mu mu jobs for i in range(17): submitToBatch( configFile="runFakeRateAnalysisZtoMuTau_cfg.py", channel="ZtoMuTau", sample="Zmumu_10TeV_part%(i)02d" % {"i": (i + 1)}, replFunction=makeReplacementsAnalysis, replacements="maxEvents = -1; inputFileType = " + inputFileType + "; inputFilePath = " + inputFilePath +
# # Monte Carlo samples from Summer'09 production # reprocessed with CMSSW_3_1_2, skimmed by Letizia and Manuel # # NOTE: The jobs get submitted to the '1nd' queue, # which allows for an execution time of the cmsRun jobs of up to 24 hours # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) # #-------------------------------------------------------------------------------- # Z --> tau+ tau- jobs for i in range(16): submitToBatch(configFile = "runBgEstTemplateProductionZtoMuTau_cfg.py", channel = "ZtoMuTau", sample = "Ztautau_7TeV_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsAnalysis, replacements = "maxEvents = -1; inputFileType = " + inputFileType + "; inputFilePath = " + inputFilePath + "; applyFactorization = false; estimateSysUncertainties = true; disableEventDump = true", job = "bgEstTemplateProduction", queue = "1nw", outputFilePath = outputFilePath) # Z --> mu+ mu- jobs for i in range(15): submitToBatch(configFile = "runBgEstTemplateProductionZtoMuTau_cfg.py", channel = "ZtoMuTau", sample = "Zmumu_7TeV_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsAnalysis, replacements = "maxEvents = -1; inputFileType = " + inputFileType + "; inputFilePath = " + inputFilePath + "; applyFactorization = false; estimateSysUncertainties = false; disableEventDump = true", job = "bgEstTemplateProduction", queue = "1nd", outputFilePath = outputFilePath) # pp --> mu X QCD jobs for i in range(28): submitToBatch(configFile = "runBgEstTemplateProductionZtoMuTau_cfg.py", channel = "ZtoMuTau", sample = "InclusivePPmuX_7TeV_part%(i)02d" % {"i" : (i + 1)},
# # Monte Carlo samples from Summer'09 production # reprocessed with CMSSW_3_1_2, skimmed by Letizia and Manuel # # NOTE: The jobs get submitted to the '1nd' queue, # which allows for an execution time of the cmsRun jobs of up to 24 hours # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) # #-------------------------------------------------------------------------------- # Z --> tau tau jobs for i in range(3): submitToBatch(configFile = "prodNtupleZtoMuTau_cfg.py", channel = "ZtoMuTau", sample = "Ztautau_10TeV_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsBgEstNtupleProd, replacements = "maxEvents = -1; skipEvents = 0; intLumiData = intLumiZtoMuTau_Data_10TeV", job = "bgEstNtupleProd", queue = "1nd", outputFilePath = outputFilePath) # Z --> mu mu jobs for i in range(9): submitToBatch(configFile = "prodNtupleZtoMuTau_cfg.py", channel = "ZtoMuTau", sample = "Zmumu_10TeV_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsBgEstNtupleProd, replacements = "maxEvents = -1; skipEvents = 0; intLumiData = intLumiZtoMuTau_Data_10TeV", job = "bgEstNtupleProd", queue = "1nd", outputFilePath = outputFilePath) # pp --> mu X QCD jobs for i in range(2): submitToBatch(configFile = "prodNtupleZtoMuTau_cfg.py", channel = "ZtoMuTau", sample = "InclusivePPmuX_10TeV_part%(i)02d" % {"i" : (i + 1)},
# # NOTE: The jobs get submitted to the '8nh' queue, # which allows for an execution time of the cmsRun jobs of up to 24 hours # (the queues are {'1nh' (1 hour), '8nh' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) # #-------------------------------------------------------------------------------- # # 7 TeV data samples # for i in range(417): submitToBatch(configFile = "runZtoElecTau_cfg.py", channel = "ZtoElecTau", sample = "Data_7TeV_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsAnalysis, replacements = "maxEvents = -1; globalTag = GR_R_36X_V12B::All; inputFileType = " + inputFileType + "; inputFilePath = " + inputFilePath + "; applyFactorization = false; estimateSysUncertainties = false", job = "analysis", queue = "8nh", outputFilePath = outputFilePath, type = "data", resourceRequest = None, submit = "yes") for i in range(417,1097): submitToBatch(configFile = "runZtoElecTau_cfg.py", channel = "ZtoElecTau", sample = "Data2010B_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsAnalysis, replacements = "maxEvents = -1; globalTag = GR10_P_V10::All; inputFileType = " + inputFileType + "; inputFilePath = " + inputFilePath + "; applyFactorization = false; estimateSysUncertainties = false", job = "analysis", queue = "8nh", outputFilePath = outputFilePath, type = "data", resourceRequest = None, submit = "yes") # # 7 TeV MC samples #
#-------------------------------------------------------------------------------- # # Monte Carlo samples from Summer'08 production # reprocessed with CMSSW_2_2_3, skimmed by Jeff Kolb # # NOTE: The jobs get submitted to the '1nd' queue, # which allows for an execution time of the cmsRun jobs of up to 24 hours # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) # #-------------------------------------------------------------------------------- # Z --> tau tau jobs for i in range(10): submitToBatch(configFile = "skimZtoElecTau_cfg.py", channel = "ZtoElecTau", sample = "Ztautau_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsBgEstSkim, replacements = "maxEvents = -1", job = "bgEstSkim", queue = "1nd", outputDirectory = outputDirectory) # Z --> e e jobs for i in range(24): submitToBatch(configFile = "skimZtoElecTau_cfg.py", channel = "ZtoElecTau", sample = "Zee_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsBgEstSkim, replacements = "maxEvents = -1", job = "bgEstSkim", queue = "1nd", outputDirectory = outputDirectory) # Photon + jets jobs submitToBatch(configFile = "skimZtoElecTau_cfg.py", channel = "ZtoElecTau", sample = "PhotonJets_Pt15to20", replFunction = makeReplacementsBgEstSkim, replacements = "maxEvents = -1", job = "bgEstSkim", queue = "1nd", outputDirectory = outputDirectory) submitToBatch(configFile = "skimZtoElecTau_cfg.py", channel = "ZtoElecTau", sample = "PhotonJets_Pt20to25", replFunction = makeReplacementsBgEstSkim, replacements = "maxEvents = -1", job = "bgEstSkim", queue = "1nd", outputDirectory = outputDirectory)
# NOTE: The jobs get submitted to the '1nh' queue, # which allows for an execution time of the cmsRun jobs of up to 1 hour # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) # #-------------------------------------------------------------------------------- # 7TeV samples # harvest 2010A data for i in range(9): submitToBatch(configFile="harvestZtoElecTauPlots_cfg.py", channel="ZtoElecTau", sample="Data_7TeV" + "_part%(i)02d" % {"i": (i + 1)}, replFunction=makeReplacementsHarvesting, replacements="inputFilePath = " + inputFilePath, job="harvesting", queue="1nd", outputFilePath=outputFilePath, type="data") # harvest 2010B data for i in range(14): submitToBatch(configFile="harvestZtoElecTauPlots_cfg.py", channel="ZtoElecTau", sample="Data2010B" + "_part%(i)02d" % {"i": (i + 1)}, replFunction=makeReplacementsHarvesting, replacements="inputFilePath = " + inputFilePath, job="harvesting", queue="1nd", outputFilePath=outputFilePath,
# #-------------------------------------------------------------------------------- # # 7 TeV data samples # for i in range(417): submitToBatch( configFile="runZtoElecTau_cfg.py", channel="ZtoElecTau", sample="Data_7TeV_part%(i)02d" % {"i": (i + 1)}, replFunction=makeReplacementsAnalysis, replacements= "maxEvents = -1; globalTag = GR_R_36X_V12B::All; inputFileType = " + inputFileType + "; inputFilePath = " + inputFilePath + "; applyFactorization = false; estimateSysUncertainties = false", job="analysis", queue="8nh", outputFilePath=outputFilePath, type="data", resourceRequest=None, submit="yes") for i in range(417, 1097): submitToBatch( configFile="runZtoElecTau_cfg.py", channel="ZtoElecTau", sample="Data2010B_part%(i)02d" % {"i": (i + 1)}, replFunction=makeReplacementsAnalysis, replacements=
# calculate file ranges numOutputFiles = numInputFiles/numInputFilesPerOutputFile rem = numInputFiles%numInputFilesPerOutputFile if rem != 0: numOutputFiles += 1 # name of the directory (either on afs area or castor) # to which all .root files produced by the cmsRun job will be copied castorDirectory = "/castor/cern.ch/user/j/jkolb/eTauSkims/Summer09_CMSSW_3_1_4/" outputDirectory = castorDirectory + sampleName # set input file path and base filename inputFileNameBase = castorDirectory + sampleName + "/skimElecTau_" # loop over output files, submitting one job each for i in range(0, numOutputFiles): # calculate range of input files for this job min = (i * numInputFilesPerOutputFile ) + 1 max = (i + 1) * numInputFilesPerOutputFile if i + 1 == numOutputFiles: if rem != 0: max = (i * numInputFilesPerOutputFile) + rem print "Doing output file",i+1,":",min,"-",max submitToBatch(configFile = "mergeSkimOutput_cfg.py", channel = "ZtoElecTau", sample = "%(sName)s_%(i)02d" % {"sName" : sampleName, "i" : (i + 1)}, replFunction = makeReplacementsMerge, replacements = "part = %(i)02d; minFileNum = %(min)d; maxFileNum = %(max)d; maxEvents = -1; inputFileNameBase = %(inDir)s" % {"i" : (i + 1), "min" : min, "max" : max, "inDir" : inputFileNameBase }, job = "merge", queue = "8nh", outputFilePath = outputDirectory, resourceRequest = "", submit = sub)
for i in range(0, numOutputFiles): # calculate range of input files for this job min = (i * numInputFilesPerOutputFile) + 1 max = (i + 1) * numInputFilesPerOutputFile if i + 1 == numOutputFiles: if rem != 0: max = (i * numInputFilesPerOutputFile) + rem print "Doing output file", i + 1, ":", min, "-", max submitToBatch( configFile="mergeSkimOutput_cfg.py", channel="ZtoElecTau", sample="%(sName)s_%(i)02d" % { "sName": sampleName, "i": (i + 1) }, replFunction=makeReplacementsMerge, replacements= "part = %(i)02d; minFileNum = %(min)d; maxFileNum = %(max)d; maxEvents = -1; inputFileNameBase = %(inDir)s" % { "i": (i + 1), "min": min, "max": max, "inDir": inputFileNameBase }, job="merge", queue="8nh", outputFilePath=outputDirectory, resourceRequest="", submit=sub)
#-------------------------------------------------------------------------------- # # Monte Carlo samples from Summer'09 production # reprocessed with CMSSW_3_1_2, skimmed by Letizia and Manuel # # NOTE: The jobs get submitted to the '1nd' queue, # which allows for an execution time of the cmsRun jobs of up to 24 hours # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) # #-------------------------------------------------------------------------------- # Z --> tau tau jobs for i in range(15): submitToBatch(configFile = "skimZtoMuTau_cfg.py", channel = "ZtoMuTau", sample = "Ztautau_10TeV_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsBgEstSkim, replacements = "maxEvents = -1", job = "bgEstSkim", queue = "1nd", outputFilePath = outputFilePath) # Z --> mu mu jobs for i in range(17): submitToBatch(configFile = "skimZtoMuTau_cfg.py", channel = "ZtoMuTau", sample = "Zmumu_10TeV_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsBgEstSkim, replacements = "maxEvents = -1", job = "bgEstSkim", queue = "1nd", outputFilePath = outputFilePath) # pp --> mu X QCD jobs for i in range(21): submitToBatch(configFile = "skimZtoMuTau_cfg.py", channel = "ZtoMuTau", sample = "InclusivePPmuX_10TeV_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsBgEstSkim, replacements = "maxEvents = -1", job = "bgEstSkim", queue = "1nd", outputFilePath = outputFilePath)
# which allows for an execution time of the cmsRun jobs of up to 24 hours # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) # #-------------------------------------------------------------------------------- # # 7TeV data samples # for i in range(402): submitToBatch(configFile="producePatTuple_cfg.py", channel="ZtoElecTau", sample="Data2010A_part%(i)02d" % {"i": (i + 1)}, replFunction=makeReplacementsPatProduction, replacements="maxEvents = -1; globalTag = GR_R_38X_V14::All", job="PatProduction", queue="1nd", outputFilePath=outputFilePath, type="data") for i in range(417, 1097): submitToBatch(configFile="producePatTuple_cfg.py", channel="ZtoElecTau", sample="Data2010B_part%(i)02d" % {"i": (i + 1)}, replFunction=makeReplacementsPatProduction, replacements="maxEvents = -1; globalTag = GR10_P_V10::All", job="PatProduction", queue="1nd", outputFilePath=outputFilePath, type="data")
# Monte Carlo samples from Summer'09 production # reprocessed with CMSSW_3_1_2, skimmed by Letizia and Manuel # # NOTE: The jobs get submitted to the '1nd' queue, # which allows for an execution time of the cmsRun jobs of up to 24 hours # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) # #-------------------------------------------------------------------------------- # Z --> tau tau jobs for i in range(15): submitToBatch(configFile="skimZtoMuTau_cfg.py", channel="ZtoMuTau", sample="Ztautau_10TeV_part%(i)02d" % {"i": (i + 1)}, replFunction=makeReplacementsBgEstSkim, replacements="maxEvents = -1", job="bgEstSkim", queue="1nd", outputFilePath=outputFilePath) # Z --> mu mu jobs for i in range(17): submitToBatch(configFile="skimZtoMuTau_cfg.py", channel="ZtoMuTau", sample="Zmumu_10TeV_part%(i)02d" % {"i": (i + 1)}, replFunction=makeReplacementsBgEstSkim, replacements="maxEvents = -1", job="bgEstSkim", queue="1nd", outputFilePath=outputFilePath)
#-------------------------------------------------------------------------------- # Add histograms, numbers in FilterStatisticsTables and run + event numbers # stored as DQM MonitorElements in different ROOT files # # NOTE: The jobs get submitted to the '1nh' queue, # which allows for an execution time of the cmsRun jobs of up to 1 hour # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) #-------------------------------------------------------------------------------- # harvest W --> tau nu submitToBatch(configFile="harvestWtoTauNuPlots_cfg.py", channel="WtoTauNu", sample="Wtaunu", replFunction=makeReplacementsHarvesting, replacements="", job="harvesting", queue="1nh", outputDirectory=outputDirectory) # harvest qcd submitToBatch(configFile="harvestWtoTauNuPlots_cfg.py", channel="WtoTauNu", sample="qcd_W", replFunction=makeReplacementsHarvesting, replacements="", job="harvesting", queue="1nh", outputDirectory=outputDirectory) # harvest W --> mu nu
# # NOTE: The jobs get submitted to the '1nd' queue, # which allows for an execution time of the cmsRun jobs of up to 24 hours # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) # #-------------------------------------------------------------------------------- # MSSM Higgs A/H --> tau+ tau- jobs for i in range(8): submitToBatch( configFile="runAHtoMuTau_cfg.py", channel="AHtoMuTau", sample="AH120_tautau_7TeV_part%(i)02d" % {"i": (i + 1)}, replFunction=makeReplacementsAnalysis, replacements="maxEvents = -1; inputFileType = " + inputFileType + "; inputFilePath = " + inputFilePath + "; applyFactorization = false; estimateSysUncertainties = false; disableEventDump = true; globalTag = MC_36Y_V7A::All", job="analysis", queue="1nd", outputFilePath=outputFilePath) for i in range(4): submitToBatch( configFile="runAHtoMuTau_cfg.py", channel="AHtoMuTau", sample="AHbb120_tautau_7TeV_part%(i)02d" % {"i": (i + 1)}, replFunction=makeReplacementsAnalysis, replacements="maxEvents = -1; inputFileType = " + inputFileType + "; inputFilePath = " + inputFilePath + "; applyFactorization = false; estimateSysUncertainties = false; disableEventDump = true; globalTag = MC_36Y_V7A::All", job="analysis",
# Add histograms, numbers in FilterStatisticsTables and run + event numbers # stored as DQM MonitorElements in different ROOT files # # NOTE: The jobs get submitted to the '1nh' queue, # which allows for an execution time of the cmsRun jobs of up to 1 hour # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) # #-------------------------------------------------------------------------------- # harvest Z --> tau tau submitToBatch( configFile="harvestZtoElecMuPlots_cfg.py", channel="ZtoElecMu", sample="Ztautau", replFunction=makeReplacementsHarvesting, replacements="inputFilePath = " + inputFilePath + "; recoSampleDefinitionsFile = TauAnalysis.Configuration.plotZtoElecMu_processes_10TeV_cfi", job="harvesting", queue="1nh", outputFilePath=outputFilePath) # harvest Z --> mu mu submitToBatch( configFile="harvestZtoElecMuPlots_cfg.py", channel="ZtoElecMu", sample="Zmumu", replFunction=makeReplacementsHarvesting, replacements="inputFilePath = " + inputFilePath + "; recoSampleDefinitionsFile = TauAnalysis.Configuration.plotZtoElecMu_processes_10TeV_cfi", job="harvesting", queue="1nh",
# # Monte Carlo samples from Summer'08 production # reprocessed with CMSSW_3_1_2, skimmed by Letizia and Monica # # NOTE: The jobs get submitted to the '1nd' queue, # which allows for an execution time of the cmsRun jobs of up to 24 hours # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) # #-------------------------------------------------------------------------------- # Z --> tau tau jobs for i in range(15): submitToBatch(configFile = "runFakeRateAnalysisZtoMuTau_cfg.py", channel = "ZtoMuTau", sample = "Ztautau_10TeV_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsAnalysis, replacements = "maxEvents = -1; inputFileType = " + inputFileType + "; inputFilePath = " + inputFilePath + "; applyFactorization = false; estimateSysUncertainties = false", job = "frAnalysis", queue = "1nd", outputFilePath = outputFilePath) # Z --> mu mu jobs for i in range(17): submitToBatch(configFile = "runFakeRateAnalysisZtoMuTau_cfg.py", channel = "ZtoMuTau", sample = "Zmumu_10TeV_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsAnalysis, replacements = "maxEvents = -1; inputFileType = " + inputFileType + "; inputFilePath = " + inputFilePath + "; applyFactorization = false; estimateSysUncertainties = false", job = "frAnalysis", queue = "1nd", outputFilePath = outputFilePath) # pp --> mu X QCD jobs for i in range(21): submitToBatch(configFile = "runFakeRateAnalysisZtoMuTau_cfg.py", channel = "ZtoMuTau", sample = "InclusivePPmuX_10TeV_part%(i)02d" % {"i" : (i + 1)},
# Monte Carlo samples from Summer'08 production # reprocessed with CMSSW_2_2_3, skimmed by Sunil and Giuseppe # # NOTE: The jobs get submitted to the '1nd' queue, # which allows for an execution time of the cmsRun jobs of up to 24 hours # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) # #-------------------------------------------------------------------------------- numevts = "maxEvents = -1" myQueue = "2nd" # Higgs jobs (mass = 115 GeV) submitToBatch(configFile = "runAHtoElecMu_cfg.py", channel = "AHtoElecMu", sample = "AH115tautau", replFunction = makeReplacementsAnalysis, replacements = numevts+"; applyFactorization = false; estimateSysUncertainties = false", job = "analysis", queue = myQueue, outputDirectory = outputDirectory) submitToBatch(configFile = "runAHtoElecMu_cfg.py", channel = "AHtoElecMu", sample = "AH115bbtautau", replFunction = makeReplacementsAnalysis, replacements = "maxEvents = -1; applyFactorization = false; estimateSysUncertainties = false", job = "analysis", queue = myQueue, outputDirectory = outputDirectory) ## submitToBatch(configFile = "runAHtoElecMu_cfg.py", channel = "AHtoElecMu", sample = "AH115tautau2l", ## replFunction = makeReplacementsAnalysis, replacements = "maxEvents = -1; applyFactorization = false; estimateSysUncertainties = false", ## job = "analysis", queue = myQueue, outputDirectory = outputDirectory) ## for i in range(22): ## submitToBatch(configFile = "runAHtoElecMu_cfg.py", channel = "AHtoElecMu", sample = "AH115bbtautau2l_part%(i)02d" % {"i" : (i + 1)}, ## replFunction = makeReplacementsAnalysis, replacements = "maxEvents = -1; applyFactorization = false; estimateSysUncertainties = false", ## job = "analysis", queue = myQueue, outputDirectory = outputDirectory) # Higgs jobs (mass = 160 GeV) submitToBatch(configFile = "runAHtoElecMu_cfg.py", channel = "AHtoElecMu", sample = "AH160tautau", replFunction = makeReplacementsAnalysis, replacements = "maxEvents = -1; applyFactorization = false; estimateSysUncertainties = false",
# #-------------------------------------------------------------------------------- samples = [ #'AH115_tautau', #'AH160_tautau', 'AH115bb_tautau', 'AH160bb_tautau', 'Ztautau', 'Zee', #'QCD_EMenriched_Pt20to30', #'QCD_EMenriched_Pt30to80', #'QCD_EMenriched_Pt80to170', #'QCD_BCtoE_Pt20to30', #'QCD_BCtoE_Pt30to80', #'QCD_BCtoE_Pt80to170', 'WplusJets', 'ZplusJets', 'TTplusJets', #'PhotonJets_Pt15to20', #'PhotonJets_Pt20to25', #'PhotonJets_Pt25to30', #'PhotonJets_Pt30to25', #'PhotonJets_PtGt35', 'ZeePlusJets', 'ZtautauPlusJets' ] for s in samples: submitToBatch(configFile = "harvestbbAHtoElecTauPlots_cfg.py", channel = "bbAHtoElecTau", sample = s, replFunction = makeReplacementsHarvesting, replacements = "", job = "harvesting", queue = "1nh", outputDirectory = outputDirectory)
# # Monte Carlo samples from Summer'09 production # reprocessed with CMSSW_3_1_2, skimmed by Letizia and Manuel # # NOTE: The jobs get submitted to the '1nd' queue, # which allows for an execution time of the cmsRun jobs of up to 24 hours # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) # #-------------------------------------------------------------------------------- # MSSM Higgs A/H --> tau+ tau- jobs for i in range(8): submitToBatch(configFile = "runAHtoMuTau_cfg.py", channel = "AHtoMuTau", sample = "AH120_tautau_7TeV_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsAnalysis, replacements = "maxEvents = -1; inputFileType = " + inputFileType + "; inputFilePath = " + inputFilePath + "; applyFactorization = false; estimateSysUncertainties = false; disableEventDump = true; globalTag = MC_36Y_V7A::All", job = "analysis", queue = "1nd", outputFilePath = outputFilePath) for i in range(4): submitToBatch(configFile = "runAHtoMuTau_cfg.py", channel = "AHtoMuTau", sample = "AHbb120_tautau_7TeV_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsAnalysis, replacements = "maxEvents = -1; inputFileType = " + inputFileType + "; inputFilePath = " + inputFilePath + "; applyFactorization = false; estimateSysUncertainties = false; disableEventDump = true; globalTag = MC_36Y_V7A::All", job = "analysis", queue = "1nd", outputFilePath = outputFilePath) # Z --> tau+ tau- jobs for i in range(16): submitToBatch(configFile = "runAHtoMuTau_cfg.py", channel = "AHtoMuTau", sample = "Ztautau_7TeV_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsAnalysis, replacements = "maxEvents = -1; inputFileType = " + inputFileType + "; inputFilePath = " + inputFilePath + "; applyFactorization = false; estimateSysUncertainties = false; disableEventDump = true; globalTag = MC_36Y_V7A::All",
'AH160bb_tautau', 'Ztautau', 'Zee', #'QCD_EMenriched_Pt20to30', #'QCD_EMenriched_Pt30to80', #'QCD_EMenriched_Pt80to170', #'QCD_BCtoE_Pt20to30', #'QCD_BCtoE_Pt30to80', #'QCD_BCtoE_Pt80to170', 'WplusJets', 'ZplusJets', 'TTplusJets', #'PhotonJets_Pt15to20', #'PhotonJets_Pt20to25', #'PhotonJets_Pt25to30', #'PhotonJets_Pt30to25', #'PhotonJets_PtGt35', 'ZeePlusJets', 'ZtautauPlusJets' ] for s in samples: submitToBatch(configFile="harvestbbAHtoElecTauPlots_cfg.py", channel="bbAHtoElecTau", sample=s, replFunction=makeReplacementsHarvesting, replacements="", job="harvesting", queue="1nh", outputDirectory=outputDirectory)
# name of the directory (either on afs area or castor) to which all .root files produced by the Harvesting job will be copied outputFilePath = "/castor/cern.ch/user/l/liis/CMSSW_38X/Histograms/" inputFilePath = "rfio:" + outputFilePath #-------------------------------------------------------------------------------- # Add histograms, numbers in FilterStatisticsTables and run + event numbers # stored as DQM MonitorElements in different ROOT files # # NOTE: The jobs get submitted to the '1nh' queue, # which allows for an execution time of the cmsRun jobs of up to 1 hour # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) #-------------------------------------------------------------------------------- # harvest W --> tau nu submitToBatch(configFile = "harvestWtoTauNuPlots_cfg.py", channel = "WtoTauNu", sample = "Wtaunu", replFunction = makeReplacementsHarvesting, replacements = "inputFilePath = " + inputFilePath + "; recoSampleDefinitionsFile = TauAnalysis.Configuration.plotWtoTauNu_processes_7TeV_cfi", job = "harvesting", queue = "1nh", outputFilePath = outputFilePath ) # harvest Z+jets submitToBatch(configFile = "harvestWtoTauNuPlots_cfg.py", channel = "WtoTauNu", sample = "ZplusJets", replFunction = makeReplacementsHarvesting, replacements = "inputFilePath = " + inputFilePath + "; recoSampleDefinitionsFile = TauAnalysis.Configuration.plotWtoTauNu_processes_7TeV_cfi", job = "harvesting", queue = "1nh", outputFilePath = outputFilePath ) # harvest W --> mu nu submitToBatch(configFile = "harvestWtoTauNuPlots_cfg.py", channel = "WtoTauNu", sample = "Wmunu", replFunction = makeReplacementsHarvesting, replacements = "inputFilePath = " + inputFilePath + "; recoSampleDefinitionsFile = TauAnalysis.Configuration.plotWtoTauNu_processes_7TeV_cfi", job = "harvesting", queue = "1nh", outputFilePath = outputFilePath ) # harvest W --> e nu submitToBatch(configFile = "harvestWtoTauNuPlots_cfg.py", channel = "WtoTauNu", sample = "Wenu", replFunction = makeReplacementsHarvesting, replacements = "inputFilePath = " + inputFilePath + "; recoSampleDefinitionsFile = TauAnalysis.Configuration.plotWtoTauNu_processes_7TeV_cfi", job = "harvesting", queue = "1nd", outputFilePath = outputFilePath ) # harvest qcd
# Monte Carlo samples from Summer'08 production # reprocessed with CMSSW_2_2_3, skimmed by Letizia and Monica # # NOTE: The jobs get submitted to the '1nd' queue, # which allows for an execution time of the cmsRun jobs of up to 24 hours # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) # #-------------------------------------------------------------------------------- # Z --> tau tau jobs for i in range(3): submitToBatch(configFile="skimZtoElecMu_cfg.py", channel="ZtoElecMu", sample="Ztautau_part%(i)02d" % {"i": (i + 1)}, replFunction=makeReplacementsBgEstSkim, replacements="maxEvents = -1", job="bgEstSkim", queue="1nd", outputDirectory=outputDirectory) # Z --> e e jobs submitToBatch(configFile="skimZtoElecMu_cfg.py", channel="ZtoElecMu", sample="Zee", replFunction=makeReplacementsBgEstSkim, replacements="maxEvents = -1", job="bgEstSkim", queue="1nd", outputDirectory=outputDirectory) # Z --> mu mu jobs
#-------------------------------------------------------------------------------- # # Add histograms, numbers in FilterStatisticsTables and run + event numbers # stored as DQM MonitorElements in different ROOT files # # NOTE: The jobs get submitted to the '1nh' queue, # which allows for an execution time of the cmsRun jobs of up to 1 hour # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) # #-------------------------------------------------------------------------------- # harvest Z --> tau+ tau- submitToBatch(configFile = "../../Configuration/test/harvestZtoMuTauPlots_cfg.py", channel = "ZtoMuTau", sample = "Ztautau", replFunction = makeReplacementsHarvesting, replacements = "inputFilePath = " + inputFilePath + "; recoSampleDefinitionsFile = TauAnalysis.Configuration.plotZtoMuTau_processes_7TeV_cfi", job = "bgEstTemplateHarvesting", queue = "1nh", outputFilePath = outputFilePath) # harvest Z --> mu+ mu- submitToBatch(configFile = "../../Configuration/test/harvestZtoMuTauPlots_cfg.py", channel = "ZtoMuTau", sample = "Zmumu", replFunction = makeReplacementsHarvesting, replacements = "inputFilePath = " + inputFilePath + "; recoSampleDefinitionsFile = TauAnalysis.Configuration.plotZtoMuTau_processes_7TeV_cfi", job = "bgEstTemplateHarvesting", queue = "1nh", outputFilePath = outputFilePath) # harvest InclusivePPmuX submitToBatch(configFile = "../../Configuration/test/harvestZtoMuTauPlots_cfg.py", channel = "ZtoMuTau", sample = "InclusivePPmuX", replFunction = makeReplacementsHarvesting, replacements = "inputFilePath = " + inputFilePath + "; recoSampleDefinitionsFile = TauAnalysis.Configuration.plotZtoMuTau_processes_7TeV_cfi", job = "bgEstTemplateHarvesting", queue = "1nh", outputFilePath = outputFilePath) # harvest PPmuXptGt20
#inputFileType = "PATTuple" #-------------------------------------------------------------------------------- # NOTE: The jobs get submitted to the '1nd' queue, # which allows for an execution time of the cmsRun jobs of up to 24 hours # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) #-------------------------------------------------------------------------------- # W --> tau nu jobs for i in range(10): submitToBatch(configFile = "runWtoTauNu_cfg.py", channel = "WtoTauNu", sample = "Wtaunu_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsAnalysis, replacements = "maxEvents = -1; globalTag = START38_V12::All; inputFileType = " + inputFileType + "; inputFilePath = " + inputFilePath + "; applyFactorization = false; estimateSysUncertainties = false", job = "analysis", queue = "1nd", outputFilePath = outputFilePath, type = "mc") # W --> e nu jobs for i in range(33): submitToBatch(configFile = "runWtoTauNu_cfg.py", channel = "WtoTauNu", sample = "Wenu_part%(i)02d" % {"i" : (i + 1)}, replFunction = makeReplacementsAnalysis, replacements = "maxEvents = -1; globalTag = START38_V12::All; inputFileType = " + inputFileType + "; inputFilePath = " + inputFilePath + "; applyFactorization = false; estimateSysUncertainties = false", job = "analysis", queue = "1nd", outputFilePath = outputFilePath, type = "mc") # W --> mu nu jobs for i in range(30): submitToBatch(configFile = "runWtoTauNu_cfg.py", channel = "WtoTauNu",
outputDirectory = "/castor/cern.ch/user/l/liis/WTauNuPlots/" #-------------------------------------------------------------------------------- # Add histograms, numbers in FilterStatisticsTables and run + event numbers # stored as DQM MonitorElements in different ROOT files # # NOTE: The jobs get submitted to the '1nh' queue, # which allows for an execution time of the cmsRun jobs of up to 1 hour # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) #-------------------------------------------------------------------------------- # harvest W --> tau nu submitToBatch(configFile = "harvestWtoTauNuPlots_cfg.py", channel = "WtoTauNu", sample = "Wtaunu", replFunction = makeReplacementsHarvesting, replacements = "", job = "harvesting", queue = "1nh", outputDirectory = outputDirectory) # harvest qcd submitToBatch(configFile = "harvestWtoTauNuPlots_cfg.py", channel = "WtoTauNu", sample = "qcd_W", replFunction = makeReplacementsHarvesting, replacements = "", job = "harvesting", queue = "1nh", outputDirectory = outputDirectory) # harvest W --> mu nu submitToBatch(configFile = "harvestWtoTauNuPlots_cfg.py", channel = "WtoTauNu", sample = "Wmunu", replFunction = makeReplacementsHarvesting, replacements = "", job = "harvesting", queue = "1nh", outputDirectory = outputDirectory) # harvest W --> e nu submitToBatch(configFile = "harvestWtoTauNuPlots_cfg.py", channel = "WtoTauNu", sample = "Wenu", replFunction = makeReplacementsHarvesting, replacements = "",
# NOTE: The jobs get submitted to the '1nd' queue, # which allows for an execution time of the cmsRun jobs of up to 24 hours # (the queues are {'1nh' (1 hour), '1nd' (24 hours) and '1nw' (1 week execution time limit); # see https://twiki.cern.ch/twiki/bin/view/CMS/CMSUKCMSSWBatch for details about the CERN batch system) # #-------------------------------------------------------------------------------- numevts = "maxEvents = -1" myQueue = "2nd" # Higgs jobs (mass = 115 GeV) submitToBatch(configFile="runAHtoElecMu_cfg.py", channel="AHtoElecMu", sample="AH115tautau", replFunction=makeReplacementsAnalysis, replacements=numevts + "; applyFactorization = false; estimateSysUncertainties = false", job="analysis", queue=myQueue, outputDirectory=outputDirectory) submitToBatch( configFile="runAHtoElecMu_cfg.py", channel="AHtoElecMu", sample="AH115bbtautau", replFunction=makeReplacementsAnalysis, replacements= "maxEvents = -1; applyFactorization = false; estimateSysUncertainties = false", job="analysis", queue=myQueue, outputDirectory=outputDirectory) ## submitToBatch(configFile = "runAHtoElecMu_cfg.py", channel = "AHtoElecMu", sample = "AH115tautau2l",