Пример #1
0
def lumiList(json):
    import FWCore.PythonUtilities.LumiList as LumiList
    myLumis = LumiList.LumiList(filename=json).getCMSSWString().split(',')
    return myLumis
        # 'file:/afs/cern.ch/user/y/yoshin/eos/cms/store/group/phys_exotica/EmergingJets/wjetskim-v0/SingleMuonD-PRv3/SingleMuon/WJetSkim/151028_030342/0000/output_181.root',
        # 'file:/afs/cern.ch/user/y/yoshin/eos/cms/store/group/phys_exotica/EmergingJets/wjetskim-v0/SingleMuonD-PRv3/SingleMuon/WJetSkim/151028_030342/0000/output_182.root',
        # 'file:/afs/cern.ch/user/y/yoshin/eos/cms/store/group/phys_exotica/EmergingJets/wjetskim-v0/SingleMuonD-PRv3/SingleMuon/WJetSkim/151028_030342/0000/output_183.root',
        # 'file:/afs/cern.ch/user/y/yoshin/eos/cms/store/group/phys_exotica/EmergingJets/wjetskim-v0/SingleMuonD-PRv3/SingleMuon/WJetSkim/151028_030342/0000/output_184.root',
        # 'file:/afs/cern.ch/user/y/yoshin/eos/cms/store/group/phys_exotica/EmergingJets/wjetskim-v0/SingleMuonD-PRv3/SingleMuon/WJetSkim/151028_030342/0000/output_185.root',
        # 'file:/afs/cern.ch/user/y/yoshin/eos/cms/store/group/phys_exotica/EmergingJets/wjetskim-v0/SingleMuonD-PRv3/SingleMuon/WJetSkim/151028_030342/0000/output_186.root',
        # 'file:/afs/cern.ch/user/y/yoshin/eos/cms/store/group/phys_exotica/EmergingJets/wjetskim-v0/SingleMuonD-PRv3/SingleMuon/WJetSkim/151028_030342/0000/output_187.root',
        # 'file:/afs/cern.ch/user/y/yoshin/eos/cms/store/group/phys_exotica/EmergingJets/wjetskim-v0/SingleMuonD-PRv3/SingleMuon/WJetSkim/151028_030342/0000/output_188.root',
        # 'file:/afs/cern.ch/user/y/yoshin/eos/cms/store/group/phys_exotica/EmergingJets/wjetskim-v0/SingleMuonD-PRv3/SingleMuon/WJetSkim/151028_030342/0000/output_189.root',
        # 'file:/afs/cern.ch/user/y/yoshin/eos/cms/store/group/phys_exotica/EmergingJets/wjetskim-v0/SingleMuonD-PRv3/SingleMuon/WJetSkim/151028_030342/0000/output_19.root',
    ),
)

import FWCore.PythonUtilities.LumiList as LumiList
process.source.lumisToProcess = LumiList.LumiList(
    filename='Cert_246908-258750_13TeV_PromptReco_Collisions15_25ns_JSON.txt'
).getVLuminosityBlockRange()

# process.jetFilter = cms.EDFilter("JetFilter",
#     srcJets = cms.InputTag("ak4PFJetsCHS"),
#     # srcJets = cms.InputTag("patJets"),
#     # additionalCut = cms.string(""),
#     additionalCut = cms.string("abs(eta) < 2.5 && pt > 50.0"),
#     jetCuts = cms.VPSet(
#         cms.PSet(
#             minPt = cms.double(400.0),
#             maxEta = cms.double(2.5),
#             stringCut = cms.string(""),
#             ),
#         cms.PSet(
#             minPt = cms.double(200.0),
Пример #3
0
        #  '/store/hidata/HIRun2013A/PAHighPt/RECO/PromptReco-v1/000/210/634/FA4E6B7E-7366-E211-8DD0-0019B9F581C9.root'
        #  'file:/cms/store/hidata/HIRun2013/PAHighPt/RECO/PromptReco-v1/000/210/498/00000/1E825832-FA64-E211-8F9C-003048CF9B28.root'
    ),
)
filename = "filelist" + sys.argv[3] + ".dat"
mylist = FileUtils.loadListFromFile(filename)
for fname in mylist[int(sys.argv[4]):int(sys.argv[5])]:
    process.source.fileNames.append('file:%s' % (fname))

# =============== Other Statements =====================
process.maxEvents = cms.untracked.PSet(input=cms.untracked.int32(-1))
process.options = cms.untracked.PSet(wantSummary=cms.untracked.bool(True))
process.GlobalTag.globaltag = 'GR_P_V43F::All'
if sys.argv[3] == "pPbReReco":
    process.source.lumisToProcess = LumiList.LumiList(
        filename='Cert_210498-210658_HI_PromptReco_Collisions13_JSON_v2.txt'
    ).getVLuminosityBlockRange()
elif sys.argv[3] == "pPb":
    process.source.lumisToProcess = LumiList.LumiList(
        filename='Cert_210676-211256_HI_PromptReco_Collisions13_JSON_v2.txt'
    ).getVLuminosityBlockRange()
elif sys.argv[3] == "Pbp":
    process.source.lumisToProcess = LumiList.LumiList(
        filename='Cert_211313-211631_HI_PromptReco_Collisions13_JSON_v2.txt'
    ).getVLuminosityBlockRange()

# =============== Import Sequences =====================
process.load('Appeltel.RpPbAnalysis.PAPileUpVertexFilter_cff')

#Trigger Selection
### Comment out for the timing being assuming running on secondary dataset with trigger bit selected already
Пример #4
0
from os import path as path
import FWCore.ParameterSet.Config as cms

process = cms.Process("AnalysisProc")
process.load("FWCore.MessageService.MessageLogger_cfi")
process.MessageLogger.cerr.FwkReport.reportEvery = 10000

#import PhysicsTools.PythonAnalysis.LumiList as LumiList

#LumiList.LumiList().getVLuminosityBlockRange()
import FWCore.PythonUtilities.LumiList as LumiList
LumiList.LumiList().getVLuminosityBlockRange()

#from Configuration.AlCa.GlobalTag import GlobalTag
process.load(
    'Configuration.StandardSequences.FrontierConditions_GlobalTag_condDBv2_cff'
)
from Configuration.AlCa.GlobalTag_condDBv2 import GlobalTag
process.GlobalTag.globaltag = ''

process.source = cms.Source(
    "PoolSource",
    fileNames=cms.untracked.vstring(
        'root://cms-xrd-global.cern.ch//store/user/tomc/heavyNeutrinoMiniAOD/Fall17/displaced/HeavyNeutrino_lljj_M-5_V-0.00836660026534_mu_massiveAndCKM_LO/heavyNeutrino_1.root'
        #'root://cms-xrd-global.cern.ch///store/mc/RunIISummer16MiniAODv2/DYJetsToLL_Zpt-200toInf_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/100000/04B4B947-A1E4-E611-BF2C-ECF4BBE16468.root'
        #'file:/afs/cern.ch/work/h/hrejebsf/public/HLTReco/CMSSW_8_0_21/src/miniAOD.root'
        #'root://cms-xrd-global.cern.ch//store/user/tomc/heavyNeutrinoMiniAOD/Fall17/displaced/HeavyNeutrino_lljj_M-5_V-0.00836660026534_mu_massiveAndCKM_LO/heavyNeutrino_1.root',
        #'root://cms-xrd-global.cern.ch//store/user/tomc/heavyNeutrinoMiniAOD/Fall17/displaced/HeavyNeutrino_lljj_M-5_V-0.00836660026534_mu_massiveAndCKM_LO/heavyNeutrino_1.root',
        #'root://cms-xrd-global.cern.ch//store/user/tomc/heavyNeutrinoMiniAOD/Fall17/displaced/HeavyNeutrino_lljj_M-5_V-0.00836660026534_mu_massiveAndCKM_LO/heavyNeutrino_1.root',
        #'root://cms-xrd-global.cern.ch//store/user/tomc/heavyNeutrinoMiniAOD/Fall17/displaced/HeavyNeutrino_lljj_M-5_V-0.00836660026534_mu_massiveAndCKM_LO/heavyNeutrino_1.root',
        #'root://cms-xrd-global.cern.ch//store/user/tomc/heavyNeutrinoMiniAOD/Fall17/displaced/HeavyNeutrino_lljj_M-5_V-0.00836660026534_mu_massiveAndCKM_LO/heavyNeutrino_1.root',
Пример #5
0
if inputScript != '':
    process.load(inputScript)
else:
    print 'need an input script'
    exit(8889)

process.maxEvents = cms.untracked.PSet(input=cms.untracked.int32(maxEvents))

if skipEvents > 0:
    process.source.skipEvents = cms.untracked.uint32(skipEvents)

if not runOnMC and not (json == "nojson"):
    import FWCore.PythonUtilities.LumiList as LumiList
    import FWCore.ParameterSet.Types as CfgTypes
    myLumis = LumiList.LumiList(filename=json).getCMSSWString().split(',')
    process.source.lumisToProcess = CfgTypes.untracked(
        CfgTypes.VLuminosityBlockRange())
    process.source.lumisToProcess.extend(myLumis)

####################################################################
## Configure message logger

process.load("FWCore.MessageService.MessageLogger_cfi")
process.MessageLogger.destinations = ['cout', 'cerr']
process.MessageLogger.cerr.threshold = 'INFO'
#process.MessageLogger.suppressWarning=['particleFlowDisplacedVertexCandidate','The interpolated laser correction is <= zero! (0). Using 1. as correction factor.']
process.MessageLogger.cerr.FwkReport.reportEvery = reportEvery

####################################################################
### Geometry and Detector Conditions
Пример #6
0
    ),
)

process.options = cms.untracked.PSet()

# Production Info
process.configurationMetadata = cms.untracked.PSet(
    version=cms.untracked.string('$Revision: 1.20 $'),
    annotation=cms.untracked.string('RelVal nevts:100'),
    name=cms.untracked.string('Applications'))

# JSON

import FWCore.PythonUtilities.LumiList as LumiList
process.source.lumisToProcess = LumiList.LumiList(
    filename='Cert_271036-284044_13TeV_PromptReco_Collisions16_JSON.txt'
).getVLuminosityBlockRange()

# skim definitions

process.highpfmet = cms.EDFilter("PtMinCandViewSelector",
                                 src=cms.InputTag("slimmedMETs"),
                                 ptMin=cms.double(100))

process.metFilter = cms.EDFilter(
    "CandViewCountFilter",
    src=cms.InputTag("highpfmet"),
    minNumber=cms.uint32(1),
)

process.pfmet_filter_step = cms.Path(process.highpfmet + process.metFilter)
Пример #7
0
# process.load("RecoTracker.IterativeTracking.MuonSeededStep_cff")
# process.load("RecoTracker.CkfPattern.GroupedCkfTrajectoryBuilder_cfi")
# import RecoTracker.CkfPattern.GroupedCkfTrajectoryBuilder_cfi
process.load("RecoTracker.CkfPattern.GroupedCkfTrajectoryBuilder_cff")

process.maxEvents = cms.untracked.PSet(input=cms.untracked.int32(-1))
process.source = cms.Source(
    "PoolSource",
    fileNames=cms.untracked.vstring(
        '/store/data/Run2016E/RPCMonitor/RAW/v2/000/277/420/00000/D4696168-B352-E611-A385-02163E0135EB.root'
    ))

import FWCore.PythonUtilities.LumiList as LumiList

process.source.lumisToProcess = LumiList.LumiList(
    filename=
    '/afs/cern.ch/user/c/carrillo/efficiency/CMSSW_8_0_1/src/DQM/RPCMonitorModule/test/parallel/Cert_271036-284044_13TeV_PromptReco_Collisions16_JSON_NoL1T_MuonPhys.json'
).getVLuminosityBlockRange()

process.dTandCSCSegmentsinTracks = cms.EDProducer(
    "DTandCSCSegmentsinTracks",
    cscSegments=cms.untracked.InputTag("hltCscSegments"),
    dt4DSegments=cms.untracked.InputTag("hltDt4DSegments"),
    tracks=cms.untracked.InputTag("standAloneMuons", ""))

process.rpcPointProducer = cms.EDProducer(
    'RPCPointProducer',
    incldt=cms.untracked.bool(True),
    inclcsc=cms.untracked.bool(True),
    incltrack=cms.untracked.bool(False),
    debug=cms.untracked.bool(False),
    rangestrips=cms.untracked.double(4.),
def unpackAndMergeFiles(datasetpath, prefix, castor):
    ###
    pwd = os.getenv("PWD")
    minRun = 0
    maxRun = 0
    sampleName = "None"
    ## Verify that CRAB directory exists
    pathCrabFull = ""
    pathCrabShort = ""
    for fname in os.listdir(prefix + datasetpath):
        if re.search("crab_0_", fname) != None:
            print fname
            if pathCrabFull == "":
                pathCrabFull = prefix + datasetpath + "/" + fname
                pathCrabShort = fname
            else:
                print "ERROR: More than one CRAB directory exists in", prefix + datasetpath
                print "       Only one is allowed!"
                os.abort()
    ## Unpack and merge the results
    if pathCrabFull != "":
        #Get min and max number of analysed runs
        jsonFile = prefix + datasetpath + "/goodRuns.json"
        jsonList = LumiList(filename=jsonFile)
        runList = jsonList.getRuns()
        minRun = int(runList[0])
        maxRun = int(runList[len(runList) - 1])
        ## Create TEMP dir
        pathTMP = "/tmp/" + os.getenv("USER") + "/_HADD_TMP_/"
        os.system("rm -rf /tmp/${USER}/_HADD_TMP_")
        os.system("mkdir -p " + pathTMP + prefix + datasetpath)
        pathCrabResults = pathCrabFull + "/res/"
        pathCrabResultsTMP = pathTMP + prefix + datasetpath + "/" + pathCrabShort + "/res/"
        print "CRAB results from:", pathCrabResults, " will be unpacked here:", pathCrabResultsTMP
        #Copy CRAB directory to TEMP dir
        os.system("cp -r " + prefix + "/. " + pathTMP + prefix + "/.")
        ## Copy all but .tgz files (if any)
        ##os.system("cd "+pathCrabResults+";for i in efficiencyTree_*.root rawMonitor_*.root synchroAnalysis_*.root crab_*.xml CMSSW_*.stdout CMSSW_*.stderr ; do cp $i /tmp/${USER}/_HADD_TMP_; done; ls -lart ; cd -")
        ## Extract .tgz files (if any)
        ##os.system("cd "+pathCrabResults+";for i in out_files_*.tgz; do tar -xzvf $i -C /tmp/${USER}/_HADD_TMP_; done; ls -lart ; cd -")
        os.system("cd " + pathCrabResultsTMP +
                  ";for i in out_files_*.tgz; do tar -xzvf $i -C " +
                  pathCrabResultsTMP + "; done; ls -lart ; cd -")
        ## Get missing CRAB job outputs (if any)
        ## After this step all out_files_*tgz will disappear from TMP dir
        os.system("cd " + pathTMP + prefix + datasetpath + "; crab -status")
        os.system("cd " + pathTMP + prefix + datasetpath + "; crab -get all")
        ## Get lumi summary report
        ## NOTE: 'crab -status' and 'crab -get all steps' are essential
        ##       The lumiSummary.json is created in the original submit dir...
        os.system("cd " + pathTMP + prefix + datasetpath + "; crab -report")
        ##Files to merge
        jsonFileName = "GoodRuns_" + str(minRun) + "-" + str(maxRun) + ".json"
        jsonFileNameLumiSummary = "lumiSummary-" + str(minRun) + "-" + str(
            maxRun) + ".json"
        sampleName = "efficiencyTree-" + str(minRun) + "-" + str(
            maxRun) + ".root"
        sampleName2 = "efficiencyHelper-" + str(minRun) + "-" + str(
            maxRun) + ".root"
        sampleName3 = "synchroAnalysis-" + str(minRun) + "-" + str(
            maxRun) + ".root"
        sampleName4 = "rawMonitor-" + str(minRun) + "-" + str(maxRun) + ".root"
        sampleName5 = "l1demon-" + str(minRun) + "-" + str(maxRun) + ".root"
        sampleName6 = "out-" + str(minRun) + "-" + str(maxRun) + ".txt"
        print "Doing hadd for ", sampleName
        os.system("cd " + pathCrabResultsTMP +
                  "; hadd /tmp/${USER}/_HADD_TMP_/" + sampleName +
                  " efficiencyTree_*.root")
        print "Doing hadd for ", sampleName2
        os.system("cd " + pathCrabResultsTMP +
                  "; hadd /tmp/${USER}/_HADD_TMP_/" + sampleName2 +
                  " efficiencyHelper_*.root")
        print "Doing hadd for ", sampleName3
        os.system("cd " + pathCrabResultsTMP +
                  "; hadd /tmp/${USER}/_HADD_TMP_/" + sampleName3 +
                  " synchroAnalysis_*.root")
        print "Doing hadd for ", sampleName4
        os.system("cd " + pathCrabResultsTMP +
                  "; hadd /tmp/${USER}/_HADD_TMP_/" + sampleName4 +
                  " rawMonitor_*.root ; ls -lart")
        print "Doing hadd for ", sampleName5
        os.system("cd " + pathCrabResultsTMP +
                  "; hadd /tmp/${USER}/_HADD_TMP_/" + sampleName5 +
                  " l1demon_*.root ; ls -lart")
        print "Preparing out.txt"
        os.system(
            "cd " + pathCrabResultsTMP + "; rm -f /tmp/${USER}/_HADD_TMP_/" +
            sampleName6 +
            "; cat C*.stdout|grep -i lb|grep -i mean >> /tmp/${USER}/_HADD_TMP_/"
            + sampleName6)
        ##Copy results to TMP destination
        print "Copying final results to ", pathTMP + prefix, "/ROOT"
        os.system("mkdir -p " + pathTMP + prefix + "/ROOT")
        os.system("cp " + prefix + datasetpath + "/goodRuns.json" + " " +
                  pathTMP + prefix + "/ROOT/" + jsonFileName)
        os.system("cp " + pathCrabResults + "/lumiSummary.json" + " " +
                  pathTMP + prefix + "/ROOT/" + jsonFileNameLumiSummary)
        os.system("cp " + pathTMP + sampleName + " " + pathTMP + prefix +
                  "/ROOT/")
        os.system("cp " + pathTMP + sampleName2 + " " + pathTMP + prefix +
                  "/ROOT/")
        os.system("cp " + pathTMP + sampleName3 + " " + pathTMP + prefix +
                  "/ROOT/")
        os.system("cp " + pathTMP + sampleName4 + " " + pathTMP + prefix +
                  "/ROOT/")
        os.system("cp " + pathTMP + sampleName5 + " " + pathTMP + prefix +
                  "/ROOT/")
        os.system("gzip " + pathTMP + sampleName6 + "; cp " + pathTMP +
                  sampleName6 + ".gz " + pathTMP + prefix + "/ROOT/")
        ##Copy results to final destination
        print "Copying final results to ", prefix, "/ROOT"
        os.system("mkdir -p " + prefix + "/ROOT")
        os.system("cp " + prefix + datasetpath + "/goodRuns.json" + " " +
                  prefix + "/ROOT/" + jsonFileName)
        os.system("cp " + pathCrabResults + "/lumiSummary.json" + " " +
                  prefix + "/ROOT/" + jsonFileNameLumiSummary)
        os.system("cp /tmp/${USER}/_HADD_TMP_/" + sampleName + " " + prefix +
                  "/ROOT/")
        os.system("cp /tmp/${USER}/_HADD_TMP_/" + sampleName2 + " " + prefix +
                  "/ROOT/")
        os.system("cp /tmp/${USER}/_HADD_TMP_/" + sampleName3 + " " + prefix +
                  "/ROOT/")
        os.system("cp /tmp/${USER}/_HADD_TMP_/" + sampleName4 + " " + prefix +
                  "/ROOT/")
        os.system("cp /tmp/${USER}/_HADD_TMP_/" + sampleName5 + " " + prefix +
                  "/ROOT/")
        os.system("gzip /tmp/${USER}/_HADD_TMP_/" + sampleName6 +
                  "; cp /tmp/${USER}/_HADD_TMP_/" + sampleName6 + ".gz " +
                  prefix + "/ROOT/")
        ##Remove trash from tmp
        os.system("rm -rf /tmp/${USER}/_HADD_TMP_/*.root")
        os.system("rm -rf /tmp/${USER}/_HADD_TMP_/out*.txt.gz")
        ##Copy crab files to CASTOR
        archiveName = (prefix + datasetpath)[2:]
        archiveName = archiveName.replace("/", "_")
        archiveDir = archiveName[:-1] + "-" + str(minRun) + "-" + str(maxRun)
        archiveName = archiveDir + ".tar.gz"
        os.system("cd " + pathTMP + "; tar -cvzf " + pathTMP + archiveName +
                  " " + pathTMP + prefix + datasetpath + "/*")
        castor2 = castor + "/" + archiveDir + "/"
        print "Copying archive", archiveName, "to CASTOR path: ", castor2
        os.system("rfmkdir " + castor2)
        command = "cd " + pathTMP + "; rfcp " + archiveName + " " + castor2 + "; cd -"
        os.system(command)
        print "Copying merged data", sampleName, "to CASTOR path: ", castor2
        command = "cd " + pathTMP + prefix + "/ROOT/; rfcp " + sampleName + " " + castor2 + "; cd -"
        os.system(command)
        print "Copying merged data", sampleName2, "to CASTOR path: ", castor2
        command = "cd " + pathTMP + prefix + "/ROOT/; rfcp " + sampleName2 + " " + castor2 + "; cd -"
        os.system(command)
        print "Copying merged data", sampleName3, "to CASTOR path: ", castor2
        command = "cd " + pathTMP + prefix + "/ROOT/; rfcp " + sampleName3 + " " + castor2 + "; cd -"
        os.system(command)
        print "Copying merged data", sampleName4, "to CASTOR path: ", castor2
        command = "cd " + pathTMP + prefix + "/ROOT/; rfcp " + sampleName4 + " " + castor2 + "; cd -"
        os.system(command)
        print "Copying merged data", sampleName5, "to CASTOR path: ", castor2
        command = "cd " + pathTMP + prefix + "/ROOT/; rfcp " + sampleName5 + " " + castor2 + "; cd -"
        os.system(command)
        print "Copying merged data", sampleName6 + ".gz", "to CASTOR path: ", castor2
        command = "cd " + pathTMP + prefix + "/ROOT/; rfcp " + sampleName6 + ".gz " + castor2 + "; cd -"
        os.system(command)
        print "Copying JSON files:", jsonFileName, ",", jsonFileNameLumiSummary, "to CASTOR path: ", castor2
        command = "cd " + pathTMP + prefix + "/ROOT/; rfcp " + jsonFileName + " " + jsonFileNameLumiSummary + " " + castor2 + "; cd -"
        os.system(command)
        # Remove TEMP DIR
        os.system("rm -rf /tmp/${USER}/_HADD_TMP_")
        print "Done. " + '\033[93m' + "Please remove " + prefix + datasetpath + " directory manualy!" + '\033[0m'
Пример #9
0
if config["RUNONMC"]: GT = '80X_mcRun2_asymptotic_2016_miniAODv2'
elif not (config["RUNONMC"]): GT = '80X_dataRun2_Prompt_ICHEP16JEC_v0'

print "*************************************** GLOBAL TAG *************************************************"
print GT
print "****************************************************************************************************"
process.GlobalTag = GlobalTag(process.GlobalTag, GT)

######### read JSON file for data ##########
if not (config["RUNONMC"]) and config["USEJSON"]:

    import FWCore.PythonUtilities.LumiList as LumiList
    import FWCore.ParameterSet.Types as CfgTypes
    process.source.lumisToProcess = CfgTypes.untracked(
        CfgTypes.VLuminosityBlockRange())
    myLumis = LumiList.LumiList(
        filename=config["JSONFILE"]).getCMSSWString().split(',')
    process.source.lumisToProcess.extend(myLumis)

    if config["FILTEREVENTS"]:

        fname = ""
        if (options.inputFiles)[0].find("SingleMuon") != -1:
            fname = "RunLumiEventLists/SingleMuon_csc2015_Nov14.txt"
        elif (options.inputFiles)[0].find("SingleElectron") != -1:
            fname = "RunLumiEventLists/SingleElectron_csc2015_Nov14.txt"
        elif (options.inputFiles)[0].find("JetHT") != -1:
            fname = "RunLumiEventLists/JetHT_csc2015_Nov27.txt"
        else:
            print "** WARNING: EVENT LIST NOT FOUND! exiting... "
            sys.exit()
Пример #10
0
else:

    from PhysicsTools.NanoAODTools.postprocessing.framework.crabhelper import inputFiles, runsAndLumis
    infilelist = inputFiles()
    jsoninput = runsAndLumis()
    fwkjobreport = True

if args.isdata and args.year == '2018' and args.period == 'D' and (
        'MuonEG' in infilelist):
    print 'special treatment for MuonEG_Run2018D'
    import FWCore.PythonUtilities.LumiList as LumiList
    import FWCore.ParameterSet.Config as cms

    lumisToProcess = cms.untracked.VLuminosityBlockRange(
        LumiList.LumiList(
            filename=
            "./Cert_314472-325175_13TeV_Legacy2018_Collisions18_JSON.txt").
        getCMSSWString().split(','))
    # print lumisToProcess

    runsAndLumis_special = {}
    for l in lumisToProcess:
        if "-" in l:
            start, stop = l.split("-")
            rstart, lstart = start.split(":")
            rstop, lstop = stop.split(":")
        else:
            rstart, lstart = l.split(":")
            rstop, lstop = l.split(":")
        if rstart != rstop:
            raise Exception(
                "Cannot convert '%s' to runs and lumis json format" % l)
Пример #11
0
elif isData and isPromptReco:
    JECstring = "Spring16_25nsV6_DATA"
elif not isData:
    JECstring = "Summer16_23Sep2016V3_MC"

print "JEC ->",JECstring

#-----------------------#
#        FILTERS        #
#-----------------------#

# JSON filter
if isData:
    import FWCore.PythonUtilities.LumiList as LumiList
    jsonName = "Cert_294927-305364_13TeV_PromptReco_Collisions17_JSON"#"Cert_294927-301567_13TeV_PromptReco_Collisions17_JSON" #golden json
    process.source.lumisToProcess = LumiList.LumiList(filename = 'data/JSON/'+jsonName+'.txt').getVLuminosityBlockRange()
    print "JSON file loaded: ", jsonName

# MET filters
process.load('RecoMET.METFilters.BadPFMuonFilter_cfi')
process.BadPFMuonFilter.muons = cms.InputTag('slimmedMuons')# if not isAOD else 'muons')
process.BadPFMuonFilter.PFCandidates = cms.InputTag('packedPFCandidates')# if not isAOD else 'PFCandidates')

process.load('RecoMET.METFilters.BadChargedCandidateFilter_cfi')
process.BadChargedCandidateFilter.muons = cms.InputTag('slimmedMuons')# if not isAOD else 'muons')
process.BadChargedCandidateFilter.PFCandidates = cms.InputTag('packedPFCandidates')# if not isAOD else 'PFCandidates')


#-----------------------#
#       ANALYZER        #
#-----------------------#
Пример #12
0
# ParameterSets for use in bin/<script>.cc
#
process = cms.PSet()

#Setup framework lite input file object
process.fwliteInput = cms.PSet(
    fileNames=cms.vstring(fileNames),
    maxEvents=cms.int32(int(o.nevents)),
)

# LumiMask
process.inputs = cms.PSet(
    lumisToProcess=CfgTypes.untracked(CfgTypes.VLuminosityBlockRange()))
if isData:
    # get JSON file correctly parced
    myList = LumiList.LumiList(
        filename=JSONfiles[o.year]).getCMSSWString().split(',')
    process.inputs.lumisToProcess.extend(myList)

# Setup picoAOD
process.picoAOD = cms.PSet(
    fileName=cms.string(picoAOD),
    create=cms.bool(create),
    fastSkim=cms.bool(o.fastSkim),
)

inputHFiles_3Tag = []
inputHFiles_4Tag = []
if o.loadHemisphereLibrary:

    fileList_3Tag = os.popen("ls " + o.inputHLib3Tag).readlines()
    for i in fileList_3Tag:
Пример #13
0
if (isData):
    # sept reprocessing
    process.GlobalTag.globaltag = '80X_dataRun2_2016SeptRepro_v3'
else:
    process.GlobalTag.globaltag = '80X_mcRun2_asymptotic_2016_TrancheIV_v6'

### LOAD DATABASE
from CondCore.DBCommon.CondDBSetup_cfi import *
#from CondCore.CondDB.CondDB_cfi import *

######## LUMI MASK
#if isData and not options.isGrid and False: ## dont load the lumiMaks, will be called by crab
if isData:
    import FWCore.PythonUtilities.LumiList as LumiList
    process.source.lumisToProcess = LumiList.LumiList(
        filename=
        'goodlumis/Cert_271036-284044_13TeV_23Sep2016ReReco_Collisions16_JSON.txt'
    ).getVLuminosityBlockRange()
    print "Using local JSON"

### LOAD CONFIGURATION
process.load('PandaProd.Filter.infoProducerSequence_cff')
process.load('PandaProd.Filter.MonoXFilterSequence_cff')
process.load('PandaProd.Ntupler.PandaProd_cfi')
#process.load('PandaProd.Ntupler.VBF_cfi')

### ##ISO
process.load("RecoEgamma/PhotonIdentification/PhotonIDValueMapProducer_cfi")
process.load(
    "RecoEgamma/ElectronIdentification/ElectronIDValueMapProducer_cfi")

process.PandaNtupler.isData = isData
Пример #14
0
process.source = cms.Source("PoolSource",
                            fileNames=cms.untracked.vstring(ivars.inputFiles))

# nope, doing lumis another way
#theLumiMask = path.expandvars("") # for MC it defaults for "", and somehow the lumi-checker works well with that
#process.ntupler.lumisToProcess = LumiList.LumiList(filename = theLumiMask).getVLuminosityBlockRange()

# new lumi processing from
# https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuidePythonTips#Use_a_JSON_file_of_good_lumi_sec
# not sure how it works and how it counts lumisections processed
if not isMC:
    process.source.lumisToProcess = CfgTypes.untracked(
        CfgTypes.VLuminosityBlockRange())
    JSONfile = ''
    myLumis = LumiList.LumiList(filename=JSONfile).getCMSSWString().split(',')
    process.source.lumisToProcess.extend(myLumis)
# it's not clear how to get the output from this: which LumiSecs have actually been processed (and which were not due to job crashes etc)
# maybe should use the old utility from llvv_fwk

# NTUPLER
process.load("UserCode.NtuplerAnalyzer.CfiFile_cfi")
process.ntupler.isMC = cms.bool(isMC)
#process.ntupler.dtag = cms.string('MC2016_TT_powheg')
process.ntupler.dtag = cms.string(dtag)

process.ntupler.isLocal = cms.bool(False)  # LSF submition is local
# for LumiDump (to be scraped):
process.ntupler.input = cms.untracked.vstring(
    'file:165F54A0-A3BE-E611-B3F7-0025905A606A.root',
    #'root://cms-xrd-global.cern.ch///store/mc/RunIISummer16MiniAODv2/TT_TuneCUETP8M2T4_13TeV-powheg-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/0693E0E7-97BE-E611-B32F-0CC47A78A3D8.root',
Пример #15
0
    def customize(self,process):
        self.parse()

        isFwlite = False
        hasOutput = False
        hasTFile = False
        sp_unused = ""
        if hasattr(process,"fwliteInput"):
            isFwlite = True
        if not isFwlite:
            hasOutput = hasattr(process,"out")            
            hasTFile = hasattr(process,"TFileService")
        
        if hasOutput and hasTFile:
            tfile = self.outputFile.replace(".root","_histos.root")
        else:
            tfile = self.outputFile
            
        if self.dryRun:
            import sys
            if self.dataset and self.dataset != "":
                name,xsec,totEvents,files,maxEvents,sp_unused = self.dataset
                if self.getMaxJobs:
                    print "maxJobs:%d" % ( min(len(files),self.nJobs) )                    
                if len(files) != 0:
                    if isFwlite:
                        print "hadd:%s" % self.outputFile
                    else:
                        if hasOutput:
                            print "edm:%s" % self.outputFile
                        if hasTFile or self.tfileOut:
                            print "hadd:%s" % tfile
                    ## sys.exit(0)
            else:
                sys.exit(1)
            
        files = self.inputFiles
        if self.dataset and self.dataset != "":
            dsetname,xsec,totEvents,files,maxEvents,sp_unused = self.dataset
            if type(xsec) == float or xsec == None:
                print 
                print "Error: cross section not found for dataset %s" % dsetname
                print
                
            self.maxEvents = int(maxEvents)
            
            putarget = None
            samplepu = None
            if self.puTarget != "":
                putarget = map(float, self.puTarget.split(","))
                
            processId = self.getProcessId(dsetname)
            self.processId = processId

            #----------

            if self.options.processIndex != None:
                self.processIndex = self.options.processIndex
            else:
                # not specified on the command line, try to take it 
                # from the cross section file, otherwise use smallest int32 as default value
                # in order not to confuse it with data (index 0)

                if isinstance(xsec, dict):
                    self.processIndex = xsec.get('itype', -0x7FFFFFFF)
                else:
                    # note that in some cases (process not defined in cross_sections.json ?)
                    # this can still be a float
                    self.processIndex = -0x7FFFFFFF

            #----------

            if isinstance(xsec, dict) and "itype" in xsec:
                for name,obj in process.__dict__.iteritems():
                    if hasattr(obj, "sampleIndex"):
                        obj.sampleIndex = xsec["itype"]

            
            isdata = self.processType == "data"
            if isdata or self.targetLumi > 0. or putarget:
                ## look for analyzers which have lumiWeight as attribute
                for name,obj in process.__dict__.iteritems():
                    
                    if hasattr(obj,"lumiWeight"):
                        if  isdata:
                            obj.lumiWeight = 1.
                        else:
                            wei = xsec["xs"]/float(totEvents)*self.targetLumi
                            wei *= xsec.get("br",1.)
                            wei *= xsec.get("kf",1.)
                            obj.lumiWeight = wei

                    if hasattr(obj,"intLumi"):
                        if isdata:
                            obj.intLumi= 0 # should not be used in final fits.
                            # setting to 0 will cause error if someone tries to use
                            #it for normalization downsteram
                        else:
                            obj.intLumi=self.targetLumi

                    if putarget and not isdata:
                        puObj = None
                        if hasattr(obj,"puReWeight"):
                            puObj = obj
                        elif hasattr(obj,"globalVariables") and hasattr(obj.globalVariables,"puReWeight"):
                            puObj = obj.globalVariables
                        if puObj:
                            if not samplepu:
                                matches = filter(lambda x: x in dsetname, self.pu_distribs.keys() )
                                print matches
                                if len(matches) > 1:
                                    print "Multiple matches, check if they're all the same"
                                    allsame = True
                                    for i in range(1,len(matches)):
                                        if self.pu_distribs[matches[0]] != self.pu_distribs[matches[i]]:
                                            allsame = False
                                    if allsame:
                                        print "They're all the same so we just take the 0th one:",matches[0]
                                        matches = [matches[0]]
                                    else:
                                        print "Not all the same... so we return to the old behavior and take an exact match, otherwise leave empty..."
                                        matches = filter(lambda x: x == dsetname, matches)
                                if len(matches) != 1:
                                    raise Exception("Could not determine sample pu distribution for reweighting. Possible matches are [%s]. Selected [%s]\n dataset: %s" % 
                                                ( ",".join(self.pu_distribs.keys()), ",".join(matches), dsetname ) )
                                samplepu = self.pu_distribs[matches[0]]
                            puObj.puReWeight = True
                            puObj.puBins = cms.vdouble( map(float, samplepu.probFunctionVariable) )
                            puObj.mcPu   = samplepu.probValue
                            puObj.dataPu = cms.vdouble(putarget)
                            puObj.useTruePu = cms.bool(True)
                        
                    
            for name,obj in process.__dict__.iteritems():
                if hasattr(obj,"processId"):
                    obj.processId = str(processId)

            for name,obj in process.__dict__.iteritems():
                if hasattr(obj,"processIndex"):
                    obj.processIndex = int(self.processIndex)
                    
            lumisToSkip = None
            if isdata:
                lumisToSkip = self.samplesMan.getLumisToSkip(dsetname)
                process.source.lumisToSkip = lumisToSkip.getVLuminosityBlockRange()

            if isdata and self.lumiMask != "":
                if isFwlite:
                    sys.exit("Lumi mask not supported in FWlite",-1)

                import FWCore.PythonUtilities.LumiList as LumiList
                target = LumiList.LumiList(filename = self.lumiMask)
                if lumisToSkip: 
                    target = target.__sub__(lumisToSkip)                    
                process.source.lumisToProcess = target.getVLuminosityBlockRange()

            if isdata:    
                print process.source.lumisToProcess
            
        flist = []
        for f in files:
            if len(f.split(":",1))>1:
                flist.append(str(f))
            else:
                flist.append(str("%s%s" % (self.filePrepend,f)))
        if len(flist) > 0:
            ## fwlite
            if isFwlite:
                ## process.fwliteInput.fileNames.extend([ str("%s%s" % (self.filePrepend,f)) for f in  files])
                process.fwliteInput.fileNames = flist
            ## full framework
            else:
                ## process.source.fileNames.extend([ str("%s%s" % (self.filePrepend,f)) for f in  files])
                process.source.fileNames = flist
 
        ## fwlite
        if isFwlite:
            process.fwliteInput.maxEvents = self.maxEvents
            process.fwliteOutput.fileName = self.outputFile
        ## full framework
        else:
            process.maxEvents.input = self.maxEvents
            
            if hasOutput:
                process.out.fileName = self.outputFile

            if hasTFile:
                process.TFileService.fileName = tfile
    
        if self.tfileOut:
            if hasTFile:
                print "Could not run with both TFileService and custom tfileOut"
                sys.exit(-1)
            name,attr = self.tfileOut
            setattr( getattr( process, name ), attr, tfile )
            

        if self.dumpPython != "":
            from gzip import open
            pyout = open("%s.gz" % self.dumpPython,"w+")
            pyout.write( process.dumpPython() )
            pyout.close()
process.pfPileUp.checkClosestZVertex = cms.bool(False)
process.pfNoElectron.enable = cms.bool(False)
process.pfNoMuon.enable = cms.bool(False)
process.pfJets.srcPVs = cms.InputTag("goodPV")
process.pfNoTau.enable = cms.bool(False)

process.setName_("llbbX")
process.options.wantSummary = False
process.MessageLogger.cerr.FwkReport.reportEvery = 1000
if nevents > 0: process.MessageLogger.cerr.FwkReport.reportEvery = nevents / 10

if runOnCondor and not runOnMC:
    import FWCore.PythonUtilities.LumiList as LumiList
    import FWCore.ParameterSet.Types as CfgTypes
    myLumis = LumiList.LumiList(
        filename=
        '/nfs/user/llbb/JSON/Cert_190456-208686_8TeV_22Jan2013ReReco_Collisions12_JSON.txt'
    ).getCMSSWString().split(',')
    process.source.lumisToProcess = CfgTypes.untracked(
        CfgTypes.VLuminosityBlockRange())
    process.source.lumisToProcess.extend(myLumis)

#GT
if runOnMC: process.GlobalTag.globaltag = 'START53_V27::All'
else: process.GlobalTag.globaltag = 'FT_53_V21_AN6::All'

## Source
process.source = cms.Source("PoolSource", fileNames=files)

## Maximal Number of Events
process.maxEvents = cms.untracked.PSet(input=cms.untracked.int32(nevents))
    process.source.fileNames = [
        '/store/data/Run2016G/Charmonium/AOD/23Sep2016-v1/100000/0006BA63-7097-E611-BBE8-001E67E71412.root',
        '/store/data/Run2016G/Charmonium/AOD/23Sep2016-v1/100000/0018FFBA-5B94-E611-AD99-008CFAFBF52E.root',
        '/store/data/Run2016G/Charmonium/AOD/23Sep2016-v1/100000/003343EB-7496-E611-B5EC-848F69FD2997.root',
        '/store/data/Run2016G/Charmonium/AOD/23Sep2016-v1/100000/0060EAA8-9197-E611-9D75-001E67E59BE3.root',
        '/store/data/Run2016G/Charmonium/AOD/23Sep2016-v1/100000/00E2CD87-9798-E611-8CA4-848F69FD4598.root',
        '/store/data/Run2016G/Charmonium/AOD/23Sep2016-v1/100000/020BB297-5B97-E611-82B5-848F69FD4541.root',
        '/store/data/Run2016G/Charmonium/AOD/23Sep2016-v1/100000/04CA2B1A-0897-E611-A716-0025907FD242.root',
        '/store/data/Run2016G/Charmonium/AOD/23Sep2016-v1/100000/06069075-3C97-E611-92D8-008CFA00018C.root',
        '/store/data/Run2016G/Charmonium/AOD/23Sep2016-v1/100000/061698EB-E096-E611-840C-848F69FD3EC9.root',
        '/store/data/Run2016G/Charmonium/AOD/23Sep2016-v1/100000/062B282E-8097-E611-9710-001E67E6F86E.root',
    ]
    #process.source.fileNames = [ 'file:/tmp/gpetrucc/0006BA63-7097-E611-BBE8-001E67E71412.root' ]
    import FWCore.PythonUtilities.LumiList as LumiList
    json = '/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions16/13TeV/ReReco/Final/Cert_271036-284044_13TeV_23Sep2016ReReco_Collisions16_JSON.txt'
    process.source.lumisToProcess = LumiList.LumiList(filename = json).getVLuminosityBlockRange()

#elif "CMSSW_9_" in os.environ['CMSSW_VERSION']:
#    process.GlobalTag.globaltag = cms.string('92X_dataRun2_Prompt_v4')
#    # process.source.fileNames = ['root://cms-xrd-global.cern.ch//store/data/Run2017C/Charmonium/AOD/PromptReco-v1/000/299/368/00000/1C041F03-806D-E711-B796-02163E0136CC.root']
#    #process.source.fileNames = ['file:/u/user/kplee/scratch/ROOTFiles_Test/92X/AOD_Charmonium_Run2017Bv1_Run297050.root']
elif "CMSSW_9_4_" in os.environ['CMSSW_VERSION']:
    process.GlobalTag.globaltag = cms.string('91X_mcRun2_asymptotic_v3')
    process.source.fileNames = [
        '/store/data/Run2017C/Charmonium/AOD/17Nov2017-v1/00000/1A315D13-8CF1-E711-8706-1866DA890700.root'
    ] 

else: raise RuntimeError, "Unknown CMSSW version %s" % os.environ['CMSSW_VERSION']


Пример #18
0
                            )

runboundary = RUNBOUNDARYTEMPLATE
process.source.firstRun = cms.untracked.uint32(int(runboundary))
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(MAXEVENTSTEMPLATE) )

###################################################################
# JSON Filtering
###################################################################
if isMC:
     print(">>>>>>>>>> testPVValidation_cfg.py: msg%-i: This is Simulation!")
     runboundary = 1
else:
     print(">>>>>>>>>> testPVValidation_cfg.py: msg%-i: This is DATA!")
     import FWCore.PythonUtilities.LumiList as LumiList
     process.source.lumisToProcess = LumiList.LumiList(filename ='LUMILISTTEMPLATE').getVLuminosityBlockRange()

###################################################################
# Messages
###################################################################
process.load("FWCore.MessageService.MessageLogger_cfi")
process.MessageLogger.destinations = ['cout', 'cerr']
process.MessageLogger.cerr.FwkReport.reportEvery = 1000

####################################################################
# Produce the Transient Track Record in the event
####################################################################
process.load("TrackingTools.TransientTrack.TransientTrackBuilder_cfi")

####################################################################
# Get the Magnetic Field
process.load("RecoHI.HiCentralityAlgos.CentralityBin_cfi")
process.load('GeneratorInterface.HiGenCommon.HeavyIon_cff')
process.load("HeavyIonsAnalysis.Configuration.hfCoincFilter_cff")
process.load("HeavyIonsAnalysis.Configuration.analysisFilters_cff")
process.load("HeavyIonsAnalysis.Configuration.collisionEventSelection_cff")
process.load("QWAna.QWNtrkOfflineProducer.QWNoff_cfi")
process.load("HeavyIonsAnalysis.VNAnalysis/vnanalyzer_cfi")

from Configuration.AlCa.GlobalTag import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, '80X_dataRun2_Prompt_v16', '')

process.maxEvents = cms.untracked.PSet(input=cms.untracked.int32(-1))
process.MessageLogger.cerr.FwkReport.reportEvery = 500

import FWCore.PythonUtilities.LumiList as LumiList
goodLumiSecs = LumiList.LumiList(
    filename=ivars.lumifile).getCMSSWString().split(',')

readFiles = cms.untracked.vstring()
secFiles = cms.untracked.vstring()

#process.source = cms.Source ("PoolSource",fileNames = cms.untracked.vstring(),
#                             inputCommands=cms.untracked.vstring(
#        'keep *',
#        'drop *_hiEvtPlane_*_*'
#)
# )

process.source = cms.Source(
    "PoolSource",
    fileNames=cms.untracked.vstring(
        "root://cmsxrootd.fnal.gov//store/user/davidlw/PAHighMultiplicity0/RecoSkim2016_pPb_V0Cascade_v1/170301_201930/0000/pPb_HM_28.root"
Пример #20
0
print 'Running on', ('data' if isData else 'MC'), ', sample is', sample
if isReHLT: print '-> re-HLT sample'
if isDibosonInclusive: print '-> Pythia LO sample'
#isData = False

#-----------------------#
#        FILTERS        #
#-----------------------#

# JSON filter
import FWCore.PythonUtilities.LumiList as LumiList
if isData:
    #process.source.lumisToProcess = LumiList.LumiList(filename = '%s/src/Analysis/ALPHA/data/JSON/Cert_271036-275125_13TeV_PromptReco_Collisions16_JSON.txt' % os.environ['CMSSW_BASE']).getVLuminosityBlockRange() #4.34
    #process.source.lumisToProcess = LumiList.LumiList(filename = '%s/src/Analysis/ALPHA/data/JSON/Cert_271036-275783_13TeV_PromptReco_Collisions16_JSON.txt' % os.environ['CMSSW_BASE']).getVLuminosityBlockRange() #6.26
    process.source.lumisToProcess = LumiList.LumiList(
        filename=
        '%s/src/Analysis/ALPHA/data/JSON/Cert_271036-276811_13TeV_PromptReco_Collisions16_JSON_NoL1T.txt'
        % os.environ['CMSSW_BASE']).getVLuminosityBlockRange()  #12.9

process.counter = cms.EDAnalyzer(
    'CounterAnalyzer',
    lheProduct=cms.InputTag(
        'externalLHEProducer' if not isCustom else 'source'),
    pythiaLOSample=cms.bool(True if isDibosonInclusive else False),
)

# Trigger filter
import HLTrigger.HLTfilters.hltHighLevel_cfi
triggerTag = 'HLT2' if isReHLT else 'HLT'
process.HLTFilter = cms.EDFilter(
    'HLTHighLevel',
    TriggerResultsTag=cms.InputTag('TriggerResults', '', triggerTag),
        'L1_HTT400er',
        'L1_HTT450er',
        'L1_HTT500er',
    ),
)

process.p = cms.Path(process.MssmHbb)

readFiles = cms.untracked.vstring()
secFiles = cms.untracked.vstring()
process.source = cms.Source('PoolSource',
                            fileNames=readFiles,
                            secondaryFileNames=secFiles)
readFiles.extend([
    #   'root://cms-xrd-global.cern.ch//store/data/Run2017F/BTagCSV/MINIAOD/PromptReco-v1/000/305/112/00000/02368BFC-A6B4-E711-AC9A-02163E01A4CB.root',  # LS = [70,90]
    'root://cms-xrd-global.cern.ch//store/data/Run2017F/BTagCSV/MINIAOD/PromptReco-v1/000/305/112/00000/F4D1DB92-50B4-E711-A8D2-02163E012205.root',  # LS = [250,269]
])

secFiles.extend([])

## ============ JSON Certified data ===============   BE CAREFUL!!!
## Don't use with CRAB!!!
import FWCore.PythonUtilities.LumiList as LumiList
import FWCore.ParameterSet.Types as CfgTypes
process.source.lumisToProcess = CfgTypes.untracked(
    CfgTypes.VLuminosityBlockRange())
#JSONfile = 'json_305112_70to90.txt'
JSONfile = 'json_305112_250to269.txt'
myLumis = LumiList.LumiList(filename=JSONfile).getCMSSWString().split(',')
process.source.lumisToProcess.extend(myLumis)
Пример #22
0
readFiles = cms.untracked.vstring()
readFiles.extend(files_to_process)

process = cms.Process("MITCMSOpenData")

process.load(
    'Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
process.GlobalTag.globaltag = 'GR_R_42_V25::All'

process.source = cms.Source("PoolSource", fileNames=readFiles)

process.maxEvents = cms.untracked.PSet(input=cms.untracked.int32(-1))

goodJSON = "file_paths/Cert_136033-149442_7TeV_Apr21ReReco_Collisions10_JSON_v2.txt"
myLumis = LumiList.LumiList(filename=goodJSON).getCMSSWString().split(',')
process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange()
process.source.lumisToProcess.extend(myLumis)

process.ak5PFJets = ak5PFJets.clone(doAreaFastjet=cms.bool(True))

process.kt6PFJetsForIsolation = kt4PFJets.clone(rParam=0.6, doRhoFastjet=True)

process.PFCandidateProducer = cms.EDProducer(
    "PFCandidateProducer",
    rho=cms.InputTag("kt6PFJets", "rho"),
    PFCandidateInputTag=cms.InputTag("particleFlow"),
    AK5PFInputTag=cms.InputTag("ak5PFJets"),
    mapFilename=cms.string(map_file_path),
    outputDir=cms.string(output_dir),
    primaryVertices=cms.InputTag("offlinePrimaryVertices"),
Пример #23
0
process.TFileService = cms.Service('TFileService',
                                   fileName=cms.string(options.tupleOutput))

from AnalysisTools.Run.readFileList import *
if len(options.fileList) > 0:
    readFileList(process.source.fileNames, options.fileList,
                 options.fileNamePrefix)
elif len(options.inputFiles) > 0:
    addFilesToList(process.source.fileNames, options.inputFiles,
                   options.fileNamePrefix)
if options.maxEvents > 0:
    process.maxEvents.input = options.maxEvents

if len(options.lumiFile) > 0:
    import FWCore.PythonUtilities.LumiList as LumiList
    process.source.lumisToProcess = LumiList.LumiList(
        filename=options.lumiFile).getVLuminosityBlockRange()

if options.eventList != '':
    process.source.eventsToProcess = cms.untracked.VEventRange(
        re.split(',', options.eventList))

import RecoTauTag.RecoTau.tools.runTauIdMVA as tauIdConfig
updatedTauName = "slimmedTausNewID"
tauIdEmbedder = tauIdConfig.TauIDEmbedder(process,
                                          cms,
                                          updatedTauName=updatedTauName,
                                          toKeep=["deepTau2017v2"])
tauIdEmbedder.runTauID()

process.tauTupleProducer = cms.EDAnalyzer(
    'DeepTauTest',
Пример #24
0
process.load("FWCore.MessageLogger.MessageLogger_cfi")
process.schedule = cms.Schedule()

process.MessageLogger.cerr.FwkReport.reportEvery = 100

process.source = cms.Source(
    "PoolSource",
    fileNames=cms.untracked.vstring(options.inputFiles),
    skipEvents=cms.untracked.uint32(options.skipEvents),
)

if options.lumiMask:
    lumiJSON = options.lumiMask
    if not os.path.exists(lumiJSON):
        raise IOError("Lumi mask file {} not found.".format(lumiJSON))
    lumiList = LumiList.LumiList(filename=lumiJSON)
    runs = lumiList.getRuns()
    lumisToProcess = CfgTypes.untracked(CfgTypes.VLuminosityBlockRange())
    lumisToProcess.extend(lumiList.getCMSSWString().split(','))
    process.source.lumisToProcess = lumisToProcess

process.maxEvents = cms.untracked.PSet(
    input=cms.untracked.int32(options.maxEvents))

#############################################################################
#    Make the analysis flow. It is assembled from a list of classes, each   #
#    of which adds related steps to the sequence.                           #
#############################################################################
FlowSteps = []

# everybody needs vertex cleaning
Пример #25
0
if (isData):
    process.GlobalTag.globaltag = '80X_dataRun2_Prompt_v8'
else:
    process.GlobalTag.globaltag = '80X_mcRun2_asymptotic_2016_miniAODv2'

### LOAD DATABASE
from CondCore.DBCommon.CondDBSetup_cfi import *
#from CondCore.CondDB.CondDB_cfi import *

######## LUMI MASK
if isData and not options.isGrid and False:  ## dont load the lumiMaks, will be called by crab
    #pass
    import FWCore.PythonUtilities.LumiList as LumiList
    ## SILVER
    process.source.lumisToProcess = LumiList.LumiList(
        filename=
        '/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions15/13TeV/Cert_246908-260627_13TeV_PromptReco_Collisions15_25ns_JSON_Silver_v2.txt'
    ).getVLuminosityBlockRange()
    print "FIX JSON"

### LOAD CONFIGURATION
process.load('PandaProd.Filter.infoProducerSequence_cff')
process.load('PandaProd.Filter.MonoXFilterSequence_cff')
process.load('PandaProd.Ntupler.PandaProd_cfi')

#-----------------------ELECTRON ID-------------------------------
from PandaProd.Ntupler.egammavid_cfi import *

initEGammaVID(process, options)

### ##ISO
process.load("RecoEgamma/PhotonIdentification/PhotonIDValueMapProducer_cfi")
Пример #26
0
    def customize(self,process):
        self.parse()

        isFwlite = False
        hasOutput = False
        hasTFile = False
        if hasattr(process,"fwliteInput"):
            isFwlite = True
        if not isFwlite:
            hasOutput = hasattr(process,"out")            
            hasTFile = hasattr(process,"TFileService")
        
        if hasOutput and hasTFile:
            tfile = self.outputFile.replace(".root","_histos.root")
        else:
            tfile = self.outputFile
            
        if self.dryRun:
            import sys
            if self.dataset and self.dataset != "":
                name,xsec,totEvents,files,maxEvents = self.dataset
                if self.getMaxJobs:
                    print "maxJobs:%d" % ( min(len(files),self.nJobs) )                    
                if len(files) != 0:
                    if isFwlite:
                        print "hadd:%s" % self.outputFile
                    else:
                        if hasOutput:
                            print "edm:%s" % self.outputFile
                        if hasTFile or self.tfileOut:
                            print "hadd:%s" % tfile
                    ## sys.exit(0)
            else:
                sys.exit(1)
            

        files = self.inputFiles
        if self.dataset and self.dataset != "":
            dsetname,xsec,totEvents,files,maxEvents = self.dataset
            self.maxEvents = int(maxEvents)
            
            putarget = None
            samplepu = None
            if self.puTarget != "":
                putarget = map(float, self.puTarget.split(","))
                
            processId = self.getProcessId(dsetname)

            self.processIndex = self.options.processIndex
            self.processId = processId

            if ("itype" in xsec):
                for name,obj in process.__dict__.iteritems():
                    if hasattr(obj, "sampleIndex"):
                        obj.sampleIndex = xsec["itype"]

            
            isdata = self.processType == "data"
            if isdata or self.targetLumi > 0. or putarget:
                ## look for analyzers which have lumiWeight as attribute
                for name,obj in process.__dict__.iteritems():
                    
                    if hasattr(obj,"lumiWeight"):
                        if  isdata:
                            obj.lumiWeight = 1.
                        else:
                            wei = xsec["xs"]/float(totEvents)*self.targetLumi
                            wei *= xsec.get("br",1.)
                            wei *= xsec.get("kf",1.)
                            obj.lumiWeight = wei

                    if hasattr(obj,"intLumi"):
                        if isdata:
                            obj.intLumi= 0 # should not be used in final fits.
                            # setting to 0 will cause error if someone tries to use
                            #it for normalization downsteram
                        else:
                            obj.intLumi=self.targetLumi

                    if putarget and not isdata:
                        puObj = None
                        if hasattr(obj,"puReWeight"):
                            puObj = obj
                        elif hasattr(obj,"globalVariables") and hasattr(obj.globalVariables,"puReWeight"):
                            puObj = obj.globalVariables
                        if puObj:
                            if not samplepu:
                                matches = filter(lambda x: x in dsetname, self.pu_distribs.keys() )
                                print matches
                                if len(matches) > 1:
                                    matches = filter(lambda x: x == dsetname, matches)
                                if len(matches) != 1:
                                    raise Exception("Could not determine sample pu distribution for reweighting. Possible matches are [%s]. Selected [%s]\n dataset: %s" % 
                                                ( ",".join(self.pu_distribs.keys()), ",".join(matches), dsetname ) )
                                samplepu = self.pu_distribs[matches[0]]
                            puObj.puReWeight = True
                            puObj.puBins = cms.vdouble( map(float, samplepu.probFunctionVariable) )
                            puObj.mcPu   = samplepu.probValue
                            ## puObj.mcPu   = [0.00023481895458601418, 0.0007044568637580425, 0.0020664068003569246, 0.005353872164561123, 0.01009721504719861, 0.016155544075517777, 0.02385760578593904, 0.03289813553750059, 0.04172732822993472, 0.0496876907904006, 0.05544075517775795, 0.06079462734231907, 0.06027802564222984, 0.06499788662940872, 0.06401164702014747, 0.06443432113840229, 0.06196872211524914, 0.05692011459164984, 0.0529986380500634, 0.044991311698680314, 0.04283097731648899, 0.03613863711078758, 0.03116047527356408, 0.024327243695111068, 0.02115718780819988, 0.017940168130371484, 0.01425351054337106, 0.011153900342835674, 0.008641337528765322, 0.005870473864650355, 0.004696379091720284, 0.0033344291551214013, 0.0027238998731977646, 0.0021133705912741276, 0.0010332034001784623, 0.0011036490865542667, 0.0003757103273376227, 0.00035222843187902126, 0.00023481895458601418, 0.0003991922227962241, 0.00016437326821020992, 0.0001408913727516085, 4.6963790917202836e-05, 7.044568637580425e-05, 4.6963790917202836e-05, 2.3481895458601418e-05, 4.6963790917202836e-05,4.6963790917202836e-05,4.6963790917202836e-05,4.6963790917202836e-05,4.6963790917202836e-05,4.6963790917202836e-05]
                            ## puObj.mcPu   = [2.0983716635890548e-05, 0.0001259022998153433, 0.0009022998153432936, 0.002245257680040289, 0.00574953835823401, 0.009904314252140339, 0.016703038442168878, 0.02400537183145879, 0.031979184153097195, 0.04114906832298137, 0.047737955346651, 0.056068490851099544, 0.05936293436293436, 0.06339180795702534, 0.06712690951821386, 0.06563706563706563, 0.06263639415813328, 0.06123048514352862, 0.05585865368474064, 0.052522242739634045, 0.04746516703038442, 0.04247104247104247, 0.035462481114655026, 0.03017458452241061, 0.025474231995971125, 0.021277488668793018, 0.018108947456773543, 0.012821050864529126, 0.011226288400201444, 0.008519388954171562, 0.00612724525768004, 0.004238710760449891, 0.004196743327178109, 0.002371159979855632, 0.0017206647641430251, 0.0012170555648816517, 0.0006714789323484975, 0.0006085277824408259, 0.00039869061608192044, 0.00031475574953835825, 0.0002518045996306866, 6.295114990767164e-05, 0.00014688601645123384, 6.295114990767164e-05, 6.295114990767164e-05, 4.1967433271781096e-05, 2.0983716635890548e-05, 4.1967433271781096e-05, 4.1967433271781096e-05, 2.0983716635890548e-05, 2.0983716635890548e-05, 2.0983716635890548e-05]
                            puObj.dataPu = cms.vdouble(putarget)
                            puObj.useTruePu = cms.bool(True)
                        
                    
            for name,obj in process.__dict__.iteritems():
                if hasattr(obj,"processId"):
                    obj.processId = str(processId)

            for name,obj in process.__dict__.iteritems():
                if hasattr(obj,"processIndex"):
                    obj.processIndex = int(self.processIndex)
            
            if isdata and self.lumiMask != "":
                if isFwlite:
                    sys.exit("Lumi mask not supported in FWlite",-1)

                import FWCore.PythonUtilities.LumiList as LumiList
                process.source.lumisToProcess = LumiList.LumiList(filename = self.lumiMask).getVLuminosityBlockRange()
                
            
        flist = []
        for f in files:
            if len(f.split(":",1))>1:
                flist.append(str(f))
            else:
                flist.append(str("%s%s" % (self.filePrepend,f)))
        if len(flist) > 0:
            ## fwlite
            if isFwlite:
                ## process.fwliteInput.fileNames.extend([ str("%s%s" % (self.filePrepend,f)) for f in  files])
                process.fwliteInput.fileNames = flist
            ## full framework
            else:
                ## process.source.fileNames.extend([ str("%s%s" % (self.filePrepend,f)) for f in  files])
                process.source.fileNames = flist
 
        ## fwlite
        if isFwlite:
            process.fwliteInput.maxEvents = self.maxEvents
            process.fwliteOutput.fileName = self.outputFile
        ## full framework
        else:
            process.maxEvents.input = self.maxEvents
            
            if hasOutput:
                process.out.fileName = self.outputFile

            if hasTFile:
                process.TFileService.fileName = tfile
    
        if self.tfileOut:
            if hasTFile:
                print "Could not run with both TFileService and custom tfileOut"
                sys.exit(-1)
            name,attr = self.tfileOut
            setattr( getattr( process, name ), attr, tfile )
            

        if self.dumpPython != "":
            from gzip import open
            pyout = open("%s.gz" % self.dumpPython,"w+")
            pyout.write( process.dumpPython() )
            pyout.close()
from Configuration.AlCa.GlobalTag import GlobalTag
process.load(
    "Configuration.StandardSequences.FrontierConditions_GlobalTag_condDBv2_cff"
)
process.GlobalTag = GlobalTag(
    process.GlobalTag, '92X_upgrade2017_realistic_Candidate_forECALStudies',
    '')

# input
process.maxEvents = cms.untracked.PSet(input=cms.untracked.int32(100))
outputFile = "electron_ntuple_2017F_MINIAOD.root"
process.source = cms.Source(
    "PoolSource", fileNames=cms.untracked.vstring('file:step3_2017F.root'))
import FWCore.PythonUtilities.LumiList as LumiList
process.source.lumisToProcess = LumiList.LumiList(
    filename='goodList2017F.json').getVLuminosityBlockRange()

process.ntupler = cms.EDAnalyzer(
    'ElectronPlots',
    beamSpot=cms.InputTag('offlineBeamSpot'),
    genEventInfoProduct=cms.InputTag('generator'),
    electrons=cms.InputTag("gedGsfElectrons"),
    #    electrons    = cms.InputTag("slimmedElectrons"),
    genParticles=cms.InputTag("genParticles"),
    vertices=cms.InputTag("offlinePrimaryVertices"),
    #    vertices     = cms.InputTag("offlineSlimmedPrimaryVertices"),
    conversions=cms.InputTag('allConversions'),
    isMC=cms.bool(True))

process.TFileService = cms.Service("TFileService",
                                   fileName=cms.string(outputFile))
        "keep *_dedxHitInfo*_*_*",
        "keep triggerTriggerEvent_hltTriggerSummaryAOD_*_*",
        "keep *_offlineBeamSpot_*_*",
        "keep *_MuonSegmentProducer_*_*",
        "keep *_g4SimHits_StoppedParticles*_*",
        "keep PileupSummaryInfos_addPileupInfo_*_*",
        "keep *_dt4DSegments__*",
        "keep *_cscSegments__*",
    ),
    fileName=cms.untracked.string(OUTPUTFILE),
    SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring('*')),
)

if (isData and len(LUMITOPROCESS) > 0):
    import FWCore.PythonUtilities.LumiList as LumiList
    process.source.lumisToProcess = LumiList.LumiList(
        filename=LUMITOPROCESS).getVLuminosityBlockRange()

if (isBckg or isData):
    process.Out.SelectEvents.SelectEvents = cms.vstring(
        'HSCPTuplePath')  #take just the skimmed ones
    process.Out.outputCommands.extend(
        ["drop triggerTriggerEvent_hltTriggerSummaryAOD_*_*"])
else:
    process.Out.SelectEvents = cms.untracked.PSet()

########################################################################

#schedule the sequence
process.endPath1 = cms.EndPath(process.Out)
process.schedule = cms.Schedule(process.HSCPTuplePath, process.endPath1)
Пример #29
0
    JERdown=cms.bool(False))

#######################################################
#
# Input files
#
process.inputs = cms.PSet(
    nEvents=cms.int32(-1),
    skipEvents=cms.int32(0),
    lumisToProcess=CfgTypes.untracked(CfgTypes.VLuminosityBlockRange()),
    fileNames=cms.vstring(
        'file:/data1/avetisya/temp/T53T53_750_Summer12_8TeV_PAT_1.root'))

# JSON
JsonFile = '/data1/speer/tblsm/cmssw/CMSSW_5_3_3/src/LJMet/Com/data/json/Cert_190456-202016_8TeV_PromptReco_Collisions12_JSON_MuonPhys.txt'
myList = LumiList.LumiList(filename=JsonFile).getCMSSWString().split(',')
if not process.ljmet.isMc:
    process.inputs.lumisToProcess.extend(myList)

#######################################################
#
# Output
#
process.outputs = cms.PSet(
    outputName=cms.string('ljmet_tree'),
    treeName=cms.string('ljmet'),
)

#######################################################
#
# Object selector options
Пример #30
0
    headerPart1=cms.FileInPath(
        "heavyNeutrino/multilep/data/header/soviet.txt"),
    headerPart2=cms.FileInPath("heavyNeutrino/multilep/data/header/text.txt"),
)


def getJSON(is2017, is2018):
    if is2018:
        return "Cert_314472-325175_13TeV_17SeptEarlyReReco2018ABC_PromptEraD_Collisions18_JSON.txt"
    elif is2017:
        return "Cert_294927-306462_13TeV_EOY2017ReReco_Collisions17_JSON_v1.txt"
    else:
        return "Cert_271036-284044_13TeV_23Sep2016ReReco_Collisions16_JSON.txt"


if isData:
    print('Sample is found to be 20%s data, will process using %s' %
          (yy, getJSON(is2017, is2018)))
    import FWCore.PythonUtilities.LumiList as LumiList
    jsonDir = os.path.expandvars(
        '$CMSSW_BASE/src/heavyNeutrino/multilep/data/JSON')
    process.source.lumisToProcess = LumiList.LumiList(filename=os.path.join(
        jsonDir, getJSON(is2017, is2018))).getVLuminosityBlockRange()

process.p = cms.Path(
    process.goodOfflinePrimaryVertices * process.egammaPostRecoSeq *
    process.pileupJetIdUpdated * process.jetSequence *
    process.fullPatMetSequence * process.prefiringweight *
    process.particleLevelSequence * process.rerunMvaIsolationSequence *
    getattr(process, updatedTauName) * process.blackJackAndHookers)