def dqmIOSource(args): import FWCore.ParameterSet.Config as cms if args.get("newDQMIO", False): return cms.Source("DQMRootSource", fileNames=cms.untracked(cms.vstring())) else: return cms.Source("PoolSource", fileNames=cms.untracked(cms.vstring()))
def dqmIOSource(args): import FWCore.ParameterSet.Config as cms if args.get('newDQMIO', False): return cms.Source("DQMRootSource", fileNames=cms.untracked(cms.vstring())) else: return cms.Source("PoolSource", fileNames=cms.untracked(cms.vstring()))
def alcaHarvesting(self, globalTag, datasetName, **args): """ _alcaHarvesting_ Proton collisions data taking AlCa Harvesting """ if not 'skims' in args: return None options = defaultOptions options.scenario = self.cbSc if hasattr(self,'cbSc') else self.__class__.__name__ options.step = "ALCAHARVEST:"+('+'.join(args['skims'])) options.name = "ALCAHARVEST" options.conditions = globalTag process = cms.Process("ALCAHARVEST") process.source = cms.Source("PoolSource") configBuilder = ConfigBuilder(options, process = process) configBuilder.prepare() # # customise process for particular job # process.source.processingMode = cms.untracked.string('RunsAndLumis') process.source.fileNames = cms.untracked(cms.vstring()) process.maxEvents.input = -1 process.dqmSaver.workflow = datasetName return process
def alcaHarvesting(self, globalTag, datasetName, **args): """ _alcaHarvesting_ Proton collisions data taking AlCa Harvesting """ if not 'skims' in args: return None options = defaultOptions options.scenario = self.cbSc if hasattr( self, 'cbSc') else self.__class__.__name__ options.step = "ALCAHARVEST:" + ('+'.join(args['skims'])) options.name = "ALCAHARVEST" options.conditions = globalTag process = cms.Process("ALCAHARVEST") process.source = cms.Source("PoolSource") configBuilder = ConfigBuilder(options, process=process) configBuilder.prepare() # # customise process for particular job # process.source.processingMode = cms.untracked.string('RunsAndLumis') process.source.fileNames = cms.untracked(cms.vstring()) process.maxEvents.input = -1 process.dqmSaver.workflow = datasetName return process
def dqmHarvesting(self, datasetName, runNumber, globalTag, **args): """ _dqmHarvesting_ DQM Harvesting for RelVal MC production """ options = defaultOptions options.scenario = "pp" options.step = "HARVESTING:validationHarvestingFS" options.isMC = True options.isData = False options.beamspot = None options.name = "EDMtoMEConvert" options.conditions = globalTag process = cms.Process("HARVESTING") process.source = cms.Source("PoolSource") configBuilder = ConfigBuilder(options, process = process) configBuilder.prepare() # # customise process for particular job # process.source.processingMode = cms.untracked.string('RunsAndLumis') process.source.fileNames = cms.untracked(cms.vstring()) process.maxEvents.input = -1 process.dqmSaver.workflow = datasetName if args.has_key('referenceFile') and args.get('referenceFile', ''): process.DQMStore.referenceFileName = \ cms.untracked.string(args['referenceFile']) return process
def alcaHarvesting(self, globalTag, datasetName, **args): """ _alcaHarvesting_ Proton collisions data taking AlCa Harvesting """ skims = [] if 'skims' in args: skims = args['skims'] if 'alcapromptdataset' in args: skims.append('@' + args['alcapromptdataset']) if len(skims) == 0: return None options = defaultOptions options.scenario = self.cbSc if hasattr( self, 'cbSc') else self.__class__.__name__ options.step = "ALCAHARVEST:" + ('+'.join(skims)) options.name = "ALCAHARVEST" options.conditions = gtNameAndConnect(globalTag, args) process = cms.Process("ALCAHARVEST", self.eras) process.source = cms.Source("PoolSource") if 'customs' in args: options.customisation_file = args['customs'] configBuilder = ConfigBuilder(options, process=process) configBuilder.prepare() process.source.processingMode = cms.untracked.string('RunsAndLumis') process.source.fileNames = cms.untracked(cms.vstring()) process.maxEvents.input = -1 process.dqmSaver.workflow = datasetName return process
def alcaHarvesting(self, globalTag, datasetName, **args): """ _alcaHarvesting_ Heavy-ion collisions data taking AlCa Harvesting """ options = defaultOptions options.scenario = "HeavyIons" options.step = "ALCAHARVEST:BeamSpotByRun+BeamSpotByLumi" options.isMC = False options.isData = True options.beamspot = None options.eventcontent = None options.name = "ALCAHARVEST" options.conditions = globalTag options.arguments = "" options.evt_type = "" options.filein = [] process = cms.Process("ALCAHARVEST") process.source = cms.Source("PoolSource") configBuilder = ConfigBuilder(options, process = process) configBuilder.prepare() # # customise process for particular job # process.source.processingMode = cms.untracked.string('RunsAndLumis') process.source.fileNames = cms.untracked(cms.vstring()) process.maxEvents.input = -1 process.dqmSaver.workflow = datasetName return process
def dqmHarvesting(self, datasetName, runNumber, globalTag, **args): """ _dqmHarvesting_ DQM Harvesting for RelVal GEN production """ options = defaultOptions options.scenario = "pp" options.step = "HARVESTING:genHarvesting" options.isMC = True options.isData = False options.beamspot = None options.eventcontent = None options.name = "EDMtoMEConvert" options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag options.arguments = "" options.evt_type = "" options.filein = [] options.harvesting = "AtJobEnd" process = cms.Process("HARVESTING", self.eras) process.source = cms.Source("PoolSource") configBuilder = ConfigBuilder(options, process = process) configBuilder.prepare() # # customise process for particular job # process.source.processingMode = cms.untracked.string('RunsAndLumis') process.source.fileNames = cms.untracked(cms.vstring()) process.maxEvents.input = -1 process.dqmSaver.workflow = datasetName return process
def dqmHarvesting(self, datasetName, runNumber, globalTag, **args): """ _dqmHarvesting_ DQM Harvesting for RelVal MC production """ options = defaultOptions options.scenario = "pp" options.step = "HARVESTING:validationHarvestingFS" options.isMC = True options.isData = False options.beamspot = None options.name = "EDMtoMEConvert" options.conditions = globalTag process = cms.Process("HARVESTING") process.source = cms.Source("PoolSource") configBuilder = ConfigBuilder(options, process=process) configBuilder.prepare() # # customise process for particular job # process.source.processingMode = cms.untracked.string('RunsAndLumis') process.source.fileNames = cms.untracked(cms.vstring()) process.maxEvents.input = -1 process.dqmSaver.workflow = datasetName if args.has_key('referenceFile') and args.get('referenceFile', ''): process.DQMStore.referenceFileName = \ cms.untracked.string(args['referenceFile']) return process
def apply(json, source): ''' Takes a pool source module configuraiton and a filename of a JSON file and extends the configuation such that the pool source filters on the JSON file. ''' lumis = LumiList.LumiList(filename=json).getCMSSWString().split(',') if not hasattr(source, 'lumisToProcess'): source.lumisToProcess = cms.untracked(cms.VLuminosityBlockRange()) source.lumisToProcess.extend(lumis)
def dqmHarvesting(self, datasetName, runNumber, globalTag, **args): """ _dqmHarvesting_ build a DQM Harvesting configuration this method can be used to test an extra scenario, all the ConfigBuilder options can be overwritten by using **args. This will be useful for testing with real jobs. Arguments: datasetName - aka workflow name for DQMServer, this is the name of the dataset containing the harvested run runNumber - The run being harvested globalTag - The global tag being used inputFiles - The list of LFNs being harvested """ options = defaultOptions options.scenario = "cosmics" options.step = "HARVESTING:dqmHarvesting" options.isMC = False options.isData = True options.beamspot = None options.eventcontent = None options.name = "EDMtoMEConvert" options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag options.arguments = "" options.evt_type = "" options.filein = [] options.__dict__.update(args) process = cms.Process("HARVESTING") process.source = cms.Source("PoolSource") configBuilder = ConfigBuilder(options, process=process) configBuilder.prepare() # # customise process for particular job # process.source.processingMode = cms.untracked.string('RunsAndLumis') process.source.fileNames = cms.untracked(cms.vstring()) process.maxEvents.input = -1 process.dqmSaver.workflow = datasetName if args.has_key('saveByLumiSection') and \ args.get('saveByLumiSection', ''): process.dqmSaver.saveByLumiSection = int(args['saveByLumiSection']) if args.has_key('referenceFile') and args.get('referenceFile', ''): process.DQMStore.referenceFileName = \ cms.untracked.string(args['referenceFile']) return process
def dqmHarvesting(self, datasetName, runNumber, globalTag, **args): """ _dqmHarvesting_ build a DQM Harvesting configuration this method can be used to test an extra scenario, all the ConfigBuilder options can be overwritten by using **args. This will be useful for testing with real jobs. Arguments: datasetName - aka workflow name for DQMServer, this is the name of the dataset containing the harvested run runNumber - The run being harvested globalTag - The global tag being used inputFiles - The list of LFNs being harvested """ options = defaultOptions options.scenario = "cosmics" options.step = "HARVESTING:dqmHarvesting" options.isMC = False options.isData = True options.beamspot = None options.eventcontent = None options.name = "EDMtoMEConvert" options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag options.arguments = "" options.evt_type = "" options.filein = [] options.__dict__.update(args) process = cms.Process("HARVESTING") process.source = cms.Source("PoolSource") configBuilder = ConfigBuilder(options, process = process) configBuilder.prepare() # # customise process for particular job # process.source.processingMode = cms.untracked.string('RunsAndLumis') process.source.fileNames = cms.untracked(cms.vstring()) process.maxEvents.input = -1 process.dqmSaver.workflow = datasetName if args.has_key('saveByLumiSection') and \ args.get('saveByLumiSection', ''): process.dqmSaver.saveByLumiSection = int(args['saveByLumiSection']) if args.has_key('referenceFile') and args.get('referenceFile', ''): process.DQMStore.referenceFileName = \ cms.untracked.string(args['referenceFile']) return process
def handlePerformanceSettings(self): """ _handlePerformanceSettings_ Install the standard performance report services """ import FWCore.ParameterSet.Config as PSetConfig # include the default performance report services self.process.add_(PSetConfig.Service("SimpleMemoryCheck")) self.process.add_(PSetConfig.Service("Timing")) self.process.Timing.summaryOnly = PSetConfig.untracked(PSetConfig.bool(True))
def vinputtagize(l, untracked_tags=False, untracked_vector=True): if type(l) != type([]): l = [l] tags = [] if untracked_tags: it = cms.untracked.InputTag else: it = cms.InputTag for t in l: if type(t) != type(()): t = (t,) tags.append(it(*t)) tags = cms.VInputTag(*tags) if untracked_vector: tags = cms.untracked(tags) return tags
def vinputtagize(l, untracked_tags=False, untracked_vector=True): if type(l) != type([]): l = [l] tags = [] if untracked_tags: it = cms.untracked.InputTag else: it = cms.InputTag for t in l: if type(t) != type(()): t = (t, ) tags.append(it(*t)) tags = cms.VInputTag(*tags) if untracked_vector: tags = cms.untracked(tags) return tags
def handlePerformanceSettings(self): """ _handlePerformanceSettings_ Install the standard performance report services """ import FWCore.ParameterSet.Config as PSetConfig # include the default performance report services if getattr(self.step.data.application.command, 'silentMemoryCheck', False): self.process.add_(PSetConfig.Service("SimpleMemoryCheck", jobReportOutputOnly=PSetConfig.untracked.bool(True))) else: self.process.add_(PSetConfig.Service("SimpleMemoryCheck")) self.process.add_(PSetConfig.Service("CPU")) self.process.add_(PSetConfig.Service("Timing")) self.process.Timing.summaryOnly = PSetConfig.untracked(PSetConfig.bool(True))
def handlePerformanceSettings(self): """ _handlePerformanceSettings_ Install the standard performance report services """ # include the default performance report services if getattr(self.step.data.application.command, 'silentMemoryCheck', False): self.process.add_(cms.Service("SimpleMemoryCheck", jobReportOutputOnly=cms.untracked.bool(True))) else: self.process.add_(cms.Service("SimpleMemoryCheck")) self.process.add_(cms.Service("CPU")) self.process.add_(cms.Service("Timing")) self.process.Timing.summaryOnly = cms.untracked(cms.bool(True)) return
def dqmHarvesting(self, datasetName, runNumber, globalTag, **args): """ _dqmHarvesting_ Heavy-ion collisions data taking DQM Harvesting """ options = defaultOptions options.scenario = "HeavyIons" options.step = "HARVESTING:dqmHarvesting" options.isMC = False options.isData = True options.beamspot = None options.eventcontent = None options.name = "EDMtoMEConvert" options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag options.arguments = "" options.evt_type = "" options.filein = [] process = cms.Process("HARVESTING") if args.get('newDQMIO', False): process.source = cms.Source("DQMRootSource") else: process.source = cms.Source("PoolSource") configBuilder = ConfigBuilder(options, process = process) configBuilder.prepare() # # customise process for particular job # process.source.processingMode = cms.untracked.string('RunsAndLumis') process.source.fileNames = cms.untracked(cms.vstring()) process.maxEvents.input = -1 process.dqmSaver.workflow = datasetName process.dqmSaver.saveByLumiSection = 1 if args.has_key('referenceFile') and args.get('referenceFile', ''): process.DQMStore.referenceFileName = \ cms.untracked.string(args['referenceFile']) return process
def alcaHarvesting(self, globalTag, datasetName, **args): """ _alcaHarvesting_ Proton collisions data taking AlCa Harvesting """ skims = [] if 'skims' in args: skims = args['skims'] if 'alcapromptdataset' in args: skims.append('@'+args['alcapromptdataset']) if len(skims) == 0: return None options = defaultOptions options.scenario = self.cbSc if hasattr(self,'cbSc') else self.__class__.__name__ options.step = "ALCAHARVEST:"+('+'.join(skims)) options.name = "ALCAHARVEST" options.conditions = gtNameAndConnect(globalTag, args) process = cms.Process("ALCAHARVEST", self.eras) process.source = cms.Source("PoolSource") if 'customs' in args: options.customisation_file=args['customs'] configBuilder = ConfigBuilder(options, process = process) configBuilder.prepare() # # customise process for particular job # process.source.processingMode = cms.untracked.string('RunsAndLumis') process.source.fileNames = cms.untracked(cms.vstring()) process.maxEvents.input = -1 process.dqmSaver.workflow = datasetName return process
def dqmHarvesting(self, datasetName, runNumber, globalTag, **args): """ _dqmHarvesting_ DQM Harvesting for RelVal GEN production """ options = defaultOptions options.scenario = "pp" options.step = "HARVESTING:genHarvesting" options.isMC = True options.isData = False options.beamspot = None options.eventcontent = None options.name = "EDMtoMEConvert" options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag options.arguments = "" options.evt_type = "" options.filein = [] options.harvesting = "AtJobEnd" process = cms.Process("HARVESTING", self.eras) process.source = cms.Source("PoolSource") configBuilder = ConfigBuilder(options, process = process) configBuilder.prepare() # # customise process for particular job # process.source.processingMode = cms.untracked.string('RunsAndLumis') process.source.fileNames = cms.untracked(cms.vstring()) process.maxEvents.input = -1 process.dqmSaver.workflow = datasetName if 'referenceFile' in args and args.get('referenceFile', ''): process.DQMStore.referenceFileName = \ cms.untracked.string(args['referenceFile']) return process
import FWCore.ParameterSet.Config as cms from Configuration.AlCa.GlobalTag import GlobalTag process = cms.Process("test") process.load("Configuration.StandardSequences.GeometryDB_cff") process.load("FWCore.MessageService.MessageLogger_cfi") process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff") input_files = cms.vstring("/store/data/Run2018A/EGamma/AOD/17Sep2018-v2/100000/01EB9686-9A6F-BF48-903A-02F7D9AEB9B9.root") process.GlobalTag = GlobalTag(process.GlobalTag, 'auto:run2_data', '') process.source = cms.Source("PoolSource", fileNames = cms.untracked( input_files ) ) process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32( 10 ) ) process.testEcalClusterTools = cms.EDAnalyzer("testEcalClusterTools", barrelRecHitCollection = cms.InputTag("reducedEcalRecHitsEB"), endcapRecHitCollection = cms.InputTag("reducedEcalRecHitsEE"), barrelClusterCollection = cms.InputTag("hybridSuperClusters:hybridBarrelBasicClusters"), endcapClusterCollection = cms.InputTag("multi5x5SuperClusters:multi5x5EndcapBasicClusters") ) process.p1 = cms.Path( process.testEcalClusterTools )
#bring in the cms configuration classes import FWCore.ParameterSet.Config as cms process = cms.Process('hlt_optimizer') process.analysis = cms.EDAnalyzer('hlt_optimizer') process.source = cms.Source("PoolSource", fileNames = cms.untracked(cms.vstring('file:/afs/cern.ch/user/d/deguio/scratch0/CMSSW_1_3_1/src/HLTrigger/xchannel/test/EMuon-output.root')), maxEvents = cms.untracked(cms.int32(10)) ) process.p = cms.Path( process.analysis ) process.add_(cms.Service("MessageLogger")) process.dumpConfig()
) process.options = cms.untracked.PSet(multiProcesses=cms.untracked.PSet( maxChildProcesses=cms.untracked.int32(3), maxSequentialEventsPerChild=cms.untracked.uint32(2))) process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring("file:multiprocess_oneRun_test.root"), skipEvents = cms.untracked.uint32(6)) ids = cms.VEventID() numberOfEventsInRun = 0 numberOfEventsPerRun = 100 run = 100 event=6 for i in xrange(20): numberOfEventsInRun +=1 event += 1 if numberOfEventsInRun > numberOfEventsPerRun: numberOfEventsInRun=1 run += 1 event = 1 ids.append(cms.EventID(run,event)) process.check = cms.EDAnalyzer("EventIDChecker", eventSequence = cms.untracked(ids), multiProcessSequentialEvents = process.options.multiProcesses.maxSequentialEventsPerChild) process.print1 = cms.OutputModule("AsciiOutputModule") process.p = cms.EndPath(process.check+process.print1)
process.load("FWCore.Framework.test.cmsExceptionsFatal_cff") process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(20) ) process.source = cms.Source("EmptySource", firstRun = cms.untracked.uint32(100), numberEventsInRun = cms.untracked.uint32(5), firstTime = cms.untracked.uint32(1000), timeBetweenEvents = cms.untracked.uint32(10) ) ids = cms.VEventID() numberOfEventsInRun = 0 numberOfEventsPerRun = process.source.numberEventsInRun.value() run = process.source.firstRun.value() event=0 for i in xrange(process.maxEvents.input.value()): numberOfEventsInRun +=1 event += 1 if numberOfEventsInRun > numberOfEventsPerRun: numberOfEventsInRun=1 run += 1 event = 1 ids.append(cms.EventID(run,event)) process.check = cms.EDAnalyzer("EventIDChecker", eventSequence = cms.untracked(ids)) process.print1 = cms.OutputModule("AsciiOutputModule") process.p = cms.EndPath(process.check+process.print1)
if self.useLazyDownload == True: logging.debug("Lazy downloads ENABLED.") import FWCore.ParameterSet.Config as cms process.AdaptorConfig = cms.Service("AdaptorConfig", cacheHint = cms.untracked.string("lazy-download"), readHint = cms.untracked.string("auto-detect")) else: logging.debug("Lazy downloads DISABLED.") import FWCore.ParameterSet.Config as cms process.AdaptorConfig = cms.Service("AdaptorConfig", cacheHint = cms.untracked.string("application-only"), readHint = cms.untracked.string("direct-unbuffered")) process.RECO = cms.OutputModule("PoolOutputModule") process.RECO.dataset = cms.untracked(cms.PSet()) process.RECO.dataset.dataTier = cms.untracked(cms.string("RECO")) process.RECO.fileName = cms.untracked.string("NOTSET") process.RECO.logicalFileName = cms.untracked.string("NOTSET") process.RECO.fastCloning = cms.untracked.bool(False) process.outpath = cms.EndPath(process.RECO) configName = "prompt-reco-config" configVersion = "%s-%s-%s" % (self.cmssw["CMSSWVersion"], self.run, self.primaryDataset) configAnnot = "auto generated prompt reco config" process.configurationMetadata = CmsTypes.untracked(CmsTypes.PSet()) process.configurationMetadata.name = CmsTypes.untracked(CmsTypes.string(configName)) process.configurationMetadata.version = CmsTypes.untracked(CmsTypes.string(configVersion)) process.configurationMetadata.annotation = CmsTypes.untracked(CmsTypes.string(configAnnot))
print process.source.eventsToProcess #------------------------------------------------------------------------------- # _ ____ ___ _ # | | (_) \/ | | | # | | _ _ _ __ ___ _| . . | __ _ ___| | __ # | | | | | | '_ ` _ \| | |\/| |/ _` / __| |/ / # | |___| |_| | | | | | | | | | | (_| \__ \ < # \_____/\__,_|_| |_| |_|_\_| |_/\__,_|___/_|\_\ # if options.useLumi: lumis = LumiList.LumiList( filename=options.useLumi).getCMSSWString().split(',') # print lumis process.source.lumisToProcess = cms.untracked(cms.VLuminosityBlockRange()) # print process.source.lumisToProcess process.source.lumisToProcess.extend(lumis) print process.source.lumisToProcess #------------------------------------------------------------------------------- process.maxEvents = cms.untracked.PSet(input=cms.untracked.int32( options.maxEvents), ) process.pLep = cms.Path() #-------------------------------------------------------------------- # _ _ _ _ _ # | | | | (_) | | | | # | | | | ___ _ __ _| |__ | |_ ___ # | |/\| |/ _ \ |/ _` | '_ \| __/ __|
# bring in the cms configuration classes import FWCore.ParameterSet.Config as cms process = cms.Process("Dump") process.extend(cms.include("FWCore/MessageLogger/data/MessageLogger.cfi")) process.analysis = cms.EDAnalyzer("ditausAnalysis") # process.analysis.srcLabel = cms.string('source') # evenctual output file, now hard-coded # process.analysis.rootfile = cms.untracked(cms.string('miniTree.root')) # eventual bad runs: uncomment and fill as you want # bad = [ 1, 2] bad = [] input_files = cms.vstring() # loop from i = 1 to i < 100 for i in range(1, 100): if i not in bad: input_files.append("rfio:/castor/cern.ch/user/d/deguio/HLT/Z_DiElectron_HLT/Z_DiElectron_HLT_%d.root" % i) process.source = cms.Source("PoolSource", fileNames=cms.untracked(input_files), maxEvents=cms.untracked(cms.int32(-1))) process.p = cms.Path(process.analysis) ofile = open("last_config_dump.log", "w") ofile.write(process.dumpConfig()) ofile.close()
process.source.eventsToProcess = cms.untracked.VEventRange (options.eventsToProcess) print process.source.eventsToProcess #------------------------------------------------------------------------------- # _ ____ ___ _ # | | (_) \/ | | | # | | _ _ _ __ ___ _| . . | __ _ ___| | __ # | | | | | | '_ ` _ \| | |\/| |/ _` / __| |/ / # | |___| |_| | | | | | | | | | | (_| \__ \ < # \_____/\__,_|_| |_| |_|_\_| |_/\__,_|___/_|\_\ # if options.useLumi: lumis = LumiList.LumiList(filename = options.useLumi ).getCMSSWString().split(',') # print lumis process.source.lumisToProcess = cms.untracked(cms.VLuminosityBlockRange()) # print process.source.lumisToProcess process.source.lumisToProcess.extend(lumis) print process.source.lumisToProcess #------------------------------------------------------------------------------- process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32 (options.maxEvents), ) process.pLep = cms.Path() #--------------------------------------------------------- # _____ _ # |_ _| (_)
if os.path.exists(t): return t if opts.goldenjson and find_up(opts.goldenjson): goldenjson = find_up(opts.goldenjson) # if we filter in the process.source, then the events are just skipped # so we use a custom lumiFilter to skip *after* the EventMaker to keep # total event counts in agreement with DBS, but also have evt_event,run,lumiBlock # for babymakers to filter skip_event = False import FWCore.PythonUtilities.LumiList as LumiList # JSONfile = "Cert_314472-325175_13TeV_PromptReco_Collisions18_JSON.txt" lumilist = LumiList.LumiList(filename=goldenjson).getCMSSWString().split(',') print("Found json list of lumis to process with {} lumi sections from {}".format(len(lumilist),goldenjson)) print("Skipping {} if they're not in the lumi list".format("events entirely" if skip_event else "anything after eventMaker")) if skip_event: process.source.lumisToProcess = cms.untracked(cms.VLuminosityBlockRange()+lumilist) else: process.lumiFilter.lumisToProcess = cms.untracked(cms.VLuminosityBlockRange()+lumilist) #Max Events process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(opts.nevents) ) process.outpath = cms.EndPath(process.out) process.out.outputCommands = cms.untracked.vstring( 'drop *' ) extra = {} if opts.metrecipe: process.pfmetMakerModifiedMET = process.pfmetMaker.clone() process.pfmetMakerModifiedMET.pfMetInputTag_ = cms.InputTag("slimmedMETsModifiedMET","","CMS3") process.pfmetMaker.aliasPrefix = cms.untracked.string("evt_old") extra = dict(
cmsPath) try: loader.load() except Exception, ex: msg = "Couldn't load CMSSW libraries: %s" % ex logging.error(msg) raise RuntimeError, msg import FWCore.ParameterSet.Config as cms process = cms.Process("EDMtoMEConvert") process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring() ) process.configurationMetadata = cms.untracked(cms.PSet()) process.configurationMetadata.name = cms.untracked( cms.string("TEMP_CONFIG_USED")) process.configurationMetadata.version = cms.untracked( cms.string(cmsswVersion)) process.configurationMetadata.annotation = cms.untracked( cms.string("DQM Harvesting Configuration Placeholder")) cfgWrapper = CMSSWConfig() cfgInt = cfgWrapper.loadConfiguration(process) cfgInt.validateForProduction() loader.unload() return cfgWrapper