def promptReco(self, globalTag, **args): """ _promptReco_ Collision data, data scouting (dst stream). This method provides the scheleton process for the dataScouting. dpiparo 17-7-2012 I follow the structure of the package. """ options = Options() options.scenario = 'pp' options.__dict__.update(defaultOptions.__dict__) options.step = 'DQM:DQM/DataScouting/dataScouting_cff.dataScoutingDQMSequence,ENDJOB' dictIO(options,args) options.conditions = gtNameAndConnect(globalTag, args) process = cms.Process('DataScouting', self.eras) cb = ConfigBuilder(options, process = process, with_output = True) # Input source process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring() ) cb.prepare() return process
def expressProcessing(self, globalTag, **args): """ _expressProcessing_ Proton collision data taking express processing """ skims = args['skims'] # the AlCaReco skims for PCL should only run during AlCaSkimming step which uses the same configuration on the Tier0 side, for this reason we drop them here pclWkflws = [x for x in skims if "PromptCalibProd" in x] for wfl in pclWkflws: skims.remove(wfl) step = stepALCAPRODUCER(skims) dqmStep= dqmSeq(args,'') options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = self.cbSc options.step = 'RAW2DIGI,L1Reco,RECO'+step+',DQM'+dqmStep+',ENDJOB' dictIO(options,args) options.conditions = gtNameAndConnect(globalTag, args) options.filein = 'tobeoverwritten.xyz' if 'inputSource' in args: options.filetype = args['inputSource'] process = cms.Process('RECO') cb = ConfigBuilder(options, process = process, with_output = True, with_input = True) cb.prepare() addMonitoring(process) return process
def alcaHarvesting(self, globalTag, datasetName, **args): """ _alcaHarvesting_ Proton collisions data taking AlCa Harvesting """ skims = [] if 'skims' in args: skims = args['skims'] if 'alcapromptdataset' in args: skims.append('@' + args['alcapromptdataset']) if len(skims) == 0: return None options = defaultOptions options.scenario = self.cbSc if hasattr( self, 'cbSc') else self.__class__.__name__ options.step = "ALCAHARVEST:" + ('+'.join(skims)) options.name = "ALCAHARVEST" options.conditions = gtNameAndConnect(globalTag, args) process = cms.Process("ALCAHARVEST", self.eras) process.source = cms.Source("PoolSource") if 'customs' in args: options.customisation_file = args['customs'] configBuilder = ConfigBuilder(options, process=process) configBuilder.prepare() process.source.processingMode = cms.untracked.string('RunsAndLumis') process.source.fileNames = cms.untracked(cms.vstring()) process.maxEvents.input = -1 process.dqmSaver.workflow = datasetName return process
def dqmHarvesting(self, datasetName, runNumber, globalTag, **args): """ _dqmHarvesting_ Proton collisions data taking DQM Harvesting """ options = defaultOptions options.scenario = "pp" options.step = "HARVESTING:alcaHarvesting" options.name = "EDMtoMEConvert" options.conditions = gtNameAndConnect(globalTag, args) process = cms.Process("HARVESTING", self.eras) process.source = dqmIOSource(args) configBuilder = ConfigBuilder(options, process=process) configBuilder.prepare() # # customise process for particular job # #process.source.processingMode = cms.untracked.string('RunsAndLumis') #process.source.fileNames = cms.untracked(cms.vstring()) #process.maxEvents.input = -1 #process.dqmSaver.workflow = datasetName #process.dqmSaver.saveByLumiSection = 1 #if args.has_key('referenceFile') and args.get('referenceFile', ''): # process.DQMStore.referenceFileName = \ # cms.untracked.string(args['referenceFile']) harvestingMode(process, datasetName, args) return process
def promptReco(self, globalTag, **args): """ _promptReco_ Proton collision data taking prompt reco """ step = stepALCAPRODUCER(args['skims']) dqmStep= dqmSeq(args,'') options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = self.cbSc options.step = 'RAW2DIGI,L1Reco,RECO'+self.recoSeq+step+',DQM'+dqmStep+',ENDJOB' dictIO(options,args) options.conditions = gtNameAndConnect(globalTag, args) process = cms.Process('RECO') cb = ConfigBuilder(options, process = process, with_output = True) # Input source process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring() ) cb.prepare() addMonitoring(process) return process
def promptReco(self, globalTag, **args): """ _promptReco_ Collision data, data scouting (dst stream). This method provides the scheleton process for the dataScouting. dpiparo 17-7-2012 I follow the structure of the package. """ options = Options() options.scenario = 'pp' options.__dict__.update(defaultOptions.__dict__) options.step = 'DQM:DQM/DataScouting/dataScouting_cff.dataScoutingDQMSequence,ENDJOB' dictIO(options,args) options.conditions = gtNameAndConnect(globalTag, args) process = cms.Process('DataScouting') cb = ConfigBuilder(options, process = process, with_output = True) # Input source process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring() ) cb.prepare() return process
def dqmHarvesting(self, datasetName, runNumber, globalTag, **args): """ _dqmHarvesting_ Proton collisions data taking DQM Harvesting """ options = defaultOptions options.scenario = "pp" options.step = "HARVESTING:alcaHarvesting" options.name = "EDMtoMEConvert" options.conditions = gtNameAndConnect(globalTag, args) process = cms.Process("HARVESTING", self.eras) process.source = dqmIOSource(args) configBuilder = ConfigBuilder(options, process = process) configBuilder.prepare() # # customise process for particular job # #process.source.processingMode = cms.untracked.string('RunsAndLumis') #process.source.fileNames = cms.untracked(cms.vstring()) #process.maxEvents.input = -1 #process.dqmSaver.workflow = datasetName #process.dqmSaver.saveByLumiSection = 1 #if args.has_key('referenceFile') and args.get('referenceFile', ''): # process.DQMStore.referenceFileName = \ # cms.untracked.string(args['referenceFile']) harvestingMode(process,datasetName,args) return process
def promptReco(self, globalTag, **args): if not 'skims' in args: args['skims']=self.skims if not 'customs' in args: args['customs']= [ ] options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "pp" dictIO(options,args) options.conditions = gtNameAndConnect(globalTag, args) if 'customs' in args: print(args['customs']) options.customisation_file=args['customs'] options.step += stepALCAPRODUCER(args['skims']) process = cms.Process('RECO', cms.ModifierChain(self.eras) ) cb = ConfigBuilder(options, process = process, with_output = True) # Input source process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring() ) cb.prepare() return process
def visualizationProcessing(self, globalTag, **args): """ _visualizationProcessing_ """ options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = self.cbSc # FIXME: do we need L1Reco here? options.step = '' if 'preFilter' in args: options.step += 'FILTER:' + args['preFilter'] + ',' eiStep = '' if 'beamSplashRun' in args: options.step += 'RAW2DIGI,L1Reco,RECO' + args[ 'beamSplashRun'] + ',ENDJOB' print("Using RECO%s step in visualizationProcessing" % args['beamSplashRun']) else: options.step += 'RAW2DIGI,L1Reco,RECO' + eiStep + ',ENDJOB' dictIO(options, args) options.conditions = gtNameAndConnect(globalTag, args) options.timeoutOutput = True # FIXME: maybe can go...maybe not options.filein = 'tobeoverwritten.xyz' if 'inputSource' in args: options.filetype = args['inputSource'] else: # this is the default as this is what is needed on the OnlineCluster options.filetype = 'DQMDAQ' print("Using %s source" % options.filetype) process = cms.Process('RECO', cms.ModifierChain(self.eras, self.visModifiers)) if 'customs' in args: options.customisation_file = args['customs'] self._checkRepackedFlag(options, **args) cb = ConfigBuilder(options, process=process, with_output=True, with_input=True) cb.prepare() # FIXME: not sure abou this one...drop for the moment # addMonitoring(process) return process
def promptReco(self, globalTag, **args): """ _promptReco_ Proton collision data taking prompt reco """ step = stepALCAPRODUCER(args['skims']) PhysicsSkimStep = '' if ("PhysicsSkims" in args) : PhysicsSkimStep = stepSKIMPRODUCER(args['PhysicsSkims']) dqmStep = dqmSeq(args,'') options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = self.cbSc if ('nThreads' in args) : options.nThreads=args['nThreads'] miniAODStep='' # if miniAOD is asked for - then retrieve the miniaod config if 'outputs' in args: for a in args['outputs']: if a['dataTier'] == 'MINIAOD': miniAODStep=',PAT' """ Unscheduled for all """ options.runUnscheduled=True self._checkRepackedFlag(options, **args) if 'customs' in args: options.customisation_file=args['customs'] eiStep='' if self.addEI: eiStep=',EI' options.step = 'RAW2DIGI,L1Reco,RECO'+self.recoSeq+eiStep+step+PhysicsSkimStep+miniAODStep+',DQM'+dqmStep+',ENDJOB' dictIO(options,args) options.conditions = gtNameAndConnect(globalTag, args) process = cms.Process('RECO', cms.ModifierChain(self.eras, self.promptModifiers) ) cb = ConfigBuilder(options, process = process, with_output = True) # Input source process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring() ) cb.prepare() addMonitoring(process) return process
def visualizationProcessing(self, globalTag, **args): """ _visualizationProcessing_ """ options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = self.cbSc # FIXME: do we need L1Reco here? options.step ='' if 'preFilter' in args: options.step +='FILTER:'+args['preFilter']+',' eiStep='' if self.cbSc == 'pp': eiStep=',EI' options.step += 'RAW2DIGI,L1Reco,RECO'+eiStep+',ENDJOB' dictIO(options,args) options.conditions = gtNameAndConnect(globalTag, args) options.timeoutOutput = True # FIXME: maybe can go...maybe not options.filein = 'tobeoverwritten.xyz' if 'inputSource' in args: options.filetype = args['inputSource'] else: # this is the default as this is what is needed on the OnlineCluster options.filetype = 'DQMDAQ' print "Using %s source"%options.filetype process = cms.Process('RECO', cms.ModifierChain(self.eras, self.visModifiers) ) if 'customs' in args: options.customisation_file=args['customs'] self._checkRepackedFlag(options, **args) cb = ConfigBuilder(options, process = process, with_output = True, with_input = True) cb.prepare() # FIXME: not sure abou this one...drop for the moment # addMonitoring(process) return process
def skimming(self, skims, globalTag,**options): """ _skimming_ skimming method overload for the prompt skiming """ options = defaultOptions options.scenario = self.cbSc if hasattr(self,'cbSc') else self.__class__.__name__ options.step = "SKIM:"+('+'.join(skims)) options.name = "SKIM" options.conditions = gtNameAndConnect(globalTag, args) process = cms.Process("SKIM") process.source = cms.Source("PoolSource") configBuilder = ConfigBuilder(options, process = process) configBuilder.prepare() return process
def promptReco(self, globalTag, **args): """ _promptReco_ Proton collision data taking prompt reco """ step = stepALCAPRODUCER(args['skims']) dqmStep= dqmSeq(args,'') options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = self.cbSc miniAODStep='' # if miniAOD is asked for - then retrieve the miniaod config if 'outputs' in args: for a in args['outputs']: if a['dataTier'] == 'MINIAOD': miniAODStep=',PAT' options.runUnscheduled=True if 'customs' in args: options.customisation_file=args['customs'] options.step = 'RAW2DIGI,L1Reco,RECO'+self.recoSeq+step+miniAODStep+',DQM'+dqmStep+',ENDJOB' dictIO(options,args) options.conditions = gtNameAndConnect(globalTag, args) process = cms.Process('RECO') cb = ConfigBuilder(options, process = process, with_output = True) # Input source process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring() ) cb.prepare() addMonitoring(process) return process
def visualizationProcessing(self, globalTag, **args): """ _visualizationProcessing_ """ options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = self.cbSc # FIXME: do we need L1Reco here? options.step ='' if 'preFilter' in args: options.step +='FILTER:'+args['preFilter']+',' options.step += 'RAW2DIGI,L1Reco,RECO,ENDJOB' dictIO(options,args) options.conditions = gtNameAndConnect(globalTag, args) options.timeoutOutput = True # FIXME: maybe can go...maybe not options.filein = 'tobeoverwritten.xyz' if 'inputSource' in args: options.filetype = args['inputSource'] else: # this is the default as this is what is needed on the OnlineCluster options.filetype = 'DQMDAQ' print "Using %s source"%options.filetype process = cms.Process('RECO') cb = ConfigBuilder(options, process = process, with_output = True, with_input = True) cb.prepare() # FIXME: not sure abou this one...drop for the moment # addMonitoring(process) return process
def dqmHarvesting(self, datasetName, runNumber, globalTag, **args): """ _dqmHarvesting_ Proton collisions data taking DQM Harvesting """ options = defaultOptions options.scenario = "pp" options.step = "HARVESTING:alcaHarvesting" options.name = "EDMtoMEConvert" options.conditions = gtNameAndConnect(globalTag, args) process = cms.Process("HARVESTING", self.eras) process.source = dqmIOSource(args) configBuilder = ConfigBuilder(options, process=process) configBuilder.prepare() harvestingMode(process, datasetName, args) return process
def dqmHarvesting(self, datasetName, runNumber, globalTag, **args): """ _dqmHarvesting_ Proton collisions data taking DQM Harvesting """ options = defaultOptions options.scenario = 'pp' options.step = "HARVESTING"+dqmSeq(args,':DQMOffline') options.name = "EDMtoMEConvert" options.conditions = gtNameAndConnect(globalTag, args) process = cms.Process("HARVESTING", self.eras) process.source = dqmIOSource(args) configBuilder = ConfigBuilder(options, process = process) configBuilder.prepare() harvestingMode(process,datasetName,args,rANDl=False) return process
def promptReco(self, globalTag, **args): if not 'skims' in args: args['skims']=self.skims step = stepALCAPRODUCER(args['skims']) options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "pp" options.step = step dictIO(options,args) options.conditions = gtNameAndConnect(globalTag, args) process = cms.Process('RECO', self.eras) cb = ConfigBuilder(options, process = process, with_output = True) # Input source process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring() ) cb.prepare() return process
def promptReco(self, globalTag, **args): if not 'skims' in args: args['skims']=self.skims step = stepALCAPRODUCER(args['skims']) options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "pp" options.step = step dictIO(options,args) options.conditions = gtNameAndConnect(globalTag, args) process = cms.Process('RECO') cb = ConfigBuilder(options, process = process, with_output = True) # Input source process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring() ) cb.prepare() return process
def alcaHarvesting(self, globalTag, datasetName, **args): """ _alcaHarvesting_ Proton collisions data taking AlCa Harvesting """ skims = [] if 'skims' in args: skims = args['skims'] if 'alcapromptdataset' in args: skims.append('@'+args['alcapromptdataset']) if len(skims) == 0: return None options = defaultOptions options.scenario = self.cbSc if hasattr(self,'cbSc') else self.__class__.__name__ options.step = "ALCAHARVEST:"+('+'.join(skims)) options.name = "ALCAHARVEST" options.conditions = gtNameAndConnect(globalTag, args) process = cms.Process("ALCAHARVEST", self.eras) process.source = cms.Source("PoolSource") if 'customs' in args: options.customisation_file=args['customs'] configBuilder = ConfigBuilder(options, process = process) configBuilder.prepare() # # customise process for particular job # process.source.processingMode = cms.untracked.string('RunsAndLumis') process.source.fileNames = cms.untracked(cms.vstring()) process.maxEvents.input = -1 process.dqmSaver.workflow = datasetName return process
def dqmHarvesting(self, datasetName, runNumber, globalTag, **args): """ _dqmHarvesting_ Proton collisions data taking DQM Harvesting """ options = defaultOptions options.scenario = self.cbSc options.step = "HARVESTING"+dqmSeq(args,':dqmHarvesting') options.name = "EDMtoMEConvert" options.conditions = gtNameAndConnect(globalTag, args) process = cms.Process("HARVESTING") process.source = dqmIOSource(args) if 'customs' in args: options.customisation_file=args['customs'] configBuilder = ConfigBuilder(options, process = process) configBuilder.prepare() harvestingMode(process,datasetName,args,rANDl=False) return process
def dqmHarvesting(self, datasetName, runNumber, globalTag, **args): """ _dqmHarvesting_ Proton collisions data taking DQM Harvesting """ options = defaultOptions options.scenario = self.cbSc options.step = "HARVESTING" + dqmSeq(args, ':dqmHarvesting') options.name = "EDMtoMEConvert" options.conditions = gtNameAndConnect(globalTag, args) process = cms.Process("HARVESTING", self.eras) process.source = dqmIOSource(args) if 'customs' in args: options.customisation_file = args['customs'] configBuilder = ConfigBuilder(options, process=process) configBuilder.prepare() harvestingMode(process, datasetName, args, rANDl=False) return process
def expressProcessing(self, globalTag, **args): """ _expressProcessing_ Proton collision data taking express processing """ skims = [] if 'skims' in args: skims = args['skims'] pclWkflws = [x for x in skims if "PromptCalibProd" in x] for wfl in pclWkflws: skims.remove(wfl) options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "pp" options.step = stepALCAPRODUCER(skims) if 'outputs' in args: # the RAW data-tier needs a special treatment since the event-content as defined in release is not good enough outputs_Raw = [x for x in args['outputs'] if x['dataTier'] == 'RAW'] outputs_noRaw = [x for x in args['outputs'] if x['dataTier'] != 'RAW'] if len(outputs_Raw) == 1: print('RAW data-tier requested') options.outputDefinition = outputs_noRaw.__str__() options.conditions = gtNameAndConnect(globalTag, args) options.filein = 'tobeoverwritten.xyz' if 'inputSource' in args: options.filetype = args['inputSource'] process = cms.Process('RECO', self.eras) if 'customs' in args: options.customisation_file=args['customs'] cb = ConfigBuilder(options, process = process, with_output = True, with_input = True) cb.prepare() addMonitoring(process) for output in outputs_Raw: print(output) moduleLabel = output['moduleLabel'] selectEvents = output.get('selectEvents', None) maxSize = output.get('maxSize', None) outputModule = cms.OutputModule( "PoolOutputModule", fileName = cms.untracked.string("%s.root" % moduleLabel) ) outputModule.dataset = cms.untracked.PSet(dataTier = cms.untracked.string("RAW")) if maxSize != None: outputModule.maxSize = cms.untracked.int32(maxSize) if selectEvents != None: outputModule.SelectEvents = cms.untracked.PSet( SelectEvents = cms.vstring(selectEvents) ) outputModule.outputCommands = cms.untracked.vstring('drop *', 'keep *_*_*_HLT') setattr(process, moduleLabel, outputModule) setattr(process, moduleLabel+'_step', cms.EndPath(outputModule)) path = getattr(process, moduleLabel+'_step') process.schedule.append(path) return process
def expressProcessing(self, globalTag, **args): """ _expressProcessing_ Proton collision data taking express processing """ skims = [] if 'skims' in args: skims = args['skims'] pclWkflws = [x for x in skims if "PromptCalibProd" in x] for wfl in pclWkflws: skims.remove(wfl) options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "pp" options.step = stepALCAPRODUCER(skims) if 'outputs' in args: # the RAW data-tier needs a special treatment since the event-content as defined in release is not good enough outputs_Raw = [x for x in args['outputs'] if x['dataTier'] == 'RAW'] outputs_noRaw = [x for x in args['outputs'] if x['dataTier'] != 'RAW'] if len(outputs_Raw) == 1: print('RAW data-tier requested') options.outputDefinition = outputs_noRaw.__str__() # dictIO(options,args) options.conditions = gtNameAndConnect(globalTag, args) options.filein = 'tobeoverwritten.xyz' if 'inputSource' in args: options.filetype = args['inputSource'] process = cms.Process('RECO', self.eras) if 'customs' in args: options.customisation_file=args['customs'] cb = ConfigBuilder(options, process = process, with_output = True, with_input = True) cb.prepare() addMonitoring(process) for output in outputs_Raw: print(output) moduleLabel = output['moduleLabel'] selectEvents = output.get('selectEvents', None) maxSize = output.get('maxSize', None) outputModule = cms.OutputModule( "PoolOutputModule", fileName = cms.untracked.string("%s.root" % moduleLabel) ) outputModule.dataset = cms.untracked.PSet(dataTier = cms.untracked.string("RAW")) if maxSize != None: outputModule.maxSize = cms.untracked.int32(maxSize) if selectEvents != None: outputModule.SelectEvents = cms.untracked.PSet( SelectEvents = cms.vstring(selectEvents) ) outputModule.outputCommands = cms.untracked.vstring('drop *', 'keep *_*_*_HLT') setattr(process, moduleLabel, outputModule) # outputModule=getattr(self.process,theModuleLabel) setattr(process, moduleLabel+'_step', cms.EndPath(outputModule)) path = getattr(process, moduleLabel+'_step') process.schedule.append(path) return process