def expressProcessing(self, globalTag, writeTiers=[], **args): """ _expressProcessing_ Proton collision data taking express processing """ skims = ['EcalCalPi0Calib', 'EcalCalEtaCalib'] step = stepALCAPRODUCER(skims) options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "pp" options.step = step options.isMC = False options.isData = True options.beamspot = None options.eventcontent = ','.join(writeTiers) options.datatier = ','.join(writeTiers) options.magField = 'AutoFromDBCurrent' options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag options.relval = False process = cms.Process('RECO') cb = ConfigBuilder(options, process=process, with_output=True) # Input source process.source = cms.Source("PoolSource", fileNames=cms.untracked.vstring()) cb.prepare() #add the former top level patches here customisePrompt(process) return process
def alcaHarvesting(self, globalTag, datasetName, **args): """ _alcaHarvesting_ Proton collisions data taking AlCa Harvesting """ if not 'skims' in args: return None options = defaultOptions options.scenario = self.cbSc if hasattr( self, 'cbSc') else self.__class__.__name__ options.step = "ALCAHARVEST:" + ('+'.join(args['skims'])) options.name = "ALCAHARVEST" options.conditions = globalTag process = cms.Process("ALCAHARVEST") process.source = cms.Source("PoolSource") configBuilder = ConfigBuilder(options, process=process) configBuilder.prepare() # # customise process for particular job # process.source.processingMode = cms.untracked.string('RunsAndLumis') process.source.fileNames = cms.untracked(cms.vstring()) process.maxEvents.input = -1 process.dqmSaver.workflow = datasetName return process
def expressProcessing(self, globalTag, writeTiers = [], **args): """ _expressProcessing_ Cosmic data taking express processing """ skims = ['SiStripCalZeroBias', 'MuAlCalIsolatedMu'] step = stepALCAPRODUCER(skims) options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "cosmics" options.step = 'RAW2DIGI,L1Reco,RECO'+step+',L1HwVal,DQM,ENDJOB' options.isMC = False options.isData = True options.beamspot = None options.eventcontent = ','.join(writeTiers) options.datatier = ','.join(writeTiers) options.magField = 'AutoFromDBCurrent' options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag options.relval = False process = cms.Process('RECO') cb = ConfigBuilder(options, process = process, with_output = True) # Input source process.source = cms.Source("NewEventStreamFileReader", fileNames = cms.untracked.vstring() ) cb.prepare() customiseCosmicData(process) return process
def expressProcessing(self, globalTag, **args): """ _expressProcessing_ Proton collision data taking express processing """ step = stepALCAPRODUCER(args['skims']) dqmStep = dqmSeq(args, '') options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = self.cbSc options.step = 'RAW2DIGI,L1Reco,RECO' + step + ',DQM' + dqmStep + ',ENDJOB' dictIO(options, args) options.conditions = globalTag process = cms.Process('RECO') cb = ConfigBuilder(options, process=process, with_output=True) # Input source process.source = cms.Source("NewEventStreamFileReader", fileNames=cms.untracked.vstring()) cb.prepare() addMonitoring(process) return process
def expressProcessing(self, globalTag, **args): """ _expressProcessing_ Heavy-ion collision data taking express processing """ skims = ['SiStripCalZeroBias', 'TkAlMinBiasHI'] step = stepALCAPRODUCER(skims) options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "HeavyIons" options.step = 'RAW2DIGI,L1Reco,RECO' + step + ',DQM,ENDJOB' options.isRepacked = True dictIO(options, args) options.conditions = globalTag process = cms.Process('RECO') cb = ConfigBuilder(options, process=process, with_output=True) # Input source process.source = cms.Source("NewEventStreamFileReader", fileNames=cms.untracked.vstring()) cb.prepare() customiseExpressHI(process) addMonitoring(process) return process
def expressProcessing(self, globalTag, **args): """ _expressProcessing_ Heavy-ion collision data taking express processing """ skims = ['SiStripCalZeroBias', 'TkAlMinBiasHI'] step = stepALCAPRODUCER(skims) options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "HeavyIons" options.step = 'RAW2DIGI,L1Reco,RECO'+step+',DQM,ENDJOB' options.isRepacked = True dictIO(options,args) options.conditions = globalTag process = cms.Process('RECO') cb = ConfigBuilder(options, process = process, with_output=True) # Input source process.source = cms.Source("NewEventStreamFileReader", fileNames = cms.untracked.vstring() ) cb.prepare() customiseExpressHI(process) addMonitoring(process) return process
def promptReco(self, globalTag, **args): """ _promptReco_ Collision data, data scouting (dst stream). This method provides the scheleton process for the dataScouting. dpiparo 17-7-2012 I follow the structure of the package. """ options = Options() options.scenario = self.cbSc options.__dict__.update(defaultOptions.__dict__) options.step = 'DQM:DQM/DataScouting/dataScouting_cff.dataScoutingDQMSequence,ENDJOB' dictIO(options, args) options.conditions = globalTag process = cms.Process('DataScouting') cb = ConfigBuilder(options, process=process, with_output=True) # Input source process.source = cms.Source("PoolSource", fileNames=cms.untracked.vstring()) cb.prepare() return process
def alcaHarvesting(self, globalTag, **args): """ _alcaHarvesting_ Proton collisions data taking AlCa Harvesting """ options = defaultOptions options.scenario = "pp" options.step = "ALCAHARVEST:BeamSpotByRun+BeamSpotByLumi" options.isMC = False options.isData = True options.beamspot = None options.eventcontent = None options.name = "ALCAHARVEST" options.conditions = globalTag options.arguments = "" options.evt_type = "" options.filein = [] process = cms.Process("ALCAHARVEST") process.source = cms.Source("PoolSource") configBuilder = ConfigBuilder(options, process=process) configBuilder.prepare() # # customise process for particular job # process.source.processingMode = cms.untracked.string('RunsAndLumis') process.source.fileNames = cms.untracked(cms.vstring()) process.maxEvents.input = -1 return process
def expressProcessing(self, globalTag, **args): """ _expressProcessing_ Proton collision data taking express processing """ step = stepALCAPRODUCER(args['skims']) dqmStep= dqmSeq(args,'') options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = self.cbSc options.step = 'RAW2DIGI,L1Reco,RECO'+step+',DQM'+dqmStep+',ENDJOB' dictIO(options,args) options.conditions = globalTag options.filein = 'tobeoverwritten.xyz' if 'inputSource' in args: options.filetype = args['inputSource'] process = cms.Process('RECO') cb = ConfigBuilder(options, process = process, with_output = True, with_input = True) cb.prepare() addMonitoring(process) return process
def promptReco(self, globalTag, **args): """ _promptReco_ Proton collision data taking prompt reco """ step = stepALCAPRODUCER(args['skims']) dqmStep= dqmSeq(args,'') options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = self.cbSc options.step = 'RAW2DIGI,L1Reco,RECO'+self.recoSeq+step+',DQM'+dqmStep+',ENDJOB' dictIO(options,args) options.conditions = globalTag process = cms.Process('RECO') cb = ConfigBuilder(options, process = process, with_output = True) # Input source process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring() ) cb.prepare() addMonitoring(process) return process
def alcaSkim(self, skims, **args): """ _alcaSkim_ AlcaReco processing & skims for proton collisions """ options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "pp" options.step = "ALCAOUTPUT:"+('+'.join(skims)) options.conditions = args['globaltag'] if 'globaltag' in args else 'None' options.triggerResultsProcess = 'RECO' process = cms.Process('ALCA') cb = ConfigBuilder(options, process = process) # Input source process.source = cms.Source( "PoolSource", fileNames = cms.untracked.vstring() ) cb.prepare() return process
def dqmHarvesting(self, datasetName, runNumber, globalTag, **args): """ _dqmHarvesting_ Proton collisions data taking DQM Harvesting """ options = defaultOptions options.scenario = "pp" options.step = "HARVESTING:alcaHarvesting" options.name = "EDMtoMEConvert" options.conditions = globalTag process = cms.Process("HARVESTING") process.source = dqmIOSource(args) configBuilder = ConfigBuilder(options, process = process) configBuilder.prepare() # # customise process for particular job # #process.source.processingMode = cms.untracked.string('RunsAndLumis') #process.source.fileNames = cms.untracked(cms.vstring()) #process.maxEvents.input = -1 #process.dqmSaver.workflow = datasetName #process.dqmSaver.saveByLumiSection = 1 #if args.has_key('referenceFile') and args.get('referenceFile', ''): # process.DQMStore.referenceFileName = \ # cms.untracked.string(args['referenceFile']) harvestingMode(process,datasetName,args) return process
def promptReco(self, globalTag, writeTiers=['RECO'], **args): """ _promptReco_ Prompt reco for RelVal MC FastSim production """ options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "pp" options.step = 'RAW2DIGI,L1Reco,RECO,VALIDATION,ENDJOB' options.isMC = True options.isData = False options.beamspot = None options.eventcontent = ','.join(writeTiers) options.datatier = ','.join(writeTiers) options.magField = 'AutoFromDBCurrent' options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag process = cms.Process('RECO') cb = ConfigBuilder(options, process=process, with_output=True) # Input source process.source = cms.Source("PoolSource", fileNames=cms.untracked.vstring()) cb.prepare() return process
def expressProcessing(self, globalTag, writeTiers=[], **args): """ _expressProcessing_ Cosmic data taking express processing """ skims = ['SiStripCalZeroBias', 'MuAlCalIsolatedMu'] step = stepALCAPRODUCER(skims) options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "cosmics" options.step = 'RAW2DIGI,L1Reco,RECO' + step + ',L1HwVal,DQM,ENDJOB' options.isMC = False options.isData = True options.beamspot = None options.eventcontent = ','.join(writeTiers) options.datatier = ','.join(writeTiers) options.magField = 'AutoFromDBCurrent' options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag options.relval = False process = cms.Process('RECO') cb = ConfigBuilder(options, process=process, with_output=True) # Input source process.source = cms.Source("NewEventStreamFileReader", fileNames=cms.untracked.vstring()) cb.prepare() customiseCosmicData(process) return process
def load(self,wfNumber,step): from Configuration.PyReleaseValidation.ConfigBuilder import ConfigBuilder from Configuration.PyReleaseValidation.cmsDriverOptions import OptionsFromCommand import copy if len(self.configBuilders)!=0 and self.strict: raise Exception('one should never be loading more than one process at a time due to python loading/altering feature') key=self.getKey(wfNumber,step) if key in self.configBuilders: return True for wf in self.mrd.workFlows: if float(wf.numId)!=wfNumber: continue if not hasattr(wf,'cmdStep%d'%(step)): continue if not getattr(wf,'cmdStep%d'%(step)): continue command=getattr(wf,'cmdStep%d'%(step)) opt=OptionsFromCommand(command) if opt: cb = ConfigBuilder(opt,with_input=True,with_output=True) cb.prepare() self.configBuilders[key]=copy.copy(cb) return True print "could not satisfy the request for step",step,"of workflow",wfNumber return False
def dqmHarvesting(self, datasetName, runNumber, globalTag, **args): """ _dqmHarvesting_ DQM Harvesting for RelVal MC production """ options = defaultOptions options.scenario = "pp" options.step = "HARVESTING:validationHarvestingFS" options.isMC = True options.isData = False options.beamspot = None options.name = "EDMtoMEConvert" options.conditions = globalTag process = cms.Process("HARVESTING") process.source = cms.Source("PoolSource") configBuilder = ConfigBuilder(options, process = process) configBuilder.prepare() # # customise process for particular job # process.source.processingMode = cms.untracked.string('RunsAndLumis') process.source.fileNames = cms.untracked(cms.vstring()) process.maxEvents.input = -1 process.dqmSaver.workflow = datasetName if args.has_key('referenceFile') and args.get('referenceFile', ''): process.DQMStore.referenceFileName = \ cms.untracked.string(args['referenceFile']) return process
def alcaHarvesting(self, globalTag, **args): """ _alcaHarvesting_ Heavy-ion collisions data taking AlCa Harvesting """ options = defaultOptions options.scenario = "HeavyIons" options.step = "ALCAHARVEST:BeamSpotByRun+BeamSpotByLumi" options.isMC = False options.isData = True options.beamspot = None options.eventcontent = None options.name = "ALCAHARVEST" options.conditions = globalTag options.arguments = "" options.evt_type = "" options.filein = [] process = cms.Process("ALCAHARVEST") process.source = cms.Source("PoolSource") configBuilder = ConfigBuilder(options, process = process) configBuilder.prepare() # # customise process for particular job # process.source.processingMode = cms.untracked.string('RunsAndLumis') process.source.fileNames = cms.untracked(cms.vstring()) process.maxEvents.input = -1 return process
def alcaHarvesting(self, globalTag, datasetName, **args): """ _alcaHarvesting_ Proton collisions data taking AlCa Harvesting """ if not "skims" in args: return None options = defaultOptions options.scenario = self.cbSc if hasattr(self, "cbSc") else self.__class__.__name__ options.step = "ALCAHARVEST:" + ("+".join(args["skims"])) options.name = "ALCAHARVEST" options.conditions = globalTag process = cms.Process("ALCAHARVEST") process.source = cms.Source("PoolSource") configBuilder = ConfigBuilder(options, process=process) configBuilder.prepare() # # customise process for particular job # process.source.processingMode = cms.untracked.string("RunsAndLumis") process.source.fileNames = cms.untracked(cms.vstring()) process.maxEvents.input = -1 process.dqmSaver.workflow = datasetName return process
def expressProcessing(self, globalTag, **args): """ _expressProcessing_ Proton collision data taking express processing """ step = stepALCAPRODUCER(args["skims"]) dqmStep = dqmSeq(args, "") options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = self.cbSc options.step = "RAW2DIGI,L1Reco,RECO" + step + ",DQM" + dqmStep + ",ENDJOB" dictIO(options, args) options.conditions = globalTag process = cms.Process("RECO") cb = ConfigBuilder(options, process=process, with_output=True) # Input source process.source = cms.Source("NewEventStreamFileReader", fileNames=cms.untracked.vstring()) cb.prepare() addMonitoring(process) return process
def dqmHarvesting(self, datasetName, runNumber, globalTag, **args): """ _dqmHarvesting_ Proton collisions data taking DQM Harvesting """ options = defaultOptions options.scenario = "pp" options.step = "HARVESTING:alcaHarvesting" options.name = "EDMtoMEConvert" options.conditions = globalTag process = cms.Process("HARVESTING") process.source = dqmIOSource(args) configBuilder = ConfigBuilder(options, process=process) configBuilder.prepare() # # customise process for particular job # #process.source.processingMode = cms.untracked.string('RunsAndLumis') #process.source.fileNames = cms.untracked(cms.vstring()) #process.maxEvents.input = -1 #process.dqmSaver.workflow = datasetName #process.dqmSaver.saveByLumiSection = 1 #if args.has_key('referenceFile') and args.get('referenceFile', ''): # process.DQMStore.referenceFileName = \ # cms.untracked.string(args['referenceFile']) harvestingMode(process, datasetName, args) return process
def alcaSkim(self, skims, **args): """ _alcaSkim_ AlcaReco processing & skims for proton collisions """ options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "pp" options.step = "ALCAOUTPUT:" + ('+'.join(skims)) options.conditions = args[ 'globaltag'] if 'globaltag' in args else 'None' options.triggerResultsProcess = 'RECO' process = cms.Process('ALCA') cb = ConfigBuilder(options, process=process) # Input source process.source = cms.Source("PoolSource", fileNames=cms.untracked.vstring()) cb.prepare() return process
def dqmHarvesting(self, datasetName, runNumber, globalTag, **args): """ _dqmHarvesting_ DQM Harvesting for RelVal MC production """ options = defaultOptions options.scenario = "pp" options.step = "HARVESTING:validationHarvestingFS" options.isMC = True options.isData = False options.beamspot = None options.name = "EDMtoMEConvert" options.conditions = globalTag process = cms.Process("HARVESTING") process.source = cms.Source("PoolSource") configBuilder = ConfigBuilder(options, process=process) configBuilder.prepare() # # customise process for particular job # process.source.processingMode = cms.untracked.string('RunsAndLumis') process.source.fileNames = cms.untracked(cms.vstring()) process.maxEvents.input = -1 process.dqmSaver.workflow = datasetName if args.has_key('referenceFile') and args.get('referenceFile', ''): process.DQMStore.referenceFileName = \ cms.untracked.string(args['referenceFile']) return process
def promptReco(self, globalTag, writeTiers = ['RECO'], **args): """ _promptReco_ Prompt reco for RelVal MC production """ options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "pp" options.step = 'RAW2DIGI,L1Reco,RECO,VALIDATION,DQM,ENDJOB' options.isMC = True options.isData = False options.beamspot = None options.eventcontent = ','.join(writeTiers) options.datatier = ','.join(writeTiers) options.magField = 'AutoFromDBCurrent' options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag process = cms.Process('RECO') cb = ConfigBuilder(options, process = process, with_output = True) # Input source process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring() ) cb.prepare() return process
def promptReco(self, globalTag, writeTiers=['RECO'], **args): """ _promptReco_ Heavy-ion collision data taking prompt reco """ skims = ['SiStripCalZeroBias', 'SiStripCalMinBias', 'TkAlMinBiasHI'] step = stepALCAPRODUCER(skims) options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "HeavyIons" options.step = 'RAW2DIGI,L1Reco,RECO' + step + ',L1HwVal,DQM,ENDJOB' options.isMC = False options.isData = True options.beamspot = None options.eventcontent = ','.join(writeTiers) options.datatier = ','.join(writeTiers) options.magField = 'AutoFromDBCurrent' options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag options.relval = False process = cms.Process('RECO') cb = ConfigBuilder(options, process=process, with_output=True) # Input source process.source = cms.Source("PoolSource", fileNames=cms.untracked.vstring()) cb.prepare() #add the former top level patches here customisePromptHI(process) return process
def load(self, wfNumber, step): from Configuration.PyReleaseValidation.ConfigBuilder import ConfigBuilder from Configuration.PyReleaseValidation.cmsDriverOptions import OptionsFromCommand import copy if len(self.configBuilders) != 0 and self.strict: raise Exception( 'one should never be loading more than one process at a time due to python loading/altering feature' ) key = self.getKey(wfNumber, step) if key in self.configBuilders: return True for wf in self.mrd.workFlows: if float(wf.numId) != wfNumber: continue if not hasattr(wf, 'cmdStep%d' % (step)): continue if not getattr(wf, 'cmdStep%d' % (step)): continue command = getattr(wf, 'cmdStep%d' % (step)) opt = OptionsFromCommand(command) if opt: cb = ConfigBuilder(opt, with_input=True, with_output=True) cb.prepare() self.configBuilders[key] = copy.copy(cb) return True print "could not satisfy the request for step", step, "of workflow", wfNumber return False
def expressProcessing(self, globalTag, **args): """ _expressProcessing_ Proton collision data taking express processing """ step = stepALCAPRODUCER(args['skims']) dqmStep = dqmSeq(args, '') options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = self.cbSc options.step = 'RAW2DIGI,L1Reco,RECO' + step + ',DQM' + dqmStep + ',ENDJOB' dictIO(options, args) options.conditions = globalTag options.filein = 'tobeoverwritten.xyz' if 'inputSource' in args: options.filetype = args['inputSource'] process = cms.Process('RECO') cb = ConfigBuilder(options, process=process, with_output=True, with_input=True) cb.prepare() addMonitoring(process) return process
def alcaSkim(self, skims, **args): """ _alcaSkim_ AlcaReco processing & skims for heavy-ion collisions """ globalTag = None if 'globaltag' in args: globalTag = args['globaltag'] step = "" if 'PromptCalibProd' in skims: step = "ALCA:PromptCalibProd" skims.remove('PromptCalibProd') if len( skims ) > 0: if step != "": step += "," step += "ALCAOUTPUT:" for skim in skims: step += (skim+"+") options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "HeavyIons" options.step = step.rstrip('+') options.isMC = False options.isData = True options.beamspot = None options.eventcontent = None options.relval = None if globalTag != None : options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag options.triggerResultsProcess = 'RECO' process = cms.Process('ALCA') cb = ConfigBuilder(options, process = process) # Input source process.source = cms.Source( "PoolSource", fileNames = cms.untracked.vstring() ) cb.prepare() # FIXME: dirty hack..any way around this? # Tier0 needs the dataset used for ALCAHARVEST step to be a different data-tier if 'PromptCalibProd' in step: process.ALCARECOStreamPromptCalibProd.dataset.dataTier = cms.untracked.string('ALCAPROMPT') return process
def alcaReco(self, skims, **args): """ _alcaReco_ AlcaReco processing & skims for RelVal MC production """ options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "pp" options.step = 'ALCA:MuAlStandAloneCosmics+DQM,ENDJOB' options.isMC = True options.isData = False options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag options.beamspot = None options.eventcontent = None options.relval = None process = cms.Process('ALCA') cb = ConfigBuilder(options, process = process) # Input source process.source = cms.Source( "PoolSource", fileNames = cms.untracked.vstring() ) cb.prepare() # // # // Verify and Edit the list of skims to be written out #// by this job availableStreams = process.outputModules_().keys() # // # // First up: Verify skims are available by output module name #// for skim in skims: if skim not in availableStreams: msg = "Skim named: %s not available " % skim msg += "in Alca Reco Config:\n" msg += "Known Skims: %s\n" % availableStreams raise RuntimeError, msg # // # // Prune any undesired skims #// for availSkim in availableStreams: if availSkim not in skims: self.dropOutputModule(process, availSkim) return process
def alcaReco(self, skims, **args): """ _alcaReco_ AlcaReco processing & skims for RelVal MC production """ options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "pp" options.step = 'ALCA:MuAlStandAloneCosmics,ENDJOB' options.isMC = True options.isData = False options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag options.beamspot = None options.eventcontent = None options.relval = None process = cms.Process('ALCA') cb = ConfigBuilder(options, process=process) # Input source process.source = cms.Source("PoolSource", fileNames=cms.untracked.vstring()) cb.prepare() # // # // Verify and Edit the list of skims to be written out #// by this job availableStreams = process.outputModules_().keys() # // # // First up: Verify skims are available by output module name #// for skim in skims: if skim not in availableStreams: msg = "Skim named: %s not available " % skim msg += "in Alca Reco Config:\n" msg += "Known Skims: %s\n" % availableStreams raise RuntimeError, msg # // # // Prune any undesired skims #// for availSkim in availableStreams: if availSkim not in skims: self.dropOutputModule(process, availSkim) return process
def promptReco(self, globalTag, writeTiers = ['RECO'], **args): """ _promptReco_ Proton collision data taking prompt reco """ skims = ['SiStripCalZeroBias', 'TkAlMinBias', 'TkAlMuonIsolated', 'MuAlCalIsolatedMu', 'MuAlOverlaps', 'HcalCalIsoTrk', 'HcalCalDijets', 'SiStripCalMinBias', 'EcalCalElectron', 'DtCalib', 'TkAlJpsiMuMu', 'TkAlUpsilonMuMu', 'TkAlZMuMu'] step = stepALCAPRODUCER(skims) options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "pp" options.step = 'RAW2DIGI,L1Reco,RECO'+step+',L1HwVal,DQM,ENDJOB' options.isMC = False options.isData = True options.beamspot = None options.eventcontent = ','.join(writeTiers) options.datatier = ','.join(writeTiers) options.magField = 'AutoFromDBCurrent' options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag options.relval = False process = cms.Process('RECO') cb = ConfigBuilder(options, process = process, with_output = True) # Input source process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring() ) cb.prepare() #add the former top level patches here customisePrompt(process) return process
def alcaSkim(self, skims, **args): """ _alcaSkim_ AlcaReco processing & skims for proton collisions """ step = "" if 'PromptCalibProd' in skims: step = "ALCA:PromptCalibProd" skims.remove('PromptCalibProd') if len( skims ) > 0: if step != "": step += "," step += "ALCAOUTPUT:"+('+'.join(skims)) options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = self.cbSc options.step = step options.conditions = args['globaltag'] if 'globaltag' in args else 'None' options.triggerResultsProcess = 'RECO' process = cms.Process('ALCA') cb = ConfigBuilder(options, process = process) # Input source process.source = cms.Source( "PoolSource", fileNames = cms.untracked.vstring() ) cb.prepare() # FIXME: dirty hack..any way around this? # Tier0 needs the dataset used for ALCAHARVEST step to be a different data-tier if 'PromptCalibProd' in step: process.ALCARECOStreamPromptCalibProd.dataset.dataTier = cms.untracked.string('ALCAPROMPT') return process
def alcaSkim(self, skims, **args): """ _alcaSkim_ AlcaReco processing & skims for proton collisions """ step = "" if 'PromptCalibProd' in skims: step = "ALCA:PromptCalibProd" skims.remove('PromptCalibProd') if len(skims) > 0: if step != "": step += "," step += "ALCAOUTPUT:" + ('+'.join(skims)) options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = self.cbSc options.step = step options.conditions = args[ 'globaltag'] if 'globaltag' in args else 'None' options.triggerResultsProcess = 'RECO' process = cms.Process('ALCA') cb = ConfigBuilder(options, process=process) # Input source process.source = cms.Source("PoolSource", fileNames=cms.untracked.vstring()) cb.prepare() # FIXME: dirty hack..any way around this? # Tier0 needs the dataset used for ALCAHARVEST step to be a different data-tier if 'PromptCalibProd' in step: process.ALCARECOStreamPromptCalibProd.dataset.dataTier = cms.untracked.string( 'ALCAPROMPT') return process
def promptReco(self, globalTag, **args): if not 'skims' in args: args['skims'] = self.skims step = stepALCAPRODUCER(args['skims']) options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "pp" options.step = step dictIO(options, args) options.conditions = globalTag process = cms.Process('RECO') cb = ConfigBuilder(options, process=process, with_output=True) # Input source process.source = cms.Source("PoolSource", fileNames=cms.untracked.vstring()) cb.prepare() return process
def dqmHarvesting(self, datasetName, runNumber, globalTag, **args): """ _dqmHarvesting_ Proton collisions data taking DQM Harvesting """ options = defaultOptions options.scenario = self.cbSc options.step = "HARVESTING" + dqmSeq(args, ':dqmHarvesting') options.name = "EDMtoMEConvert" options.conditions = globalTag process = cms.Process("HARVESTING") process.source = dqmIOSource(args) configBuilder = ConfigBuilder(options, process=process) configBuilder.prepare() harvestingMode(process, datasetName, args, rANDl=False) return process
def dqmHarvesting(self, datasetName, runNumber, globalTag, **args): """ _dqmHarvesting_ Proton collisions data taking DQM Harvesting """ options = defaultOptions options.scenario = self.cbSc options.step = "HARVESTING" + dqmSeq(args, ":DQMOffline") options.name = "EDMtoMEConvert" options.conditions = globalTag process = cms.Process("HARVESTING") process.source = dqmIOSource(args) configBuilder = ConfigBuilder(options, process=process) configBuilder.prepare() harvestingMode(process, datasetName, args, rANDl=False) return process
def dqmHarvesting(self, datasetName, runNumber, globalTag, **args): """ _dqmHarvesting_ Heavy-ion collisions data taking DQM Harvesting """ options = defaultOptions options.scenario = "HeavyIons" options.step = "HARVESTING:dqmHarvesting" options.isMC = False options.isData = True options.beamspot = None options.eventcontent = None options.name = "EDMtoMEConvert" options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag options.arguments = "" options.evt_type = "" options.filein = [] process = cms.Process("HARVESTING") if args.get('newDQMIO', False): process.source = cms.Source("DQMRootSource") else: process.source = cms.Source("PoolSource") configBuilder = ConfigBuilder(options, process=process) configBuilder.prepare() # # customise process for particular job # process.source.processingMode = cms.untracked.string('RunsAndLumis') process.source.fileNames = cms.untracked(cms.vstring()) process.maxEvents.input = -1 process.dqmSaver.workflow = datasetName process.dqmSaver.saveByLumiSection = 1 if args.has_key('referenceFile') and args.get('referenceFile', ''): process.DQMStore.referenceFileName = \ cms.untracked.string(args['referenceFile']) return process
def dqmHarvesting(self, datasetName, runNumber, globalTag, **args): """ _dqmHarvesting_ Heavy-ion collisions data taking DQM Harvesting """ options = defaultOptions options.scenario = "HeavyIons" options.step = "HARVESTING:dqmHarvesting" options.isMC = False options.isData = True options.beamspot = None options.eventcontent = None options.name = "EDMtoMEConvert" options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag options.arguments = "" options.evt_type = "" options.filein = [] process = cms.Process("HARVESTING") if args.get('newDQMIO', False): process.source = cms.Source("DQMRootSource") else: process.source = cms.Source("PoolSource") configBuilder = ConfigBuilder(options, process = process) configBuilder.prepare() # # customise process for particular job # process.source.processingMode = cms.untracked.string('RunsAndLumis') process.source.fileNames = cms.untracked(cms.vstring()) process.maxEvents.input = -1 process.dqmSaver.workflow = datasetName process.dqmSaver.saveByLumiSection = 1 if args.has_key('referenceFile') and args.get('referenceFile', ''): process.DQMStore.referenceFileName = \ cms.untracked.string(args['referenceFile']) return process
def promptReco(self, globalTag, writeTiers = ['RECO'], **args): """ _promptReco_ Cosmic data taking prompt reco """ skims = ['TkAlBeamHalo', 'MuAlBeamHaloOverlaps', 'MuAlBeamHalo', 'TkAlCosmics0T', 'MuAlGlobalCosmics', 'MuAlCalIsolatedMu', 'HcalCalHOCosmics', 'DtCalib'] step = stepALCAPRODUCER(skims) options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "cosmics" options.step = 'RAW2DIGI,L1Reco,RECO'+step+',L1HwVal,DQM,ENDJOB' options.isMC = False options.isData = True options.beamspot = None options.eventcontent = ','.join(writeTiers) options.datatier = ','.join(writeTiers) options.magField = 'AutoFromDBCurrent' options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag options.relval = False process = cms.Process('RECO') cb = ConfigBuilder(options, process = process, with_output = True) # Input source process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring() ) cb.prepare() customiseCosmicData(process) return process
def promptReco(self, globalTag, **args): if not 'skims' in args: args['skims']=self.skims step = stepALCAPRODUCER(args['skims']) options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "pp" options.step = step dictIO(options,args) options.conditions = globalTag process = cms.Process('RECO') cb = ConfigBuilder(options, process = process, with_output = True) # Input source process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring() ) cb.prepare() return process
def alcaSkim(self, skims, **args): """ _alcaSkim_ AlcaReco processing & skims for proton collisions """ globalTag = None if 'globaltag' in args: globalTag = args['globaltag'] step = "ALCAOUTPUT:" for skim in skims: step += (skim+"+") options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "pp" options.step = step.rstrip('+') options.isMC = False options.isData = True options.beamspot = None options.eventcontent = None options.relval = None if globalTag != None : options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag options.triggerResultsProcess = 'RECO' process = cms.Process('ALCA') cb = ConfigBuilder(options, process = process) # Input source process.source = cms.Source( "PoolSource", fileNames = cms.untracked.vstring() ) cb.prepare() return process
def promptReco(self, globalTag, writeTiers = ['RECO'], **args): """ _promptReco_ Heavy-ion collision data taking prompt reco """ skims = ['SiStripCalZeroBias', 'SiStripCalMinBias', 'TkAlMinBiasHI'] step = stepALCAPRODUCER(skims) options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "HeavyIons" options.step = 'RAW2DIGI,L1Reco,RECO'+step+',L1HwVal,DQM,ENDJOB' options.isMC = False options.isData = True options.beamspot = None options.eventcontent = None options.magField = 'AutoFromDBCurrent' options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag options.relval = False process = cms.Process('RECO') cb = ConfigBuilder(options, process = process) # Input source process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring() ) cb.prepare() for tier in writeTiers: addOutputModule(process, tier, tier) #add the former top level patches here customiseAlcaOnlyPromptHI(process) return process
def expressProcessing(self, globalTag, writeTiers = [], **args): """ _expressProcessing_ Heavy-ion collision data taking express processing """ skims = ['SiStripCalZeroBias', 'TkAlMinBiasHI'] step = stepALCAPRODUCER(skims) options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "HeavyIons" options.step = 'RAW2DIGI,L1Reco,RECO'+step+',L1HwVal,DQM,ENDJOB' options.isMC = False options.isData = True options.beamspot = None options.eventcontent = None options.magField = 'AutoFromDBCurrent' options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag options.relval = False process = cms.Process('RECO') cb = ConfigBuilder(options, process = process) # Input source process.source = cms.Source("NewEventStreamFileReader", fileNames = cms.untracked.vstring() ) cb.prepare() for tier in writeTiers: addOutputModule(process, tier, tier) #add the former top level patches here customiseExpressHI(process) return process
def run(): import sys import os import Configuration.PyReleaseValidation from Configuration.PyReleaseValidation.ConfigBuilder import ConfigBuilder from Configuration.PyReleaseValidation.cmsDriverOptions import OptionsFromCommandLine options = OptionsFromCommandLine() # after cleanup of all config parameters pass it to the ConfigBuilder configBuilder = ConfigBuilder(options, with_output=True, with_input=True) configBuilder.prepare() # fetch the results and write it to file config = file(options.python_filename, "w") config.write(configBuilder.pythonCfgCode) config.close() # handle different dump options if options.dump_python: result = {} execfile(options.python_filename, result) process = result["process"] expanded = process.dumpPython() expandedFile = file(options.python_filename, "w") expandedFile.write(expanded) expandedFile.close() print "Expanded config file", options.python_filename, "created" sys.exit(0) if options.no_exec_flag: print "Config file " + options.python_filename + " created" sys.exit(0) else: commandString = options.prefix + " cmsRun " + options.suffix print "Starting " + commandString + ' ' + options.python_filename commands = commandString.lstrip().split() os.execvpe(commands[0], commands + [options.python_filename], os.environ) sys.exit()
def expressProcessing(self, globalTag, writeTiers = [], **args): """ _expressProcessing_ Heavy-ion collision data taking express processing """ skims = [] step = stepALCAPRODUCER(skims) options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "HeavyIons" options.step = 'RAW2DIGI,L1Reco,RECO'+step+',L1HwVal,DQM,ENDJOB' options.isMC = False options.isData = True options.beamspot = None options.eventcontent = None options.magField = 'AutoFromDBCurrent' options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag options.relval = False process = cms.Process('RECO') cb = ConfigBuilder(options, process = process) # Input source process.source = cms.Source("NewEventStreamFileReader", fileNames = cms.untracked.vstring() ) cb.prepare() for tier in writeTiers: addOutputModule(process, tier, tier) #add the former top level patches here customiseExpress(process) return process
def run(): import sys import os import Configuration.PyReleaseValidation from Configuration.PyReleaseValidation.ConfigBuilder import ConfigBuilder from Configuration.PyReleaseValidation.cmsDriverOptions import OptionsFromCommandLine options = OptionsFromCommandLine() # after cleanup of all config parameters pass it to the ConfigBuilder configBuilder = ConfigBuilder(options, with_output = True, with_input = True) configBuilder.prepare() # fetch the results and write it to file config = file(options.python_filename,"w") config.write(configBuilder.pythonCfgCode) config.close() # handle different dump options if options.dump_python: result = {} execfile(options.python_filename, result) process = result["process"] expanded = process.dumpPython() expandedFile = file(options.python_filename,"w") expandedFile.write(expanded) expandedFile.close() print "Expanded config file", options.python_filename, "created" sys.exit(0) if options.no_exec_flag: print "Config file "+options.python_filename+ " created" sys.exit(0) else: commandString = options.prefix+" cmsRun "+options.suffix print "Starting "+commandString+' '+options.python_filename commands = commandString.lstrip().split() os.execvpe(commands[0],commands+[options.python_filename],os.environ) sys.exit()
def promptReco(self, globalTag, writeTiers=['RECO'], **args): """ _promptReco_ Cosmic data taking prompt reco """ skims = [ 'TkAlBeamHalo', 'MuAlBeamHaloOverlaps', 'MuAlBeamHalo', 'TkAlCosmics0T', 'MuAlGlobalCosmics', 'MuAlCalIsolatedMu', 'HcalCalHOCosmics', 'DtCalib' ] step = stepALCAPRODUCER(skims) options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "cosmics" options.step = 'RAW2DIGI,L1Reco,RECO' + step + ',L1HwVal,DQM,ENDJOB' options.isMC = False options.isData = True options.beamspot = None options.eventcontent = ','.join(writeTiers) options.datatier = ','.join(writeTiers) options.magField = 'AutoFromDBCurrent' options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag options.relval = False process = cms.Process('RECO') cb = ConfigBuilder(options, process=process, with_output=True) # Input source process.source = cms.Source("PoolSource", fileNames=cms.untracked.vstring()) cb.prepare() customiseCosmicData(process) return process
def expressProcessing(self, globalTag, writeTiers = [], **args): """ _expressProcessing_ Proton collision data taking express processing """ skims = ['EcalCalPi0Calib', 'EcalCalEtaCalib'] step = stepALCAPRODUCER(skims) options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "pp" options.step = step options.isMC = False options.isData = True options.beamspot = None options.eventcontent = ','.join(writeTiers) options.datatier = ','.join(writeTiers) options.magField = 'AutoFromDBCurrent' options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag options.relval = False process = cms.Process('RECO') cb = ConfigBuilder(options, process = process, with_output = True) # Input source process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring() ) cb.prepare() #add the former top level patches here customisePrompt(process) return process
def promptReco(self, globalTag, writeTiers = ['RECO'], **args): """ _promptReco_ Proton collision data taking prompt reco """ skims = ['HcalCalMinBias'] step = stepALCAPRODUCER(skims) options = Options() options.__dict__.update(defaultOptions.__dict__) options.scenario = "pp" options.step = 'RAW2DIGI,L1Reco,RECO:reconstruction_HcalNZS'+step+',DQM,ENDJOB' options.isMC = False options.isData = True options.beamspot = None options.eventcontent = ','.join(writeTiers) options.datatier = ','.join(writeTiers) options.magField = 'AutoFromDBCurrent' options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag options.relval = False process = cms.Process('RECO') cb = ConfigBuilder(options, process = process, with_output = True) # Input source process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring() ) cb.prepare() #add the former top level patches here customisePrompt(process) return process
def promptReco(self, globalTag, **args): """ _promptReco_ Collision data, data scouting (dst stream). This method provides the scheleton process for the dataScouting. dpiparo 17-7-2012 I follow the structure of the package. """ options = Options() options.scenario = self.cbSc options.__dict__.update(defaultOptions.__dict__) options.step = "DQM:DQM/DataScouting/dataScouting_cff.dataScoutingDQMSequence,ENDJOB" dictIO(options, args) options.conditions = globalTag process = cms.Process("DataScouting") cb = ConfigBuilder(options, process=process, with_output=True) # Input source process.source = cms.Source("PoolSource", fileNames=cms.untracked.vstring()) cb.prepare() return process
#! /usr/bin/env python # A Pyrelval Wrapper import sys import os import Configuration.PyReleaseValidation from Configuration.PyReleaseValidation.ConfigBuilder import ConfigBuilder, defaultOptions from Configuration.PyReleaseValidation.cmsDriverOptions import options, python_config_filename # after cleanup of all config parameters pass it to the ConfigBuilder configBuilder = ConfigBuilder(options, with_output = True, with_input = True) configBuilder.prepare() # fetch the results and write it to file if options.python_filename: python_config_filename = options.python_filename config = file(python_config_filename,"w") config.write(configBuilder.pythonCfgCode) config.close() # handle different dump options if options.dump_python: result = {} execfile(python_config_filename, result) process = result["process"] expanded = process.dumpPython() expandedFile = file(python_config_filename,"w") expandedFile.write(expanded) expandedFile.close() print "Expanded config file", python_config_filename, "created" sys.exit(0)