self._handleChainedProcessing() else: # Apply per job PSet Tweaks jobTweak = makeJobTweak(self.job) applyTweak(self.process, jobTweak, self.fixupDict) # check for pileup settings presence, pileup support implementation # and if enabled, process pileup configuration / settings if hasattr(self.step.data, "pileup"): self._handlePileup() # Apply per output module PSet Tweaks cmsswStep = self.step.getTypeHelper() for om in cmsswStep.listOutputModules(): mod = cmsswStep.getOutputModule(om) outTweak = makeOutputTweak(mod, self.job) applyTweak(self.process, outTweak, self.fixupDict) # revlimiter for testing if getattr(self.step.data.application.command, "oneEventMode", False): self.process.maxEvents.input = 1 # check for random seeds and the method of seeding which is in the job baggage self.handleSeeding() # make sure default parametersets for perf reports are installed self.handlePerformanceSettings() # check for event numbers in the producers self.handleProducersNumberOfEvents()
self._handleChainedProcessing() else: # Apply per job PSet Tweaks jobTweak = makeJobTweak(self.job) applyTweak(self.process, jobTweak, self.fixupDict) # check for pileup settings presence, pileup support implementation # and if enabled, process pileup configuration / settings if hasattr(self.step.data, "pileup"): self._handlePileup() # Apply per output module PSet Tweaks cmsswStep = self.step.getTypeHelper() for om in cmsswStep.listOutputModules(): mod = cmsswStep.getOutputModule(om) outTweak = makeOutputTweak(mod, self.job) applyTweak(self.process, outTweak, self.fixupDict) # revlimiter for testing #self.process.maxEvents.input = 2 # check for random seeds and the method of seeding which is in the job baggage self.handleSeeding() # make sure default parametersets for perf reports are installed self.handlePerformanceSettings() # check for event numbers in the producers self.handleProducersNumberOfEvents() # fixup the dqmFileSaver
def __call__(self): """ _call_ Examine the step configuration and construct a PSet from that. """ self.logger.info("Executing SetupCMSSWPSet...") self.jobBag = self.job.getBaggage() scenario = getattr(self.step.data.application.configuration, "scenario", None) if scenario is not None and scenario != "": self.logger.info("Setting up job scenario/process") funcName = getattr(self.step.data.application.configuration, "function", None) if getattr(self.step.data.application.configuration, "pickledarguments", None) is not None: funcArgs = pickle.loads( self.step.data.application.configuration.pickledarguments) else: funcArgs = {} try: self.createProcess(scenario, funcName, funcArgs) except Exception as ex: self.logger.exception( "Error creating process for Config/DataProcessing:") raise ex if funcName == "repack": self.handleRepackSettings() if funcName in ["merge", "alcaHarvesting"]: self.handleSingleCoreOverride() if socket.getfqdn().endswith("cern.ch"): self.handleSpecialCERNMergeSettings(funcName) else: try: self.loadPSet() except Exception as ex: self.logger.exception("Error loading PSet:") raise ex # Check process.source exists if getattr(self.process, "source", None) is None: msg = "Error in CMSSW PSet: process is missing attribute 'source'" msg += " or process.source is defined with None value." self.logger.error(msg) raise RuntimeError(msg) self.handleCondorStatusService() self.fixupProcess() # In case of CRAB3, the number of threads in the PSet should not be overridden if not self.crabPSet: try: origCores = int( getattr(self.step.data.application.multicore, 'numberOfCores', 1)) eventStreams = int( getattr(self.step.data.application.multicore, 'eventStreams', 0)) resources = {'cores': origCores} resizeResources(resources) numCores = resources['cores'] if numCores != origCores: self.logger.info( "Resizing a job with nStreams != nCores. Setting nStreams = nCores. This may end badly." ) eventStreams = 0 options = getattr(self.process, "options", None) if options is None: self.process.options = cms.untracked.PSet() options = getattr(self.process, "options") options.numberOfThreads = cms.untracked.uint32(numCores) options.numberOfStreams = cms.untracked.uint32(eventStreams) except AttributeError as ex: self.logger.error("Failed to override numberOfThreads: %s", str(ex)) psetTweak = getattr(self.step.data.application.command, "psetTweak", None) if psetTweak is not None: self.applyPSetTweak(psetTweak, self.fixupDict) # Apply task level tweaks taskTweak = makeTaskTweak(self.step.data) applyTweak(self.process, taskTweak, self.fixupDict) # Check if chained processing is enabled # If not - apply the per job tweaks # If so - create an override TFC (like done in PA) and then modify thePSet accordingly if hasattr(self.step.data.input, "chainedProcessing" ) and self.step.data.input.chainedProcessing: self.handleChainedProcessing() else: # Apply per job PSet Tweaks jobTweak = makeJobTweak(self.job) applyTweak(self.process, jobTweak, self.fixupDict) # check for pileup settings presence, pileup support implementation # and if enabled, process pileup configuration / settings if hasattr(self.step.data, "pileup"): self.handlePileup() # Apply per output module PSet Tweaks cmsswStep = self.step.getTypeHelper() for om in cmsswStep.listOutputModules(): mod = cmsswStep.getOutputModule(om) outTweak = makeOutputTweak(mod, self.job) applyTweak(self.process, outTweak, self.fixupDict) # revlimiter for testing if getattr(self.step.data.application.command, "oneEventMode", False): self.process.maxEvents.input = 1 # check for random seeds and the method of seeding which is in the job baggage self.handleSeeding() # make sure default parametersets for perf reports are installed self.handlePerformanceSettings() # check for event numbers in the producers self.handleProducersNumberOfEvents() # fixup the dqmFileSaver self.handleDQMFileSaver() # tweak for jobs reading LHE articles from CERN self.handleLHEInput() # tweak jobs for enforceGUIDInFileName self.handleEnforceGUIDInFileName() # Check if we accept skipping bad files if hasattr(self.step.data.application.configuration, "skipBadFiles"): self.process.source.skipBadFiles = \ cms.untracked.bool(self.step.data.application.configuration.skipBadFiles) # Apply events per lumi section if available if hasattr(self.step.data.application.configuration, "eventsPerLumi"): self.process.source.numberEventsInLuminosityBlock = \ cms.untracked.uint32(self.step.data.application.configuration.eventsPerLumi) # limit run time if desired if hasattr(self.step.data.application.configuration, "maxSecondsUntilRampdown"): self.process.maxSecondsUntilRampdown = cms.untracked.PSet( input=cms.untracked.int32( self.step.data.application.configuration. maxSecondsUntilRampdown)) # accept an overridden TFC from the step if hasattr(self.step.data.application, 'overrideCatalog'): self.logger.info("Found a TFC override: %s", self.step.data.application.overrideCatalog) self.process.source.overrideCatalog = \ cms.untracked.string(self.step.data.application.overrideCatalog) configFile = self.step.data.application.command.configuration configPickle = getattr(self.step.data.application.command, "configurationPickle", "PSet.pkl") workingDir = self.stepSpace.location try: with open("%s/%s" % (workingDir, configPickle), 'wb') as pHandle: pickle.dump(self.process, pHandle) with open("%s/%s" % (workingDir, configFile), 'w') as handle: handle.write("import FWCore.ParameterSet.Config as cms\n") handle.write("import pickle\n") handle.write("with open('%s', 'rb') as handle:\n" % configPickle) handle.write(" process = pickle.load(handle)\n") except Exception as ex: self.logger.exception("Error writing out PSet:") raise ex self.logger.info("CMSSW PSet setup completed!") return 0
def __call__(self): """ _call_ Examine the step configuration and construct a PSet from that. """ self.process = None funcName = None scenario = getattr(self.step.data.application.configuration, "scenario", None) if scenario != None and scenario != "": funcName = getattr(self.step.data.application.configuration, "function", None) if getattr(self.step.data.application.configuration, "pickledarguments", None) != None: funcArgs = pickle.loads(self.step.data.application.configuration.pickledarguments) else: funcArgs = {} try: self.createProcess(scenario, funcName, funcArgs) except Exception as ex: print("Error creating process for Config/DataProcessing:") print(traceback.format_exc()) raise ex if funcName == "repack": self.handleRepackSettings() elif funcName == "merge": self.handleMergeSettings() if socket.getfqdn().endswith("cern.ch"): self.handleSpecialCERNMergeSettings(funcName) else: try: self.loadPSet() except Exception as ex: print("Error loading PSet:") print(traceback.format_exc()) raise ex # Check process.source exists if getattr(self.process, "source", None) == None: msg = "Error in CMSSW PSet: process is missing attribute 'source' or process.source is defined with None value." raise RuntimeError(msg) self.handleCondorStatusService() self.fixupProcess() try: if int(self.step.data.application.multicore.numberOfCores) > 1: numCores = int(self.step.data.application.multicore.numberOfCores) options = getattr(self.process, "options", None) if options == None: self.process.options = cms.untracked.PSet() options = getattr(self.process, "options") options.numberOfThreads = cms.untracked.uint32(numCores) options.numberOfStreams = cms.untracked.uint32(0) # For now, same as numCores except AttributeError: print("No value for numberOfCores. Not setting") psetTweak = getattr(self.step.data.application.command, "psetTweak", None) if psetTweak != None: self.applyPSetTweak(psetTweak, self.fixupDict) # Apply task level tweaks taskTweak = makeTaskTweak(self.step.data) applyTweak(self.process, taskTweak, self.fixupDict) # Check if chained processing is enabled # If not - apply the per job tweaks # If so - create an override TFC (like done in PA) and then modify thePSet accordingly if (hasattr(self.step.data.input, "chainedProcessing") and self.step.data.input.chainedProcessing): self.handleChainedProcessing() else: # Apply per job PSet Tweaks jobTweak = makeJobTweak(self.job) applyTweak(self.process, jobTweak, self.fixupDict) # check for pileup settings presence, pileup support implementation # and if enabled, process pileup configuration / settings if hasattr(self.step.data, "pileup"): self.handlePileup() # Apply per output module PSet Tweaks cmsswStep = self.step.getTypeHelper() for om in cmsswStep.listOutputModules(): mod = cmsswStep.getOutputModule(om) outTweak = makeOutputTweak(mod, self.job) applyTweak(self.process, outTweak, self.fixupDict) # revlimiter for testing if getattr(self.step.data.application.command, "oneEventMode", False): self.process.maxEvents.input = 1 # check for random seeds and the method of seeding which is in the job baggage self.handleSeeding() # make sure default parametersets for perf reports are installed self.handlePerformanceSettings() # check for event numbers in the producers self.handleProducersNumberOfEvents() # fixup the dqmFileSaver self.handleDQMFileSaver() # Check if we accept skipping bad files if hasattr(self.step.data.application.configuration, "skipBadFiles"): self.process.source.skipBadFiles = \ cms.untracked.bool(self.step.data.application.configuration.skipBadFiles) #Apply events per lumi section if available if hasattr(self.step.data.application.configuration, "eventsPerLumi"): self.process.source.numberEventsInLuminosityBlock = \ cms.untracked.uint32(self.step.data.application.configuration.eventsPerLumi) # accept an overridden TFC from the step if hasattr(self.step.data.application,'overrideCatalog'): print("Found a TFC override: %s" % self.step.data.application.overrideCatalog) self.process.source.overrideCatalog = \ cms.untracked.string(self.step.data.application.overrideCatalog) configFile = self.step.data.application.command.configuration configPickle = getattr(self.step.data.application.command, "configurationPickle", "PSet.pkl") workingDir = self.stepSpace.location handle = open("%s/%s" % (workingDir, configFile), 'w') pHandle = open("%s/%s" % (workingDir, configPickle), 'wb') try: pickle.dump(self.process, pHandle) handle.write("import FWCore.ParameterSet.Config as cms\n") handle.write("import pickle\n") handle.write("handle = open('%s', 'rb')\n" % configPickle) handle.write("process = pickle.load(handle)\n") handle.write("handle.close()\n") except Exception as ex: print("Error writing out PSet:") print(traceback.format_exc()) raise ex finally: handle.close() pHandle.close() return 0
def __call__(self): """ _call_ Examine the step configuration and construct a PSet from that. """ self.process = None scenario = getattr(self.step.data.application.configuration, "scenario", None) if scenario is not None and scenario != "": funcName = getattr(self.step.data.application.configuration, "function", None) if getattr(self.step.data.application.configuration, "pickledarguments", None) is not None: funcArgs = pickle.loads(self.step.data.application.configuration.pickledarguments) else: funcArgs = {} try: self.createProcess(scenario, funcName, funcArgs) except Exception as ex: logging.exception("Error creating process for Config/DataProcessing:") raise ex if funcName == "repack": self.handleRepackSettings() if funcName in ["merge", "alcaHarvesting"]: self.handleSingleCoreOverride() if socket.getfqdn().endswith("cern.ch"): self.handleSpecialCERNMergeSettings(funcName) else: try: self.loadPSet() except Exception as ex: logging.exception("Error loading PSet:") raise ex # Check process.source exists if getattr(self.process, "source", None) is None: msg = "Error in CMSSW PSet: process is missing attribute 'source'" msg += " or process.source is defined with None value." logging.error(msg) raise RuntimeError(msg) self.handleCondorStatusService() self.fixupProcess() # In case of CRAB3, the number of threads in the PSet should not be overridden if not self.crabPSet: try: origCores = int(getattr(self.step.data.application.multicore, 'numberOfCores', 1)) eventStreams = int(getattr(self.step.data.application.multicore, 'eventStreams', 0)) resources = {'cores': origCores} resizeResources(resources) numCores = resources['cores'] if numCores != origCores: logging.info( "Resizing a job with nStreams != nCores. Setting nStreams = nCores. This may end badly.") eventStreams = 0 options = getattr(self.process, "options", None) if options is None: self.process.options = cms.untracked.PSet() options = getattr(self.process, "options") options.numberOfThreads = cms.untracked.uint32(numCores) options.numberOfStreams = cms.untracked.uint32(eventStreams) except AttributeError as ex: logging.error("Failed to override numberOfThreads: %s", str(ex)) psetTweak = getattr(self.step.data.application.command, "psetTweak", None) if psetTweak is not None: self.applyPSetTweak(psetTweak, self.fixupDict) # Apply task level tweaks taskTweak = makeTaskTweak(self.step.data) applyTweak(self.process, taskTweak, self.fixupDict) # Check if chained processing is enabled # If not - apply the per job tweaks # If so - create an override TFC (like done in PA) and then modify thePSet accordingly if hasattr(self.step.data.input, "chainedProcessing") and self.step.data.input.chainedProcessing: self.handleChainedProcessing() else: # Apply per job PSet Tweaks jobTweak = makeJobTweak(self.job) applyTweak(self.process, jobTweak, self.fixupDict) # check for pileup settings presence, pileup support implementation # and if enabled, process pileup configuration / settings if hasattr(self.step.data, "pileup"): self.handlePileup() # Apply per output module PSet Tweaks cmsswStep = self.step.getTypeHelper() for om in cmsswStep.listOutputModules(): mod = cmsswStep.getOutputModule(om) outTweak = makeOutputTweak(mod, self.job) applyTweak(self.process, outTweak, self.fixupDict) # revlimiter for testing if getattr(self.step.data.application.command, "oneEventMode", False): self.process.maxEvents.input = 1 # check for random seeds and the method of seeding which is in the job baggage self.handleSeeding() # make sure default parametersets for perf reports are installed self.handlePerformanceSettings() # check for event numbers in the producers self.handleProducersNumberOfEvents() # fixup the dqmFileSaver self.handleDQMFileSaver() # Check if we accept skipping bad files if hasattr(self.step.data.application.configuration, "skipBadFiles"): self.process.source.skipBadFiles = \ cms.untracked.bool(self.step.data.application.configuration.skipBadFiles) # Apply events per lumi section if available if hasattr(self.step.data.application.configuration, "eventsPerLumi"): self.process.source.numberEventsInLuminosityBlock = \ cms.untracked.uint32(self.step.data.application.configuration.eventsPerLumi) # limit run time if desired if hasattr(self.step.data.application.configuration, "maxSecondsUntilRampdown"): self.process.maxSecondsUntilRampdown = cms.untracked.PSet( input=cms.untracked.int32(self.step.data.application.configuration.maxSecondsUntilRampdown)) # accept an overridden TFC from the step if hasattr(self.step.data.application, 'overrideCatalog'): logging.info("Found a TFC override: %s", self.step.data.application.overrideCatalog) self.process.source.overrideCatalog = \ cms.untracked.string(self.step.data.application.overrideCatalog) configFile = self.step.data.application.command.configuration configPickle = getattr(self.step.data.application.command, "configurationPickle", "PSet.pkl") workingDir = self.stepSpace.location try: with open("%s/%s" % (workingDir, configPickle), 'wb') as pHandle: pickle.dump(self.process, pHandle) with open("%s/%s" % (workingDir, configFile), 'w') as handle: handle.write("import FWCore.ParameterSet.Config as cms\n") handle.write("import pickle\n") handle.write("with open('%s', 'rb') as handle:\n" % configPickle) handle.write(" process = pickle.load(handle)\n") except Exception as ex: logging.exception("Error writing out PSet:") raise ex return 0
def __call__(self): """ _call_ Examine the step configuration and construct a PSet from that. """ self.logger.info("Executing SetupCMSSWPSet...") self.jobBag = self.job.getBaggage() self.configPickle = getattr(self.step.data.application.command, "configurationPickle", "PSet.pkl") self.psetFile = getattr(self.step.data.application.command, "configuration", "PSet.py") self.scram = self.createScramEnv() scenario = getattr(self.step.data.application.configuration, "scenario", None) if scenario is not None and scenario != "": self.logger.info("DEBUG: I'm in scenario") self.logger.info("Setting up job scenario/process") funcName = getattr(self.step.data.application.configuration, "function", None) if getattr(self.step.data.application.configuration, "pickledarguments", None) is not None: funcArgs = pickle.loads( self.step.data.application.configuration.pickledarguments) else: funcArgs = {} # Create process try: self.createProcess(scenario, funcName, funcArgs) except Exception as ex: self.logger.exception( "Error creating process for Config/DataProcessing:") raise ex # Now, load the new picked process try: with open( os.path.join(self.stepSpace.location, self.configPickle), 'rb') as f: self.process = Unpickler(f).load() except ImportError as ex: msg = "Unable to import pset from %s:\n" % self.psetFile msg += str(ex) self.logger.error(msg) raise ex if funcName == "repack": self.handleRepackSettings() if funcName in ["merge", "alcaHarvesting"]: self.handleSingleCoreOverride() if socket.getfqdn().endswith("cern.ch"): self.handleSpecialCERNMergeSettings(funcName) else: self.logger.info("DEBUG: Now in the none scenario to load PSET") try: self.loadPSet() except Exception as ex: self.logger.exception("Error loading PSet:") raise ex # Check process.source exists self.logger.info("Debug: Self.process") self.logger.info(dir(self.process)) if getattr(self.process, "source", None) is None and getattr( self.process, "_Process__source", None) is None: msg = "Error in CMSSW PSet: process is missing attribute 'source'" msg += " or process.source is defined with None value." self.logger.error(msg) raise RuntimeError(msg) self.handleCondorStatusService() self.fixupProcess() # In case of CRAB3, the number of threads in the PSet should not be overridden if not self.crabPSet: try: self.makeThreadsStreamsTweak() except AttributeError as ex: self.logger.error("Failed to override numberOfThreads: %s", str(ex)) # Apply task level tweaks makeTaskTweak(self.step.data, self.tweak) self.applyPsetTweak(self.tweak, cleanupTweak=True) # Check if chained processing is enabled # If not - apply the per job tweaks # If so - create an override TFC (like done in PA) and then modify thePSet accordingly if hasattr(self.step.data.input, "chainedProcessing" ) and self.step.data.input.chainedProcessing: self.logger.info("Handling Chain processing tweaks") self.handleChainedProcessingTweak() else: self.logger.info("Creating job level tweaks") makeJobTweak(self.job, self.tweak) self.applyPsetTweak(self.tweak, cleanupTweak=True) # check for pileup settings presence, pileup support implementation # and if enabled, process pileup configuration / settings if hasattr(self.step.data, "pileup"): self.handlePileup() # Apply per output module PSet Tweaks self.logger.info("Output module section") cmsswStep = self.step.getTypeHelper() for om in cmsswStep.listOutputModules(): mod = cmsswStep.getOutputModule(om) makeOutputTweak(mod, self.job, self.tweak) self.applyPsetTweak(self.tweak, cleanupTweak=True) # revlimiter for testing if getattr(self.step.data.application.command, "oneEventMode", False): self.tweak.addParameter('process.maxEvents.input', "customTypeCms.untracked.int32(1)") # check for random seeds and the method of seeding which is in the job baggage self.handleSeeding() # make sure default parametersets for perf reports are installed self.handlePerformanceSettings() # fixup the dqmFileSaver self.handleDQMFileSaver() # tweak for jobs reading LHE articles from CERN self.handleLHEInput() # tweak jobs for enforceGUIDInFileName self.handleEnforceGUIDInFileName() # Check if we accept skipping bad files if hasattr(self.step.data.application.configuration, "skipBadFiles"): self.tweak.addParameter( "process.source.skipBadFiles", "customTypeCms.untracked.bool(%s)" % self.step.data.application.configuration.skipBadFiles) # Apply events per lumi section if available if hasattr(self.step.data.application.configuration, "eventsPerLumi"): self.tweak.addParameter( "process.source.numberEventsInLuminosityBlock", "customTypeCms.untracked.uint32(%s)" % self.step.data.application.configuration.eventsPerLumi) # limit run time if desired if hasattr(self.step.data.application.configuration, "maxSecondsUntilRampdown"): self.tweak.addParameter( "process.maxSecondsUntilRampdown.input", "customTypeCms.untracked.PSet(input=cms.untracked.int32(%s)" % self.step.data.application.configuration. maxSecondsUntilRampdown) # accept an overridden TFC from the step if hasattr(self.step.data.application, 'overrideCatalog'): self.logger.info("Found a TFC override: %s", self.step.data.application.overrideCatalog) self.tweak.addParameter( "process.source.overrideCatalog", "customTypeCms.untracked.string('%s')" % self.step.data.application.overrideCatalog) configFile = self.step.data.application.command.configuration workingDir = self.stepSpace.location try: self.applyPsetTweak(self.tweak) with open("%s/%s" % (workingDir, configFile), 'w') as handle: handle.write("import FWCore.ParameterSet.Config as cms\n") handle.write("import pickle\n") handle.write("with open('%s', 'rb') as handle:\n" % self.configPickle) handle.write(" process = pickle.load(handle)\n") except Exception as ex: self.logger.exception("Error writing out PSet:") raise ex # check for event numbers in the producers self.handleProducersNumberOfEvents() self.logger.info("CMSSW PSet setup completed!") return 0