Exemple #1
0
 def handleSeeding(self):
     """
     _handleSeeding_
     
     Handle Random Seed settings for the job
     
     """
     baggage = self.job.getBaggage()
     seeding = getattr(baggage, "seeding", None)
     if seeding == None:
         return
     if seeding == "AutomaticSeeding":
         from IOMC.RandomEngine.RandomServiceHelper import RandomNumberServiceHelper
         helper = RandomNumberServiceHelper(self.process.RandomNumberGeneratorService)
         helper.populate()
         return
     if seeding == "ReproducibleSeeding":
         randService = self.process.RandomNumberGeneratorService
         tweak = PSetTweak()
         for x in randService:
             parameter = "process.RandomNumberGeneratorService.%s.initialSeed" % x._internal_name
             tweak.addParameter(parameter, x.initialSeed)
         applyTweak(self.process, tweak, self.fixupDict)
         return
     # still here means bad seeding algo name
     raise RuntimeError, "Bad Seeding Algorithm: %s" % seeding
    def applyTweak(self, psetTweak):
        """
        _applyTweak_

        Apply a tweak to the process.
        """
        tweak = PSetTweak()
        tweak.unpersist(psetTweak)
        applyTweak(self.process, tweak, self.fixupDict)
        return
Exemple #3
0
    def applyTweak(self, psetTweak):
        """
        _applyTweak_

        Apply a tweak to the process.
        """
        tweak = PSetTweak()
        tweak.unpersist(psetTweak)
        applyTweak(self.process, tweak, self.fixupDict)
        return
Exemple #4
0
    def handleSeeding(self):
        """
        _handleSeeding_

        Handle Random Seed settings for the job
        """
        baggage = self.job.getBaggage()
        seeding = getattr(baggage, "seeding", None)
        if seeding == "ReproducibleSeeding":
            randService = self.process.RandomNumberGeneratorService
            tweak = PSetTweak()
            for x in randService:
                parameter = "process.RandomNumberGeneratorService.%s.initialSeed" % x._internal_name
                tweak.addParameter(parameter, x.initialSeed)
            applyTweak(self.process, tweak, self.fixupDict)
        else:
            if hasattr(self.process, "RandomNumberGeneratorService"):
                from IOMC.RandomEngine.RandomServiceHelper import RandomNumberServiceHelper
                helper = RandomNumberServiceHelper(self.process.RandomNumberGeneratorService)
                helper.populate()
        return
Exemple #5
0
            if options == None:
                self.process.options = cms.untracked.PSet()
                options = getattr(self.process, "options")

            options.numberOfThreads = cms.untracked.uint32(numCores)
            options.numberOfStreams = cms.untracked.uint32(
                0)  # For now, same as numCores

        psetTweak = getattr(self.step.data.application.command, "psetTweak",
                            None)
        if psetTweak != None:
            self.applyPSetTweak(psetTweak, self.fixupDict)

        # Apply task level tweaks
        taskTweak = makeTaskTweak(self.step.data)
        applyTweak(self.process, taskTweak, self.fixupDict)

        # Check if chained processing is enabled
        # If not - apply the per job tweaks
        # If so - create an override TFC (like done in PA) and then modify thePSet accordingly
        if (hasattr(self.step.data.input, "chainedProcessing")
                and self.step.data.input.chainedProcessing):
            self._handleChainedProcessing()
        else:
            # Apply per job PSet Tweaks
            jobTweak = makeJobTweak(self.job)
            applyTweak(self.process, jobTweak, self.fixupDict)

        # check for pileup settings presence, pileup support implementation
        # and if enabled, process pileup configuration / settings
        if hasattr(self.step.data, "pileup"):
Exemple #6
0
                print "Error loading PSet:"
                print traceback.format_exc()
                raise ex

        self.fixupProcess()
        self.fixupLazyDownload()

        psetTweak = getattr(self.step.data.application.command, "psetTweak", None)
        if psetTweak != None:
            self.applyPSetTweak(psetTweak, self.fixupDict)



        # Apply task level tweaks
        taskTweak = makeTaskTweak(self.step.data)
        applyTweak(self.process, taskTweak, self.fixupDict)

        # Check if chained processing is enabled
        # If not - apply the per job tweaks
        # If so - create an override TFC (like done in PA) and then modify thePSet accordingly
        if (hasattr(self.step.data.input, "chainedProcessing") and
            self.step.data.input.chainedProcessing):
            self._handleChainedProcessing()
        else:
            # Apply per job PSet Tweaks
            jobTweak = makeJobTweak(self.job)
            applyTweak(self.process, jobTweak, self.fixupDict)

        # check for pileup settings presence, pileup support implementation
        # and if enabled, process pileup configuration / settings
        if hasattr(self.step.data, "pileup"):
    def __call__(self):
        """
        _call_

        Examine the step configuration and construct a PSet from that.

        """
        self.logger.info("Executing SetupCMSSWPSet...")
        self.jobBag = self.job.getBaggage()

        scenario = getattr(self.step.data.application.configuration,
                           "scenario", None)
        if scenario is not None and scenario != "":
            self.logger.info("Setting up job scenario/process")
            funcName = getattr(self.step.data.application.configuration,
                               "function", None)
            if getattr(self.step.data.application.configuration,
                       "pickledarguments", None) is not None:
                funcArgs = pickle.loads(
                    self.step.data.application.configuration.pickledarguments)
            else:
                funcArgs = {}
            try:
                self.createProcess(scenario, funcName, funcArgs)
            except Exception as ex:
                self.logger.exception(
                    "Error creating process for Config/DataProcessing:")
                raise ex

            if funcName == "repack":
                self.handleRepackSettings()

            if funcName in ["merge", "alcaHarvesting"]:
                self.handleSingleCoreOverride()

            if socket.getfqdn().endswith("cern.ch"):
                self.handleSpecialCERNMergeSettings(funcName)

        else:
            try:
                self.loadPSet()
            except Exception as ex:
                self.logger.exception("Error loading PSet:")
                raise ex

        # Check process.source exists
        if getattr(self.process, "source", None) is None:
            msg = "Error in CMSSW PSet: process is missing attribute 'source'"
            msg += " or process.source is defined with None value."
            self.logger.error(msg)
            raise RuntimeError(msg)

        self.handleCondorStatusService()

        self.fixupProcess()

        # In case of CRAB3, the number of threads in the PSet should not be overridden
        if not self.crabPSet:
            try:
                origCores = int(
                    getattr(self.step.data.application.multicore,
                            'numberOfCores', 1))
                eventStreams = int(
                    getattr(self.step.data.application.multicore,
                            'eventStreams', 0))
                resources = {'cores': origCores}
                resizeResources(resources)
                numCores = resources['cores']
                if numCores != origCores:
                    self.logger.info(
                        "Resizing a job with nStreams != nCores. Setting nStreams = nCores. This may end badly."
                    )
                    eventStreams = 0
                options = getattr(self.process, "options", None)
                if options is None:
                    self.process.options = cms.untracked.PSet()
                    options = getattr(self.process, "options")
                options.numberOfThreads = cms.untracked.uint32(numCores)
                options.numberOfStreams = cms.untracked.uint32(eventStreams)
            except AttributeError as ex:
                self.logger.error("Failed to override numberOfThreads: %s",
                                  str(ex))

        psetTweak = getattr(self.step.data.application.command, "psetTweak",
                            None)
        if psetTweak is not None:
            self.applyPSetTweak(psetTweak, self.fixupDict)

        # Apply task level tweaks
        taskTweak = makeTaskTweak(self.step.data)
        applyTweak(self.process, taskTweak, self.fixupDict)

        # Check if chained processing is enabled
        # If not - apply the per job tweaks
        # If so - create an override TFC (like done in PA) and then modify thePSet accordingly
        if hasattr(self.step.data.input, "chainedProcessing"
                   ) and self.step.data.input.chainedProcessing:
            self.handleChainedProcessing()
        else:
            # Apply per job PSet Tweaks
            jobTweak = makeJobTweak(self.job)
            applyTweak(self.process, jobTweak, self.fixupDict)

        # check for pileup settings presence, pileup support implementation
        # and if enabled, process pileup configuration / settings
        if hasattr(self.step.data, "pileup"):
            self.handlePileup()

        # Apply per output module PSet Tweaks
        cmsswStep = self.step.getTypeHelper()
        for om in cmsswStep.listOutputModules():
            mod = cmsswStep.getOutputModule(om)
            outTweak = makeOutputTweak(mod, self.job)
            applyTweak(self.process, outTweak, self.fixupDict)

        # revlimiter for testing
        if getattr(self.step.data.application.command, "oneEventMode", False):
            self.process.maxEvents.input = 1

        # check for random seeds and the method of seeding which is in the job baggage
        self.handleSeeding()

        # make sure default parametersets for perf reports are installed
        self.handlePerformanceSettings()

        # check for event numbers in the producers
        self.handleProducersNumberOfEvents()

        # fixup the dqmFileSaver
        self.handleDQMFileSaver()

        # tweak for jobs reading LHE articles from CERN
        self.handleLHEInput()

        # tweak jobs for enforceGUIDInFileName
        self.handleEnforceGUIDInFileName()

        # Check if we accept skipping bad files
        if hasattr(self.step.data.application.configuration, "skipBadFiles"):
            self.process.source.skipBadFiles = \
                cms.untracked.bool(self.step.data.application.configuration.skipBadFiles)

        # Apply events per lumi section if available
        if hasattr(self.step.data.application.configuration, "eventsPerLumi"):
            self.process.source.numberEventsInLuminosityBlock = \
                cms.untracked.uint32(self.step.data.application.configuration.eventsPerLumi)

        # limit run time if desired
        if hasattr(self.step.data.application.configuration,
                   "maxSecondsUntilRampdown"):
            self.process.maxSecondsUntilRampdown = cms.untracked.PSet(
                input=cms.untracked.int32(
                    self.step.data.application.configuration.
                    maxSecondsUntilRampdown))

        # accept an overridden TFC from the step
        if hasattr(self.step.data.application, 'overrideCatalog'):
            self.logger.info("Found a TFC override: %s",
                             self.step.data.application.overrideCatalog)
            self.process.source.overrideCatalog = \
                cms.untracked.string(self.step.data.application.overrideCatalog)

        configFile = self.step.data.application.command.configuration
        configPickle = getattr(self.step.data.application.command,
                               "configurationPickle", "PSet.pkl")
        workingDir = self.stepSpace.location
        try:
            with open("%s/%s" % (workingDir, configPickle), 'wb') as pHandle:
                pickle.dump(self.process, pHandle)

            with open("%s/%s" % (workingDir, configFile), 'w') as handle:
                handle.write("import FWCore.ParameterSet.Config as cms\n")
                handle.write("import pickle\n")
                handle.write("with open('%s', 'rb') as handle:\n" %
                             configPickle)
                handle.write("    process = pickle.load(handle)\n")
        except Exception as ex:
            self.logger.exception("Error writing out PSet:")
            raise ex
        self.logger.info("CMSSW PSet setup completed!")

        return 0
Exemple #8
0
    def __call__(self):
        """
        _call_

        Examine the step configuration and construct a PSet from that.

        """
        self.process = None
        funcName = None

        scenario = getattr(self.step.data.application.configuration, "scenario", None)
        if scenario != None and scenario != "":
            funcName = getattr(self.step.data.application.configuration, "function", None)
            if getattr(self.step.data.application.configuration, "pickledarguments", None) != None:
                funcArgs = pickle.loads(self.step.data.application.configuration.pickledarguments)
            else:
                funcArgs = {}
            try:
                self.createProcess(scenario, funcName, funcArgs)
            except Exception as ex:
                print("Error creating process for Config/DataProcessing:")
                print(traceback.format_exc())
                raise ex

            if funcName == "repack":
                self.handleRepackSettings()
            elif funcName == "merge":
                self.handleMergeSettings()

            if socket.getfqdn().endswith("cern.ch"):
                self.handleSpecialCERNMergeSettings(funcName)

        else:
            try:
                self.loadPSet()
            except Exception as ex:
                print("Error loading PSet:")
                print(traceback.format_exc())
                raise ex

        # Check process.source exists
        if getattr(self.process, "source", None) == None:
            msg = "Error in CMSSW PSet: process is missing attribute 'source' or process.source is defined with None value."
            raise RuntimeError(msg)

        self.handleCondorStatusService()

        self.fixupProcess()

        try:
            if int(self.step.data.application.multicore.numberOfCores) > 1:
                numCores = int(self.step.data.application.multicore.numberOfCores)
                options = getattr(self.process, "options", None)
                if options == None:
                    self.process.options = cms.untracked.PSet()
                    options = getattr(self.process, "options")
                options.numberOfThreads = cms.untracked.uint32(numCores)
                options.numberOfStreams = cms.untracked.uint32(0)        # For now, same as numCores
        except AttributeError:
            print("No value for numberOfCores. Not setting")

        psetTweak = getattr(self.step.data.application.command, "psetTweak", None)
        if psetTweak != None:
            self.applyPSetTweak(psetTweak, self.fixupDict)

        # Apply task level tweaks
        taskTweak = makeTaskTweak(self.step.data)
        applyTweak(self.process, taskTweak, self.fixupDict)

        # Check if chained processing is enabled
        # If not - apply the per job tweaks
        # If so - create an override TFC (like done in PA) and then modify thePSet accordingly
        if (hasattr(self.step.data.input, "chainedProcessing") and
            self.step.data.input.chainedProcessing):
            self.handleChainedProcessing()
        else:
            # Apply per job PSet Tweaks
            jobTweak = makeJobTweak(self.job)
            applyTweak(self.process, jobTweak, self.fixupDict)

        # check for pileup settings presence, pileup support implementation
        # and if enabled, process pileup configuration / settings
        if hasattr(self.step.data, "pileup"):
            self.handlePileup()

        # Apply per output module PSet Tweaks
        cmsswStep = self.step.getTypeHelper()
        for om in cmsswStep.listOutputModules():
            mod = cmsswStep.getOutputModule(om)
            outTweak = makeOutputTweak(mod, self.job)
            applyTweak(self.process, outTweak, self.fixupDict)

        # revlimiter for testing
        if getattr(self.step.data.application.command, "oneEventMode", False):
            self.process.maxEvents.input = 1

        # check for random seeds and the method of seeding which is in the job baggage
        self.handleSeeding()

        # make sure default parametersets for perf reports are installed
        self.handlePerformanceSettings()

        # check for event numbers in the producers
        self.handleProducersNumberOfEvents()

        # fixup the dqmFileSaver
        self.handleDQMFileSaver()

        # Check if we accept skipping bad files
        if hasattr(self.step.data.application.configuration, "skipBadFiles"):
            self.process.source.skipBadFiles = \
                cms.untracked.bool(self.step.data.application.configuration.skipBadFiles)

        #Apply events per lumi section if available
        if hasattr(self.step.data.application.configuration, "eventsPerLumi"):
            self.process.source.numberEventsInLuminosityBlock = \
                cms.untracked.uint32(self.step.data.application.configuration.eventsPerLumi)

        # accept an overridden TFC from the step
        if hasattr(self.step.data.application,'overrideCatalog'):
            print("Found a TFC override: %s" % self.step.data.application.overrideCatalog)
            self.process.source.overrideCatalog = \
                cms.untracked.string(self.step.data.application.overrideCatalog)

        configFile = self.step.data.application.command.configuration
        configPickle = getattr(self.step.data.application.command, "configurationPickle", "PSet.pkl")
        workingDir = self.stepSpace.location
        handle = open("%s/%s" % (workingDir, configFile), 'w')
        pHandle = open("%s/%s" % (workingDir, configPickle), 'wb')
        try:
            pickle.dump(self.process, pHandle)
            handle.write("import FWCore.ParameterSet.Config as cms\n")
            handle.write("import pickle\n")
            handle.write("handle = open('%s', 'rb')\n" % configPickle)
            handle.write("process = pickle.load(handle)\n")
            handle.write("handle.close()\n")
        except Exception as ex:
            print("Error writing out PSet:")
            print(traceback.format_exc())
            raise ex
        finally:
            handle.close()
            pHandle.close()

        return 0
Exemple #9
0
    def __call__(self):
        """
        _call_

        Examine the step configuration and construct a PSet from that.

        """
        self.process = None

        scenario = getattr(self.step.data.application.configuration, "scenario", None)
        if scenario is not None and scenario != "":
            funcName = getattr(self.step.data.application.configuration, "function", None)
            if getattr(self.step.data.application.configuration, "pickledarguments", None) is not None:
                funcArgs = pickle.loads(self.step.data.application.configuration.pickledarguments)
            else:
                funcArgs = {}
            try:
                self.createProcess(scenario, funcName, funcArgs)
            except Exception as ex:
                logging.exception("Error creating process for Config/DataProcessing:")
                raise ex

            if funcName == "repack":
                self.handleRepackSettings()

            if funcName in ["merge", "alcaHarvesting"]:
                self.handleSingleCoreOverride()

            if socket.getfqdn().endswith("cern.ch"):
                self.handleSpecialCERNMergeSettings(funcName)

        else:
            try:
                self.loadPSet()
            except Exception as ex:
                logging.exception("Error loading PSet:")
                raise ex

        # Check process.source exists
        if getattr(self.process, "source", None) is None:
            msg = "Error in CMSSW PSet: process is missing attribute 'source'"
            msg += " or process.source is defined with None value."
            logging.error(msg)
            raise RuntimeError(msg)

        self.handleCondorStatusService()

        self.fixupProcess()

        # In case of CRAB3, the number of threads in the PSet should not be overridden
        if not self.crabPSet:
            try:
                origCores = int(getattr(self.step.data.application.multicore, 'numberOfCores', 1))
                eventStreams = int(getattr(self.step.data.application.multicore, 'eventStreams', 0))
                resources = {'cores': origCores}
                resizeResources(resources)
                numCores = resources['cores']
                if numCores != origCores:
                    logging.info(
                        "Resizing a job with nStreams != nCores. Setting nStreams = nCores. This may end badly.")
                    eventStreams = 0
                options = getattr(self.process, "options", None)
                if options is None:
                    self.process.options = cms.untracked.PSet()
                    options = getattr(self.process, "options")
                options.numberOfThreads = cms.untracked.uint32(numCores)
                options.numberOfStreams = cms.untracked.uint32(eventStreams)
            except AttributeError as ex:
                logging.error("Failed to override numberOfThreads: %s", str(ex))

        psetTweak = getattr(self.step.data.application.command, "psetTweak", None)
        if psetTweak is not None:
            self.applyPSetTweak(psetTweak, self.fixupDict)

        # Apply task level tweaks
        taskTweak = makeTaskTweak(self.step.data)
        applyTweak(self.process, taskTweak, self.fixupDict)

        # Check if chained processing is enabled
        # If not - apply the per job tweaks
        # If so - create an override TFC (like done in PA) and then modify thePSet accordingly
        if hasattr(self.step.data.input, "chainedProcessing") and self.step.data.input.chainedProcessing:
            self.handleChainedProcessing()
        else:
            # Apply per job PSet Tweaks
            jobTweak = makeJobTweak(self.job)
            applyTweak(self.process, jobTweak, self.fixupDict)

        # check for pileup settings presence, pileup support implementation
        # and if enabled, process pileup configuration / settings
        if hasattr(self.step.data, "pileup"):
            self.handlePileup()

        # Apply per output module PSet Tweaks
        cmsswStep = self.step.getTypeHelper()
        for om in cmsswStep.listOutputModules():
            mod = cmsswStep.getOutputModule(om)
            outTweak = makeOutputTweak(mod, self.job)
            applyTweak(self.process, outTweak, self.fixupDict)

        # revlimiter for testing
        if getattr(self.step.data.application.command, "oneEventMode", False):
            self.process.maxEvents.input = 1

        # check for random seeds and the method of seeding which is in the job baggage
        self.handleSeeding()

        # make sure default parametersets for perf reports are installed
        self.handlePerformanceSettings()

        # check for event numbers in the producers
        self.handleProducersNumberOfEvents()

        # fixup the dqmFileSaver
        self.handleDQMFileSaver()

        # Check if we accept skipping bad files
        if hasattr(self.step.data.application.configuration, "skipBadFiles"):
            self.process.source.skipBadFiles = \
                cms.untracked.bool(self.step.data.application.configuration.skipBadFiles)

        # Apply events per lumi section if available
        if hasattr(self.step.data.application.configuration, "eventsPerLumi"):
            self.process.source.numberEventsInLuminosityBlock = \
                cms.untracked.uint32(self.step.data.application.configuration.eventsPerLumi)

        # limit run time if desired
        if hasattr(self.step.data.application.configuration, "maxSecondsUntilRampdown"):
            self.process.maxSecondsUntilRampdown = cms.untracked.PSet(
                input=cms.untracked.int32(self.step.data.application.configuration.maxSecondsUntilRampdown))

        # accept an overridden TFC from the step
        if hasattr(self.step.data.application, 'overrideCatalog'):
            logging.info("Found a TFC override: %s", self.step.data.application.overrideCatalog)
            self.process.source.overrideCatalog = \
                cms.untracked.string(self.step.data.application.overrideCatalog)

        configFile = self.step.data.application.command.configuration
        configPickle = getattr(self.step.data.application.command, "configurationPickle", "PSet.pkl")
        workingDir = self.stepSpace.location
        try:
            with open("%s/%s" % (workingDir, configPickle), 'wb') as pHandle:
                pickle.dump(self.process, pHandle)

            with open("%s/%s" % (workingDir, configFile), 'w') as handle:
                handle.write("import FWCore.ParameterSet.Config as cms\n")
                handle.write("import pickle\n")
                handle.write("with open('%s', 'rb') as handle:\n" % configPickle)
                handle.write("    process = pickle.load(handle)\n")
        except Exception as ex:
            logging.exception("Error writing out PSet:")
            raise ex

        return 0