def makeProcess(numEvents = 200):
    """
    _makeProcess_

    Create a new Process instance

    """
    
    proc = Process("HLT")
    proc.include("FWCore/MessageLogger/data/MessageLogger.cfi")
    


    configName =  "fake-streamer-config"
    configVersion = timestamp()
    configAnnot =  "auto generated fake streamer  config"

    proc.configurationMetadata = CmsTypes.untracked(CmsTypes.PSet())
    proc.configurationMetadata.name = CmsTypes.untracked(CmsTypes.string(
        configName))
    proc.configurationMetadata.version = CmsTypes.untracked(CmsTypes.string(
        configVersion))
        
    proc.configurationMetadata.annotation = CmsTypes.untracked(CmsTypes.string(
        configAnnot))

    
    proc.options = CmsTypes.untracked(CmsTypes.PSet())
    proc.options.wantSummary = CmsTypes.untracked(CmsTypes.bool(True))

    proc.source = Source("EmptySource")

    proc.maxEvents = CmsTypes.untracked(CmsTypes.PSet())
    proc.maxEvents.input = CmsTypes.untracked(CmsTypes.int32(numEvents))
    

    proc.prod = EDProducer("StreamThingProducer")
    proc.prod.array_size = CmsTypes.int32(2500)
    proc.prod.instance_count = CmsTypes.int32(150)
    proc.prod.apply_bit_mask = CmsTypes.untracked(CmsTypes.bool(True))
    proc.prod.bit_mask = CmsTypes.untracked( CmsTypes.uint32( 16777215))

    proc.add_(Service("RandomNumberGeneratorService"))
    
    svc = proc.services["RandomNumberGeneratorService"]
    svc.moduleSeeds = CmsTypes.PSet()
    
    proc.makeData = Path(proc.prod)
    
    return proc
Esempio n. 2
0
def mergeProcess(*inputFiles, **options):
    """
    _mergeProcess_

    Creates and returns a merge process that will merge the provided
    filenames

    supported options:

    - process_name : name of the procee, defaults to Merge
    - output_file  : sets the output file name
    - output_lfn   : sets the output LFN

    """
    #  //
    # // process supported options
    #//
    processName = options.get("process_name", "Merge")
    outputFilename = options.get("output_file", "Merged.root")
    outputLFN = options.get("output_lfn", None)

    #  //
    # // build process
    #//
    process = Process(processName)

    #  //
    # // input source
    #// 
    process.source = Source("PoolSource")
    process.source.fileNames = CfgTypes.untracked(CfgTypes.vstring())
    for entry in inputFiles:
        process.source.fileNames.append(str(entry))

    #  //
    # // output module
    #//
    process.Merged = OutputModule("PoolOutputModule")
    process.Merged.fileName = CfgTypes.untracked(CfgTypes.string(
        outputFilename))

    if outputLFN != None:
        process.Merged.logicalFileName = CfgTypes.untracked(CfgTypes.string(
            outputLFN))
        
    
    process.outputPath = EndPath(process.Merged)
    return process
Esempio n. 3
0
    def setLogicalFileName(self, lfn):
        """
        _setLogicalFileName_

        """
        self.data.logicalFileName = CfgTypes.untracked(CfgTypes.string(lfn))
        return
Esempio n. 4
0
    def setLogicalFileName(self, lfn):
        """
        _setLogicalFileName_

        """
        self.data.logicalFileName = CfgTypes.untracked(CfgTypes.string(lfn))
        return
Esempio n. 5
0
    def insertSeeds(self, *seeds):
        """
        _insertSeeds_

        Insert the list of seeds into the RandomNumber Service

        """
        seedList = list(seeds)
        if "RandomNumberGeneratorService" not in self.data.services.keys():
            return

        if self.hasOldSeeds():
            self.insertOldSeeds(*seeds)
            return

        #  //
        # // Use seed service utility to generate seeds on the fly
        #//
        svc = self.data.services["RandomNumberGeneratorService"]
        try:
            from IOMC.RandomEngine.RandomServiceHelper import RandomNumberServiceHelper
        except ImportError:
            msg = "Unable to import RandomeNumberServiceHelper"
            print msg
            raise RuntimeError, msg
        randHelper = RandomNumberServiceHelper(svc)
        randHelper.populate()
        svc.saveFileName = CfgTypes.untracked(
            CfgTypes.string("RandomEngineState.log"))

        return
Esempio n. 6
0
    def insertSeeds(self, *seeds):
        """
        _insertSeeds_

        Insert the list of seeds into the RandomNumber Service

        """
        seedList = list(seeds)
        if "RandomNumberGeneratorService" not in self.data.services.keys():
            return


        if self.hasOldSeeds():
            self.insertOldSeeds(*seeds)
            return

        #  //
        # // Use seed service utility to generate seeds on the fly
        #//
        svc = self.data.services["RandomNumberGeneratorService"]
        try:
            from IOMC.RandomEngine.RandomServiceHelper import RandomNumberServiceHelper
        except ImportError:
            msg = "Unable to import RandomeNumberServiceHelper"
            print msg
            raise RuntimeError, msg
        randHelper = RandomNumberServiceHelper(svc)
        randHelper.populate()
        svc.saveFileName = CfgTypes.untracked(
            CfgTypes.string("RandomEngineState.log"))

        return
Esempio n. 7
0
    def createMergePSet(self):
        """
        _createMergePSet_

        Merges are a little different since we have to build the entire
        process object from scratch.

        """
        print "<<<<<<<<<<<<<<<<<<<<Merge>>>>>>>>>>>>>>>>>>>>>."
        cfgFile = self.config['Configuration'].get("CfgFile", "PSet.py")[0]
        cfgFile = str(cfgFile)
        self.jobSpecNode.loadConfiguration()
        cfgInt = self.jobSpecNode.cfgInterface

        from FWCore.ParameterSet.Config import Process, EndPath
        from FWCore.ParameterSet.Modules import OutputModule, Source
        import FWCore.ParameterSet.Types as CfgTypes

        process = Process("Merge")
        process.source = Source("PoolSource")
        process.source.fileNames = CfgTypes.untracked(CfgTypes.vstring())
        for entry in cfgInt.inputFiles:
            process.source.fileNames.append(str(entry))
                

        outMod = cfgInt.outputModules['Merged']
        process.Merged = OutputModule("PoolOutputModule")
        process.Merged.fileName = CfgTypes.untracked(CfgTypes.string(
            outMod['fileName']))

        process.Merged.logicalFileName = CfgTypes.untracked(CfgTypes.string(
            outMod['logicalFileName']))

        process.Merged.catalog = CfgTypes.untracked(CfgTypes.string(
            outMod['catalog']))
        process.outputPath = EndPath(process.Merged)
        cfgDump = open("CfgFileDump.log", 'w')
        cfgDump.write(process.dumpConfig())
        cfgDump.close()
        
        
        handle = open(cfgFile, 'w')
        handle.write("import pickle\n")
        handle.write("pickledCfg=\"\"\"%s\"\"\"\n" % pickle.dumps(process))
        handle.write("process = pickle.loads(pickledCfg)\n")
        handle.close()
        return
Esempio n. 8
0
    def setCatalog(self, newCatalog):
        """
        _setCatalog_

        Set the catalog name

        """
        self.data.catalog = CfgTypes.untracked(CfgTypes.string(newCatalog))
        return
Esempio n. 9
0
    def setCatalog(self, newCatalog):
        """
        _setCatalog_

        Set the catalog name

        """
        self.data.catalog = CfgTypes.untracked(CfgTypes.string(newCatalog))
        return
def addOutputModule(process, fileName):
    """
    _addOutputModule_

    
    """
    outMod = OutputModule("EventStreamFileWriter")
    outMod.max_event_size = CmsTypes.untracked(CmsTypes.int32(7000000))
    outMod.max_queue_depth = CmsTypes.untracked(CmsTypes.int32(5))
    outMod.use_compression = CmsTypes.untracked(CmsTypes.bool(True))
    outMod.compression_level = CmsTypes.untracked(CmsTypes.int32(1))

    streamer = "%s.dat" % fileName
    indexFile = "%s.ind" % fileName
    outMod.fileName = CmsTypes.untracked(CmsTypes.string(streamer))
    outMod.indexFileName = CmsTypes.untracked(CmsTypes.string(indexFile))

    process.out = outMod
    process.o = EndPath(process.out)

    return
Esempio n. 11
0
    def setConditionsTag(self, condTag):
        """
        _setConditionsTag-

        Set the conditions glbal tag

        """
        globalPSet = getattr(self.data, "GlobalTag", None)
        if globalPSet == None:
            return
        globalTag = getattr(globalPSet, "globaltag", None)
        if globalTag == None:
            globalPSet.globalTag = CfgTypes.string(condTag)
        else:
            globalPSet.globaltag = condTag
        return
Esempio n. 12
0
    def setConditionsTag(self, condTag):
        """
        _setConditionsTag-

        Set the conditions glbal tag

        """
        globalPSet = getattr(
            self.data, "GlobalTag", None)
        if globalPSet == None:
            return
        globalTag = getattr(globalPSet, "globaltag", None)
        if globalTag == None:
            globalPSet.globalTag = CfgTypes.string(condTag)
        else:
            globalPSet.globaltag = condTag
        return
Esempio n. 13
0
    def createPSet(self):
        """
        _createPSet_

        Create the PSet cfg File

        """
        cfgFile = self.config['Configuration'].get("CfgFile", "PSet.py")[0]
        cfgFile = str(cfgFile)
        self.jobSpecNode.loadConfiguration()
        self.jobSpecNode.cfgInterface.rawCfg = self.workflowNode.cfgInterface.rawCfg

        # taken from cmssw environment
        # pylint: disable-msg=F0401
        import FWCore.ParameterSet.Types as CfgTypes
        # pylint: enable-msg=F0401

        workingDir = os.path.join(os.getcwd(), 'prestage')
        if os.path.exists(workingDir + '/prestageTFC.xml'):

           rawCfg = pickle.loads(self.jobSpecNode.cfgInterface.rawCfg)
           rawCfg.source.overrideCatalog = CfgTypes.untracked(CfgTypes.string('trivialcatalog_file:%s/prestageTFC.xml?protocol=local-stage-in' % workingDir))

           self.jobSpecNode.cfgInterface.rawCfg = pickle.dumps(rawCfg)

        # Apply site specific customizations
        self.localCustomization(self.jobSpecNode.cfgInterface)

        for inpLink in self.jobSpecNode._InputLinks:
            #  //
            # // We have in-job input links to be resolved
            #//
            self.handleInputLink(self.jobSpecNode.cfgInterface, inpLink)

        cmsProcess = self.jobSpecNode.cfgInterface.makeConfiguration()



        pycfgDump = open("PyCfgFileDump.log", 'w')
        try:
            pycfgDump.write(cmsProcess.dumpPython())
        except Exception, ex:
            msg = "Error writing python format cfg dump:\n"
            msg += "%s\n" % str(ex)
            msg += "This needs to be reported to the framework team"
            pycfgDump.write(msg)
Esempio n. 14
0
    def ioCustomization(self, config, custom_config, merge = False):
        """
        Apply site specific io customizations
        """
        # Don't do anything if no customization or job has no input files
        if not custom_config or (merge is False and not config.inputFiles):
            return

        import re
        version = lambda x: tuple(int(x) for x in re.compile('(\d+)').findall(x))
        cmssw_version = version(os.environ['CMSSW_VERSION'])

        # Only implemented in CMSSW_2_1_8 and above
        if cmssw_version < (2, 1, 8):
            return

        print "Site specific IO parameters will be used:"

        # cacheSize is a property of InputSource
        cache_size = custom_config.get('cacheSize', None)
        if cache_size:
            # Merge pset creates process on fly so can't use CMSSWConfig object
            if merge:
                from ProdCommon.CMSConfigTools.ConfigAPI.InputSource import InputSource
                inputSource = InputSource(config.source)
                inputSource.setCacheSize(cache_size)
            else:
                config.sourceParams['cacheSize'] = cache_size

        if merge:
            from FWCore.ParameterSet.Modules import Service
            config.add_(Service('AdaptorConfig'))

        for param in custom_config:
            print "  %s %s" % (param, custom_config[param])
            if param == 'cacheSize':
                continue

            if merge:
                import FWCore.ParameterSet.Types as CfgTypes
                adaptor = config.services['AdaptorConfig']
                setattr(adaptor, param,
                        CfgTypes.untracked(CfgTypes.string(str(custom_config[param]))))
            else:
                config.tFileAdaptorConfig[param] = custom_config[param]
        return
Esempio n. 15
0
    def addDataset(self, **datasetParams):
        """
        _addDataset_

        Add Dataset information to this cfg file. Each key, value pair
        provided in dataset Params is added as an untracked string to
        the datasets PSet in a PSet named entryName

        """
        datasetPSet = getattr(self.data, "dataset", None)
        if datasetPSet == None:
            self.data.dataset = CfgTypes.untracked(CfgTypes.PSet())
            datasetPSet = self.data.dataset

        for key, value in datasetParams.items():
            setattr(datasetPSet, key,
                    CfgTypes.untracked(CfgTypes.string(str(value))))

        return
Esempio n. 16
0
    def addDataset(self, **datasetParams):
        """
        _addDataset_

        Add Dataset information to this cfg file. Each key, value pair
        provided in dataset Params is added as an untracked string to
        the datasets PSet in a PSet named entryName

        """
        datasetPSet = getattr(self.data, "dataset", None)
        if datasetPSet == None:
            self.data.dataset = CfgTypes.untracked(CfgTypes.PSet())
            datasetPSet = self.data.dataset
        
        
        for key, value in datasetParams.items():
            setattr(
                datasetPSet, key,
                CfgTypes.untracked(CfgTypes.string(str(value)))
                )
            
        return
Esempio n. 17
0
# // build process
#//
process = Process(processName)

#  //
# // input source
#//
process.source = Source("PoolSource",
    fileNames = cms.untracked.vstring()
	)
inputFiles = ["lstore://cms-lstore.vampre/test/file2.root", "lstore://cms-lstore.vampire/test/file1.root"]
for entry in inputFiles:
	process.source.fileNames.append(str(entry))
if dropDQM:
	process.source.inputCommands = CfgTypes.untracked.vstring('keep *','drop *_EDMtoMEConverter_*_*')

#  //
# // output module
#//
process.Merged = OutputModule("PoolOutputModule")
process.Merged.fileName = CfgTypes.untracked(CfgTypes.string(
	outputFilename))

if outputLFN != None:
	process.Merged.logicalFileName = CfgTypes.untracked(CfgTypes.string(
		outputLFN))


process.outputPath = EndPath(process.Merged)

Esempio n. 18
0
 def setOverrideCatalog(self, catalog, protocol):
     """set overrideCatalog to allow local files to be read via LFN"""
     uri = "trivialcatalog_file:%s?protocol=%s" % (catalog, protocol)
     self.data.overrideCatalog = CfgTypes.untracked(CfgTypes.string(uri))
Esempio n. 19
0
 def setFileMatchMode(self, matchMode):
     """set file match mode for reading files in same job"""
     self.data.fileMatchMode = CfgTypes.untracked(
         CfgTypes.string(matchMode))
                                                cacheHint = cms.untracked.string("application-only"),
                                                readHint = cms.untracked.string("direct-unbuffered"))
            
        process.RECO = cms.OutputModule("PoolOutputModule")
        process.RECO.dataset = cms.untracked(cms.PSet())
        process.RECO.dataset.dataTier = cms.untracked(cms.string("RECO"))
        process.RECO.fileName = cms.untracked.string("NOTSET")
        process.RECO.logicalFileName = cms.untracked.string("NOTSET")
        process.RECO.fastCloning = cms.untracked.bool(False)
        process.outpath = cms.EndPath(process.RECO)

        configName = "prompt-reco-config"
        configVersion = "%s-%s-%s" % (self.cmssw["CMSSWVersion"], self.run,
                                      self.primaryDataset)
        configAnnot = "auto generated prompt reco config"

        process.configurationMetadata = CmsTypes.untracked(CmsTypes.PSet())
        process.configurationMetadata.name = CmsTypes.untracked(CmsTypes.string(configName))
        process.configurationMetadata.version = CmsTypes.untracked(CmsTypes.string(configVersion))
        process.configurationMetadata.annotation = CmsTypes.untracked(CmsTypes.string(configAnnot))

        cfgInt = cfgWrapper.loadConfiguration(process)
        cfgWrapper.conditionsTag = "NOTSET"
        cfgInt.validateForProduction()

        setattr(self.workflow.payload, "cfgInterface", cfgWrapper)

        loader.unload()
        
        return
Esempio n. 21
0
    def setFileName(self, fname):
        """
        _setFileName_

        """
        self.data.fileName = CfgTypes.untracked(CfgTypes.string(fname))
Esempio n. 22
0
    def setFileName(self, fname):
        """
        _setFileName_

        """
        self.data.fileName = CfgTypes.untracked(CfgTypes.string(fname))
Esempio n. 23
0
    def createMergePSet(self):
        """
        _createMergePSet_

        Merges are a little different since we have to build the entire
        process object from scratch.

        """
        print "<<<<<<<<<<<<<<<<<<<<Merge>>>>>>>>>>>>>>>>>>>>>."
        cfgFile = self.config['Configuration'].get("CfgFile", "PSet.py")[0]
        cfgFile = str(cfgFile)
        self.jobSpecNode.loadConfiguration()
        cfgInt = self.jobSpecNode.cfgInterface

        # taken from cmssw environment
        # pylint: disable-msg=F0401
        from FWCore.ParameterSet.Config import Process, EndPath
        from FWCore.ParameterSet.Modules import OutputModule, Source
        import FWCore.ParameterSet.Types as CfgTypes
        import FWCore.ParameterSet.Config as cms
        # pylint: enable-msg=F0401

        processName = "Merge"
        process = Process(processName)
        process.source = Source("PoolSource")
        process.source.fileNames = CfgTypes.untracked(CfgTypes.vstring())
        for entry in cfgInt.inputFiles:
            process.source.fileNames.append(str(entry))

        # For StoreResults, drop DQM products as they are sometimes left over
        if cfgInt.configMetadata.get("annotation",None) \
            == "AutoGenerated By StoreResults":
            inputCommands = getattr(process.source,'inputCommands',
                                    cms.untracked.vstring('keep *'))
            inputCommands.append('drop *_EDMtoMEConverter_*_*')
            process.source.inputCommands = inputCommands

        outMod = cfgInt.outputModules['Merged']
        process.Merged = OutputModule("PoolOutputModule")
        process.Merged.fileName = CfgTypes.untracked(CfgTypes.string(
            outMod['fileName']))

        process.Merged.logicalFileName = CfgTypes.untracked(CfgTypes.string(
            outMod['logicalFileName']))

        process.Merged.catalog = CfgTypes.untracked(CfgTypes.string(
            outMod['catalog']))

        process.outputPath = EndPath(process.Merged)

        # Apply site specific customizations
        self.localCustomization(process, merge=True)

    

        pycfgDump = open("PyCfgFileDump.log", 'w')
        try:
            pycfgDump.write(process.dumpPython())    
        except Exception, ex:
            msg = "Error writing python format cfg dump:\n"
            msg += "%s\n" % str(ex)
            msg += "This needs to be reported to the framework team"
            pycfgDump.write(msg)
Esempio n. 24
0
#  //
# // build process
#//
process = Process(processName)

#  //
# // input source
#//
process.source = Source("PoolSource", fileNames=cms.untracked.vstring())
inputFiles = [
    "lstore://cms-lstore.vampre/test/file2.root",
    "lstore://cms-lstore.vampire/test/file1.root"
]
for entry in inputFiles:
    process.source.fileNames.append(str(entry))
if dropDQM:
    process.source.inputCommands = CfgTypes.untracked.vstring(
        'keep *', 'drop *_EDMtoMEConverter_*_*')

#  //
# // output module
#//
process.Merged = OutputModule("PoolOutputModule")
process.Merged.fileName = CfgTypes.untracked(CfgTypes.string(outputFilename))

if outputLFN != None:
    process.Merged.logicalFileName = CfgTypes.untracked(
        CfgTypes.string(outputLFN))

process.outputPath = EndPath(process.Merged)