示例#1
0
    def testF_TaskChain(self):
        """
        _TaskChain_

        Test the monstrous TaskChain workflow
        This will be a long one

        NOTE: This test is so complicated that all I do is
        take code from TaskChain_t and make sure it still
        produces and actual request
        
        """
        couchServer = CouchServer(os.environ["COUCHURL"])
        configDatabase = couchServer.connectDatabase(self.couchDBName)  
        generatorDoc = makeGeneratorConfig(configDatabase)
        processorDocs = makeProcessingConfigs(configDatabase)
        
        userName     = '******'
        groupName    = 'Li'
        teamName     = 'Tang'
        schema = utils.getSchema(userName = userName)
        schema["CouchURL"] = os.environ["COUCHURL"]
        schema["CouchDBName"] = self.couchDBName
        schema["SiteWhitelist"] = ["T1_CH_CERN", "T1_US_FNAL"]
        schema["TaskChain"] = 5
        chains = {"Task1" : {"TaskName" : "GenSim",
                             "ConfigCacheID" : generatorDoc,
                              "SplittingAlgorithm"  : "EventBased",
                              "SplittingArguments" : {"events_per_job" : 250},
                              "RequestNumEvents" : 10000,
                              "Seeding" : "Automatic",
                              "PrimaryDataset" : "RelValTTBar"},
                  "Task2" : {"TaskName" : "DigiHLT",
                             "InputTask" : "GenSim",
                             "InputFromOutputModule" : "writeGENSIM",
                             "ConfigCacheID" : processorDocs['DigiHLT'],
                             "SplittingAlgorithm" : "FileBased",
                             "SplittingArguments" : {"files_per_job" : 1 } },
                  "Task3" : {"TaskName" : "Reco",
                             "InputTask" : "DigiHLT",
                             "InputFromOutputModule" : "writeRAWDIGI",
                             "ConfigCacheID" : processorDocs['Reco'],
                             "SplittingAlgorithm" : "FileBased",
                             "SplittingArguments" : {"files_per_job" : 1 } },
                  "Task4" : {"TaskName" : "ALCAReco",
                             "InputTask" : "Reco",
                             "InputFromOutputModule" : "writeALCA",
                             "ConfigCacheID" : processorDocs['ALCAReco'],
                             "SplittingAlgorithm" : "FileBased",
                             "SplittingArguments" : {"files_per_job" : 1 } },
                  "Task5" : {"TaskName" : "Skims",
                             "InputTask" : "Reco",
                             "InputFromOutputModule" : "writeRECO",
                             "ConfigCacheID" : processorDocs['Skims'],
                             "SplittingAlgorithm" : "FileBased",
                             "SplittingArguments" : {"files_per_job" : 10 } } }
        schema.update(chains)
        args = utils.getAndSetupSchema(self,
                                       userName = userName,
                                       groupName = groupName,
                                       teamName = teamName)
        schema.update(args)
        
        # this is necessary and after all updates to the schema are made,
        # otherwise this item will get overwritten
        schema['RequestType'] = "TaskChain"
        schema["CouchDBName"] = self.couchDBName
        schema["CouchURL"]    = os.environ.get("COUCHURL")
        
        result = self.jsonSender.put('request/testRequest', schema)
        
        requestName = result[0]['RequestName']
        result = self.jsonSender.get('request/%s' % requestName)
        request = result[0]
        self.assertEqual(request['CMSSWVersion'], schema['CMSSWVersion'])
        self.assertEqual(request['Group'], groupName)
        self.assertEqual(request['Requestor'], userName)
        self.assertEqual(request['DbsUrl'], None)

        workload = self.loadWorkload(requestName)
        self.assertEqual(workload.data.request.schema.Task1.SplittingArguments,
                         {'events_per_job': 250})
示例#2
0
    def testF_TaskChain(self):
        """
        _TaskChain_
  
        Test the monstrous TaskChain workflow
        This will be a long one
  
        NOTE: This test is so complicated that all I do is
        take code from TaskChain_t and make sure it still
        produces and actual request
          
        """
        couchServer = CouchServer(os.environ["COUCHURL"])
        configDatabase = couchServer.connectDatabase(self.couchDBName)
        generatorDoc = makeGeneratorConfig(configDatabase)
        processorDocs = makeProcessingConfigs(configDatabase)

        userName = '******'
        groupName = 'Li'
        teamName = 'Tang'
        schema = utils.getSchema(userName=userName)
        schema["CouchURL"] = os.environ["COUCHURL"]
        schema["CouchDBName"] = self.couchDBName
        schema["CouchWorkloadDBName"] = self.couchDBName
        schema["SiteWhitelist"] = ["T1_CH_CERN", "T1_US_FNAL"]
        schema["TaskChain"] = 5
        chains = {
            "Task1": {
                "TaskName": "GenSim",
                "ConfigCacheID": generatorDoc,
                "SplittingAlgo": "EventBased",
                "EventsPerJob": 250,
                "RequestNumEvents": 10000,
                "PrimaryDataset": "RelValTTBar"
            },
            "Task2": {
                "TaskName": "DigiHLT",
                "InputTask": "GenSim",
                "InputFromOutputModule": "writeGENSIM",
                "ConfigCacheID": processorDocs['DigiHLT'],
                "SplittingAlgo": "FileBased"
            },
            "Task3": {
                "TaskName": "Reco",
                "InputTask": "DigiHLT",
                "InputFromOutputModule": "writeRAWDIGI",
                "ConfigCacheID": processorDocs['Reco'],
                "SplittingAlgo": "FileBased"
            },
            "Task4": {
                "TaskName": "ALCAReco",
                "InputTask": "Reco",
                "InputFromOutputModule": "writeALCA",
                "ConfigCacheID": processorDocs['ALCAReco'],
                "SplittingAlgo": "FileBased"
            },
            "Task5": {
                "TaskName": "Skims",
                "InputTask": "Reco",
                "InputFromOutputModule": "writeRECO",
                "ConfigCacheID": processorDocs['Skims'],
                "SplittingAlgo": "FileBased",
                "FilesPerJob": 10
            }
        }
        schema.update(chains)
        args = utils.getAndSetupSchema(self,
                                       userName=userName,
                                       groupName=groupName,
                                       teamName=teamName)
        schema.update(args)

        # this is necessary and after all updates to the schema are made,
        # otherwise this item will get overwritten
        schema['RequestType'] = "TaskChain"
        schema["CouchDBName"] = self.couchDBName
        schema["CouchURL"] = os.environ.get("COUCHURL")
        schema["CouchWorkloadDBName"] = self.couchDBName

        result = self.jsonSender.put('request', schema)

        requestName = result[0]['RequestName']
        result = self.jsonSender.get('request/%s' % requestName)
        request = result[0]
        self.assertEqual(request['CMSSWVersion'], schema['CMSSWVersion'])
        self.assertEqual(request['Group'], groupName)
        self.assertEqual(request['Requestor'], userName)

        workload = self.loadWorkload(requestName)
        self.assertEqual(workload.data.request.schema.Task1["EventsPerJob"],
                         250)
示例#3
0
    def testF_TaskChain(self):
        """
        _TaskChain_

        Test the monstrous TaskChain workflow
        This will be a long one

        NOTE: This test is so complicated that all I do is
        take code from TaskChain_t and make sure it still
        produces and actual request
        """

        from WMCore_t.WMSpec_t.StdSpecs_t.TaskChain_t import makeGeneratorConfig, makeProcessingConfigs
        couchServer = CouchServer(os.environ["COUCHURL"])
        configDatabase = couchServer.connectDatabase(self.couchDBName)  
        generatorDoc = makeGeneratorConfig(configDatabase)
        processorDocs = makeProcessingConfigs(configDatabase)

        schema = {
            "AcquisitionEra": "ReleaseValidation",
            "Requestor": "*****@*****.**",
            "CMSSWVersion": "CMSSW_3_5_8",
            "ScramArch": "slc5_ia32_gcc434",
            "ProcessingVersion": "v1",
            "GlobalTag": "GR10_P_v4::All",
            "CouchURL": os.environ["COUCHURL"],
            "CouchDBName": self.couchDBName,
            "SiteWhitelist" : ["T1_CH_CERN", "T1_US_FNAL"],
            "TaskChain" : 5,
            "Task1" :{
                "TaskName" : "GenSim",
                "ConfigCacheID" : generatorDoc, 
                "SplittingAlgorithm"  : "EventBased",
                "SplittingArguments" : {"events_per_job" : 250},
                "RequestNumEvents" : 10000,
                "Seeding" : "Automatic",
                "PrimaryDataset" : "RelValTTBar",
            },
            "Task2" : {
                "TaskName" : "DigiHLT",
                "InputTask" : "GenSim",
                "InputFromOutputModule" : "writeGENSIM",
                "ConfigCacheID" : processorDocs['DigiHLT'],
                "SplittingAlgorithm" : "FileBased",
                "SplittingArguments" : {"files_per_job" : 1 },
            },
            "Task3" : {
                "TaskName" : "Reco",
                "InputTask" : "DigiHLT",
                "InputFromOutputModule" : "writeRAWDIGI",
                "ConfigCacheID" : processorDocs['Reco'],
                "SplittingAlgorithm" : "FileBased",
                "SplittingArguments" : {"files_per_job" : 1 },
            },
            "Task4" : {
                "TaskName" : "ALCAReco",
                "InputTask" : "Reco",
                "InputFromOutputModule" : "writeALCA",
                "ConfigCacheID" : processorDocs['ALCAReco'],
                "SplittingAlgorithm" : "FileBased",
                "SplittingArguments" : {"files_per_job" : 1 },
            
            },
            "Task5" : {
                "TaskName" : "Skims",
                "InputTask" : "Reco",
                "InputFromOutputModule" : "writeRECO",
                "ConfigCacheID" : processorDocs['Skims'],
                "SplittingAlgorithm" : "FileBased",
                "SplittingArguments" : {"files_per_job" : 10 },            
            }
            
        }

        userName     = '******'
        groupName    = 'Li'
        teamName     = 'Tang'
        CMSSWVersion = 'CMSSW_3_5_8'
        args         = self.setupSchema(userName = userName,
                                        groupName = groupName,
                                        teamName = teamName,
                                        CMSSWVersion = CMSSWVersion,
                                        typename = "TaskChain")

        schema.update(args)
        schema["CouchDBName"] = self.couchDBName
        schema["CouchURL"]    = os.environ.get("COUCHURL")

        result = self.jsonSender.put('request/testRequest', schema)
        requestName = result[0]['RequestName']

        result = self.jsonSender.get('request/%s' % requestName)
        request = result[0]
        self.assertEqual(request['CMSSWVersion'], CMSSWVersion)
        self.assertEqual(request['Group'], groupName)
        self.assertEqual(request['Requestor'], userName)

        workload = self.loadWorkload(requestName)
        self.assertEqual(workload.data.request.schema.Task1.SplittingArguments,
                         {'events_per_job': 250})

        return