Example #1
0
    def testMultiTaskProcessingWorkload(self):
        """Multi Task Processing Workflow"""
        datasets = []
        tasks, count = 0, 0
        for task in MultiTaskProcessingWorkload.taskIterator():
            tasks += 1
            inputDataset = task.inputDataset()
            datasets.append("/%s/%s/%s" %
                            (inputDataset.primary, inputDataset.processed,
                             inputDataset.tier))
        dbs = {inputDataset.dbsurl: DBSReader(inputDataset.dbsurl)}

        for task in MultiTaskProcessingWorkload.taskIterator():
            units, _ = Block(**self.splitArgs)(MultiTaskProcessingWorkload,
                                               task)
            self.assertEqual(58, len(units))
            blocks = []  # fill with blocks as we get work units for them

            for unit in units:
                self.assertTrue(1 <= unit['Jobs'])
                self.assertEqual(MultiTaskProcessingWorkload, unit['WMSpec'])
                self.assertEqual(task, unit['Task'])
            self.assertEqual(
                len(units),
                len(dbs[inputDataset.dbsurl].getFileBlocksInfo(datasets[0])))
            count += 1
        self.assertEqual(tasks, count)
Example #2
0
    def testMultiTaskProcessingWorkload(self):
        """Multi Task Processing Workflow"""
        datasets = []
        tasks, count = 0, 0
        for task in MultiTaskProcessingWorkload.taskIterator():
            tasks += 1
            inputDataset = task.inputDataset()
            datasets.append("/%s/%s/%s" % (inputDataset.primary,
                                           inputDataset.processed,
                                           inputDataset.tier))
        dbs = {inputDataset.dbsurl : DBSReader(inputDataset.dbsurl)}

        for task in MultiTaskProcessingWorkload.taskIterator():
            units, _ = Block(**self.splitArgs)(MultiTaskProcessingWorkload, task)
            self.assertEqual(58, len(units))
            blocks = [] # fill with blocks as we get work units for them

            for unit in units:
                self.assertTrue(1 <= unit['Jobs'])
                self.assertEqual(MultiTaskProcessingWorkload, unit['WMSpec'])
                self.assertEqual(task, unit['Task'])
            self.assertEqual(len(units),
                             len(dbs[inputDataset.dbsurl].getFileBlocksInfo(datasets[0])))
            count += 1
        self.assertEqual(tasks, count)
Example #3
0
 def testMultiTaskProcessingWorkload(self):
     """Multi Task Processing Workflow"""
     splitArgs = dict(SliceType='NumberOfFiles', SliceSize=5)
     datasets = []
     tasks, count = 0, 0
     for task in MultiTaskProcessingWorkload.taskIterator():
         tasks += 1
         inputDataset = task.getInputDatasetPath()
         datasets.append(inputDataset)
     for task in MultiTaskProcessingWorkload.taskIterator():
         units, _, _ = Dataset(**splitArgs)(MultiTaskProcessingWorkload, task)
         self.assertEqual(1, len(units))
         for unit in units:
             self.assertEqual(22, unit['Jobs'])
             self.assertEqual(MultiTaskProcessingWorkload, unit['WMSpec'])
             self.assertEqual(task, unit['Task'])
             self.assertEqual(unit['Inputs'].keys(), [datasets[count]])
         count += 1
     self.assertEqual(tasks, count)
Example #4
0
 def testMultiTaskProcessingWorkload(self):
     """Multi Task Processing Workflow"""
     datasets = []
     tasks, count = 0, 0
     for task in MultiTaskProcessingWorkload.taskIterator():
         tasks += 1
         inputDataset = task.inputDataset()
         datasets.append("/%s/%s/%s" % (inputDataset.primary,
                                        inputDataset.processed,
                                        inputDataset.tier))
     for task in MultiTaskProcessingWorkload.taskIterator():
         units, _ = Dataset(**self.splitArgs)(MultiTaskProcessingWorkload, task)
         self.assertEqual(1, len(units))
         for unit in units:
             self.assertEqual(22, unit['Jobs'])
             self.assertEqual(MultiTaskProcessingWorkload, unit['WMSpec'])
             self.assertEqual(task, unit['Task'])
             self.assertEqual(unit['Inputs'].keys(), [datasets[count]])
         count += 1
     self.assertEqual(tasks, count)
Example #5
0
 def testMultiTaskProcessingWorkload(self):
     """Multi Task Processing Workflow"""
     datasets = []
     tasks, count = 0, 0
     for task in MultiTaskProcessingWorkload.taskIterator():
         tasks += 1
         inputDataset = task.inputDataset()
         datasets.append("/%s/%s/%s" % (inputDataset.primary,
                                        inputDataset.processed,
                                        inputDataset.tier))
     for task in MultiTaskProcessingWorkload.taskIterator():
         units, _ = Dataset(**self.splitArgs)(MultiTaskProcessingWorkload, task)
         self.assertEqual(1, len(units))
         for unit in units:
             self.assertEqual(22, unit['Jobs'])
             self.assertEqual(MultiTaskProcessingWorkload, unit['WMSpec'])
             self.assertEqual(task, unit['Task'])
             self.assertEqual(unit['Inputs'].keys(), [datasets[count]])
         count += 1
     self.assertEqual(tasks, count)
Example #6
0
    def testPileupData(self):
        """
        _testPileupData_

        Check that every workqueue element split contains the pile up data
        if it is present in the workload.
        """
        for task in MultiTaskProcessingWorkload.taskIterator():
            units, _ = Block(**self.splitArgs)(MultiTaskProcessingWorkload, task)
            self.assertEqual(58, len(units))
            for unit in units:
                pileupData = unit["PileupData"]
                self.assertEqual(len(pileupData), 1)
                self.assertItemsEqual(pileupData.values()[0], ['T2_XX_SiteA', 'T2_XX_SiteB', 'T2_XX_SiteC'])
        return
Example #7
0
    def testPileupData(self):
        """
        _testPileupData_

        Check that every workqueue element split contains the pile up data
        if it is present in the workload.
        """
        for task in MultiTaskProcessingWorkload.taskIterator():
            units, _ = Block(**self.splitArgs)(MultiTaskProcessingWorkload, task)
            self.assertEqual(Globals.GlobalParams.numOfBlocksPerDataset(), len(units))
            for unit in units:
                pileupData = unit["PileupData"]
                self.assertEqual(len(pileupData), 1)
                self.assertEqual(pileupData.values()[0], ["T2_XX_SiteC"])
        return
Example #8
0
    def testPileupData(self):
        """
        _testPileupData_

        Check that every workqueue element split contains the pile up data
        if it is present in the workload.
        """
        for task in MultiTaskProcessingWorkload.taskIterator():
            units, _, _ = Block(**self.splitArgs)(MultiTaskProcessingWorkload, task)
            self.assertEqual(58, len(units))
            for unit in units:
                pileupData = unit["PileupData"]
                self.assertEqual(len(pileupData), 1)
                self.assertItemsEqual(pileupData.values()[0], ['T2_XX_SiteA', 'T2_XX_SiteB', 'T2_XX_SiteC'])
        return