def injectFilesFromDBS(inputFileset, datasetPath):
    """
    _injectFilesFromDBS_

    """
    print("injecting files from %s into %s, please wait..." %
          (datasetPath, inputFileset.name))
    args = {}
    args["url"] = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader"
    args["version"] = "DBS_2_0_9"
    args["mode"] = "GET"
    dbsApi = DbsApi(args)
    dbsResults = dbsApi.listFileArray(
        path=datasetPath, retriveList=["retrive_lumi", "retrive_run"])
    dbsResults = dbsResults[0:10]
    print("  found %d files, inserting into wmbs..." % (len(dbsResults)))

    for dbsResult in dbsResults:
        myFile = File(lfn=dbsResult["LogicalFileName"],
                      size=dbsResult["FileSize"],
                      events=dbsResult["NumberOfEvents"],
                      checksums={"cksum": dbsResult["Checksum"]},
                      locations="cmssrm.fnal.gov",
                      merged=True)
        myRun = Run(runNumber=dbsResult["LumiList"][0]["RunNumber"])
        for lumi in dbsResult["LumiList"]:
            myRun.lumis.append(lumi["LumiSectionNumber"])
        myFile.addRun(myRun)
        myFile.create()
        inputFileset.addFile(myFile)

        dbsFile = DBSBufferFile(lfn=dbsResult["LogicalFileName"],
                                size=dbsResult["FileSize"],
                                events=dbsResult["NumberOfEvents"],
                                checksums={"cksum": dbsResult["Checksum"]},
                                locations="cmssrm.fnal.gov",
                                status="LOCAL")
        dbsFile.setDatasetPath(datasetPath)
        dbsFile.setAlgorithm(appName="cmsRun",
                             appVer="Unknown",
                             appFam="Unknown",
                             psetHash="Unknown",
                             configContent="Unknown")
        dbsFile.create()

    inputFileset.commit()
    inputFileset.markOpen(False)
    return
Exemple #2
0
    def createParentFiles(self,
                          acqEra,
                          nFiles=10,
                          workflowName='TestWorkload',
                          taskPath='/TestWorkload/DataTest'):
        """
        _createParentFiles_

        Create several parentless files in DBSBuffer.  This simulates raw files
        in the T0.
        """
        workflowId = self.injectWorkflow(workflowName=workflowName,
                                         taskPath=taskPath)
        parentlessFiles = []

        baseLFN = "/store/data/%s/Cosmics/RAW/v1/000/143/316/" % (acqEra)
        for i in range(nFiles):
            testFile = DBSBufferFile(lfn=baseLFN + makeUUID() + ".root",
                                     size=1024,
                                     events=20,
                                     checksums={"cksum": 1},
                                     workflowId=workflowId)
            testFile.setAlgorithm(appName="cmsRun",
                                  appVer="CMSSW_3_1_1",
                                  appFam="RAW",
                                  psetHash="GIBBERISH",
                                  configContent="MOREGIBBERISH")
            testFile.setDatasetPath("/Cosmics/%s-v1/RAW" % (acqEra))

            testFile['block_close_max_wait_time'] = 1000000
            testFile['block_close_max_events'] = 1000000
            testFile['block_close_max_size'] = 1000000
            testFile['block_close_max_files'] = 1000000

            lumis = []
            for j in range(10):
                lumis.append((i * 10) + j)
            testFile.addRun(Run(143316, *lumis))

            testFile.setAcquisitionEra(acqEra)
            testFile.setProcessingVer("1")
            testFile.setGlobalTag("START54::All")
            testFile.create()
            testFile.setLocation("malpaquet")
            parentlessFiles.append(testFile)

        return parentlessFiles
Exemple #3
0
    def addFileToDBS(self, jobReportFile, task, errorDataset=False):
        """
        _addFileToDBS_

        Add a file that was output from a job to the DBS buffer.
        """
        datasetInfo = jobReportFile["dataset"]

        dbsFile = DBSBufferFile(lfn=jobReportFile["lfn"],
                                size=jobReportFile["size"],
                                events=jobReportFile["events"],
                                checksums=jobReportFile["checksums"],
                                status="NOTUPLOADED",
                                inPhedex=0)
        dbsFile.setAlgorithm(appName=datasetInfo["applicationName"],
                             appVer=datasetInfo["applicationVersion"],
                             appFam=jobReportFile["module_label"],
                             psetHash="GIBBERISH",
                             configContent=jobReportFile.get('configURL'))

        if errorDataset:
            dbsFile.setDatasetPath(
                "/%s/%s/%s" %
                (datasetInfo["primaryDataset"] + "-Error",
                 datasetInfo["processedDataset"], datasetInfo["dataTier"]))
        else:
            dbsFile.setDatasetPath(
                "/%s/%s/%s" %
                (datasetInfo["primaryDataset"],
                 datasetInfo["processedDataset"], datasetInfo["dataTier"]))

        dbsFile.setValidStatus(
            validStatus=jobReportFile.get("validStatus", None))
        dbsFile.setProcessingVer(ver=jobReportFile.get('processingVer', None))
        dbsFile.setAcquisitionEra(
            era=jobReportFile.get('acquisitionEra', None))
        dbsFile.setGlobalTag(globalTag=jobReportFile.get('globalTag', None))
        # TODO need to find where to get the prep id
        dbsFile.setPrepID(prep_id=jobReportFile.get('prep_id', None))
        dbsFile['task'] = task
        dbsFile['runs'] = jobReportFile['runs']

        dbsFile.setLocation(pnn=list(jobReportFile["locations"])[0],
                            immediateSave=False)
        self.dbsFilesToCreate.append(dbsFile)
        return
Exemple #4
0
    def testXSetBlock(self):
        """
        _testSetBlock_

        Verify that the [Set|Get]Block DAOs work correctly.
        """
        myThread = threading.currentThread()

        dataset = "/Cosmics/CRUZET09-PromptReco-v1/RECO"

        uploadFactory = DAOFactory(package = "WMComponent.DBS3Buffer",
                                   logger = myThread.logger,
                                   dbinterface = myThread.dbi)

        datasetAction = uploadFactory(classname = "NewDataset")
        createAction = uploadFactory(classname = "CreateBlocks")

        datasetAction.execute(datasetPath = dataset)

        newBlock = DBSBlock(name = "someblockname",
                            location = "se1.cern.ch",
                            das = None, workflow = None)
        newBlock.setDataset(dataset, 'data', 'VALID')

        createAction.execute(blocks = [newBlock])

        setBlockAction = self.daoFactory(classname = "DBSBufferFiles.SetBlock")
        getBlockAction = self.daoFactory(classname = "DBSBufferFiles.GetBlock")

        testFile = DBSBufferFile(lfn = "/this/is/a/lfn", size = 1024, events = 10,
                                 locations = "se1.fnal.gov")
        testFile.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8",
                              appFam = "RECO", psetHash = "GIBBERISH",
                              configContent = "MOREGIBBERISH")
        testFile.setDatasetPath(dataset)

        testFile.create()

        setBlockAction.execute(lfn = testFile["lfn"], blockName = "someblockname")
        blockName = getBlockAction.execute(lfn = testFile["lfn"])

        assert blockName[0][0] == "someblockname", \
               "Error: Incorrect block returned: %s" % blockName[0][0]
        return
Exemple #5
0
    def _addToDBSBuffer(self, dbsFile, checksums, locations):
        """
        This step is just for increase the performance for
        Accountant doesn't neccessary to check the parentage
        """
        dbsBuffer = DBSBufferFile(lfn=dbsFile["LogicalFileName"],
                                  size=dbsFile["FileSize"],
                                  events=dbsFile["NumberOfEvents"],
                                  checksums=checksums,
                                  locations=locations,
                                  status="GLOBAL",
                                  inPhedex=1)
        dbsBuffer.setDatasetPath('bogus')
        dbsBuffer.setAlgorithm(appName="cmsRun", appVer="Unknown",
                               appFam="Unknown", psetHash="Unknown",
                               configContent="Unknown")

        self.dbsFilesToCreate.add(dbsBuffer)
        return
Exemple #6
0
    def addFileToDBS(self, jobReportFile, task):
        """
        _addFileToDBS_

        Add a file that was output from a job to the DBS buffer.
        """
        datasetInfo = jobReportFile["dataset"]

        dbsFile = DBSBufferFile(lfn=jobReportFile["lfn"],
                                size=jobReportFile["size"],
                                events=jobReportFile["events"],
                                checksums=jobReportFile["checksums"],
                                status="NOTUPLOADED")
        dbsFile.setAlgorithm(appName=datasetInfo["applicationName"],
                             appVer=datasetInfo["applicationVersion"],
                             appFam=jobReportFile["module_label"],
                             psetHash="GIBBERISH",
                             configContent=jobReportFile.get('configURL'))

        dbsFile.setDatasetPath(
            "/%s/%s/%s" %
            (datasetInfo["primaryDataset"], datasetInfo["processedDataset"],
             datasetInfo["dataTier"]))
        dbsFile.setValidStatus(
            validStatus=jobReportFile.get("validStatus", None))
        dbsFile.setProcessingVer(ver=jobReportFile.get('processingVer', None))
        dbsFile.setAcquisitionEra(
            era=jobReportFile.get('acquisitionEra', None))
        dbsFile.setGlobalTag(globalTag=jobReportFile.get('globalTag', None))
        dbsFile.setCustodialSite(
            custodialSite=jobReportFile.get('custodialSite', None))
        dbsFile['task'] = task

        for run in jobReportFile["runs"]:
            newRun = Run(runNumber=run.run)
            newRun.extend(run.lumis)
            dbsFile.addRun(newRun)

        dbsFile.setLocation(se=list(jobReportFile["locations"])[0],
                            immediateSave=False)
        self.dbsFilesToCreate.append(dbsFile)
        return
Exemple #7
0
    def addFile(self, file, dataset=0):
        """
        Add the file to the buffer
        """

        myThread = threading.currentThread()

        existingTransaction = self.beginTransaction()

        bufferFile = DBSBufferFile(lfn=file['LFN'],
                                   size=file['Size'],
                                   events=file['TotalEvents'],
                                   cksum=file['Checksum'],
                                   dataset=dataset)

        runLumiList = file.getLumiSections()
        runList = [x['RunNumber'] for x in runLumiList]

        for runNumber in runList:
            lumis = [
                int(y['LumiSectionNumber']) for y in runLumiList
                if y['RunNumber'] == runNumber
            ]
            run = Run(runNumber, *lumis)
            bufferFile.addRun(run)

        if bufferFile.exists() == False:
            bufferFile.create()
            bufferFile.setLocation(se=file['SEName'], immediateSave=True)
        else:
            bufferFile.load()
        # Lets add the file to DBS Buffer as well
        #UPDATE File Count

        self.updateDSFileCount(dataset=dataset)

        #Parent files
        bufferFile.addParents(file.inputFiles)

        self.commitTransaction(existingTransaction)

        return
Exemple #8
0
    def testAddRunSet(self):
        """
        _testAddRunSet_

        Test the ability to add run and lumi information to a file.
        """
        testFile = DBSBufferFile(lfn = "/this/is/a/lfn", size = 1024, events = 10,
                                 locations = "se1.fnal.gov")
        testFile.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8",
                              appFam = "RECO", psetHash = "GIBBERISH",
                              configContent = "MOREGIBBERISH")
        testFile.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")

        testFile.create()
        runSet = set()
        runSet.add(Run( 1, *[45]))
        runSet.add(Run( 2, *[67, 68]))
        testFile.addRunSet(runSet)

        assert (runSet - testFile["runs"]) == set(), \
            "Error: addRunSet is not updating set correctly"
Exemple #9
0
    def testProperties(self):
        """
        _testProperties_

        Test added tags that use DBSBuffer to transfer from workload to DBS
        """


        testFileA = DBSBufferFile(lfn = "/this/is/a/lfn", size = 1024, events = 10)
        testFileA.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8",
                               appFam = "RECO", psetHash = "GIBBERISH",
                               configContent = "MOREGIBBERISH")
        testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")
        testFileA.setValidStatus(validStatus = "VALID")
        testFileA.setProcessingVer(ver = "ProcVer")
        testFileA.setAcquisitionEra(era = "AcqEra")
        testFileA.setGlobalTag(globalTag = "GlobalTag")
        testFileA.setDatasetParent(datasetParent = "Parent")
        testFileA.create()

        return
Exemple #10
0
    def testDeleteTransaction(self):
        """
        _testDeleteTransaction_

        Create a file and commit it to the database.  Start a new transaction
        and delete the file.  Rollback the transaction after the file has been
        deleted.  Use the file class's exists() method to verify that the file
        does not exist after it has been deleted but does exist after the
        transaction is rolled back.
        """
        testFile = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10)
        testFile.setAlgorithm(appName="cmsRun",
                              appVer="CMSSW_2_1_8",
                              appFam="RECO",
                              psetHash="GIBBERISH",
                              configContent="MOREGIBBERISH")
        testFile.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")

        assert testFile.exists() == False, \
            "ERROR: File exists before it was created"

        testFile.addRun(Run(1, *[45]))
        testFile.create()

        assert testFile.exists() > 0, \
            "ERROR: File does not exist after it was created"

        myThread = threading.currentThread()
        myThread.transaction.begin()

        testFile.delete()

        assert testFile.exists() == False, \
            "ERROR: File exists after it has been deleted"

        myThread.transaction.rollback()

        assert testFile.exists() > 0, \
            "ERROR: File does not exist after transaction was rolled back."
        return
Exemple #11
0
    def createTestFiles(self):
        """
        _createTestFiles_

        Create some dbsbuffer test files with different statuses
        :return:
        """
        phedexStatus = self.dbsbufferFactory(
            classname="DBSBufferFiles.SetPhEDExStatus")

        for i in range(0, 4):

            lfn = "/path/to/some/lfn" + str(i)

            # Two files should be InDBS, two files should be NOTUPLOADED
            if i in [0, 2]:
                status = 'InDBS'
            else:
                status = 'NOTUPLOADED'

            testDBSFile = DBSBufferFile(lfn=lfn,
                                        size=600000,
                                        events=60000,
                                        status=status,
                                        workflowId=1)

            testDBSFile.setAlgorithm(appName="cmsRun",
                                     appVer="UNKNOWN",
                                     appFam="RECO",
                                     psetHash="SOMEHASH" + str(i),
                                     configContent="SOMECONTENT")
            testDBSFile.setDatasetPath("/path/to/some/dataset")
            testDBSFile.create()

            # Create all four combinations of status(InDBS,NOTUPLOADED) and in_phedex(0,1)
            if i in [0, 1]:
                phedexStatus.execute(lfn, 1)
Exemple #12
0
    def testProperties(self):
        """
        _testProperties_

        Test added tags that use DBSBuffer to transfer from workload to DBS
        """

        testFileA = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10)
        testFileA.setAlgorithm(appName="cmsRun",
                               appVer="CMSSW_2_1_8",
                               appFam="RECO",
                               psetHash="GIBBERISH",
                               configContent="MOREGIBBERISH")
        testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")
        testFileA.setValidStatus(validStatus="VALID")
        testFileA.setProcessingVer(ver="ProcVer")
        testFileA.setAcquisitionEra(era="AcqEra")
        testFileA.setGlobalTag(globalTag="GlobalTag")
        testFileA.setDatasetParent(datasetParent="Parent")
        testFileA.setCustodialSite(custodialSite='testCustody')
        testFileA.create()

        # There are no accessors for these things because load is never called
        action = self.daoFactory2(classname="LoadInfoFromDAS")
        das = action.execute(ids=[1])[0]
        self.assertEqual(das['Parent'], 'Parent')
        self.assertEqual(das['GlobalTag'], 'GlobalTag')
        self.assertEqual(das['ValidStatus'], 'VALID')

        # Test things with no accessors whatsoever
        myThread = threading.currentThread()
        self.assertEqual(
            myThread.dbi.processData(
                "SELECT custodial_site FROM dbsbuffer_dataset")[0].fetchall()
            [0][0], 'testCustody')
        return
    def testReportHandling(self):
        """
        _testReportHandling_

        Verify that we're able to parse a CMSSW report, convert it to a Report()
        style report, pickle it and then have the accountant process it.
        """
        self.procPath = os.path.join(WMCore.WMBase.getTestBase(),
                                    "WMCore_t/FwkJobReport_t/CMSSWProcessingReport.xml")

        myReport = Report("cmsRun1")
        myReport.parse(self.procPath)

        # Fake some metadata that should be added by the stageout scripts.
        for fileRef in myReport.getAllFileRefsFromStep("cmsRun1"):
            fileRef.size = 1024
            fileRef.location = "cmssrm.fnal.gov"

        fwjrPath = os.path.join(self.tempDir, "ProcReport.pkl")
        cmsRunStep = myReport.retrieveStep("cmsRun1")
        cmsRunStep.status = 0
        myReport.setTaskName('/TestWF/None')
        myReport.persist(fwjrPath)

        self.setFWJRAction.execute(jobID = self.testJob["id"], fwjrPath = fwjrPath)

        pFile = DBSBufferFile(lfn = "/path/to/some/lfn", size = 600000, events = 60000)
        pFile.setAlgorithm(appName = "cmsRun", appVer = "UNKNOWN",
                           appFam = "RECO", psetHash = "GIBBERISH",
                           configContent = "MOREGIBBERISH")
        pFile.setDatasetPath("/bogus/dataset/path")
        #pFile.addRun(Run(1, *[45]))
        pFile.create()

        config = self.createConfig(workerThreads = 1)
        accountant = JobAccountantPoller(config)
        accountant.setup()
        accountant.algorithm()

        self.verifyJobSuccess(self.testJob["id"])
        self.verifyFileMetaData(self.testJob["id"], myReport.getAllFilesFromStep("cmsRun1"))

        inputFile = File(lfn = "/store/backfill/2/unmerged/WMAgentCommissioining10/MinimumBias/RECO/rereco_GR09_R_34X_V5_All_v1/0000/outputRECORECO.root")
        inputFile.load()
        self.testMergeJob = Job(name = "testMergeJob", files = [inputFile])
        self.testMergeJob.create(group = self.mergeJobGroup)
        self.testMergeJob["state"] = "complete"
        self.stateChangeAction.execute(jobs = [self.testMergeJob])

        self.mergePath = os.path.join(WMCore.WMBase.getTestBase(),
                                         "WMCore_t/FwkJobReport_t/CMSSWMergeReport.xml")

        myReport = Report("mergeReco")
        myReport.parse(self.mergePath)

        # Fake some metadata that should be added by the stageout scripts.
        for fileRef in myReport.getAllFileRefsFromStep("mergeReco"):
            fileRef.size = 1024
            fileRef.location = "cmssrm.fnal.gov"
            fileRef.dataset = {"applicationName": "cmsRun", "applicationVersion": "CMSSW_3_4_2_patch1",
                               "primaryDataset": "MinimumBias", "processedDataset": "Rereco-v1",
                               "dataTier": "RECO"}

        fwjrPath = os.path.join(self.tempDir, "MergeReport.pkl")
        myReport.setTaskName('/MergeWF/None')
        cmsRunStep = myReport.retrieveStep("mergeReco")
        cmsRunStep.status = 0
        myReport.persist(fwjrPath)

        self.setFWJRAction.execute(jobID = self.testMergeJob["id"], fwjrPath = fwjrPath)
        accountant.algorithm()

        self.verifyJobSuccess(self.testMergeJob["id"])
        self.verifyFileMetaData(self.testMergeJob["id"], myReport.getAllFilesFromStep("mergeReco"))

        return
Exemple #14
0
    def testGetChildrenDAO(self):
        """
        _testGetChildrenDAO_

        Verify that the GetChildren DAO correctly returns the LFNs of a file's
        children.
        """
        testFileChildA = DBSBufferFile(lfn="/this/is/a/child/lfnA",
                                       size=1024,
                                       events=20)
        testFileChildA.setAlgorithm(appName="cmsRun",
                                    appVer="CMSSW_2_1_8",
                                    appFam="RECO",
                                    psetHash="GIBBERISH",
                                    configContent="MOREGIBBERISH")
        testFileChildA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")
        testFileChildB = DBSBufferFile(lfn="/this/is/a/child/lfnB",
                                       size=1024,
                                       events=20)
        testFileChildB.setAlgorithm(appName="cmsRun",
                                    appVer="CMSSW_2_1_8",
                                    appFam="RECO",
                                    psetHash="GIBBERISH",
                                    configContent="MOREGIBBERISH")
        testFileChildB.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")
        testFileChildC = DBSBufferFile(lfn="/this/is/a/child/lfnC",
                                       size=1024,
                                       events=20)
        testFileChildC.setAlgorithm(appName="cmsRun",
                                    appVer="CMSSW_2_1_8",
                                    appFam="RECO",
                                    psetHash="GIBBERISH",
                                    configContent="MOREGIBBERISH")
        testFileChildC.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")

        testFileChildA.create()
        testFileChildB.create()
        testFileChildC.create()

        testFile = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10)
        testFile.setAlgorithm(appName="cmsRun",
                              appVer="CMSSW_2_1_8",
                              appFam="RECO",
                              psetHash="GIBBERISH",
                              configContent="MOREGIBBERISH")
        testFile.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")
        testFile.create()

        testFileChildA.addParents([testFile["lfn"]])
        testFileChildB.addParents([testFile["lfn"]])
        testFileChildC.addParents([testFile["lfn"]])

        getChildrenAction = self.daoFactory(
            classname="DBSBufferFiles.GetChildren")
        childLFNs = getChildrenAction.execute(testFile["lfn"])

        assert len(childLFNs) == 3, \
            "ERROR: Parent does not have the right amount of children."

        goldenLFNs = [
            "/this/is/a/child/lfnA", "/this/is/a/child/lfnB",
            "/this/is/a/child/lfnC"
        ]
        for childLFN in childLFNs:
            assert childLFN in goldenLFNs, \
                "ERROR: Unknown child lfn"
            goldenLFNs.remove(childLFN)

        return
Exemple #15
0
    def testBulkLoad(self):
        """
        _testBulkLoad_

        Can we load in bulk?
        """

        addToBuffer = DBSBufferUtil()

        testFileChildA = DBSBufferFile(lfn="/this/is/a/child/lfnA",
                                       size=1024,
                                       events=20)
        testFileChildA.setAlgorithm(appName="cmsRun",
                                    appVer="CMSSW_2_1_8",
                                    appFam="RECO",
                                    psetHash="GIBBERISH",
                                    configContent="MOREGIBBERISH")
        testFileChildA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")
        testFileChildB = DBSBufferFile(lfn="/this/is/a/child/lfnB",
                                       size=1024,
                                       events=20)
        testFileChildB.setAlgorithm(appName="cmsRun",
                                    appVer="CMSSW_2_1_8",
                                    appFam="RECO",
                                    psetHash="GIBBERISH",
                                    configContent="MOREGIBBERISH")
        testFileChildB.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")
        testFileChildC = DBSBufferFile(lfn="/this/is/a/child/lfnC",
                                       size=1024,
                                       events=20)
        testFileChildC.setAlgorithm(appName="cmsRun",
                                    appVer="CMSSW_2_1_8",
                                    appFam="RECO",
                                    psetHash="GIBBERISH",
                                    configContent="MOREGIBBERISH")
        testFileChildC.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")

        testFileChildA.create()
        testFileChildB.create()
        testFileChildC.create()

        testFileChildA.setLocation(["se1.fnal.gov", "se1.cern.ch"])
        testFileChildB.setLocation(["se1.fnal.gov", "se1.cern.ch"])
        testFileChildC.setLocation(["se1.fnal.gov", "se1.cern.ch"])

        runSet = set()
        runSet.add(Run(1, *[45]))
        runSet.add(Run(2, *[67, 68]))
        testFileChildA.addRunSet(runSet)
        testFileChildB.addRunSet(runSet)
        testFileChildC.addRunSet(runSet)

        testFileChildA.save()
        testFileChildB.save()
        testFileChildC.save()

        setCksumAction = self.daoFactory(
            classname="DBSBufferFiles.AddChecksumByLFN")
        binds = [{
            'lfn': "/this/is/a/child/lfnA",
            'cktype': 'adler32',
            'cksum': 201
        }, {
            'lfn': "/this/is/a/child/lfnA",
            'cktype': 'cksum',
            'cksum': 101
        }, {
            'lfn': "/this/is/a/child/lfnB",
            'cktype': 'adler32',
            'cksum': 201
        }, {
            'lfn': "/this/is/a/child/lfnB",
            'cktype': 'cksum',
            'cksum': 101
        }, {
            'lfn': "/this/is/a/child/lfnC",
            'cktype': 'adler32',
            'cksum': 201
        }, {
            'lfn': "/this/is/a/child/lfnC",
            'cktype': 'cksum',
            'cksum': 101
        }]
        setCksumAction.execute(bulkList=binds)

        testFile = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10)
        testFile.setAlgorithm(appName="cmsRun",
                              appVer="CMSSW_2_1_8",
                              appFam="RECO",
                              psetHash="GIBBERISH",
                              configContent="MOREGIBBERISH")
        testFile.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")
        testFile.create()

        testFileChildA.addParents([testFile["lfn"]])
        testFileChildB.addParents([testFile["lfn"]])
        testFileChildC.addParents([testFile["lfn"]])

        binds = [{
            'id': testFileChildA.exists()
        }, {
            'id': testFileChildB.exists()
        }, {
            'id': testFileChildC.exists()
        }]

        listOfFiles = addToBuffer.loadDBSBufferFilesBulk(fileObjs=binds)

        # print listOfFiles

        compareList = [
            'locations', 'psetHash', 'configContent', 'appName', 'appVer',
            'appFam', 'events', 'datasetPath', 'runs'
        ]

        for f in listOfFiles:
            self.assertTrue(
                f['lfn'] in [
                    "/this/is/a/child/lfnA", "/this/is/a/child/lfnB",
                    "/this/is/a/child/lfnC"
                ], "Unknown file in loaded results")
            self.assertEqual(f['checksums'], {
                'adler32': '201',
                'cksum': '101'
            })
            for parent in f['parents']:
                self.assertEqual(parent['lfn'], testFile['lfn'])
            for key in compareList:
                self.assertEqual(f[key], testFileChildA[key])
Exemple #16
0
    def testAddChildTransaction(self):
        """
        _testAddChildTransaction_

        Add a child to some parent files and make sure that all the parentage
        information is loaded/stored correctly from the database.  Rollback the
        addition of one of the childs and then verify that it does in fact only
        have one parent.
        """
        testFileParentA = DBSBufferFile(lfn="/this/is/a/parent/lfnA",
                                        size=1024,
                                        events=20)
        testFileParentA.setAlgorithm(appName="cmsRun",
                                     appVer="CMSSW_2_1_8",
                                     appFam="RECO",
                                     psetHash="GIBBERISH",
                                     configContent="MOREGIBBERISH")
        testFileParentA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")
        testFileParentA.addRun(Run(1, *[45]))

        testFileParentB = DBSBufferFile(lfn="/this/is/a/parent/lfnB",
                                        size=1024,
                                        events=20)
        testFileParentB.setAlgorithm(appName="cmsRun",
                                     appVer="CMSSW_2_1_8",
                                     appFam="RECO",
                                     psetHash="GIBBERISH",
                                     configContent="MOREGIBBERISH")
        testFileParentB.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")
        testFileParentB.addRun(Run(1, *[45]))
        testFileParentA.create()
        testFileParentB.create()

        testFileA = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10)
        testFileA.setAlgorithm(appName="cmsRun",
                               appVer="CMSSW_2_1_8",
                               appFam="RECO",
                               psetHash="GIBBERISH",
                               configContent="MOREGIBBERISH")
        testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")
        testFileA.addRun(Run(1, *[45]))
        testFileA.create()

        testFileParentA.addChildren("/this/is/a/lfn")

        myThread = threading.currentThread()
        myThread.transaction.begin()

        testFileParentB.addChildren("/this/is/a/lfn")

        testFileB = DBSBufferFile(id=testFileA["id"])
        testFileB.load(parentage=1)

        goldenFiles = [testFileParentA, testFileParentB]
        for parentFile in testFileB["parents"]:
            assert parentFile in goldenFiles, \
                "ERROR: Unknown parent file"
            goldenFiles.remove(parentFile)

        assert len(goldenFiles) == 0, \
            "ERROR: Some parents are missing"

        myThread.transaction.rollback()
        testFileB.load(parentage=1)

        goldenFiles = [testFileParentA]
        for parentFile in testFileB["parents"]:
            assert parentFile in goldenFiles, \
                "ERROR: Unknown parent file"
            goldenFiles.remove(parentFile)

        assert len(goldenFiles) == 0, \
            "ERROR: Some parents are missing"

        return
Exemple #17
0
    def testAddParents(self):
        """
        _testAddParents_

        Verify that the addParents() method works correctly even if the parents
        do not already exist in the database.
        """
        myThread = threading.currentThread()

        testFile = DBSBufferFile(lfn="/this/is/a/lfnA",
                                 size=1024,
                                 events=10,
                                 locations="se1.fnal.gov")
        testFile.setAlgorithm(appName="cmsRun",
                              appVer="CMSSW_2_1_8",
                              appFam="RECO",
                              psetHash="GIBBERISH",
                              configContent="MOREGIBBERISH")
        testFile.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")
        testFile.create()

        testParent = DBSBufferFile(lfn="/this/is/a/lfnB",
                                   size=1024,
                                   events=10,
                                   locations="se1.fnal.gov")
        testParent.setAlgorithm(appName="cmsRun",
                                appVer="CMSSW_2_1_8",
                                appFam="RECO",
                                psetHash="GIBBERISH",
                                configContent="MOREGIBBERISH")
        testParent.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RAW")
        testParent.create()

        goldenLFNs = ["lfn1", "lfn2", "lfn3", "/this/is/a/lfnB"]
        testFile.addParents(goldenLFNs)

        verifyFile = DBSBufferFile(id=testFile["id"])
        verifyFile.load(parentage=1)
        parentLFNs = verifyFile.getParentLFNs()

        for parentLFN in parentLFNs:
            self.assertTrue(parentLFN in goldenLFNs,
                            "Error: unknown lfn %s" % parentLFN)
            goldenLFNs.remove(parentLFN)

        self.assertEqual(len(goldenLFNs), 0, "Error: missing LFNs...")

        # Check that the bogus dataset is listed as inDBS
        sqlCommand = """SELECT in_dbs FROM dbsbuffer_algo_dataset_assoc das
                          INNER JOIN dbsbuffer_dataset ds ON das.dataset_id = ds.id
                          WHERE ds.path = 'bogus'"""

        status = myThread.dbi.processData(sqlCommand)[0].fetchall()[0][0]

        self.assertEqual(status, 1)

        # Now make sure the dummy files are listed as being in DBS
        sqlCommand = """SELECT status FROM dbsbuffer_file df
                          INNER JOIN dbsbuffer_algo_dataset_assoc das ON das.id = df.dataset_algo
                          INNER JOIN dbsbuffer_dataset ds ON das.dataset_id = ds.id
                          WHERE ds.path = '/bogus/dataset/path' """

        status = myThread.dbi.processData(sqlCommand)[0].fetchall()

        for entry in status:
            self.assertEqual(entry, ('AlreadyInDBS', ))

        return
    def stuffDatabase(self, tier0Mode = False):
        """
        _stuffDatabase_

        Fill the dbsbuffer with some files and blocks.  We'll insert a total
        of 5 files spanning two blocks.  There will be a total of two datasets
        inserted into the database.

        All files will be already in GLOBAL and in_phedex
        """

        myThread = threading.currentThread()
        buffer3Factory = DAOFactory(package = "WMComponent.DBS3Buffer",
                                   logger = myThread.logger,
                                   dbinterface = myThread.dbi)
        insertWorkflow = buffer3Factory(classname = "InsertWorkflow")
        insertWorkflow.execute("BogusRequestA", "BogusTask",
                               0, 0, 0, 0)
        insertWorkflow.execute("BogusRequestB", "BogusTask",
                               0, 0, 0, 0)

        checksums = {"adler32": "1234", "cksum": "5678"}
        testFileA = DBSBufferFile(lfn = makeUUID(), size = 1024, events = 10,
                                  checksums = checksums,
                                  locations = set(["srm-cms.cern.ch"]))
        testFileA.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8",
                               appFam = "RECO", psetHash = "GIBBERISH",
                               configContent = "MOREGIBBERISH")
        testFileA.setDatasetPath(self.testDatasetA)
        testFileA.addRun(Run(2, *[45]))
        testFileA.create()

        testFileB = DBSBufferFile(lfn = makeUUID(), size = 1024, events = 10,
                                  checksums = checksums,
                                  locations = set(["srm-cms.cern.ch"]))
        testFileB.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8",
                               appFam = "RECO", psetHash = "GIBBERISH",
                               configContent = "MOREGIBBERISH")
        testFileB.setDatasetPath(self.testDatasetA)
        testFileB.addRun(Run(2, *[45]))
        testFileB.create()

        testFileC = DBSBufferFile(lfn = makeUUID(), size = 1024, events = 10,
                                  checksums = checksums,
                                  locations = set(["srm-cms.cern.ch"]))
        testFileC.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8",
                               appFam = "RECO", psetHash = "GIBBERISH",
                               configContent = "MOREGIBBERISH")
        testFileC.setDatasetPath(self.testDatasetA)
        testFileC.addRun(Run(2, *[45]))
        testFileC.create()

        self.testFilesA.append(testFileA)
        self.testFilesA.append(testFileB)
        self.testFilesA.append(testFileC)

        testFileD = DBSBufferFile(lfn = makeUUID(), size = 1024, events = 10,
                                  checksums = checksums,
                                  locations = set(["srm-cms.cern.ch"]))
        testFileD.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8",
                               appFam = "RECO", psetHash = "GIBBERISH",
                               configContent = "MOREGIBBERISH")
        testFileD.setDatasetPath(self.testDatasetB)
        testFileD.addRun(Run(2, *[45]))
        testFileD.create()

        testFileE = DBSBufferFile(lfn = makeUUID(), size = 1024, events = 10,
                                  checksums = checksums,
                                  locations = set(["srm-cms.cern.ch"]))
        testFileE.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8",
                               appFam = "RECO", psetHash = "GIBBERISH",
                               configContent = "MOREGIBBERISH")
        testFileE.setDatasetPath(self.testDatasetB)
        testFileE.addRun(Run(2, *[45]))
        testFileE.create()

        self.testFilesB.append(testFileD)
        self.testFilesB.append(testFileE)

        uploadFactory = DAOFactory(package = "WMComponent.DBSUpload.Database",
                                   logger = myThread.logger,
                                   dbinterface = myThread.dbi)
        createBlock = uploadFactory(classname = "SetBlockStatus")

        self.blockAName = self.testDatasetA + "#" + makeUUID()
        self.blockBName = self.testDatasetB + "#" + makeUUID()
        createBlock.execute(block = self.blockAName, locations = ["srm-cms.cern.ch"], open_status = 'Closed')
        createBlock.execute(block = self.blockBName, locations = ["srm-cms.cern.ch"], open_status = 'Closed')

        bufferFactory = DAOFactory(package = "WMComponent.DBS3Buffer",
                                   logger = myThread.logger,
                                   dbinterface = myThread.dbi)

        setBlock = bufferFactory(classname = "DBSBufferFiles.SetBlock")
        setBlock.execute(testFileA["lfn"], self.blockAName)
        setBlock.execute(testFileB["lfn"], self.blockAName)
        setBlock.execute(testFileC["lfn"], self.blockAName)
        setBlock.execute(testFileD["lfn"], self.blockBName)
        setBlock.execute(testFileE["lfn"], self.blockBName)

        fileStatus = bufferFactory(classname = "DBSBufferFiles.SetStatus")
        fileStatus.execute(testFileA["lfn"], "GLOBAL")
        fileStatus.execute(testFileB["lfn"], "GLOBAL")
        fileStatus.execute(testFileC["lfn"], "GLOBAL")
        fileStatus.execute(testFileD["lfn"], "GLOBAL")
        fileStatus.execute(testFileE["lfn"], "GLOBAL")

        phedexStatus = bufferFactory(classname = "DBSBufferFiles.SetPhEDExStatus")
        phedexStatus.execute(testFileA["lfn"], 1)
        phedexStatus.execute(testFileB["lfn"], 1)
        phedexStatus.execute(testFileC["lfn"], 1)
        phedexStatus.execute(testFileD["lfn"], 1)
        phedexStatus.execute(testFileE["lfn"], 1)

        associateWorkflow = buffer3Factory(classname = "DBSBufferFiles.AssociateWorkflowToFile")
        associateWorkflow.execute(testFileA["lfn"], "BogusRequestA", "BogusTask")
        associateWorkflow.execute(testFileB["lfn"], "BogusRequestA", "BogusTask")
        associateWorkflow.execute(testFileC["lfn"], "BogusRequestA", "BogusTask")
        associateWorkflow.execute(testFileD["lfn"], "BogusRequestB", "BogusTask")
        associateWorkflow.execute(testFileE["lfn"], "BogusRequestB", "BogusTask")

        # Make the desired subscriptions
        insertSubAction = buffer3Factory(classname = "NewSubscription")
        datasetA = DBSBufferDataset(path = self.testDatasetA)
        datasetB = DBSBufferDataset(path = self.testDatasetB)
        workload = WMWorkloadHelper()
        workload.load(os.path.join(getTestBase(), 'WMComponent_t/PhEDExInjector_t/specs/TestWorkload.pkl'))
        if tier0Mode:
            # Override the settings
            workload.setSubscriptionInformation(custodialSites = ["T0_CH_CERN", "T1_US_FNAL"],
                                                nonCustodialSites = ["T3_CO_Uniandes"],
                                                priority = "Normal", custodialSubType = "Replica",
                                                autoApproveSites = ["T0_CH_CERN"],
                                                dataTier = "RECO")
            workload.setSubscriptionInformation(custodialSites = ["T0_CH_CERN", "T1_UK_RAL"],
                                                nonCustodialSites = [],
                                                autoApproveSites = [],
                                                priority = "High", custodialSubType = "Replica",
                                                dataTier = "RAW")
        insertSubAction.execute(datasetA.exists(), workload.getSubscriptionInformation()[self.testDatasetA])
        insertSubAction.execute(datasetB.exists(), workload.getSubscriptionInformation()[self.testDatasetB])

        return
Exemple #19
0
    def testGetParentLFNs(self):
        """
        _testGetParentLFNs_

        Create three files and set them to be parents of a fourth file.  Check
        to make sure that getParentLFNs() on the child file returns the correct
        LFNs.
        """
        testFileParentA = DBSBufferFile(lfn="/this/is/a/parent/lfnA",
                                        size=1024,
                                        events=20)
        testFileParentA.setAlgorithm(appName="cmsRun",
                                     appVer="CMSSW_2_1_8",
                                     appFam="RECO",
                                     psetHash="GIBBERISH",
                                     configContent="MOREGIBBERISH")
        testFileParentA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")
        testFileParentA.addRun(Run(1, *[45]))

        testFileParentB = DBSBufferFile(lfn="/this/is/a/parent/lfnB",
                                        size=1024,
                                        events=20)
        testFileParentB.setAlgorithm(appName="cmsRun",
                                     appVer="CMSSW_2_1_8",
                                     appFam="RECO",
                                     psetHash="GIBBERISH",
                                     configContent="MOREGIBBERISH")
        testFileParentB.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")
        testFileParentB.addRun(Run(1, *[45]))

        testFileParentC = DBSBufferFile(lfn="/this/is/a/parent/lfnC",
                                        size=1024,
                                        events=20)
        testFileParentC.setAlgorithm(appName="cmsRun",
                                     appVer="CMSSW_2_1_8",
                                     appFam="RECO",
                                     psetHash="GIBBERISH",
                                     configContent="MOREGIBBERISH")
        testFileParentC.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")
        testFileParentC.addRun(Run(1, *[45]))

        testFileParentA.create()
        testFileParentB.create()
        testFileParentC.create()

        testFile = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10)
        testFile.setAlgorithm(appName="cmsRun",
                              appVer="CMSSW_2_1_8",
                              appFam="RECO",
                              psetHash="GIBBERISH",
                              configContent="MOREGIBBERISH")
        testFile.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")
        testFile.addRun(Run(1, *[45]))
        testFile.create()

        testFile.addParents([
            testFileParentA["lfn"], testFileParentB["lfn"],
            testFileParentC["lfn"]
        ])

        parentLFNs = testFile.getParentLFNs()

        assert len(parentLFNs) == 3, \
            "ERROR: Child does not have the right amount of parents"

        goldenLFNs = [
            "/this/is/a/parent/lfnA", "/this/is/a/parent/lfnB",
            "/this/is/a/parent/lfnC"
        ]
        for parentLFN in parentLFNs:
            assert parentLFN in goldenLFNs, \
                "ERROR: Unknown parent lfn"
            goldenLFNs.remove(parentLFN)

        testFile.delete()
        testFileParentA.delete()
        testFileParentB.delete()
        testFileParentC.delete()
        return
    def stuffDatabase(self):
        """
        _stuffDatabase_

        Fill the dbsbuffer with some files and blocks.  We'll insert a total
        of 5 files spanning two blocks.  There will be a total of two datasets
        inserted into the database, both from the same workflow.

        All files will be already in GLOBAL and in_phedex
        """

        myThread = threading.currentThread()
        buffer3Factory = DAOFactory(package = "WMComponent.DBS3Buffer",
                                   logger = myThread.logger,
                                   dbinterface = myThread.dbi)
        insertWorkflow = buffer3Factory(classname = "InsertWorkflow")
        insertWorkflow.execute("BogusRequest", "BogusTask", os.path.join(getTestBase(),
                                                                         "WMComponent_t/PhEDExInjector_t/specs/TestWorkload.pkl"))

        checksums = {"adler32": "1234", "cksum": "5678"}
        testFileA = DBSBufferFile(lfn = makeUUID(), size = 1024, events = 10,
                                  checksums = checksums,
                                  locations = set(["srm-cms.cern.ch"]))
        testFileA.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8",
                               appFam = "RECO", psetHash = "GIBBERISH",
                               configContent = "MOREGIBBERISH")
        testFileA.setDatasetPath(self.testDatasetA)
        testFileA.addRun(Run(2, *[45]))
        testFileA.create()

        testFileB = DBSBufferFile(lfn = makeUUID(), size = 1024, events = 10,
                                  checksums = checksums,
                                  locations = set(["srm-cms.cern.ch"]))
        testFileB.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8",
                               appFam = "RECO", psetHash = "GIBBERISH",
                               configContent = "MOREGIBBERISH")
        testFileB.setDatasetPath(self.testDatasetA)
        testFileB.addRun(Run(2, *[45]))
        testFileB.create()

        testFileC = DBSBufferFile(lfn = makeUUID(), size = 1024, events = 10,
                                  checksums = checksums,
                                  locations = set(["srm-cms.cern.ch"]))
        testFileC.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8",
                               appFam = "RECO", psetHash = "GIBBERISH",
                               configContent = "MOREGIBBERISH")
        testFileC.setDatasetPath(self.testDatasetA)
        testFileC.addRun(Run(2, *[45]))
        testFileC.create()

        self.testFilesA.append(testFileA)
        self.testFilesA.append(testFileB)
        self.testFilesA.append(testFileC)

        testFileD = DBSBufferFile(lfn = makeUUID(), size = 1024, events = 10,
                                  checksums = checksums,
                                  locations = set(["srm-cms.cern.ch"]))
        testFileD.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8",
                               appFam = "RECO", psetHash = "GIBBERISH",
                               configContent = "MOREGIBBERISH")
        testFileD.setDatasetPath(self.testDatasetB)
        testFileD.addRun(Run(2, *[45]))
        testFileD.create()

        testFileE = DBSBufferFile(lfn = makeUUID(), size = 1024, events = 10,
                                  checksums = checksums,
                                  locations = set(["srm-cms.cern.ch"]))
        testFileE.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8",
                               appFam = "RECO", psetHash = "GIBBERISH",
                               configContent = "MOREGIBBERISH")
        testFileE.setDatasetPath(self.testDatasetB)
        testFileE.addRun(Run(2, *[45]))
        testFileE.create()

        self.testFilesB.append(testFileD)
        self.testFilesB.append(testFileE)

        uploadFactory = DAOFactory(package = "WMComponent.DBSUpload.Database",
                                   logger = myThread.logger,
                                   dbinterface = myThread.dbi)
        createBlock = uploadFactory(classname = "SetBlockStatus")

        self.blockAName = self.testDatasetA + "#" + makeUUID()
        self.blockBName = self.testDatasetB + "#" + makeUUID()
        createBlock.execute(block = self.blockAName, locations = ["srm-cms.cern.ch"], open_status = 0)
        createBlock.execute(block = self.blockBName, locations = ["srm-cms.cern.ch"], open_status = 0)

        bufferFactory = DAOFactory(package = "WMComponent.DBSBuffer.Database",
                                   logger = myThread.logger,
                                   dbinterface = myThread.dbi)

        setBlock = bufferFactory(classname = "DBSBufferFiles.SetBlock")
        setBlock.execute(testFileA["lfn"], self.blockAName)
        setBlock.execute(testFileB["lfn"], self.blockAName)
        setBlock.execute(testFileC["lfn"], self.blockAName)
        setBlock.execute(testFileD["lfn"], self.blockBName)
        setBlock.execute(testFileE["lfn"], self.blockBName)

        fileStatus = bufferFactory(classname = "DBSBufferFiles.SetStatus")
        fileStatus.execute(testFileA["lfn"], "GLOBAL")
        fileStatus.execute(testFileB["lfn"], "GLOBAL")
        fileStatus.execute(testFileC["lfn"], "GLOBAL")
        fileStatus.execute(testFileD["lfn"], "GLOBAL")
        fileStatus.execute(testFileE["lfn"], "GLOBAL")

        phedexStatus = bufferFactory(classname = "DBSBufferFiles.SetPhEDExStatus")
        phedexStatus.execute(testFileA["lfn"], 1)
        phedexStatus.execute(testFileB["lfn"], 1)
        phedexStatus.execute(testFileC["lfn"], 1)
        phedexStatus.execute(testFileD["lfn"], 1)
        phedexStatus.execute(testFileE["lfn"], 1)

        associateWorkflow = buffer3Factory(classname = "DBSBufferFiles.AssociateWorkflowToFile")
        associateWorkflow.execute(testFileA["lfn"], "BogusRequest", "BogusTask")
        associateWorkflow.execute(testFileB["lfn"], "BogusRequest", "BogusTask")
        associateWorkflow.execute(testFileC["lfn"], "BogusRequest", "BogusTask")
        associateWorkflow.execute(testFileD["lfn"], "BogusRequest", "BogusTask")
        associateWorkflow.execute(testFileE["lfn"], "BogusRequest", "BogusTask")

        return
Exemple #21
0
    def stuffDatabase(self):
        """
        Fill the dbsbuffer tables with some files and blocks.  We'll insert a total
        of 5 files spanning two blocks.  There will be a total of two datasets
        inserted into the database.
        We'll inject files with the location set as an SE name as well as a
        PhEDEx node name as well.
        """
        myThread = threading.currentThread()

        # Create the DAOs factory and the relevant instances
        buffer3Factory = DAOFactory(package="WMComponent.DBS3Buffer",
                                    logger=myThread.logger,
                                    dbinterface=myThread.dbi)
        setBlock = buffer3Factory(classname="DBSBufferFiles.SetBlock")
        fileStatus = buffer3Factory(classname="DBSBufferFiles.SetStatus")
        associateWorkflow = buffer3Factory(classname="DBSBufferFiles.AssociateWorkflowToFile")
        insertWorkflow = buffer3Factory(classname="InsertWorkflow")
        datasetAction = buffer3Factory(classname="NewDataset")
        createAction = buffer3Factory(classname="CreateBlocks")

        # Create workflow in the database
        insertWorkflow.execute("BogusRequest", "BogusTask", 0, 0, 0, 0)

        # First file on first block
        checksums = {"adler32": "1234", "cksum": "5678"}
        testFileA = DBSBufferFile(lfn=makeUUID(), size=1024, events=10,
                                  checksums=checksums,
                                  locations=set(["T2_CH_CERN"]))
        testFileA.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8",
                               appFam="RECO", psetHash="GIBBERISH",
                               configContent="MOREGIBBERISH")
        testFileA.setDatasetPath(self.testDatasetA)
        testFileA.addRun(Run(2, *[45]))
        testFileA.create()

        # Second file on first block
        testFileB = DBSBufferFile(lfn=makeUUID(), size=1024, events=10,
                                  checksums=checksums,
                                  locations=set(["T2_CH_CERN"]))
        testFileB.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8",
                               appFam="RECO", psetHash="GIBBERISH",
                               configContent="MOREGIBBERISH")
        testFileB.setDatasetPath(self.testDatasetA)
        testFileB.addRun(Run(2, *[45]))
        testFileB.create()

        # Third file on first block
        testFileC = DBSBufferFile(lfn=makeUUID(), size=1024, events=10,
                                  checksums=checksums,
                                  locations=set(["T2_CH_CERN"]))
        testFileC.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8",
                               appFam="RECO", psetHash="GIBBERISH",
                               configContent="MOREGIBBERISH")
        testFileC.setDatasetPath(self.testDatasetA)
        testFileC.addRun(Run(2, *[45]))
        testFileC.create()

        self.testFilesA.append(testFileA)
        self.testFilesA.append(testFileB)
        self.testFilesA.append(testFileC)

        # First file on second block
        testFileD = DBSBufferFile(lfn=makeUUID(), size=1024, events=10,
                                  checksums=checksums,
                                  locations=set(["T1_US_FNAL_Disk"]))
        testFileD.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8",
                               appFam="RECO", psetHash="GIBBERISH",
                               configContent="MOREGIBBERISH")
        testFileD.setDatasetPath(self.testDatasetB)
        testFileD.addRun(Run(2, *[45]))
        testFileD.create()

        # Second file on second block
        testFileE = DBSBufferFile(lfn=makeUUID(), size=1024, events=10,
                                  checksums=checksums,
                                  locations=set(["T1_US_FNAL_Disk"]))
        testFileE.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8",
                               appFam="RECO", psetHash="GIBBERISH",
                               configContent="MOREGIBBERISH")
        testFileE.setDatasetPath(self.testDatasetB)
        testFileE.addRun(Run(2, *[45]))
        testFileE.create()

        self.testFilesB.append(testFileD)
        self.testFilesB.append(testFileE)

        # insert datasets in the dbsbuffer table
        datasetAction.execute(datasetPath=self.testDatasetA)
        datasetAction.execute(datasetPath=self.testDatasetB)

        self.blockAName = self.testDatasetA + "#" + makeUUID()
        self.blockBName = self.testDatasetB + "#" + makeUUID()

        # create and insert blocks into dbsbuffer table
        newBlockA = DBSBufferBlock(name=self.blockAName,
                                   location="T2_CH_CERN",
                                   datasetpath=None)
        newBlockA.setDataset(self.testDatasetA, 'data', 'VALID')
        newBlockA.status = 'Closed'

        newBlockB = DBSBufferBlock(name=self.blockBName,
                                   location="T1_US_FNAL_Disk",
                                   datasetpath=None)
        newBlockB.setDataset(self.testDatasetB, 'data', 'VALID')
        newBlockB.status = 'Closed'

        createAction.execute(blocks=[newBlockA, newBlockB])

        # associate files to their correspondent block id
        setBlock.execute(testFileA["lfn"], self.blockAName)
        setBlock.execute(testFileB["lfn"], self.blockAName)
        setBlock.execute(testFileC["lfn"], self.blockAName)
        setBlock.execute(testFileD["lfn"], self.blockBName)
        setBlock.execute(testFileE["lfn"], self.blockBName)

        # set file status to LOCAL
        fileStatus.execute(testFileA["lfn"], "LOCAL")
        fileStatus.execute(testFileB["lfn"], "LOCAL")
        fileStatus.execute(testFileC["lfn"], "LOCAL")
        fileStatus.execute(testFileD["lfn"], "LOCAL")
        fileStatus.execute(testFileE["lfn"], "LOCAL")

        # associate files to a given workflow
        associateWorkflow.execute(testFileA["lfn"], "BogusRequest", "BogusTask")
        associateWorkflow.execute(testFileB["lfn"], "BogusRequest", "BogusTask")
        associateWorkflow.execute(testFileC["lfn"], "BogusRequest", "BogusTask")
        associateWorkflow.execute(testFileD["lfn"], "BogusRequest", "BogusTask")
        associateWorkflow.execute(testFileE["lfn"], "BogusRequest", "BogusTask")

        return
    def stuffDatabase(self):
        """
        _stuffDatabase_

        Fill the dbsbuffer with some files and blocks.  We'll insert a total
        of 5 files spanning two blocks.  There will be a total of two datasets
        inserted into the database.

        All files will be already in GLOBAL and in_phedex
        """
        myThread = threading.currentThread()

        buffer3Factory = DAOFactory(package="WMComponent.DBS3Buffer",
                                    logger=myThread.logger,
                                    dbinterface=myThread.dbi)
        insertWorkflow = buffer3Factory(classname="InsertWorkflow")
        insertWorkflow.execute("BogusRequestA", "BogusTask",
                               0, 0, 0, 0)
        insertWorkflow.execute("BogusRequestB", "BogusTask",
                               0, 0, 0, 0)

        checksums = {"adler32": "1234", "cksum": "5678"}
        testFileA = DBSBufferFile(lfn=makeUUID(), size=1024, events=10,
                                  checksums=checksums,
                                  locations=set(["srm-cms.cern.ch"]))
        testFileA.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8",
                               appFam="RECO", psetHash="GIBBERISH",
                               configContent="MOREGIBBERISH")
        testFileA.setDatasetPath(self.testDatasetA)
        testFileA.addRun(Run(2, *[45]))
        testFileA.create()

        testFileB = DBSBufferFile(lfn=makeUUID(), size=1024, events=10,
                                  checksums=checksums,
                                  locations=set(["srm-cms.cern.ch"]))
        testFileB.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8",
                               appFam="RECO", psetHash="GIBBERISH",
                               configContent="MOREGIBBERISH")
        testFileB.setDatasetPath(self.testDatasetA)
        testFileB.addRun(Run(2, *[45]))
        testFileB.create()

        testFileC = DBSBufferFile(lfn=makeUUID(), size=1024, events=10,
                                  checksums=checksums,
                                  locations=set(["srm-cms.cern.ch"]))
        testFileC.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8",
                               appFam="RECO", psetHash="GIBBERISH",
                               configContent="MOREGIBBERISH")
        testFileC.setDatasetPath(self.testDatasetA)
        testFileC.addRun(Run(2, *[45]))
        testFileC.create()

        self.testFilesA.append(testFileA)
        self.testFilesA.append(testFileB)
        self.testFilesA.append(testFileC)

        testFileD = DBSBufferFile(lfn=makeUUID(), size=1024, events=10,
                                  checksums=checksums,
                                  locations=set(["srm-cms.cern.ch"]))
        testFileD.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8",
                               appFam="RECO", psetHash="GIBBERISH",
                               configContent="MOREGIBBERISH")
        testFileD.setDatasetPath(self.testDatasetB)
        testFileD.addRun(Run(2, *[45]))
        testFileD.create()

        testFileE = DBSBufferFile(lfn=makeUUID(), size=1024, events=10,
                                  checksums=checksums,
                                  locations=set(["srm-cms.cern.ch"]))
        testFileE.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8",
                               appFam="RECO", psetHash="GIBBERISH",
                               configContent="MOREGIBBERISH")
        testFileE.setDatasetPath(self.testDatasetB)
        testFileE.addRun(Run(2, *[45]))
        testFileE.create()

        self.testFilesB.append(testFileD)
        self.testFilesB.append(testFileE)

        uploadFactory = DAOFactory(package="WMComponent.DBS3Buffer",
                                   logger=myThread.logger,
                                   dbinterface=myThread.dbi)
        datasetAction = uploadFactory(classname="NewDataset")
        createAction = uploadFactory(classname="CreateBlocks")

        datasetAction.execute(datasetPath=self.testDatasetA)
        datasetAction.execute(datasetPath=self.testDatasetB)

        self.blockAName = self.testDatasetA + "#" + makeUUID()
        self.blockBName = self.testDatasetB + "#" + makeUUID()

        newBlockA = DBSBufferBlock(name=self.blockAName,
                                   location="srm-cms.cern.ch",
                                   datasetpath=None)
        newBlockA.setDataset(self.testDatasetA, 'data', 'VALID')
        newBlockA.status = 'Closed'

        newBlockB = DBSBufferBlock(name=self.blockBName,
                                   location="srm-cms.cern.ch",
                                   datasetpath=None)
        newBlockB.setDataset(self.testDatasetB, 'data', 'VALID')
        newBlockB.status = 'Closed'

        createAction.execute(blocks=[newBlockA, newBlockB])

        bufferFactory = DAOFactory(package="WMComponent.DBS3Buffer",
                                   logger=myThread.logger,
                                   dbinterface=myThread.dbi)

        setBlock = bufferFactory(classname="DBSBufferFiles.SetBlock")
        setBlock.execute(testFileA["lfn"], self.blockAName)
        setBlock.execute(testFileB["lfn"], self.blockAName)
        setBlock.execute(testFileC["lfn"], self.blockAName)
        setBlock.execute(testFileD["lfn"], self.blockBName)
        setBlock.execute(testFileE["lfn"], self.blockBName)

        fileStatus = bufferFactory(classname="DBSBufferFiles.SetStatus")
        fileStatus.execute(testFileA["lfn"], "GLOBAL")
        fileStatus.execute(testFileB["lfn"], "GLOBAL")
        fileStatus.execute(testFileC["lfn"], "GLOBAL")
        fileStatus.execute(testFileD["lfn"], "GLOBAL")
        fileStatus.execute(testFileE["lfn"], "GLOBAL")

        phedexStatus = bufferFactory(classname="DBSBufferFiles.SetPhEDExStatus")
        phedexStatus.execute(testFileA["lfn"], 1)
        phedexStatus.execute(testFileB["lfn"], 1)
        phedexStatus.execute(testFileC["lfn"], 1)
        phedexStatus.execute(testFileD["lfn"], 1)
        phedexStatus.execute(testFileE["lfn"], 1)

        associateWorkflow = buffer3Factory(classname="DBSBufferFiles.AssociateWorkflowToFile")
        associateWorkflow.execute(testFileA["lfn"], "BogusRequestA", "BogusTask")
        associateWorkflow.execute(testFileB["lfn"], "BogusRequestA", "BogusTask")
        associateWorkflow.execute(testFileC["lfn"], "BogusRequestA", "BogusTask")
        associateWorkflow.execute(testFileD["lfn"], "BogusRequestB", "BogusTask")
        associateWorkflow.execute(testFileE["lfn"], "BogusRequestB", "BogusTask")

        # Make the desired subscriptions
        insertSubAction = buffer3Factory(classname="NewSubscription")
        datasetA = DBSBufferDataset(path=self.testDatasetA)
        datasetB = DBSBufferDataset(path=self.testDatasetB)
        workload = WMWorkloadHelper()
        workload.load(os.path.join(getTestBase(), 'WMComponent_t/PhEDExInjector_t/specs/TestWorkload.pkl'))
        insertSubAction.execute(datasetA.exists(), workload.getSubscriptionInformation()[self.testDatasetA])
        insertSubAction.execute(datasetB.exists(), workload.getSubscriptionInformation()[self.testDatasetB])

        return
Exemple #23
0
    def testPublishJSONCreate(self):
        """
        Re-run testA_BasicFunctionTest with data in DBSBuffer
        Make sure files are generated
        """

        # Set up uploading and write them elsewhere since the test deletes them.
        self.uploadPublishInfo = True
        self.uploadPublishDir = self.testDir

        # Insert some DBSFiles
        testFileChildA = DBSBufferFile(lfn="/this/is/a/child/lfnA",
                                       size=1024,
                                       events=20)
        testFileChildA.setAlgorithm(appName="cmsRun",
                                    appVer="CMSSW_2_1_8",
                                    appFam="RECO",
                                    psetHash="GIBBERISH",
                                    configContent="MOREGIBBERISH")
        testFileChildB = DBSBufferFile(lfn="/this/is/a/child/lfnB",
                                       size=1024,
                                       events=20)
        testFileChildB.setAlgorithm(appName="cmsRun",
                                    appVer="CMSSW_2_1_8",
                                    appFam="RECO",
                                    psetHash="GIBBERISH",
                                    configContent="MOREGIBBERISH")
        testFileChildC = DBSBufferFile(lfn="/this/is/a/child/lfnC",
                                       size=1024,
                                       events=20)
        testFileChildC.setAlgorithm(appName="cmsRun",
                                    appVer="CMSSW_2_1_8",
                                    appFam="RECO",
                                    psetHash="GIBBERISH",
                                    configContent="MOREGIBBERISH")

        testFileChildA.setDatasetPath("/Cosmics/USER-DATASET1-v1/USER")
        testFileChildB.setDatasetPath("/Cosmics/USER-DATASET1-v1/USER")
        testFileChildC.setDatasetPath("/Cosmics/USER-DATASET2-v1/USER")

        testFileChildA.create()
        testFileChildB.create()
        testFileChildC.create()

        testFile = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10)
        testFile.setAlgorithm(appName="cmsRun",
                              appVer="CMSSW_2_1_8",
                              appFam="RECO",
                              psetHash="GIBBERISH",
                              configContent="MOREGIBBERISH")
        testFile.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")
        testFile.create()

        testFileChildA.addParents([testFile["lfn"]])
        testFileChildB.addParents([testFile["lfn"]])
        testFileChildC.addParents([testFile["lfn"]])

        myThread = threading.currentThread()
        self.dbsDaoFactory = DAOFactory(package="WMComponent.DBS3Buffer",
                                        logger=myThread.logger,
                                        dbinterface=myThread.dbi)
        self.insertWorkflow = self.dbsDaoFactory(classname="InsertWorkflow")
        workflowID = self.insertWorkflow.execute(
            requestName='TestWorkload', taskPath='TestWorkload/Analysis')
        myThread.dbi.processData(
            "update dbsbuffer_file set workflow=1 where id < 4")

        # Run the test again
        self.testA_BasicFunctionTest()

        # Reset default values
        self.uploadPublishInfo = False
        self.uploadPublishDir = None

        # Make sure the files are there
        self.assertTrue(
            os.path.exists(
                os.path.join(self.testDir, 'TestWorkload_publish.json')))
        self.assertTrue(
            os.path.getsize(
                os.path.join(self.testDir, 'TestWorkload_publish.json')) > 100)
        self.assertTrue(
            os.path.exists(
                os.path.join(self.testDir, 'TestWorkload_publish.tgz')))

        return
Exemple #24
0
    def testB_AlgoMigration(self):
        """
        _AlgoMigration_

        Test our ability to migrate multiple algos to global

        Do this by creating, mid-poll, two separate batches of files
        One with the same dataset but a different algo
        One with the same algo, but a different dataset
        See that they both get to global
        """
        #raise nose.SkipTest
        myThread = threading.currentThread()
        config = self.createConfig()
        self.injectWorkflow(MaxWaitTime=20)
        name = "ThisIsATest_%s" % (makeUUID())
        tier = "RECO"
        nFiles = 12
        files = self.getFiles(name=name, tier=tier, nFiles=nFiles)
        datasetPath = '/%s/%s/%s' % (name, name, tier)

        # Load components that are necessary to check status
        factory = WMFactory("dbsUpload",
                            "WMComponent.DBSUpload.Database.Interface")
        dbinterface = factory.loadObject("UploadToDBS")

        dbsInterface = DBSInterface(config=config)
        localAPI = dbsInterface.getAPIRef()
        globeAPI = dbsInterface.getAPIRef(globalRef=True)

        testDBSUpload = DBSUploadPoller(config=config)
        testDBSUpload.algorithm()

        # There should now be one block
        result = listBlocks(apiRef=globeAPI, datasetPath=datasetPath)
        self.assertEqual(len(result), 1)

        # Okay, by now, the first migration should have gone through.
        # Now create a second batch of files with the same dataset
        # but a different algo.
        for i in range(0, nFiles):
            testFile = DBSBufferFile(lfn='%s-batch2-%i' % (name, i),
                                     size=1024,
                                     events=20,
                                     checksums={'cksum': 1},
                                     locations="malpaquet")
            testFile.setAlgorithm(appName="cmsRun",
                                  appVer="CMSSW_3_1_1",
                                  appFam=tier,
                                  psetHash="GIBBERISH_PART2",
                                  configContent=self.configURL)
            testFile.setDatasetPath(datasetPath)
            testFile.addRun(Run(1, *[46]))
            testFile.create()

        # Have to do things twice to get parents
        testDBSUpload.algorithm()
        testDBSUpload.algorithm()

        # There should now be two blocks
        result = listBlocks(apiRef=globeAPI, datasetPath=datasetPath)
        self.assertEqual(len(result), 2)

        # Now create another batch of files with the original algo
        # But in a different dataset
        for i in range(0, nFiles):
            testFile = DBSBufferFile(lfn='%s-batch3-%i' % (name, i),
                                     size=1024,
                                     events=20,
                                     checksums={'cksum': 1},
                                     locations="malpaquet")
            testFile.setAlgorithm(appName=name,
                                  appVer="CMSSW_3_1_1",
                                  appFam=tier,
                                  psetHash="GIBBERISH",
                                  configContent=self.configURL)
            testFile.setDatasetPath('/%s/%s_3/%s' % (name, name, tier))
            testFile.addRun(Run(1, *[46]))
            testFile.create()

        # Do it twice for parentage.
        testDBSUpload.algorithm()
        testDBSUpload.algorithm()

        # There should now be one block
        result = listBlocks(apiRef=globeAPI,
                            datasetPath='/%s/%s_3/%s' % (name, name, tier))
        self.assertEqual(len(result), 1)

        # Well, all the blocks got there, so we're done
        return
Exemple #25
0
    def createFilesWithChildren(self, moreParentFiles, acqEra):
        """
        _createFilesWithChildren_

        Create several parentless files and then create child files.
        """
        parentFiles = []
        childFiles = []

        baseLFN = "/store/data/%s/Cosmics/RAW/v1/000/143/316/" % (acqEra)
        for i in range(10):
            testFile = DBSBufferFile(lfn = baseLFN + makeUUID() + ".root", size = 1024,
                                     events = 20, checksums = {"cksum": 1})
            testFile.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_3_1_1",
                                  appFam = "RAW", psetHash = "GIBBERISH",
                                  configContent = "MOREGIBBERISH")
            testFile.setDatasetPath("/Cosmics/%s-v1/RAW" % (acqEra))

            testFile['block_close_max_wait_time'] = 1000000
            testFile['block_close_max_events'] = 1000000
            testFile['block_close_max_size'] = 1000000
            testFile['block_close_max_files'] = 1000000

            lumis = []
            for j in range(10):
                lumis.append((i * 10) + j)
            testFile.addRun(Run(143316, *lumis))

            testFile.setAcquisitionEra(acqEra)
            testFile.setProcessingVer("1")
            testFile.setGlobalTag("START54::All")
            testFile.create()
            testFile.setLocation("malpaquet")
            parentFiles.append(testFile)

        baseLFN = "/store/data/%s/Cosmics/RECO/v1/000/143/316/" % (acqEra)
        for i in range(5):
            testFile = DBSBufferFile(lfn = baseLFN + makeUUID() + ".root", size = 1024,
                                     events = 20, checksums = {"cksum": 1})
            testFile.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_3_1_1",
                                  appFam = "RECO", psetHash = "GIBBERISH",
                                  configContent = "MOREGIBBERISH")
            testFile.setDatasetPath("/Cosmics/%s-v1/RECO" % (acqEra))

            testFile['block_close_max_wait_time'] = 1000000
            testFile['block_close_max_events'] = 1000000
            testFile['block_close_max_size'] = 1000000
            testFile['block_close_max_files'] = 1000000

            lumis = []
            for j in range(20):
                lumis.append((i * 20) + j)
            testFile.addRun(Run(143316, *lumis))

            testFile.setAcquisitionEra(acqEra)
            testFile.setProcessingVer("1")
            testFile.setGlobalTag("START54::All")
            testFile.create()
            testFile.setLocation("malpaquet")
            testFile.addParents([parentFiles[i * 2]["lfn"],
                                 parentFiles[i * 2 + 1]["lfn"]])
            testFile.addParents([moreParentFiles[i * 2]["lfn"],
                                 moreParentFiles[i * 2 + 1]["lfn"]])
            childFiles.append(testFile)

        return (parentFiles, childFiles)
Exemple #26
0
    def stuffDatabase(self, spec="TestWorkload.pkl"):
        """
        _stuffDatabase_

        Fill the dbsbuffer with some files and blocks.  We'll insert a total
        of 5 files spanning two blocks.  There will be a total of two datasets
        inserted into the datbase.

        We'll inject files with the location set as an SE name as well as a
        PhEDEx node name as well.
        """
        myThread = threading.currentThread()

        buffer3Factory = DAOFactory(package="WMComponent.DBS3Buffer",
                                    logger=myThread.logger,
                                    dbinterface=myThread.dbi)
        insertWorkflow = buffer3Factory(classname="InsertWorkflow")
        insertWorkflow.execute("BogusRequest", "BogusTask", 0, 0, 0, 0)

        checksums = {"adler32": "1234", "cksum": "5678"}
        testFileA = DBSBufferFile(lfn=makeUUID(),
                                  size=1024,
                                  events=10,
                                  checksums=checksums,
                                  locations=set(["srm-cms.cern.ch"]))
        testFileA.setAlgorithm(appName="cmsRun",
                               appVer="CMSSW_2_1_8",
                               appFam="RECO",
                               psetHash="GIBBERISH",
                               configContent="MOREGIBBERISH")
        testFileA.setDatasetPath(self.testDatasetA)
        testFileA.addRun(Run(2, *[45]))
        testFileA.create()

        testFileB = DBSBufferFile(lfn=makeUUID(),
                                  size=1024,
                                  events=10,
                                  checksums=checksums,
                                  locations=set(["srm-cms.cern.ch"]))
        testFileB.setAlgorithm(appName="cmsRun",
                               appVer="CMSSW_2_1_8",
                               appFam="RECO",
                               psetHash="GIBBERISH",
                               configContent="MOREGIBBERISH")
        testFileB.setDatasetPath(self.testDatasetA)
        testFileB.addRun(Run(2, *[45]))
        testFileB.create()

        testFileC = DBSBufferFile(lfn=makeUUID(),
                                  size=1024,
                                  events=10,
                                  checksums=checksums,
                                  locations=set(["srm-cms.cern.ch"]))
        testFileC.setAlgorithm(appName="cmsRun",
                               appVer="CMSSW_2_1_8",
                               appFam="RECO",
                               psetHash="GIBBERISH",
                               configContent="MOREGIBBERISH")
        testFileC.setDatasetPath(self.testDatasetA)
        testFileC.addRun(Run(2, *[45]))
        testFileC.create()

        self.testFilesA.append(testFileA)
        self.testFilesA.append(testFileB)
        self.testFilesA.append(testFileC)

        testFileD = DBSBufferFile(lfn=makeUUID(),
                                  size=1024,
                                  events=10,
                                  checksums=checksums,
                                  locations=set(["srm-cms.cern.ch"]))
        testFileD.setAlgorithm(appName="cmsRun",
                               appVer="CMSSW_2_1_8",
                               appFam="RECO",
                               psetHash="GIBBERISH",
                               configContent="MOREGIBBERISH")
        testFileD.setDatasetPath(self.testDatasetB)
        testFileD.addRun(Run(2, *[45]))
        testFileD.create()

        testFileE = DBSBufferFile(lfn=makeUUID(),
                                  size=1024,
                                  events=10,
                                  checksums=checksums,
                                  locations=set(["srm-cms.cern.ch"]))
        testFileE.setAlgorithm(appName="cmsRun",
                               appVer="CMSSW_2_1_8",
                               appFam="RECO",
                               psetHash="GIBBERISH",
                               configContent="MOREGIBBERISH")
        testFileE.setDatasetPath(self.testDatasetB)
        testFileE.addRun(Run(2, *[45]))
        testFileE.create()

        self.testFilesB.append(testFileD)
        self.testFilesB.append(testFileE)

        uploadFactory = DAOFactory(package="WMComponent.DBS3Buffer",
                                   logger=myThread.logger,
                                   dbinterface=myThread.dbi)
        datasetAction = uploadFactory(classname="NewDataset")
        createAction = uploadFactory(classname="CreateBlocks")

        datasetAction.execute(datasetPath=self.testDatasetA)
        datasetAction.execute(datasetPath=self.testDatasetB)

        self.blockAName = self.testDatasetA + "#" + makeUUID()
        self.blockBName = self.testDatasetB + "#" + makeUUID()

        newBlockA = DBSBlock(name=self.blockAName,
                             location="srm-cms.cern.ch",
                             das=None,
                             workflow=None)
        newBlockA.setDataset(self.testDatasetA, 'data', 'VALID')
        newBlockA.status = 'Closed'

        newBlockB = DBSBlock(name=self.blockBName,
                             location="srm-cms.cern.ch",
                             das=None,
                             workflow=None)
        newBlockB.setDataset(self.testDatasetB, 'data', 'VALID')
        newBlockB.status = 'Closed'

        createAction.execute(blocks=[newBlockA, newBlockB])

        bufferFactory = DAOFactory(package="WMComponent.DBSBuffer.Database",
                                   logger=myThread.logger,
                                   dbinterface=myThread.dbi)

        setBlock = bufferFactory(classname="DBSBufferFiles.SetBlock")
        setBlock.execute(testFileA["lfn"], self.blockAName)
        setBlock.execute(testFileB["lfn"], self.blockAName)
        setBlock.execute(testFileC["lfn"], self.blockAName)
        setBlock.execute(testFileD["lfn"], self.blockBName)
        setBlock.execute(testFileE["lfn"], self.blockBName)

        fileStatus = bufferFactory(classname="DBSBufferFiles.SetStatus")
        fileStatus.execute(testFileA["lfn"], "LOCAL")
        fileStatus.execute(testFileB["lfn"], "LOCAL")
        fileStatus.execute(testFileC["lfn"], "LOCAL")
        fileStatus.execute(testFileD["lfn"], "LOCAL")
        fileStatus.execute(testFileE["lfn"], "LOCAL")

        associateWorkflow = buffer3Factory(
            classname="DBSBufferFiles.AssociateWorkflowToFile")
        associateWorkflow.execute(testFileA["lfn"], "BogusRequest",
                                  "BogusTask")
        associateWorkflow.execute(testFileB["lfn"], "BogusRequest",
                                  "BogusTask")
        associateWorkflow.execute(testFileC["lfn"], "BogusRequest",
                                  "BogusTask")
        associateWorkflow.execute(testFileD["lfn"], "BogusRequest",
                                  "BogusTask")
        associateWorkflow.execute(testFileE["lfn"], "BogusRequest",
                                  "BogusTask")

        return