Example #1
0
    def testLoad(self):
        """
        _testLoad_

        Test the loading of file meta data using the ID of a file and the
        LFN of a file.
        """
        testFileA = File(lfn="/this/is/a/lfn", size=1024, events=10,
                         checksums={'cksum': 101}, first_event=2, merged=True)
        testFileA.create()

        testFileB = File(lfn=testFileA["lfn"])
        testFileB.load()
        testFileC = File(id=testFileA["id"])
        testFileC.load()

        self.assertEqual(testFileA, testFileB, "File load by LFN didn't work")
        self.assertEqual(testFileA, testFileC, "File load by ID didn't work")

        self.assertTrue(isinstance(testFileB["id"], int), "File id is not an integer type.")
        self.assertTrue(isinstance(testFileB["size"], int), "File size is not an integer type.")
        self.assertTrue(isinstance(testFileB["events"], int), "File events is not an integer type.")
        self.assertTrue(isinstance(testFileB["checksums"], dict), "File cksum is not a string type.")
        self.assertTrue(isinstance(testFileB["first_event"], int), "File first_event is not an integer type.")

        self.assertTrue(isinstance(testFileC["id"], int), "File id is not an integer type.")
        self.assertTrue(isinstance(testFileC["size"], int), "File size is not an integer type.")
        self.assertTrue(isinstance(testFileC["events"], int), "File events is not an integer type.")
        self.assertTrue(isinstance(testFileC["checksums"], dict), "File cksum is not an string type.")
        self.assertTrue(isinstance(testFileC["first_event"], int), "File first_event is not an integer type.")

        self.assertEqual(testFileC['checksums'], {'cksum': '101'})

        testFileA.delete()
        return
Example #2
0
    def testSetLocationOrder(self):
        """
        _testSetLocationOrder_

        This tests that you can specify a location before creating the file,
        instead of having to do it afterwards.
        """
        myThread = threading.currentThread()

        testFileA = File(lfn="/this/is/a/lfn", size=1024, events=10)
        testFileA.setLocation("se1.cern.ch")
        testFileA.create()

        testFileB = File(lfn=testFileA["lfn"])
        testFileB.load()

        daoFactory = DAOFactory(package="WMCore.WMBS",
                                logger=logging,
                                dbinterface=myThread.dbi)

        locationFac = daoFactory(classname="Files.GetLocation")
        location = locationFac.execute(testFileB['lfn']).pop()

        self.assertEqual(location, 'se1.cern.ch')

        return
Example #3
0
    def testLoad(self):
        """
        _testLoad_

        Test the loading of file meta data using the ID of a file and the
        LFN of a file.
        """
        testFileA = File(lfn="/this/is/a/lfn",
                         size=1024,
                         events=10,
                         checksums={'cksum': 101},
                         first_event=2,
                         merged=True)
        testFileA.create()

        testFileB = File(lfn=testFileA["lfn"])
        testFileB.load()
        testFileC = File(id=testFileA["id"])
        testFileC.load()

        assert testFileA == testFileB, \
               "ERROR: File load by LFN didn't work"

        assert testFileA == testFileC, \
               "ERROR: File load by ID didn't work"

        assert type(testFileB["id"]) == int, \
               "ERROR: File id is not an integer type."
        assert type(testFileB["size"]) == int, \
               "ERROR: File size is not an integer type."
        assert type(testFileB["events"]) == int, \
               "ERROR: File events is not an integer type."
        assert type(testFileB["checksums"]) == dict, \
               "ERROR: File cksum is not a string type."
        assert type(testFileB["first_event"]) == int, \
               "ERROR: File first_event is not an integer type."

        assert type(testFileC["id"]) == int, \
               "ERROR: File id is not an integer type."
        assert type(testFileC["size"]) == int, \
               "ERROR: File size is not an integer type."
        assert type(testFileC["events"]) == int, \
               "ERROR: File events is not an integer type."
        assert type(testFileC["checksums"]) == dict, \
               "ERROR: File cksum is not an string type."
        assert type(testFileC["first_event"]) == int, \
               "ERROR: File first_event is not an integer type."

        self.assertEqual(testFileC['checksums'], {'cksum': '101'})

        testFileA.delete()
        return
Example #4
0
    def testLoad(self):
        """
        _testLoad_

        Test the loading of file meta data using the ID of a file and the
        LFN of a file.
        """
        testFileA = File(
            lfn="/this/is/a/lfn",
            size=1024,
            events=10,
            checksums={"cksum": 101},
            first_event=2,
            last_event=3,
            merged=True,
        )
        testFileA.create()

        testFileB = File(lfn=testFileA["lfn"])
        testFileB.load()
        testFileC = File(id=testFileA["id"])
        testFileC.load()

        assert testFileA == testFileB, "ERROR: File load by LFN didn't work"

        assert testFileA == testFileC, "ERROR: File load by ID didn't work"

        assert type(testFileB["id"]) == int, "ERROR: File id is not an integer type."
        assert type(testFileB["size"]) == int, "ERROR: File size is not an integer type."
        assert type(testFileB["events"]) == int, "ERROR: File events is not an integer type."
        assert type(testFileB["checksums"]) == dict, "ERROR: File cksum is not a string type."
        assert type(testFileB["first_event"]) == int, "ERROR: File first_event is not an integer type."
        assert type(testFileB["last_event"]) == int, "ERROR: File last_event is not an integer type."

        assert type(testFileC["id"]) == int, "ERROR: File id is not an integer type."
        assert type(testFileC["size"]) == int, "ERROR: File size is not an integer type."
        assert type(testFileC["events"]) == int, "ERROR: File events is not an integer type."
        assert type(testFileC["checksums"]) == dict, "ERROR: File cksum is not an string type."
        assert type(testFileC["first_event"]) == int, "ERROR: File first_event is not an integer type."
        assert type(testFileC["last_event"]) == int, "ERROR: File last_event is not an integer type."

        self.assertEqual(testFileC["checksums"], {"cksum": "101"})

        testFileA.delete()
        return
Example #5
0
    def filesOfStatusByRun(self, status, runID):
        """
        _filesOfStatusByRun_
        
        Return all the files in the given subscription and the given run which
        have the given status.
        """
        existingTransaction = self.beginTransaction()

        files = []
        action = self.daofactory(classname = "Subscriptions.Get%sFilesByRun" % status)
        for f in action.execute(self["id"], runID, conn = self.getDBConn(),
                                transaction = self.existingTransaction()):
            fl = File(id = f["file"])
            fl.load()
            files.append(fl)

        self.commitTransaction(existingTransaction)
        return files 
Example #6
0
    def filesOfStatusByRun(self, status, runID):
        """
        _filesOfStatusByRun_

        Return all the files in the given subscription and the given run which
        have the given status.
        """
        existingTransaction = self.beginTransaction()

        files = []
        action = self.daofactory(classname="Subscriptions.Get%sFilesByRun" % status)
        for f in action.execute(self["id"], runID, conn=self.getDBConn(),
                                transaction=self.existingTransaction()):
            fl = File(id=f["file"])
            fl.load()
            files.append(fl)

        self.commitTransaction(existingTransaction)
        return files
Example #7
0
    def testSetLocationOrder(self):
        """
        _testSetLocationOrder_

        This tests that you can specify a location before creating the file,
        instead of having to do it afterwards.
        """
        myThread = threading.currentThread()

        testFileA = File(lfn = "/this/is/a/lfn", size = 1024, events = 10)
        testFileA.setLocation("se1.cern.ch")
        testFileA.create()

        testFileB = File(lfn = testFileA["lfn"])
        testFileB.load()

        daoFactory = DAOFactory(package = "WMCore.WMBS", logger = logging, dbinterface = myThread.dbi)

        locationFac = daoFactory(classname = "Files.GetLocation")
        location  = locationFac.execute(testFileB['lfn']).pop()

        self.assertEqual(location, 'se1.cern.ch')

        return
    def testReportHandling(self):
        """
        _testReportHandling_

        Verify that we're able to parse a CMSSW report, convert it to a Report()
        style report, pickle it and then have the accountant process it.
        """
        self.procPath = os.path.join(WMCore.WMBase.getTestBase(),
                                    "WMCore_t/FwkJobReport_t/CMSSWProcessingReport.xml")

        myReport = Report("cmsRun1")
        myReport.parse(self.procPath)

        # Fake some metadata that should be added by the stageout scripts.
        for fileRef in myReport.getAllFileRefsFromStep("cmsRun1"):
            fileRef.size = 1024
            fileRef.location = "cmssrm.fnal.gov"

        fwjrPath = os.path.join(self.tempDir, "ProcReport.pkl")
        cmsRunStep = myReport.retrieveStep("cmsRun1")
        cmsRunStep.status = 0
        myReport.setTaskName('/TestWF/None')
        myReport.persist(fwjrPath)

        self.setFWJRAction.execute(jobID = self.testJob["id"], fwjrPath = fwjrPath)

        pFile = DBSBufferFile(lfn = "/path/to/some/lfn", size = 600000, events = 60000)
        pFile.setAlgorithm(appName = "cmsRun", appVer = "UNKNOWN",
                           appFam = "RECO", psetHash = "GIBBERISH",
                           configContent = "MOREGIBBERISH")
        pFile.setDatasetPath("/bogus/dataset/path")
        #pFile.addRun(Run(1, *[45]))
        pFile.create()

        config = self.createConfig(workerThreads = 1)
        accountant = JobAccountantPoller(config)
        accountant.setup()
        accountant.algorithm()

        self.verifyJobSuccess(self.testJob["id"])
        self.verifyFileMetaData(self.testJob["id"], myReport.getAllFilesFromStep("cmsRun1"))

        inputFile = File(lfn = "/store/backfill/2/unmerged/WMAgentCommissioining10/MinimumBias/RECO/rereco_GR09_R_34X_V5_All_v1/0000/outputRECORECO.root")
        inputFile.load()
        self.testMergeJob = Job(name = "testMergeJob", files = [inputFile])
        self.testMergeJob.create(group = self.mergeJobGroup)
        self.testMergeJob["state"] = "complete"
        self.stateChangeAction.execute(jobs = [self.testMergeJob])

        self.mergePath = os.path.join(WMCore.WMBase.getTestBase(),
                                         "WMCore_t/FwkJobReport_t/CMSSWMergeReport.xml")

        myReport = Report("mergeReco")
        myReport.parse(self.mergePath)

        # Fake some metadata that should be added by the stageout scripts.
        for fileRef in myReport.getAllFileRefsFromStep("mergeReco"):
            fileRef.size = 1024
            fileRef.location = "cmssrm.fnal.gov"
            fileRef.dataset = {"applicationName": "cmsRun", "applicationVersion": "CMSSW_3_4_2_patch1",
                               "primaryDataset": "MinimumBias", "processedDataset": "Rereco-v1",
                               "dataTier": "RECO"}

        fwjrPath = os.path.join(self.tempDir, "MergeReport.pkl")
        myReport.setTaskName('/MergeWF/None')
        cmsRunStep = myReport.retrieveStep("mergeReco")
        cmsRunStep.status = 0
        myReport.persist(fwjrPath)

        self.setFWJRAction.execute(jobID = self.testMergeJob["id"], fwjrPath = fwjrPath)
        accountant.algorithm()

        self.verifyJobSuccess(self.testMergeJob["id"])
        self.verifyFileMetaData(self.testMergeJob["id"], myReport.getAllFilesFromStep("mergeReco"))

        return
Example #9
0
    def testReportHandling(self):
        """
        _testReportHandling_

        Verify that we're able to parse a CMSSW report, convert it to a Report()
        style report, pickle it and then have the accountant process it.
        """
        self.procPath = os.path.join(WMCore.WMBase.getTestBase(),
                                    "WMCore_t/FwkJobReport_t/CMSSWProcessingReport.xml")

        myReport = Report("cmsRun1")
        myReport.parse(self.procPath)

        # Fake some metadata that should be added by the stageout scripts.
        for fileRef in myReport.getAllFileRefsFromStep("cmsRun1"):
            fileRef.size = 1024
            fileRef.location = "cmssrm.fnal.gov"

        fwjrPath = os.path.join(self.tempDir, "ProcReport.pkl")
        cmsRunStep = myReport.retrieveStep("cmsRun1")
        cmsRunStep.status = 0
        myReport.setTaskName('/TestWF/None')
        myReport.persist(fwjrPath)

        self.setFWJRAction.execute(jobID = self.testJob["id"], fwjrPath = fwjrPath)

        pFile = DBSBufferFile(lfn = "/path/to/some/lfn", size = 600000, events = 60000)
        pFile.setAlgorithm(appName = "cmsRun", appVer = "UNKNOWN",
                           appFam = "RECO", psetHash = "GIBBERISH",
                           configContent = "MOREGIBBERISH")
        pFile.setDatasetPath("/bogus/dataset/path")
        #pFile.addRun(Run(1, *[45]))
        pFile.create()

        config = self.createConfig(workerThreads = 1)
        accountant = JobAccountantPoller(config)
        accountant.setup()
        accountant.algorithm()

        self.verifyJobSuccess(self.testJob["id"])
        self.verifyFileMetaData(self.testJob["id"], myReport.getAllFilesFromStep("cmsRun1"))

        inputFile = File(lfn = "/store/backfill/2/unmerged/WMAgentCommissioining10/MinimumBias/RECO/rereco_GR09_R_34X_V5_All_v1/0000/outputRECORECO.root")
        inputFile.load()
        self.testMergeJob = Job(name = "testMergeJob", files = [inputFile])
        self.testMergeJob.create(group = self.mergeJobGroup)
        self.testMergeJob["state"] = "complete"
        self.stateChangeAction.execute(jobs = [self.testMergeJob])

        self.mergePath = os.path.join(WMCore.WMBase.getTestBase(),
                                         "WMCore_t/FwkJobReport_t/CMSSWMergeReport.xml")

        myReport = Report("mergeReco")
        myReport.parse(self.mergePath)

        # Fake some metadata that should be added by the stageout scripts.
        for fileRef in myReport.getAllFileRefsFromStep("mergeReco"):
            fileRef.size = 1024
            fileRef.location = "cmssrm.fnal.gov"
            fileRef.dataset = {"applicationName": "cmsRun", "applicationVersion": "CMSSW_3_4_2_patch1",
                               "primaryDataset": "MinimumBias", "processedDataset": "Rereco-v1",
                               "dataTier": "RECO"}

        fwjrPath = os.path.join(self.tempDir, "MergeReport.pkl")
        myReport.setTaskName('/MergeWF/None')
        cmsRunStep = myReport.retrieveStep("mergeReco")
        cmsRunStep.status = 0
        myReport.persist(fwjrPath)

        self.setFWJRAction.execute(jobID = self.testMergeJob["id"], fwjrPath = fwjrPath)
        accountant.algorithm()

        self.verifyJobSuccess(self.testMergeJob["id"])
        self.verifyFileMetaData(self.testMergeJob["id"], myReport.getAllFilesFromStep("mergeReco"))

        return
 def printBlocksAndFiles(self, location, blockList,  type = "missing",  verbose = 1):
     """
     print blocks and files: verbose = 1 only blocks, verbose > 1 blocks and files 
     """
     for block in blockList:
         print "\n"
         print "#################################################################"
         print "Block ID: %s Name: %s" % (block["BLOCK_ID"], block["BLOCK_NAME"])
         
         if verbose < 2:
             continue
         
         if location == "global":
             print "\n"
             print "List all files from %s block from Global DBS: Get info from local DBS " % type
             for file in self.localDbsApi.listFiles(blockName=block["BLOCK_NAME"]):
                 print "parent list: %s" % file["ParentList"]
                 print "file LFN: %s" % file["LogicalFileName"] 
                  
             print "\n"
             fileIDs = ListFiles.listFileIDsByBlockID(self.t0astDBConn,  block["BLOCK_ID"])
             print "List all files from %s block from Global DBS: Get info from T0AST " % type
             print "====================================================="
             for fileID in fileIDs:
                 wmbsFile = WMBSFile(id = fileID)
                 wmbsFile.load()
                 print "--------------------------------------"
                 print "Info from: T0AST"
                 print "file LFN: %s" % wmbsFile["lfn"]
                 print "" 
                 
                 file = T0ASTFile(wmbsFile)        
                 file.datasetPathID = \
                     ListDatasets.listDatasetIDForWMBSFile(self.t0astDBConn, wmbsFile["id"])
 
                 datasetNames = \
                     ListDatasets.listDatasetNamesForWMBSFile(self.t0astDBConn, wmbsFile["id"])
                 file["PRIMARY_DATASET"] = datasetNames["PRIMARY"]
                 file["PROCESSED_DATASET"] = datasetNames["PROCESSED"]
                 file["DATA_TIER"] = datasetNames["TIER"]
      
                 if file["DATA_TIER"] == "RECO":
                     t0ParentFileList =  file.getParentList(type="file")
                     
                     for wmbsFile in t0ParentFileList:
                         t0File = T0ASTFile(wmbsFile)
                         t0File["BLOCK_ID"] = ListBlock.getBlockIDByFileID(self.t0astDBConn, wmbsFile["id"])
                         
                         print "Block ID: %s : Parent File: %s" % (t0File["BLOCK_ID"], t0File["LFN"]) 
                         if t0File["BLOCK_ID"] != None:
                             blockInfo = ListBlock.getBlockInfoByID(self.t0astDBConn, t0File["BLOCK_ID"])
                             print "Block Name: %s \nStatus: %s" % (blockInfo["BLOCK_NAME"], blockInfo["STATUS"])
                             
                             if blockInfo["STATUS"] == "InFlight" or blockInfo["MIGRATE_STATUS"] == "Migrated":
                                 dbsBlock = self.localDbsApi.listBlocks(block_name=blockInfo["BLOCK_NAME"])
                                 if dbsBlock == []:
                                     print "It doesn't exist in Local dbs: Something wrong"
                                 else:
                                     print "Block: %s exist in Local DBS" % blockInfo["BLOCK_NAME"]
                                 
                                 try:    
                                     for file in self.localDbsApi.listFiles(patternLFN=t0File["LFN"]):
                                         print "File: %s exist in Local DBS" % file["LogicalFileName"]
                                 except:
                                     print "File doesn't exist in Local DBS"
                                     
                             if blockInfo["MIGRATE_STATUS"] == "Migrated":
                                 dbsBlock = self.globalDbsApi.listBlocks(block_name=blockInfo["BLOCK_NAME"])
                                 if dbsBlock == []:
                                     print "It doesn't exist in Global dbs: Something wrong"
                                 else:
                                     print "Block: %s exist in Global DBS" % blockInfo["BLOCK_NAME"]
                                     
                                 try:    
                                     for file in self.globalDbsApi.listFiles(patternLFN=t0File["LFN"]):
                                         print "File: %s exist in Global DBS" % file["LogicalFileName"]
                                 except:
                                     print "File doesn't exist in Global DBS"
                             
                     print "\n"
                     print "Info from Local DBS: List all parent files from %s block:" % type
                     
                     try:
                         for pfile in self.localDbsApi.listFileParents(file["LFN"]):
                             print "Parent Block: %s" % pfile["Block"]["NAME"]
                             print "Parent File: %s" % pfile["LogicalFileName"]
                             
                             print "Info from Global DBS: parent block for %s block:" % type
                             blockList = self.globalDbsApi.listBlocks(block_name=block["BLOCK_NAME"])
                             if blockList == []:
                                 print "Global DBS doen't have block %s "% pfile["Block"]["NAME"]
                             else:
                                 for dbsBlock in blockList:
                                     print "Global DBS Parent block %s exsist" % dbsBlock["NAME"]
                                 
                     except Exception, ex:
                         print "No parents file found in Local DBS "
                     
                     print "====================================================="