Beispiel #1
0
    def testDataStructsFile(self):
        """
        _testDataStructsFile_

        Tests our ability to create a WMBS file from a DataStructs File and vice versa
        """

        myThread = threading.currentThread()

        testLFN = "lfn1"
        testSize = 1024
        testEvents = 100
        testCksum = {"cksum": '1'}
        testParents = set(["lfn2"])
        testRun = Run(1, *[45])
        testSE = "se1.cern.ch"

        parentFile = File(lfn="lfn2")
        parentFile.create()

        testFile = File()

        inputFile = WMFile(lfn=testLFN,
                           size=testSize,
                           events=testEvents,
                           checksums=testCksum,
                           parents=testParents)
        inputFile.addRun(testRun)
        inputFile.setLocation(se=testSE)

        testFile.loadFromDataStructsFile(file=inputFile)
        testFile.create()
        testFile.save()

        loadFile = File(lfn="lfn1")
        loadFile.loadData(parentage=1)

        self.assertEqual(loadFile['size'], testSize)
        self.assertEqual(loadFile['events'], testEvents)
        self.assertEqual(loadFile['checksums'], testCksum)
        self.assertEqual(loadFile['locations'], set([testSE]))
        #self.assertEqual(loadFile['parents'].pop()['lfn'], 'lfn2')

        wmFile = loadFile.returnDataStructsFile()
        self.assertEqual(wmFile == inputFile, True)

        return
Beispiel #2
0
    def testDataStructsFile(self):
        """
        _testDataStructsFile_

        Tests our ability to create a WMBS file from a DataStructs File and vice versa
        """

        myThread = threading.currentThread()
        
        testLFN     = "lfn1"
        testSize    = 1024
        testEvents  = 100
        testCksum   = {"cksum": '1'}
        testParents = set(["lfn2"])
        testRun     = Run( 1, *[45])
        testSE      = "se1.cern.ch"

        parentFile = File(lfn= "lfn2")
        parentFile.create()

        testFile = File()

        inputFile = WMFile(lfn = testLFN, size = testSize, events = testEvents, checksums = testCksum, parents = testParents)
        inputFile.addRun(testRun)
        inputFile.setLocation(se = testSE)

        testFile.loadFromDataStructsFile(file = inputFile)
        testFile.create()
        testFile.save()

        
        loadFile = File(lfn = "lfn1")
        loadFile.loadData(parentage = 1)

        self.assertEqual(loadFile['size'],   testSize)
        self.assertEqual(loadFile['events'], testEvents)
        self.assertEqual(loadFile['checksums'], testCksum)
        self.assertEqual(loadFile['locations'], set([testSE]))
        #self.assertEqual(loadFile['parents'].pop()['lfn'], 'lfn2')

        wmFile = loadFile.returnDataStructsFile()
        self.assertEqual(wmFile == inputFile, True)

        return
Beispiel #3
0
    def __call__(self, filesets):
        """
        The algorithm itself
        """
        # Update run list
        self.getNewRuns()

        # Do per fileset work, abandon fileset processing on exception
        for fileset in filesets:
            ds = fileset.name
            try:
                # Do per run work
                watchCompleteFiles = []

                for watch in self.watchedRuns:
                    # Ensure watcher has dataset listed
                    watch.addDatasetOfInterest(ds)

                    # Query DBS to find all blocks for this run / dataset
                    (files, blocks, fileInfoMap) = \
                        self.dbsHelper.getFileInfo(watch.run, ds)

                    # Now determine all required parent blocks
                    parentBlocks = set()
                    if fileset.requireParents:
                        parentDs = self.dbsHelper.getParentDataset(ds)
                        parentBlocks = self.dbsHelper.getBlockInfo(
                            watch.run, parentDs)

                    # Final list of all required blocks
                    allBlocks = blocks[:]
                    allBlocks.update(parentBlocks)

                    # Find all sites where all blocks are complete
                    sites = self.phedexHelper.getCompleteSites(blocks)

                    # Get sites with newly completed transfers
                    newSites = watch.getNewSites(ds, sites)

                    if len(newSites) > 0:
                        # Add the files for these blocks to the fileset
                        for file in fileInfoMap:
                            fi = fileInfoMap[file]

                            # First add parent file
                            if fileset.requireParents:
                                parentFile = File(lfn=fi["file.parent"])
                                parentFile.save()
                                parentFile.setLocation(newSites)

                            # Add actual file
                            fileToAdd = File(lfn=file,
                                             size=fi["file.size"],
                                             events=fi["file.events"],
                                             run=watch.run,
                                             umi=fi["file.lumi"])
                            if not fileToAdd.exists(
                            ) and fileset.requireParents:
                                fileToAdd.addParent(fi["file.parent"])

                            # Add new locations but don't persist immediately
                            fileToAdd.setLocations(newSites,
                                                   immediateSave=False)

                            # Add the file to the new file list
                            fileset.addFile(fileToAdd)

                    # Add the site info to the watcher list
                    watchCompleteFiles.append([watch, ds, newSites])

                # Commit the fileset
                fileset.commit()

                # Add the watched runs
                for a in watchCompleteFiles:
                    a[0].addCompletedNodes(a[1], a[2])

            except:
                # Reset the watch list so we re-evaluate next call
                watchCompleteFiles = []

        # Purge old runs
        self.purgeWatchedRuns()
    def __call__(self, filesets):
        """
        The algorithm itself
        """
        # Update run list
        self.getNewRuns()

        # Do per fileset work, abandon fileset processing on exception
        for fileset in filesets:
            ds = fileset.name
            try:
                # Do per run work
                watchCompleteFiles = []

                for watch in self.watchedRuns:
                    # Ensure watcher has dataset listed
                    watch.addDatasetOfInterest(ds)

                    # Query DBS to find all blocks for this run / dataset
                    (files, blocks, fileInfoMap) = \
                        self.dbsHelper.getFileInfo(watch.run, ds)

                    # Now determine all required parent blocks
                    parentBlocks = set()
                    if fileset.requireParents:
                        parentDs = self.dbsHelper.getParentDataset(ds)
                        parentBlocks = self.dbsHelper.getBlockInfo(watch.run,
                                                                       parentDs)

                    # Final list of all required blocks
                    allBlocks = blocks[:]
                    allBlocks.update(parentBlocks)

                    # Find all sites where all blocks are complete
                    sites = self.phedexHelper.getCompleteSites(blocks)

                    # Get sites with newly completed transfers
                    newSites = watch.getNewSites(ds, sites)

                    if len(newSites) > 0:
                        # Add the files for these blocks to the fileset
                        for file in fileInfoMap:
                            fi = fileInfoMap[file]

                            # First add parent file
                            if fileset.requireParents:
                                parentFile = File(lfn=fi["file.parent"])
                                parentFile.save()
                                parentFile.setLocation(newSites)

                            # Add actual file
                            fileToAdd = File(lfn=file, size=fi["file.size"],
                                             events=fi["file.events"],
                                             run=watch.run,
                                             umi=fi["file.lumi"])
                            if not fileToAdd.exists() and fileset.requireParents:
                                fileToAdd.addParent(fi["file.parent"])

                            # Add new locations but don't persist immediately
                            fileToAdd.setLocations(newSites, immediateSave=False)

                            # Add the file to the new file list
                            fileset.addFile(fileToAdd)

                    # Add the site info to the watcher list
                    watchCompleteFiles.append([watch, ds, newSites])

                # Commit the fileset
                fileset.commit()

                # Add the watched runs
                for a in watchCompleteFiles:
                    a[0].addCompletedNodes(a[1], a[2])

            except:
                # Reset the watch list so we re-evaluate next call
                watchCompleteFiles = []

        # Purge old runs
        self.purgeWatchedRuns()