Example #1
0
    def testB_DONOTUSE(self):
        return
        config = self.getConfig()
        config.DBSUpload.DBSBlockMaxFiles = 1
        config.DBSUpload.copyBlock = True

        name = "ThisIsATest_%s" % (makeUUID())
        tier = "RECO"
        nFiles = 10
        name = name.replace('-', '_')
        name = '%s-v0' % name
        files = self.getFiles(name = name, tier = tier, nFiles = nFiles, nLumis = 3)
        datasetPath = '/%s/%s/%s' % (name, name, tier)

        from WMComponent.DBS3Buffer.DBSUploadPoller import DBSUploadPoller
        dbsUploader = DBSUploadPoller(config = config)
        dbsUtil     = DBSBufferUtil()
        from dbs.apis.dbsClient import DbsApi
        dbsApi      = DbsApi(url = config.DBSUpload.dbsUrl)

        # This should do nothing
        # Just making sure we don't crash
        try:
            dbsUploader.algorithm()
        except:
            dbsUploader.close()
            raise
Example #2
0
    def __init__(self):
        """
        __init__

        Init the DB
        """

        self.testInit = TestInit(__file__)
        self.testInit.setLogging()
        self.testInit.setDatabaseConnection(destroyAllDatabase=True)
        self.testInit.setSchema(customModules=["WMComponent.DBS3Buffer"],
                                useDefault=False)
        self.configFile = EmulatorSetup.setupWMAgentConfig()

        myThread = threading.currentThread()
        self.bufferFactory = DAOFactory(
            package="WMComponent.DBSBuffer.Database",
            logger=myThread.logger,
            dbinterface=myThread.dbi)

        locationAction = self.bufferFactory(
            classname="DBSBufferFiles.AddLocation")
        locationAction.execute(siteName="se1.cern.ch")
        locationAction.execute(siteName="se1.fnal.gov")
        locationAction.execute(siteName="malpaquet")

        config = self.getConfig()
        self.dbsUploader = DBSUploadPoller(config=config)

        return
Example #3
0
    def __init__(self):
        """
        __init__

        Init the DB
        """

        self.testInit = TestInit(__file__)
        self.testInit.setLogging()
        self.testInit.setDatabaseConnection(destroyAllDatabase = True)
        self.testInit.setSchema(customModules = ["WMComponent.DBS3Buffer"],
                                useDefault = False)
        self.configFile = EmulatorSetup.setupWMAgentConfig()

        myThread = threading.currentThread()
        self.bufferFactory = DAOFactory(package = "WMComponent.DBS3Buffer",
                                        logger = myThread.logger,
                                        dbinterface = myThread.dbi)

        locationAction = self.bufferFactory(classname = "DBSBufferFiles.AddLocation")
        locationAction.execute(siteName = "se1.cern.ch")
        locationAction.execute(siteName = "se1.fnal.gov")
        locationAction.execute(siteName = "malpaquet")

        config = self.getConfig()
        self.dbsUploader = DBSUploadPoller(config = config)

        return
Example #4
0
    def preInitialization(self):
        print("DBS3Upload.preInitialization")

        # Add event loop to worker manager
        myThread = threading.currentThread()

        pollInterval = self.config.DBS3Upload.pollInterval
        logging.info("Setting poll interval to %s seconds" % pollInterval)
        myThread.workerThreadManager.addWorker(DBSUploadPoller(self.config), pollInterval)

        return
Example #5
0
    def testB_DONOTUSE(self):

        config = self.getConfig()
        config.DBSUpload.DBSBlockMaxFiles = 1
        config.DBSUpload.copyBlock = True

        name = "ThisIsATest_%s" % (makeUUID())
        tier = "RECO"
        nFiles = 10
        files = self.getFiles(name = name, tier = tier, nFiles = nFiles, nLumis = 3)
        datasetPath = '/%s/%s/%s' % (name, name, tier)

        dbsUploader = DBSUploadPoller(config = config)
        dbsUtil     = DBSBufferUtil()
        dbsApi      = DbsApi(url = config.DBSUpload.dbsUrl)

        # This should do nothing
        # Just making sure we don't crash
        try:
            dbsUploader.algorithm()
        except:
            dbsUploader.close()
            raise
Example #6
0
    def testDualUpload(self):
        """
        _testDualUpload_

        Verify that the dual upload mode works correctly.
        """
        self.dbsApi = DbsApi(url = self.dbsUrl)
        config = self.getConfig()
        dbsUploader = DBSUploadPoller(config = config)
        dbsUtil = DBSBufferUtil()

        # First test verifies that uploader will poll and then not do anything
        # as the database is empty.
        dbsUploader.algorithm()

        acqEra = "Summer%s" % (int(time.time()))
        parentFiles = self.createParentFiles(acqEra)
        (moreParentFiles, childFiles) = \
                          self.createFilesWithChildren(parentFiles, acqEra)

        allFiles = parentFiles + moreParentFiles
        allBlocks = []
        for i in range(4):
            DBSBufferDataset(parentFiles[0]["datasetPath"]).create()
            blockName = parentFiles[0]["datasetPath"] + "#" + makeUUID()
            dbsBlock = DBSBufferBlock(blockName,
                                      location = "malpaquet",
                                      datasetpath =  None)
            dbsBlock.status = "Open"
            dbsBlock.setDataset(parentFiles[0]["datasetPath"], 'data', 'VALID')
            dbsUtil.createBlocks([dbsBlock])
            for file in allFiles[i * 5 : (i * 5) + 5]:
                dbsBlock.addFile(file, 'data', 'VALID')
                dbsUtil.setBlockFiles({"block": blockName, "filelfn": file["lfn"]})
                if i < 2:
                    dbsBlock.status = "InDBS"
                dbsUtil.updateBlocks([dbsBlock])
            dbsUtil.updateFileStatus([dbsBlock], "InDBS")
            allBlocks.append(dbsBlock)

        DBSBufferDataset(childFiles[0]["datasetPath"]).create()
        blockName = childFiles[0]["datasetPath"] + "#" + makeUUID()
        dbsBlock = DBSBufferBlock(blockName,
                                  location = "malpaquet",
                                  datasetpath =  None)
        dbsBlock.status = "InDBS"
        dbsBlock.setDataset(childFiles[0]["datasetPath"], 'data', 'VALID')
        dbsUtil.createBlocks([dbsBlock])
        for file in childFiles:
            dbsBlock.addFile(file, 'data', 'VALID')
            dbsUtil.setBlockFiles({"block": blockName, "filelfn": file["lfn"]})

        dbsUtil.updateFileStatus([dbsBlock], "InDBS")

        dbsUploader.algorithm()
        time.sleep(5)
        dbsUploader.algorithm()
        time.sleep(5)

        self.verifyData(parentFiles[0]["datasetPath"], parentFiles)

        # Change the status of the rest of the parent blocks so we can upload
        # them and the children.
        for dbsBlock in allBlocks:
            dbsBlock.status = "InDBS"
            dbsUtil.updateBlocks([dbsBlock])

        dbsUploader.algorithm()
        time.sleep(5)

        self.verifyData(parentFiles[0]["datasetPath"], parentFiles + moreParentFiles)

        # Run the uploader one more time to upload the children.
        dbsUploader.algorithm()
        time.sleep(5)

        self.verifyData(childFiles[0]["datasetPath"], childFiles)
        return
Example #7
0
    def testBasicUpload(self):
        """
        _testBasicUpload_

        Verify that we can successfully upload to DBS3.  Also verify that the
        uploader correctly handles files parentage when uploading.
        """
        self.dbsApi = DbsApi(url = self.dbsUrl)
        config = self.getConfig()
        dbsUploader = DBSUploadPoller(config = config)

        # First test verifies that uploader will poll and then not do anything
        # as the database is empty.
        dbsUploader.algorithm()

        acqEra = "Summer%s" % (int(time.time()))
        parentFiles = self.createParentFiles(acqEra)

        # The algorithm needs to be run twice.  On the first iteration it will
        # create all the blocks and upload one.  On the second iteration it will
        # timeout and upload the second block.
        dbsUploader.algorithm()
        time.sleep(5)
        dbsUploader.algorithm()
        time.sleep(5)

        # Verify the files made it into DBS3.
        self.verifyData(parentFiles[0]["datasetPath"], parentFiles)

        # Inject some more parent files and some child files into DBSBuffer.
        # Run the uploader twice, only the parent files should be added to DBS3.
        (moreParentFiles, childFiles) = \
                          self.createFilesWithChildren(parentFiles, acqEra)
        dbsUploader.algorithm()
        time.sleep(5)
        dbsUploader.algorithm()
        time.sleep(5)

        self.verifyData(parentFiles[0]["datasetPath"],
                        parentFiles + moreParentFiles)

        # Run the uploader another two times to upload the child files.  Verify
        # that the child files were uploaded.
        dbsUploader.algorithm()
        time.sleep(5)
        dbsUploader.algorithm()
        time.sleep(5)

        self.verifyData(childFiles[0]["datasetPath"], childFiles)
        return
Example #8
0
    def testDualUpload(self):
        """
        _testDualUpload_

        Verify that the dual upload mode works correctly.
        """
        self.dbsApi = DbsApi(url = self.dbsUrl)
        config = self.getConfig(dbs3UploadOnly = True)
        dbsUploader = DBSUploadPoller(config = config)
        dbsUtil = DBSBufferUtil()

        # First test verifies that uploader will poll and then not do anything
        # as the database is empty.
        dbsUploader.algorithm()

        acqEra = "Summer%s" % (int(time.time()))
        parentFiles = self.createParentFiles(acqEra)
        (moreParentFiles, childFiles) = \
                          self.createFilesWithChildren(parentFiles, acqEra)

        allFiles = parentFiles + moreParentFiles
        allBlocks = []
        for i in range(4):
            blockName = parentFiles[0]["datasetPath"] + "#" + makeUUID()
            dbsBlock = DBSBlock(blockName, "malpaquet", 1)
            dbsBlock.status = "Open"                
            dbsUtil.createBlocks([dbsBlock])
            for file in allFiles[i * 5 : (i * 5) + 5]:
                dbsBlock.addFile(file)
                dbsUtil.setBlockFiles({"block": blockName, "filelfn": file["lfn"]})
                if i < 2:
                    dbsBlock.status = "InDBS"
                dbsUtil.updateBlocks([dbsBlock])
            dbsUtil.updateFileStatus([dbsBlock], "InDBS")
            allBlocks.append(dbsBlock)            

        blockName = childFiles[0]["datasetPath"] + "#" + makeUUID()
        dbsBlock = DBSBlock(blockName, "malpaquet", 1)
        dbsBlock.status = "InDBS"
        dbsUtil.createBlocks([dbsBlock])
        for file in childFiles:
            dbsBlock.addFile(file)
            dbsUtil.setBlockFiles({"block": blockName, "filelfn": file["lfn"]})

        dbsUtil.updateFileStatus([dbsBlock], "InDBS")

        dbsUploader.algorithm()
        time.sleep(5)
        dbsUploader.algorithm()
        time.sleep(5)

        self.verifyData(parentFiles[0]["datasetPath"], parentFiles)

        # Change the status of the rest of the parent blocks so we can upload
        # them and the children.
        for dbsBlock in allBlocks:
            dbsBlock.status = "InDBS"            
            dbsUtil.updateBlocks([dbsBlock])

        dbsUploader.algorithm()
        time.sleep(5)

        self.verifyData(parentFiles[0]["datasetPath"], parentFiles + moreParentFiles)

        # Run the uploader one more time to upload the children.
        dbsUploader.algorithm()
        time.sleep(5)        
    
        self.verifyData(childFiles[0]["datasetPath"], childFiles)
        return
Example #9
0
    def testBasicUpload(self):
        """
        _testBasicUpload_

        Verify that we can successfully upload to DBS3.  Also verify that the
        uploader correctly handles files parentage when uploading.
        """
        self.dbsApi = DbsApi(url = self.dbsUrl)
        config = self.getConfig()
        dbsUploader = DBSUploadPoller(config = config)

        # First test verifies that uploader will poll and then not do anything
        # as the database is empty.
        dbsUploader.algorithm()

        acqEra = "Summer%s" % (int(time.time()))
        parentFiles = self.createParentFiles(acqEra)

        # The algorithm needs to be run twice.  On the first iteration it will
        # create all the blocks and upload one.  On the second iteration it will
        # timeout and upload the second block.
        dbsUploader.algorithm()
        time.sleep(5)
        dbsUploader.algorithm()
        time.sleep(5)

        # Verify the files made it into DBS3.
        self.verifyData(parentFiles[0]["datasetPath"], parentFiles)

        # Inject some more parent files and some child files into DBSBuffer.
        # Run the uploader twice, only the parent files should be added to DBS3.
        (moreParentFiles, childFiles) = \
                          self.createFilesWithChildren(parentFiles, acqEra)
        dbsUploader.algorithm()
        time.sleep(5)
        dbsUploader.algorithm()
        time.sleep(5)

        self.verifyData(parentFiles[0]["datasetPath"],
                        parentFiles + moreParentFiles)

        # Run the uploader another two times to upload the child files.  Verify
        # that the child files were uploaded.
        dbsUploader.algorithm()
        time.sleep(5)
        dbsUploader.algorithm()
        time.sleep(5)

        self.verifyData(childFiles[0]["datasetPath"], childFiles)
        return
Example #10
0
class scaleTestFiller:
    """
    _scaleTestFiller_

    Initializes the DB and the DBSUploader
    On __call__() it creates data and uploads it.
    """

    def __init__(self):
        """
        __init__

        Init the DB
        """

        self.testInit = TestInit(__file__)
        self.testInit.setLogging()
        self.testInit.setDatabaseConnection(destroyAllDatabase = True)
        self.testInit.setSchema(customModules = ["WMComponent.DBS3Buffer"],
                                useDefault = False)
        self.configFile = EmulatorSetup.setupWMAgentConfig()

        myThread = threading.currentThread()
        self.bufferFactory = DAOFactory(package = "WMComponent.DBS3Buffer",
                                        logger = myThread.logger,
                                        dbinterface = myThread.dbi)

        locationAction = self.bufferFactory(classname = "DBSBufferFiles.AddLocation")
        locationAction.execute(siteName = "se1.cern.ch")
        locationAction.execute(siteName = "se1.fnal.gov")
        locationAction.execute(siteName = "malpaquet")

        config = self.getConfig()
        self.dbsUploader = DBSUploadPoller(config = config)

        return


    def __call__(self):
        """
        __call__

        Generate some random data
        """

        # Generate somewhere between one and a thousand files
        name = "ThisIsATest_%s" % (makeUUID())
        nFiles = random.randint(10, 2000)
        name = name.replace('-', '_')
        name = '%s-v0' % name
        files = self.getFiles(name = name, nFiles = nFiles)

        print("Inserting %i files for dataset %s" % (nFiles * 2, name))

        try:
            self.dbsUploader.algorithm()
        except:
            self.dbsUploader.close()
            raise

        # Repeat just to make sure
        try:
            self.dbsUploader.algorithm()
        except:
            self.dbsUploader.close()
            raise


        return



    def getConfig(self):
        """
        _getConfig_

        This creates the actual config file used by the component

        """


        config = self.testInit.getConfiguration()
        self.testInit.generateWorkDir(config)

        #First the general stuff
        config.section_("General")
        config.General.workDir = os.getenv("TESTDIR", os.getcwd())

        config.section_("Agent")
        config.Agent.componentName = 'DBSUpload'
        config.Agent.useHeartbeat  = False

        #Now the CoreDatabase information
        #This should be the dialect, dburl, etc
        config.section_("CoreDatabase")
        config.CoreDatabase.connectUrl = os.getenv("DATABASE")
        config.CoreDatabase.socket     = os.getenv("DBSOCK")


        config.component_("DBS3Upload")
        config.DBS3Upload.pollInterval     = 10
        config.DBS3Upload.logLevel         = 'DEBUG'
        config.DBS3Upload.DBSBlockMaxFiles = 500
        config.DBS3Upload.DBSBlockMaxTime  = 600
        config.DBS3Upload.DBSBlockMaxSize  = 999999999999
        config.DBS3Upload.dbsUrl           = 'http://cms-xen40.fnal.gov:8787/dbs/prod/global/DBSWriter'
        config.DBS3Upload.namespace        = 'WMComponent.DBS3Buffer.DBS3Upload'
        config.DBS3Upload.componentDir     = os.path.join(os.getcwd(), 'Components')
        config.DBS3Upload.nProcesses       = 1
        config.DBS3Upload.dbsWaitTime      = 1

        return config



    def getFiles(self, name, tier = 'RECO', nFiles = 12, site = "malpaquet", nLumis = 1):
        """
        Create some quick dummy test files

        """

        files = []

        for f in range(nFiles):
            testFile = DBSBufferFile(lfn = '/data/store/random/random/RANDOM/test/0/%s-%s-%i.root' % (name, site, f), size = 1024,
                                     events = 20, checksums = {'cksum': 1})
            testFile.setAlgorithm(appName = name, appVer = "CMSSW_3_1_1",
                                  appFam = "RECO", psetHash = "GIBBERISH",
                                  configContent = "MOREGIBBERISH")
            testFile.setDatasetPath("/%s/%s/%s" % (name, name, tier))
            lumis = []
            for i in range(nLumis):
                lumis.append((f * 100000) + i)
            testFile.addRun(Run( 1, *lumis))
            testFile.setAcquisitionEra(name.split('-')[0])
            testFile.setProcessingVer("0")
            testFile.setGlobalTag("Weird")
            testFile.create()
            testFile.setLocation(site)
            files.append(testFile)

        count = 0
        for f in files:
            count += 1
            testFileChild = DBSBufferFile(lfn = '/data/store/random/random/RANDOM/test/0/%s-%s-%i-child.root' %(name, site, count), size = 1024,
                                          events = 10, checksums = {'cksum': 1})
            testFileChild.setAlgorithm(appName = name, appVer = "CMSSW_3_1_1",
                                       appFam = "RECO", psetHash = "GIBBERISH",
                                       configContent = "MOREGIBBERISH")
            testFileChild.setDatasetPath("/%s/%s_2/RECO" %(name, name))
            testFileChild.addRun(Run( 1, *[45]))
            testFileChild.create()
            testFileChild.setLocation(site)

            testFileChild.addParents([f['lfn']])


        return files
Example #11
0
    def testA_basicFunction(self):
        """
        _basicFunction_

        See if I can make the damn thing work.
        """
        myThread = threading.currentThread()

        config = self.getConfig()

        from WMComponent.DBS3Buffer.DBSUploadPoller import DBSUploadPoller
        dbsUploader = DBSUploadPoller(config = config)
        dbsUtil     = DBSBufferUtil()
        from dbs.apis.dbsClient import DbsApi
        dbsApi      = DbsApi(url = config.DBSUpload.dbsUrl)

        # This should do nothing
        # Just making sure we don't crash
        try:
            dbsUploader.algorithm()
        except:
            dbsUploader.close()
            raise

        name = "ThisIsATest%s" % (int(time.time()))
        tier = "RECO"
        nFiles = 12
        name = name.replace('-', '_')
        name = '%s-v0' % name
        files = self.getFiles(name = name, tier = tier, nFiles = nFiles)
        datasetPath = "/Cosmics/%s/%s" % (name, tier)

        try:
            dbsUploader.algorithm()
        except:
            dbsUploader.close()
            raise

        time.sleep(5)

        # Now look in DBS
        try:
            result = dbsApi.listDatasets(dataset = datasetPath, detail = True,
                                         dataset_access_type = 'PRODUCTION')
            self.assertEqual(len(result), 1)
            self.assertEqual(result[0]['data_tier_name'], 'RECO')
            self.assertEqual(result[0]['processing_version'], 0)
            self.assertEqual(result[0]['acquisition_era_name'], name.split('-')[0])

            result = dbsApi.listFiles(dataset=datasetPath)
            self.assertEqual(len(result), 11)
        except:
            dbsUploader.close()
            raise

        # All the blocks except for the last one should
        # now be there
        result = myThread.dbi.processData("SELECT id FROM dbsbuffer_block")[0].fetchall()
        self.assertEqual(len(result), 12)

        # The last block should still be open
        self.assertEqual(len(dbsUtil.findOpenBlocks()), 1)

        try:
            dbsUploader.algorithm()
        except:
            raise
        finally:
            dbsUploader.close()

        # All files should now be available
        result = dbsApi.listFiles(dataset=datasetPath)
        self.assertEqual(len(result), 12)


        # The last block should now be closed
        self.assertEqual(len(dbsUtil.findOpenBlocks()), 0)

        result = myThread.dbi.processData("SELECT status FROM dbsbuffer_block")[0].fetchall()
        for res in result:
            self.assertEqual(res.values()[0], 'InDBS')

        return
Example #12
0
class scaleTestFiller:
    """
    _scaleTestFiller_

    Initializes the DB and the DBSUploader
    On __call__() it creates data and uploads it.
    """
    def __init__(self):
        """
        __init__

        Init the DB
        """

        self.testInit = TestInit(__file__)
        self.testInit.setLogging()
        self.testInit.setDatabaseConnection(destroyAllDatabase=True)
        self.testInit.setSchema(customModules=["WMComponent.DBS3Buffer"],
                                useDefault=False)
        self.configFile = EmulatorSetup.setupWMAgentConfig()

        myThread = threading.currentThread()
        self.bufferFactory = DAOFactory(
            package="WMComponent.DBSBuffer.Database",
            logger=myThread.logger,
            dbinterface=myThread.dbi)

        locationAction = self.bufferFactory(
            classname="DBSBufferFiles.AddLocation")
        locationAction.execute(siteName="se1.cern.ch")
        locationAction.execute(siteName="se1.fnal.gov")
        locationAction.execute(siteName="malpaquet")

        config = self.getConfig()
        self.dbsUploader = DBSUploadPoller(config=config)

        return

    def __call__(self):
        """
        __call__

        Generate some random data
        """

        # Generate somewhere between one and a thousand files
        name = "ThisIsATest_%s" % (makeUUID())
        nFiles = random.randint(10, 2000)
        name = name.replace('-', '_')
        name = '%s-v0' % name
        files = self.getFiles(name=name, nFiles=nFiles)

        print "Inserting %i files for dataset %s" % (nFiles * 2, name)

        try:
            self.dbsUploader.algorithm()
        except:
            self.dbsUploader.close()
            raise

        # Repeat just to make sure
        try:
            self.dbsUploader.algorithm()
        except:
            self.dbsUploader.close()
            raise

        return

    def getConfig(self):
        """
        _getConfig_

        This creates the actual config file used by the component

        """

        config = self.testInit.getConfiguration()
        self.testInit.generateWorkDir(config)

        #First the general stuff
        config.section_("General")
        config.General.workDir = os.getenv("TESTDIR", os.getcwd())

        config.section_("Agent")
        config.Agent.componentName = 'DBSUpload'
        config.Agent.useHeartbeat = False

        #Now the CoreDatabase information
        #This should be the dialect, dburl, etc
        config.section_("CoreDatabase")
        config.CoreDatabase.connectUrl = os.getenv("DATABASE")
        config.CoreDatabase.socket = os.getenv("DBSOCK")

        config.component_("DBS3Upload")
        config.DBS3Upload.pollInterval = 10
        config.DBS3Upload.logLevel = 'DEBUG'
        config.DBS3Upload.DBSBlockMaxFiles = 500
        config.DBS3Upload.DBSBlockMaxTime = 600
        config.DBS3Upload.DBSBlockMaxSize = 999999999999
        config.DBS3Upload.dbsUrl = 'http://cms-xen40.fnal.gov:8787/dbs/prod/global/DBSWriter'
        config.DBS3Upload.namespace = 'WMComponent.DBS3Buffer.DBSUpload'
        config.DBS3Upload.componentDir = os.path.join(os.getcwd(),
                                                      'Components')
        config.DBS3Upload.nProcesses = 1
        config.DBS3Upload.dbsWaitTime = 1

        return config

    def getFiles(self,
                 name,
                 tier='RECO',
                 nFiles=12,
                 site="malpaquet",
                 nLumis=1):
        """
        Create some quick dummy test files

        """

        files = []

        for f in range(nFiles):
            testFile = DBSBufferFile(
                lfn='/data/store/random/random/RANDOM/test/0/%s-%s-%i.root' %
                (name, site, f),
                size=1024,
                events=20,
                checksums={'cksum': 1})
            testFile.setAlgorithm(appName=name,
                                  appVer="CMSSW_3_1_1",
                                  appFam="RECO",
                                  psetHash="GIBBERISH",
                                  configContent="MOREGIBBERISH")
            testFile.setDatasetPath("/%s/%s/%s" % (name, name, tier))
            lumis = []
            for i in range(nLumis):
                lumis.append((f * 100000) + i)
            testFile.addRun(Run(1, *lumis))
            testFile.setAcquisitionEra(name.split('-')[0])
            testFile.setProcessingVer("0")
            testFile.setGlobalTag("Weird")
            testFile.create()
            testFile.setLocation(site)
            files.append(testFile)

        count = 0
        for f in files:
            count += 1
            testFileChild = DBSBufferFile(
                lfn=
                '/data/store/random/random/RANDOM/test/0/%s-%s-%i-child.root' %
                (name, site, count),
                size=1024,
                events=10,
                checksums={'cksum': 1})
            testFileChild.setAlgorithm(appName=name,
                                       appVer="CMSSW_3_1_1",
                                       appFam="RECO",
                                       psetHash="GIBBERISH",
                                       configContent="MOREGIBBERISH")
            testFileChild.setDatasetPath("/%s/%s_2/RECO" % (name, name))
            testFileChild.addRun(Run(1, *[45]))
            testFileChild.create()
            testFileChild.setLocation(site)

            testFileChild.addParents([f['lfn']])

        return files
Example #13
0
    def testA_basicFunction(self):
        """
        _basicFunction_

        See if I can make the damn thing work.
        """
        myThread = threading.currentThread()

        config = self.getConfig()

        from WMComponent.DBS3Buffer.DBSUploadPoller import DBSUploadPoller
        dbsUploader = DBSUploadPoller(config=config)
        dbsUtil = DBSBufferUtil()
        from dbs.apis.dbsClient import DbsApi
        dbsApi = DbsApi(url=config.DBSUpload.dbsUrl)

        # This should do nothing
        # Just making sure we don't crash
        try:
            dbsUploader.algorithm()
        except:
            dbsUploader.close()
            raise

        name = "ThisIsATest%s" % (int(time.time()))
        tier = "RECO"
        nFiles = 12
        name = name.replace('-', '_')
        name = '%s-v0' % name
        files = self.getFiles(name=name, tier=tier, nFiles=nFiles)
        datasetPath = "/Cosmics/%s/%s" % (name, tier)

        try:
            dbsUploader.algorithm()
        except:
            dbsUploader.close()
            raise

        time.sleep(5)

        # Now look in DBS
        try:
            result = dbsApi.listDatasets(dataset=datasetPath,
                                         detail=True,
                                         dataset_access_type='PRODUCTION')
            self.assertEqual(len(result), 1)
            self.assertEqual(result[0]['data_tier_name'], 'RECO')
            self.assertEqual(result[0]['processing_version'], 0)
            self.assertEqual(result[0]['acquisition_era_name'],
                             name.split('-')[0])

            result = dbsApi.listFiles(dataset=datasetPath)
            self.assertEqual(len(result), 11)
        except:
            dbsUploader.close()
            raise

        # All the blocks except for the last one should
        # now be there
        result = myThread.dbi.processData(
            "SELECT id FROM dbsbuffer_block")[0].fetchall()
        self.assertEqual(len(result), 12)

        # The last block should still be open
        self.assertEqual(len(dbsUtil.findOpenBlocks()), 1)

        try:
            dbsUploader.algorithm()
        except:
            raise
        finally:
            dbsUploader.close()

        # All files should now be available
        result = dbsApi.listFiles(dataset=datasetPath)
        self.assertEqual(len(result), 12)

        # The last block should now be closed
        self.assertEqual(len(dbsUtil.findOpenBlocks()), 0)

        result = myThread.dbi.processData(
            "SELECT status FROM dbsbuffer_block")[0].fetchall()
        for res in result:
            self.assertEqual(res.values()[0], 'InDBS')

        return
Example #14
0
    def testA_basicFunction(self):
        """
        _basicFunction_

        See if I can make the damn thing work.
        """
        return
        myThread = threading.currentThread()

        config = self.getConfig()

        dbsUploader = DBSUploadPoller(config = config)
        dbsUtil     = DBSBufferUtil()
        dbsApi      = DbsApi(url = config.DBSUpload.dbsUrl)

        # This should do nothing
        # Just making sure we don't crash
        try:
            dbsUploader.algorithm()
        except:
            dbsUploader.close()
            raise


        name = "ThisIsATest_%s" % (makeUUID())
        tier = "RECO"
        nFiles = 12
        files = self.getFiles(name = name, tier = tier, nFiles = nFiles)
        datasetPath = '/%s/%s/%s' % (name, name, tier)
        shortPath   = '/%s/%s' % (name, name)


        try:
            dbsUploader.algorithm()
        except:
            dbsUploader.close()
            raise

        time.sleep(3)

        # Now look in DBS
        try:
            result = dbsApi.listPrimaryDatasets(dataset = name)
            self.assertEqual(len(result), 1)
            self.assertEqual(result[0]['primary_ds_name'], name)
            result = dbsApi.listDatasets(dataset = datasetPath, detail = True,
                                         dataset_access_type = 'PROCESSING')
            self.assertEqual(len(result), 1)
            self.assertEqual(result[0]['data_tier_name'], u'RECO')
            self.assertEqual(result[0]['processing_version'], u'V0')
            self.assertEqual(result[0]['acquisition_era_name'], u"DBS3TEST")
            result = dbsApi.listFiles(dataset=datasetPath)
            self.assertEqual(len(result), 11)
        except:
            dbsUploader.close()
            raise

        # All the blocks except for the last one should
        # now be there
        result = myThread.dbi.processData("SELECT id FROM dbsbuffer_block")[0].fetchall()
        self.assertEqual(len(result), 12)

        # The last block should still be open
        self.assertEqual(len(dbsUtil.findOpenBlocks()), 1)

        try:
            dbsUploader.algorithm()
        except:
            raise
        finally:
            dbsUploader.close()

        # All files should now be available
        result = dbsApi.listFiles(dataset=datasetPath)
        self.assertEqual(len(result), 12)

        
        # The last block should now be closed
        self.assertEqual(len(dbsUtil.findOpenBlocks()), 0)

        result = myThread.dbi.processData("SELECT status FROM dbsbuffer_block")[0].fetchall()
        for res in result:
            self.assertEqual(res.values()[0], 'InDBS')

        return