コード例 #1
0
    def testProperties(self):
        """
        _testProperties_

        Test added tags that use DBSBuffer to transfer from workload to DBS
        """

        testFileA = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10)
        testFileA.setAlgorithm(appName="cmsRun",
                               appVer="CMSSW_2_1_8",
                               appFam="RECO",
                               psetHash="GIBBERISH",
                               configContent="MOREGIBBERISH")
        testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")
        testFileA.setValidStatus(validStatus="VALID")
        testFileA.setProcessingVer(ver="ProcVer")
        testFileA.setAcquisitionEra(era="AcqEra")
        testFileA.setGlobalTag(globalTag="GlobalTag")
        testFileA.setDatasetParent(datasetParent="Parent")
        testFileA.create()

        # There are no accessors for these things because load is never called
        action = self.daoFactory2(classname="LoadInfoFromDAS")
        das = action.execute(ids=[1])[0]
        self.assertEqual(das['Parent'], 'Parent')
        self.assertEqual(das['GlobalTag'], 'GlobalTag')
        self.assertEqual(das['ValidStatus'], 'VALID')

        return
コード例 #2
0
ファイル: DBSBufferFile_t.py プロジェクト: zhiwenuil/WMCore
    def testProperties(self):
        """
        _testProperties_

        Test added tags that use DBSBuffer to transfer from workload to DBS
        """


        testFileA = DBSBufferFile(lfn = "/this/is/a/lfn", size = 1024, events = 10)
        testFileA.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8",
                               appFam = "RECO", psetHash = "GIBBERISH",
                               configContent = "MOREGIBBERISH")
        testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")
        testFileA.setValidStatus(validStatus = "VALID")
        testFileA.setProcessingVer(ver = "ProcVer")
        testFileA.setAcquisitionEra(era = "AcqEra")
        testFileA.setGlobalTag(globalTag = "GlobalTag")
        testFileA.setDatasetParent(datasetParent = "Parent")
        testFileA.create()

        # There are no accessors for these things because load is never called
        action = self.daoFactory2(classname = "LoadInfoFromDAS")
        das    = action.execute(ids = [1])[0]
        self.assertEqual(das['Parent'], 'Parent')
        self.assertEqual(das['GlobalTag'], 'GlobalTag')
        self.assertEqual(das['ValidStatus'], 'VALID')
コード例 #3
0
    def prepareDBSFiles(self):
        """
        _prepareDBSFiles_

        Retrieve the information from the JSON input data
        and create DBSFile objects that can be registered in the
        database.
        """
        timestamp = time.strftime('%m%d%y_%H%M%S')
        for fileEntry in self.inputData:
            # Get all the info out of a standard named dataset
            datasetInfo = str(fileEntry["dataset"])
            tokens = datasetInfo.split('/')
            primDs = tokens[1]
            procDs = tokens[2]
            dataTier = tokens[3]
            procDsTokens = procDs.split('-')
            acqEra = procDsTokens[0]
            procVer = procDsTokens[-1][1:]

            ckSumInfo = fileEntry["checksums"]
            for entry in ckSumInfo:
                ckSumInfo[entry] = str(ckSumInfo[entry])

            # Build the basic dbsBuffer file
            dbsFile = DBSBufferFile(lfn = str(fileEntry["lfn"]),
                                    size = int(fileEntry.get("size", 0)),
                                    events = int(fileEntry.get("events", 0)),
                                    checksums = ckSumInfo,
                                    status = "NOTUPLOADED")
            dbsFile.setAlgorithm(appName = "cmsRun",
                                 appVer = str(fileEntry.get("cmssw", "LEGACY")),
                                 appFam = "Legacy",
                                 psetHash = "GIBBERISH",
                                 configContent = "None;;None;;None")

            dbsFile.setDatasetPath("/%s/%s/%s" % (primDs,
                                                  procDs,
                                                  dataTier))
            dbsFile.setValidStatus(validStatus = "PRODUCTION")
            dbsFile.setProcessingVer(ver = procVer)
            dbsFile.setAcquisitionEra(era = acqEra)
            dbsFile.setGlobalTag(globalTag = str(fileEntry.get('globalTag', "LEGACY")))

            # Build a representative task name
            dbsFile['task'] = '/LegacyInsertionTask_%s/Insertion' % timestamp

            # Get the runs and lumis
            runsAndLumis = fileEntry.get("runsAndLumis", {})
            for run in runsAndLumis:
                newRun = Run(runNumber = int(run))
                newRun.extend([int(x) for x in runsAndLumis[run]])
                dbsFile.addRun(newRun)

            # Complete the file information with the location and queue it
            dbsFile.setLocation(se = str(fileEntry["location"]), immediateSave = False)
            self.dbsFilesToCreate.append(dbsFile)
        self.inputData = None
        return
コード例 #4
0
    def addFileToDBS(self, jobReportFile, task, errorDataset=False):
        """
        _addFileToDBS_

        Add a file that was output from a job to the DBS buffer.
        """
        datasetInfo = jobReportFile["dataset"]

        dbsFile = DBSBufferFile(lfn=jobReportFile["lfn"],
                                size=jobReportFile["size"],
                                events=jobReportFile["events"],
                                checksums=jobReportFile["checksums"],
                                status="NOTUPLOADED")
        dbsFile.setAlgorithm(appName=datasetInfo["applicationName"],
                             appVer=datasetInfo["applicationVersion"],
                             appFam=jobReportFile["module_label"],
                             psetHash="GIBBERISH",
                             configContent=jobReportFile.get('configURL'))

        if errorDataset:
            dbsFile.setDatasetPath(
                "/%s/%s/%s" %
                (datasetInfo["primaryDataset"] + "-Error",
                 datasetInfo["processedDataset"], datasetInfo["dataTier"]))
        else:
            dbsFile.setDatasetPath(
                "/%s/%s/%s" %
                (datasetInfo["primaryDataset"],
                 datasetInfo["processedDataset"], datasetInfo["dataTier"]))

        dbsFile.setValidStatus(
            validStatus=jobReportFile.get("validStatus", None))
        dbsFile.setProcessingVer(ver=jobReportFile.get('processingVer', None))
        dbsFile.setAcquisitionEra(
            era=jobReportFile.get('acquisitionEra', None))
        dbsFile.setGlobalTag(globalTag=jobReportFile.get('globalTag', None))
        #TODO need to find where to get the prep id
        dbsFile.setPrepID(prep_id=jobReportFile.get('prep_id', None))
        dbsFile['task'] = task

        for run in jobReportFile["runs"]:
            newRun = Run(runNumber=run.run)
            newRun.extend(run.lumis)
            dbsFile.addRun(newRun)

        dbsFile.setLocation(pnn=list(jobReportFile["locations"])[0],
                            immediateSave=False)
        self.dbsFilesToCreate.append(dbsFile)
        return
コード例 #5
0
ファイル: AccountantWorker.py プロジェクト: huohuo21/WMCore
    def addFileToDBS(self, jobReportFile, task, errorDataset = False):
        """
        _addFileToDBS_

        Add a file that was output from a job to the DBS buffer.
        """
        datasetInfo = jobReportFile["dataset"]

        dbsFile = DBSBufferFile(lfn = jobReportFile["lfn"],
                                size = jobReportFile["size"],
                                events = jobReportFile["events"],
                                checksums = jobReportFile["checksums"],
                                status = "NOTUPLOADED")
        dbsFile.setAlgorithm(appName = datasetInfo["applicationName"],
                             appVer = datasetInfo["applicationVersion"],
                             appFam = jobReportFile["module_label"],
                             psetHash = "GIBBERISH",
                             configContent = jobReportFile.get('configURL'))

        if errorDataset:
            dbsFile.setDatasetPath("/%s/%s/%s" % (datasetInfo["primaryDataset"] + "-Error",
                                                  datasetInfo["processedDataset"],
                                                  datasetInfo["dataTier"]))
        else:
            dbsFile.setDatasetPath("/%s/%s/%s" % (datasetInfo["primaryDataset"],
                                                  datasetInfo["processedDataset"],
                                                  datasetInfo["dataTier"]))

        dbsFile.setValidStatus(validStatus = jobReportFile.get("validStatus", None))
        dbsFile.setProcessingVer(ver = jobReportFile.get('processingVer', None))
        dbsFile.setAcquisitionEra(era = jobReportFile.get('acquisitionEra', None))
        dbsFile.setGlobalTag(globalTag = jobReportFile.get('globalTag', None))
        #TODO need to find where to get the prep id
        dbsFile.setPrepID(prep_id = jobReportFile.get('prep_id', None))
        dbsFile['task'] = task

        for run in jobReportFile["runs"]:
            newRun = Run(runNumber = run.run)
            newRun.extend(run.lumis)
            dbsFile.addRun(newRun)


        dbsFile.setLocation(pnn = list(jobReportFile["locations"])[0], immediateSave = False)
        self.dbsFilesToCreate.append(dbsFile)
        return
コード例 #6
0
    def testProperties(self):
        """
        _testProperties_

        Test added tags that use DBSBuffer to transfer from workload to DBS
        """

        testFileA = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10)
        testFileA.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8",
                               appFam="RECO", psetHash="GIBBERISH",
                               configContent="MOREGIBBERISH")
        testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")
        testFileA.setValidStatus(validStatus="VALID")
        testFileA.setProcessingVer(ver="ProcVer")
        testFileA.setAcquisitionEra(era="AcqEra")
        testFileA.setGlobalTag(globalTag="GlobalTag")
        testFileA.setDatasetParent(datasetParent="Parent")
        testFileA.create()

        return
コード例 #7
0
    def testProperties(self):
        """
        _testProperties_

        Test added tags that use DBSBuffer to transfer from workload to DBS
        """


        testFileA = DBSBufferFile(lfn = "/this/is/a/lfn", size = 1024, events = 10)
        testFileA.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8",
                               appFam = "RECO", psetHash = "GIBBERISH",
                               configContent = "MOREGIBBERISH")
        testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO")
        testFileA.setValidStatus(validStatus = "VALID")
        testFileA.setProcessingVer(ver = "ProcVer")
        testFileA.setAcquisitionEra(era = "AcqEra")
        testFileA.setGlobalTag(globalTag = "GlobalTag")
        testFileA.setDatasetParent(datasetParent = "Parent")
        testFileA.create()

        return