def testProperties(self): """ _testProperties_ Test added tags that use DBSBuffer to transfer from workload to DBS """ testFileA = DBSBufferFile(lfn = "/this/is/a/lfn", size = 1024, events = 10) testFileA.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.setValidStatus(validStatus = "VALID") testFileA.setProcessingVer(ver = "ProcVer") testFileA.setAcquisitionEra(era = "AcqEra") testFileA.setGlobalTag(globalTag = "GlobalTag") testFileA.setDatasetParent(datasetParent = "Parent") testFileA.create() # There are no accessors for these things because load is never called action = self.daoFactory2(classname = "LoadInfoFromDAS") das = action.execute(ids = [1])[0] self.assertEqual(das['Parent'], 'Parent') self.assertEqual(das['GlobalTag'], 'GlobalTag') self.assertEqual(das['ValidStatus'], 'VALID')
def testProperties(self): """ _testProperties_ Test added tags that use DBSBuffer to transfer from workload to DBS """ testFileA = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10) testFileA.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.setValidStatus(validStatus="VALID") testFileA.setProcessingVer(ver="ProcVer") testFileA.setAcquisitionEra(era="AcqEra") testFileA.setGlobalTag(globalTag="GlobalTag") testFileA.setDatasetParent(datasetParent="Parent") testFileA.create() # There are no accessors for these things because load is never called action = self.daoFactory2(classname="LoadInfoFromDAS") das = action.execute(ids=[1])[0] self.assertEqual(das['Parent'], 'Parent') self.assertEqual(das['GlobalTag'], 'GlobalTag') self.assertEqual(das['ValidStatus'], 'VALID') return
def getFiles(self, name, tier='RECO', nFiles=12, site="malpaquet", nLumis=1): """ Create some quick dummy test files """ files = [] for f in range(nFiles): testFile = DBSBufferFile( lfn='/data/store/random/random/RANDOM/test/0/%s-%s-%i.root' % (name, site, f), size=1024, events=20, checksums={'cksum': 1}) testFile.setAlgorithm(appName=name, appVer="CMSSW_3_1_1", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFile.setDatasetPath("/%s/%s/%s" % (name, name, tier)) lumis = [] for i in range(nLumis): lumis.append((f * 100000) + i) testFile.addRun(Run(1, *lumis)) testFile.setAcquisitionEra(name.split('-')[0]) testFile.setProcessingVer("0") testFile.setGlobalTag("Weird") testFile.create() testFile.setLocation(site) files.append(testFile) count = 0 for f in files: count += 1 testFileChild = DBSBufferFile( lfn= '/data/store/random/random/RANDOM/test/0/%s-%s-%i-child.root' % (name, site, count), size=1024, events=10, checksums={'cksum': 1}) testFileChild.setAlgorithm(appName=name, appVer="CMSSW_3_1_1", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileChild.setDatasetPath("/%s/%s_2/RECO" % (name, name)) testFileChild.addRun(Run(1, *[45])) testFileChild.create() testFileChild.setLocation(site) testFileChild.addParents([f['lfn']]) return files
def prepareDBSFiles(self): """ _prepareDBSFiles_ Retrieve the information from the JSON input data and create DBSFile objects that can be registered in the database. """ timestamp = time.strftime('%m%d%y_%H%M%S') for fileEntry in self.inputData: # Get all the info out of a standard named dataset datasetInfo = str(fileEntry["dataset"]) tokens = datasetInfo.split('/') primDs = tokens[1] procDs = tokens[2] dataTier = tokens[3] procDsTokens = procDs.split('-') acqEra = procDsTokens[0] procVer = procDsTokens[-1][1:] ckSumInfo = fileEntry["checksums"] for entry in ckSumInfo: ckSumInfo[entry] = str(ckSumInfo[entry]) # Build the basic dbsBuffer file dbsFile = DBSBufferFile(lfn = str(fileEntry["lfn"]), size = int(fileEntry.get("size", 0)), events = int(fileEntry.get("events", 0)), checksums = ckSumInfo, status = "NOTUPLOADED") dbsFile.setAlgorithm(appName = "cmsRun", appVer = str(fileEntry.get("cmssw", "LEGACY")), appFam = "Legacy", psetHash = "GIBBERISH", configContent = "None;;None;;None") dbsFile.setDatasetPath("/%s/%s/%s" % (primDs, procDs, dataTier)) dbsFile.setValidStatus(validStatus = "PRODUCTION") dbsFile.setProcessingVer(ver = procVer) dbsFile.setAcquisitionEra(era = acqEra) dbsFile.setGlobalTag(globalTag = str(fileEntry.get('globalTag', "LEGACY"))) # Build a representative task name dbsFile['task'] = '/LegacyInsertionTask_%s/Insertion' % timestamp # Get the runs and lumis runsAndLumis = fileEntry.get("runsAndLumis", {}) for run in runsAndLumis: newRun = Run(runNumber = int(run)) newRun.extend([int(x) for x in runsAndLumis[run]]) dbsFile.addRun(newRun) # Complete the file information with the location and queue it dbsFile.setLocation(se = str(fileEntry["location"]), immediateSave = False) self.dbsFilesToCreate.append(dbsFile) self.inputData = None return
def createFilesWithChildren(self, moreParentFiles, acqEra): """ _createFilesWithChildren_ Create several parentless files and then create child files. """ parentFiles = [] childFiles = [] baseLFN = "/store/data/%s/Cosmics/RAW/v1/000/143/316/" % (acqEra) for i in range(10): testFile = DBSBufferFile(lfn = baseLFN + makeUUID() + ".root", size = 1024, events = 20, checksums = {"cksum": 1}) testFile.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_3_1_1", appFam = "RAW", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/%s-v1/RAW" % (acqEra)) lumis = [] for j in range(10): lumis.append((i * 10) + j) testFile.addRun(Run(143316, *lumis)) testFile.setAcquisitionEra(acqEra) testFile.setProcessingVer("1") testFile.setGlobalTag("START54::All") testFile.create() testFile.setLocation("malpaquet") parentFiles.append(testFile) baseLFN = "/store/data/%s/Cosmics/RECO/v1/000/143/316/" % (acqEra) for i in range(5): testFile = DBSBufferFile(lfn = baseLFN + makeUUID() + ".root", size = 1024, events = 20, checksums = {"cksum": 1}) testFile.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_3_1_1", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/%s-v1/RECO" % (acqEra)) lumis = [] for j in range(20): lumis.append((i * 20) + j) testFile.addRun(Run(143316, *lumis)) testFile.setAcquisitionEra(acqEra) testFile.setProcessingVer("1") testFile.setGlobalTag("START54::All") testFile.create() testFile.setLocation("malpaquet") testFile.addParents([parentFiles[i * 2]["lfn"], parentFiles[i * 2 + 1]["lfn"]]) testFile.addParents([moreParentFiles[i * 2]["lfn"], moreParentFiles[i * 2 + 1]["lfn"]]) childFiles.append(testFile) return (parentFiles, childFiles)
def getFiles(self, name, tier, nFiles=12, site="malpaquet", nLumis=1): """ _getFiles_ Create some dummy test files. """ files = [] (acqEra, procVer) = name.split("-") baseLFN = "/store/data/%s/Cosmics/RECO/%s/000/143/316/" % (acqEra, procVer) for f in range(nFiles): testFile = DBSBufferFile(lfn=baseLFN + makeUUID() + ".root", size=1024, events=20, checksums={"cksum": 1}) testFile.setAlgorithm(appName="cmsRun", appVer="CMSSW_3_1_1", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/%s-%s/RECO" % (acqEra, procVer)) lumis = [] for i in range(nLumis): lumis.append((f * 1000000) + i) testFile.addRun(Run(1, *lumis)) testFile.setAcquisitionEra(acqEra) testFile.setProcessingVer("0") testFile.setGlobalTag("START54::All") testFile.create() testFile.setLocation(site) files.append(testFile) baseLFN = "/store/data/%s/Cosmics/RAW-RECO/%s/000/143/316/" % (acqEra, procVer) testFileChild = DBSBufferFile(lfn=baseLFN + makeUUID() + ".root", size=1024, events=10, checksums={'cksum': 1}) testFileChild.setAlgorithm(appName="cmsRun", appVer="CMSSW_3_1_1", appFam="RAW-RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileChild.setDatasetPath("/Cosmics/%s-%s/RAW-RECO" % (acqEra, procVer)) testFileChild.addRun(Run(1, *[45])) testFileChild.create() testFileChild.setLocation(site) testFileChild.addParents([x['lfn'] for x in files]) return files
def addFileToDBS(self, jobReportFile, task, errorDataset=False): """ _addFileToDBS_ Add a file that was output from a job to the DBS buffer. """ datasetInfo = jobReportFile["dataset"] dbsFile = DBSBufferFile(lfn=jobReportFile["lfn"], size=jobReportFile["size"], events=jobReportFile["events"], checksums=jobReportFile["checksums"], status="NOTUPLOADED") dbsFile.setAlgorithm(appName=datasetInfo["applicationName"], appVer=datasetInfo["applicationVersion"], appFam=jobReportFile["module_label"], psetHash="GIBBERISH", configContent=jobReportFile.get('configURL')) if errorDataset: dbsFile.setDatasetPath( "/%s/%s/%s" % (datasetInfo["primaryDataset"] + "-Error", datasetInfo["processedDataset"], datasetInfo["dataTier"])) else: dbsFile.setDatasetPath( "/%s/%s/%s" % (datasetInfo["primaryDataset"], datasetInfo["processedDataset"], datasetInfo["dataTier"])) dbsFile.setValidStatus( validStatus=jobReportFile.get("validStatus", None)) dbsFile.setProcessingVer(ver=jobReportFile.get('processingVer', None)) dbsFile.setAcquisitionEra( era=jobReportFile.get('acquisitionEra', None)) dbsFile.setGlobalTag(globalTag=jobReportFile.get('globalTag', None)) #TODO need to find where to get the prep id dbsFile.setPrepID(prep_id=jobReportFile.get('prep_id', None)) dbsFile['task'] = task for run in jobReportFile["runs"]: newRun = Run(runNumber=run.run) newRun.extend(run.lumis) dbsFile.addRun(newRun) dbsFile.setLocation(pnn=list(jobReportFile["locations"])[0], immediateSave=False) self.dbsFilesToCreate.append(dbsFile) return
def createParentFiles(self, acqEra, nFiles=10, workflowName='TestWorkload', taskPath='/TestWorkload/DataTest'): """ _createParentFiles_ Create several parentless files in DBSBuffer. This simulates raw files in the T0. """ workflowId = self.injectWorkflow(workflowName=workflowName, taskPath=taskPath) parentlessFiles = [] baseLFN = "/store/data/%s/Cosmics/RAW/v1/000/143/316/" % (acqEra) for i in range(nFiles): testFile = DBSBufferFile(lfn=baseLFN + makeUUID() + ".root", size=1024, events=20, checksums={"cksum": 1}, workflowId=workflowId) testFile.setAlgorithm(appName="cmsRun", appVer="CMSSW_3_1_1", appFam="RAW", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/%s-v1/RAW" % (acqEra)) testFile['block_close_max_wait_time'] = 1000000 testFile['block_close_max_events'] = 1000000 testFile['block_close_max_size'] = 1000000 testFile['block_close_max_files'] = 1000000 lumis = [] for j in range(10): lumis.append((i * 10) + j) testFile.addRun(Run(143316, *lumis)) testFile.setAcquisitionEra(acqEra) testFile.setProcessingVer("1") testFile.setGlobalTag("START54::All") testFile.create() testFile.setLocation("malpaquet") parentlessFiles.append(testFile) return parentlessFiles
def addFileToDBS(self, jobReportFile, task, errorDataset = False): """ _addFileToDBS_ Add a file that was output from a job to the DBS buffer. """ datasetInfo = jobReportFile["dataset"] dbsFile = DBSBufferFile(lfn = jobReportFile["lfn"], size = jobReportFile["size"], events = jobReportFile["events"], checksums = jobReportFile["checksums"], status = "NOTUPLOADED") dbsFile.setAlgorithm(appName = datasetInfo["applicationName"], appVer = datasetInfo["applicationVersion"], appFam = jobReportFile["module_label"], psetHash = "GIBBERISH", configContent = jobReportFile.get('configURL')) if errorDataset: dbsFile.setDatasetPath("/%s/%s/%s" % (datasetInfo["primaryDataset"] + "-Error", datasetInfo["processedDataset"], datasetInfo["dataTier"])) else: dbsFile.setDatasetPath("/%s/%s/%s" % (datasetInfo["primaryDataset"], datasetInfo["processedDataset"], datasetInfo["dataTier"])) dbsFile.setValidStatus(validStatus = jobReportFile.get("validStatus", None)) dbsFile.setProcessingVer(ver = jobReportFile.get('processingVer', None)) dbsFile.setAcquisitionEra(era = jobReportFile.get('acquisitionEra', None)) dbsFile.setGlobalTag(globalTag = jobReportFile.get('globalTag', None)) #TODO need to find where to get the prep id dbsFile.setPrepID(prep_id = jobReportFile.get('prep_id', None)) dbsFile['task'] = task for run in jobReportFile["runs"]: newRun = Run(runNumber = run.run) newRun.extend(run.lumis) dbsFile.addRun(newRun) dbsFile.setLocation(pnn = list(jobReportFile["locations"])[0], immediateSave = False) self.dbsFilesToCreate.append(dbsFile) return
def getFiles(self, name, tier = 'RECO', nFiles = 12, site = "malpaquet", nLumis = 1): """ Create some quick dummy test files """ files = [] for f in range(nFiles): testFile = DBSBufferFile(lfn = '/data/store/random/random/RANDOM/test/0/%s-%s-%i.root' % (name, site, f), size = 1024, events = 20, checksums = {'cksum': 1}) testFile.setAlgorithm(appName = name, appVer = "CMSSW_3_1_1", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFile.setDatasetPath("/%s/%s/%s" % (name, name, tier)) lumis = [] for i in range(nLumis): lumis.append((f * 100000) + i) testFile.addRun(Run( 1, *lumis)) testFile.setAcquisitionEra(name.split('-')[0]) testFile.setProcessingVer("0") testFile.setGlobalTag("Weird") testFile.create() testFile.setLocation(site) files.append(testFile) count = 0 for f in files: count += 1 testFileChild = DBSBufferFile(lfn = '/data/store/random/random/RANDOM/test/0/%s-%s-%i-child.root' %(name, site, count), size = 1024, events = 10, checksums = {'cksum': 1}) testFileChild.setAlgorithm(appName = name, appVer = "CMSSW_3_1_1", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileChild.setDatasetPath("/%s/%s_2/RECO" %(name, name)) testFileChild.addRun(Run( 1, *[45])) testFileChild.create() testFileChild.setLocation(site) testFileChild.addParents([f['lfn']]) return files
def createParentFiles(self, acqEra, nFiles=10, workflowName="TestWorkload", taskPath="/TestWorkload/DataTest"): """ _createParentFiles_ Create several parentless files in DBSBuffer. This simulates raw files in the T0. """ workflowId = self.injectWorkflow(workflowName=workflowName, taskPath=taskPath) parentlessFiles = [] baseLFN = "/store/data/%s/Cosmics/RAW/v1/000/143/316/" % (acqEra) for i in range(nFiles): testFile = DBSBufferFile( lfn=baseLFN + makeUUID() + ".root", size=1024, events=20, checksums={"cksum": 1}, workflowId=workflowId ) testFile.setAlgorithm( appName="cmsRun", appVer="CMSSW_3_1_1", appFam="RAW", psetHash="GIBBERISH", configContent="MOREGIBBERISH", ) testFile.setDatasetPath("/Cosmics/%s-v1/RAW" % (acqEra)) testFile["block_close_max_wait_time"] = 1000000 testFile["block_close_max_events"] = 1000000 testFile["block_close_max_size"] = 1000000 testFile["block_close_max_files"] = 1000000 lumis = [] for j in range(10): lumis.append((i * 10) + j) testFile.addRun(Run(143316, *lumis)) testFile.setAcquisitionEra(acqEra) testFile.setProcessingVer("1") testFile.setGlobalTag("START54::All") testFile.create() testFile.setLocation("malpaquet") parentlessFiles.append(testFile) return parentlessFiles
def testProperties(self): """ _testProperties_ Test added tags that use DBSBuffer to transfer from workload to DBS """ testFileA = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10) testFileA.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.setValidStatus(validStatus="VALID") testFileA.setProcessingVer(ver="ProcVer") testFileA.setAcquisitionEra(era="AcqEra") testFileA.setGlobalTag(globalTag="GlobalTag") testFileA.setDatasetParent(datasetParent="Parent") testFileA.create() return
def getFiles(self, name, tier, nFiles = 12, site = "malpaquet", nLumis = 1): """ _getFiles_ Create some dummy test files. """ files = [] (acqEra, procVer) = name.split("-") baseLFN = "/store/data/%s/Cosmics/RECO/%s/000/143/316/" % (acqEra, procVer) for f in range(nFiles): testFile = DBSBufferFile(lfn = baseLFN + makeUUID() + ".root", size = 1024, events = 20, checksums = {"cksum": 1}) testFile.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_3_1_1", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/%s-%s/RECO" % (acqEra, procVer)) lumis = [] for i in range(nLumis): lumis.append((f * 1000000) + i) testFile.addRun(Run( 1, *lumis)) testFile.setAcquisitionEra(acqEra) testFile.setProcessingVer("0") testFile.setGlobalTag("START54::All") testFile.create() testFile.setLocation(site) files.append(testFile) baseLFN = "/store/data/%s/Cosmics/RAW-RECO/%s/000/143/316/" % (acqEra, procVer) testFileChild = DBSBufferFile(lfn = baseLFN + makeUUID() + ".root", size = 1024, events = 10, checksums = {'cksum': 1}) testFileChild.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_3_1_1", appFam = "RAW-RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileChild.setDatasetPath("/Cosmics/%s-%s/RAW-RECO" %(acqEra, procVer)) testFileChild.addRun(Run( 1, *[45])) testFileChild.create() testFileChild.setLocation(site) testFileChild.addParents([x['lfn'] for x in files]) return files
def testProperties(self): """ _testProperties_ Test added tags that use DBSBuffer to transfer from workload to DBS """ testFileA = DBSBufferFile(lfn = "/this/is/a/lfn", size = 1024, events = 10) testFileA.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.setValidStatus(validStatus = "VALID") testFileA.setProcessingVer(ver = "ProcVer") testFileA.setAcquisitionEra(era = "AcqEra") testFileA.setGlobalTag(globalTag = "GlobalTag") testFileA.setDatasetParent(datasetParent = "Parent") testFileA.create() return
def createFilesWithChildren(self, moreParentFiles, acqEra): """ _createFilesWithChildren_ Create several parentless files and then create child files. """ parentFiles = [] childFiles = [] baseLFN = "/store/data/%s/Cosmics/RAW/v1/000/143/316/" % (acqEra) for i in range(10): testFile = DBSBufferFile(lfn = baseLFN + makeUUID() + ".root", size = 1024, events = 20, checksums = {"cksum": 1}) testFile.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_3_1_1", appFam = "RAW", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/%s-v1/RAW" % (acqEra)) testFile['block_close_max_wait_time'] = 1000000 testFile['block_close_max_events'] = 1000000 testFile['block_close_max_size'] = 1000000 testFile['block_close_max_files'] = 1000000 lumis = [] for j in range(10): lumis.append((i * 10) + j) testFile.addRun(Run(143316, *lumis)) testFile.setAcquisitionEra(acqEra) testFile.setProcessingVer("1") testFile.setGlobalTag("START54::All") testFile.create() testFile.setLocation("malpaquet") parentFiles.append(testFile) baseLFN = "/store/data/%s/Cosmics/RECO/v1/000/143/316/" % (acqEra) for i in range(5): testFile = DBSBufferFile(lfn = baseLFN + makeUUID() + ".root", size = 1024, events = 20, checksums = {"cksum": 1}) testFile.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_3_1_1", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/%s-v1/RECO" % (acqEra)) testFile['block_close_max_wait_time'] = 1000000 testFile['block_close_max_events'] = 1000000 testFile['block_close_max_size'] = 1000000 testFile['block_close_max_files'] = 1000000 lumis = [] for j in range(20): lumis.append((i * 20) + j) testFile.addRun(Run(143316, *lumis)) testFile.setAcquisitionEra(acqEra) testFile.setProcessingVer("1") testFile.setGlobalTag("START54::All") testFile.create() testFile.setLocation("malpaquet") testFile.addParents([parentFiles[i * 2]["lfn"], parentFiles[i * 2 + 1]["lfn"]]) testFile.addParents([moreParentFiles[i * 2]["lfn"], moreParentFiles[i * 2 + 1]["lfn"]]) childFiles.append(testFile) return (parentFiles, childFiles)