def createAlgorithmForInsert(datasetInfo): """ _createPartialAlgorithm_ Create an Algorithm instance that uses the minimal info needed to insert a file """ exeName = datasetInfo['ApplicationName'] appVersion = datasetInfo['ApplicationVersion'] appFamily = datasetInfo["ApplicationFamily"] # # Repacker jobs have no PsetContent/PSetHash # psetContent = datasetInfo.get('PSetContent', None) if psetContent == None: psetContent = "PSET_CONTENT_NOT_AVAILABLE" psetHash = datasetInfo.get('PSetHash', None) if psetHash == None: psetHash = "NO_PSET_HASH" else: if psetHash.find(";"): # no need for fake hash in new schema psetHash = psetHash.split(";")[0] psetHash = psetHash.replace("hash=", "") psetInstance = DbsQueryableParameterSet(Hash=psetHash) algorithmInstance = DbsAlgorithm(ExecutableName=exeName, ApplicationVersion=appVersion, ApplicationFamily=appFamily, ParameterSetID=psetInstance) return algorithmInstance
def createAlgorithm(apiRef, appName, appVer, appFam, PSetHash=None, PSetContent=None): """ _createAlgorithm_ Create a new DBS Algorithm, explicitly passing in the arguments. We don't use configs. The insert tag tell you whether or not to actually write this to DBS """ # Take care of PSetHash if not PSetHash: PSetHash = "NO_PSET_HASH2" elif PSetHash.find(";"): # no need for fake hash in new schema PSetHash = PSetHash.split(";")[0] PSetHash = PSetHash.replace("hash=", "") # Create PSetHash. dbsApi tries to base64encode the value of PSetContent # which blows up if it's None if not PSetContent: PSetContent = "" psetInstance = DbsQueryableParameterSet(Hash=PSetHash, Content=PSetContent) algorithmInstance = DbsAlgorithm(ExecutableName=appName, ApplicationVersion=appVer, ApplicationFamily=appFam, ParameterSetID=psetInstance) if apiRef: try: apiRef.insertAlgorithm(algorithmInstance) except DbsException as ex: msg = "Error in DBSInterface.createAlgorithm(%s)\n" % appVer msg += formatEx(ex) logging.error(msg) raise DBSInterfaceError(msg) return algorithmInstance
apiObj.run(pri,"", excep = True) f.write("\n***********************insertPrimaryDataset API tests***************************") apiObj = DbsUnitTestApi(api.insertAlgorithm,f) apiObj.setVerboseLevel(opts.verbose) f.write("\n\n***********************insertAlgorithm API tests***************************") algo1 = DbsAlgorithm (ExecutableName="TestExe01", ApplicationVersion= "TestVersion01" + mytime, ApplicationFamily="AppFamily01", ParameterSetID=DbsQueryableParameterSet(Hash="001234565798685", Name="MyFirstParam01", Version="V001", Type="test", Annotation="This is test", Content="int a= {}, b={c=1, d=33}, f={}, x, y, x" ) ) apiObj.run(algo1, excep = False) algo2 = DbsAlgorithm (ExecutableName="TestExe011", ApplicationVersion= "TestVersion011" + mytime, ApplicationFamily="AppFamily011", ParameterSetID=DbsQueryableParameterSet(Hash="001234565798685", Name="MyFirstParam01", Annotation="This is test", Content="int a= {}, b={c=1, d=33}, f={}, x, y, x" )
fileList = [] #Insert Primary apiObj = DbsUnitTestApi(api.insertPrimaryDataset, f) primary = 'StressTestPrimary' + mytime pri1 = DbsPrimaryDataset (Name = primary, Type='MC') apiObj.run(pri1, excep = False) #Insert Algorithm apiObj = DbsUnitTestApi(api.insertAlgorithm,f) algo1 = DbsAlgorithm (ExecutableName="StressTestExe01", ApplicationVersion= "StressTestVersion01" + mytime, ApplicationFamily="StressTestAppFamily01", ParameterSetID=DbsQueryableParameterSet(Hash="001234565798685", Name="StressTestParam01", Version="V001", Type="test", Annotation="This is a stress test param", Content="int a= {}, b={c=1, d=33}, f={}, x, y, x" ) ) apiObj.run(algo1, excep = False) #Insert Tier apiObj = DbsUnitTestApi(api.insertTier, f) tierName1 = "GEN" tierName2 = "SIM" apiObj.run(tierName1, excep = False) apiObj.run(tierName2, excep = False) tierList = [tierName1, tierName2]
def createAlgorithm(datasetInfo, configMetadata=None, apiRef=None): """ _createAlgorithm_ Create an algorithm assuming that datasetInfo is a ProdCommon.MCPayloads.DatasetInfo like dictionary """ exeName = datasetInfo['ApplicationName'] appVersion = datasetInfo['ApplicationVersion'] appFamily = datasetInfo["ApplicationFamily"] # # HACK: Problem with large PSets (is this still relevant ?) # # Repacker jobs have no PSetContent/PSetHash # psetContent = datasetInfo.get('PSetContent', None) if psetContent == None: psetContent = "PSET_CONTENT_NOT_AVAILABLE" psetHash = datasetInfo.get('PSetHash', None) if psetHash == None: psetHash = "NO_PSET_HASH" else: if psetHash.find(";"): # no need for fake hash in new schema psetHash = psetHash.split(";")[0] psetHash = psetHash.replace("hash=", "") ## No more hacks #msg = ">>>>>>>>>>>>>>>>>>>>>>>>>>>>\n" #msg += "TEST HACK USED FOR PSetContent\n" #msg += ">>>>>>>>>>>>>>>>>>>>>>>>>>>>" #logging.warning(msg) #print msg #psetContent = "This is not a PSet" # # HACK: 100 char limit on cfg file name if configMetadata != None: cfgName = configMetadata['name'] if len(cfgName) > 100: msg = ">>>>>>>>>>>>>>>>>>>>>>>>>>>>\n" msg += "TEST HACK USED FOR Config File Name" msg += ">>>>>>>>>>>>>>>>>>>>>>>>>>>>" logging.warning(msg) print msg configMetadata['name'] = cfgName[-99] psetInstance = DbsQueryableParameterSet( Hash=psetHash, Name=configMetadata['name'], Version=configMetadata['version'], Type=configMetadata['Type'], Annotation=configMetadata['annotation'], Content=psetContent, ) algorithmInstance = DbsAlgorithm(ExecutableName=exeName, ApplicationVersion=appVersion, ApplicationFamily=appFamily, ParameterSetID=psetInstance) else: psetInstance = DbsQueryableParameterSet(Hash=psetHash) algorithmInstance = DbsAlgorithm(ExecutableName=exeName, ApplicationVersion=appVersion, ApplicationFamily=appFamily, ParameterSetID=psetInstance) if apiRef != None: apiRef.insertAlgorithm(algorithmInstance) return algorithmInstance
from DBSAPI.dbsOptions import DbsOptionParser optManager = DbsOptionParser() (opts,args) = optManager.getOpt() api = DbsApi(opts.__dict__) mytime = time.strftime("_%Y%m%d_%Hh%Mm%Ss",time.localtime()) primary = DbsPrimaryDataset (Name = "test_primary_001" + mytime, Type="test") algo = DbsAlgorithm ( ExecutableName="TestExe01" + mytime, ApplicationVersion= "TestVersion01" + mytime, ApplicationFamily="AppFamily01" + mytime, ParameterSetID=DbsQueryableParameterSet( Hash="001234565798685", ) ) proc = DbsProcessedDataset ( PrimaryDataset=primary, Name="TestProcessedDS001" + mytime, PhysicsGroup="BPositive", Status="Valid", TierList=['SIM', 'GEN'], AlgoList=[algo], ) procChild = DbsProcessedDataset ( PrimaryDataset=primary, Name="TestProcessedDS002" + mytime,
if len(blockFiles) != len(blocks[blockName]): print "\tFile count mismatch: %s local, %s global" % (len( blocks[blockName]), len(blockFiles)) if blockName not in badBlocks: badBlocks.append(blockName) for blockFile in blocks[blockName]: if blockFile not in blockFiles: if not badFiles.has_key(blockName): badFiles[blockName] = [] badFiles[blockName].append(blockFile) #sys.exit(0) psetInstance = DbsQueryableParameterSet(Hash="GIBBERISH") for newBlockName in badFiles.keys(): seName = blockLocation[newBlockName] (datasetPath, junk) = newBlockName.split("#", 1) dbsApi.insertBlock(datasetPath, newBlockName, storage_element_list=[seName]) blockRef = dbsApi.listBlocks(dataset=datasetPath, block_name=newBlockName)[0] print blockRef newFiles = [] for newFileLFN in badFiles[newBlockName]: localFile = DBSBufferFile(lfn=newFileLFN)
fileValidStatusG = validStatus fileTypeG = 'STREAMER' qim_name1 = "Tracker_Global" qim_name2 = "TIB_Local" qim_name3 = "TIB_Power" qim_int = "TIB_Percentage" primObj = DbsPrimaryDataset(Name=primName, Type=primType) algoObj1 = DbsAlgorithm(ExecutableName=algoExe1, ApplicationVersion=algoVer1, ApplicationFamily=algoFam1, ParameterSetID=DbsQueryableParameterSet( Hash=psHash1, Name=psName1, Version=psVer1, Type=psType1, Annotation=psAnno1, Content=psCon1)) algoObj2 = DbsAlgorithm(ExecutableName=algoExe2, ApplicationVersion=algoVer2, ApplicationFamily=algoFam2, ParameterSetID=DbsQueryableParameterSet( Hash=psHash2, Name=psName2, Version=psVer2, Type=psType2, Annotation=psAnno2, Content=psCon2)) algoObjM = DbsAlgorithm(ExecutableName=algoExeM,