Beispiel #1
0
def createAlgorithmForInsert(datasetInfo):
    """
    _createPartialAlgorithm_

    Create an Algorithm instance that uses the minimal info needed
    to insert a file

    """
    exeName = datasetInfo['ApplicationName']
    appVersion = datasetInfo['ApplicationVersion']
    appFamily = datasetInfo["ApplicationFamily"]

    #
    # Repacker jobs have no PsetContent/PSetHash
    #
    psetContent = datasetInfo.get('PSetContent', None)
    if psetContent == None:
        psetContent = "PSET_CONTENT_NOT_AVAILABLE"
    psetHash = datasetInfo.get('PSetHash', None)
    if psetHash == None:
        psetHash = "NO_PSET_HASH"
    else:
        if psetHash.find(";"):
            # no need for fake hash in new schema
            psetHash = psetHash.split(";")[0]
            psetHash = psetHash.replace("hash=", "")

    psetInstance = DbsQueryableParameterSet(Hash=psetHash)
    algorithmInstance = DbsAlgorithm(ExecutableName=exeName,
                                     ApplicationVersion=appVersion,
                                     ApplicationFamily=appFamily,
                                     ParameterSetID=psetInstance)
    return algorithmInstance
Beispiel #2
0
def createAlgorithm(apiRef,
                    appName,
                    appVer,
                    appFam,
                    PSetHash=None,
                    PSetContent=None):
    """
    _createAlgorithm_

    Create a new DBS Algorithm, explicitly passing in the arguments.
    We don't use configs.
    The insert tag tell you whether or not to actually write this to DBS
    """

    # Take care of PSetHash
    if not PSetHash:
        PSetHash = "NO_PSET_HASH2"
    elif PSetHash.find(";"):
        # no need for fake hash in new schema
        PSetHash = PSetHash.split(";")[0]
        PSetHash = PSetHash.replace("hash=", "")

    # Create PSetHash. dbsApi tries to base64encode the value of PSetContent
    # which blows up if it's None
    if not PSetContent:
        PSetContent = ""
    psetInstance = DbsQueryableParameterSet(Hash=PSetHash, Content=PSetContent)
    algorithmInstance = DbsAlgorithm(ExecutableName=appName,
                                     ApplicationVersion=appVer,
                                     ApplicationFamily=appFam,
                                     ParameterSetID=psetInstance)

    if apiRef:
        try:
            apiRef.insertAlgorithm(algorithmInstance)
        except DbsException as ex:
            msg = "Error in DBSInterface.createAlgorithm(%s)\n" % appVer
            msg += formatEx(ex)
            logging.error(msg)
            raise DBSInterfaceError(msg)
    return algorithmInstance
Beispiel #3
0
def createMergeAlgorithm(datasetInfo, apiRef=None):
    """
    _createMergeAlgorithm_

    Create a DbsAlgorithm for a merge dataset

    """
    exeName = datasetInfo['ApplicationName']
    version = datasetInfo['ApplicationVersion']
    family = datasetInfo.get('ApplicationFamily', None)
    if (family == None) or not (family):
        family = datasetInfo['OutputModuleName']

    mergeAlgo = DbsAlgorithm(
        ExecutableName=exeName,
        ApplicationVersion=version,
        ApplicationFamily=family,
    )

    if apiRef != None:
        apiRef.insertAlgorithm(mergeAlgo)
    return mergeAlgo
Beispiel #4
0
from DBSAPI.dbsFileBlock import DbsFileBlock
from DBSAPI.dbsFile import DbsFile
from DBSAPI.dbsLumiSection import DbsLumiSection
from DBSAPI.dbsQueryableParameterSet import DbsQueryableParameterSet
from DBSAPI.dbsPrimaryDataset import DbsPrimaryDataset
from DBSAPI.dbsProcessedDataset import DbsProcessedDataset
from DBSAPI.dbsOptions import DbsOptionParser

optManager = DbsOptionParser()
(opts, args) = optManager.getOpt()
api = DbsApi(opts.__dict__)

try:
    merge_algo = DbsAlgorithm(
        ExecutableName="EdmFastMerge",
        ApplicationVersion="v101",
        ApplicationFamily="Merge",
    )
    path = "/test_primary_001/TestProcessedDS001/SIM"
    merge_proc = api.insertMergedDataset(path, "ThisISMergedDataset001",
                                         merge_algo)

    # File will go into THIS Block
    block = DbsFileBlock(StorageElement=['test1', 'test3'],
                         Name="/test_primary_001/TestProcessedDS001/SIM#12345")

    merged_file = DbsFile(
        Checksum='00000',
        LogicalFileName='MERGEDFILE_001',
        NumberOfEvents=10000,
        FileSize=1000000,
Beispiel #5
0
pri = DbsPrimaryDataset (Name = primary, Type="test")
apiObj.run(pri,"", excep = True)

f.write("\n***********************insertPrimaryDataset API tests***************************")



apiObj = DbsUnitTestApi(api.insertAlgorithm,f)
apiObj.setVerboseLevel(opts.verbose)
f.write("\n\n***********************insertAlgorithm API tests***************************")
algo1 = DbsAlgorithm (ExecutableName="TestExe01", 
		ApplicationVersion= "TestVersion01" + mytime, 
		ApplicationFamily="AppFamily01", 
		ParameterSetID=DbsQueryableParameterSet(Hash="001234565798685", 
							Name="MyFirstParam01", 
							Version="V001", 
							Type="test", 
							Annotation="This is test", 
							Content="int a= {}, b={c=1, d=33}, f={}, x, y, x"
			                              )
	)
apiObj.run(algo1, excep = False)


algo2 = DbsAlgorithm (ExecutableName="TestExe011", 
		ApplicationVersion= "TestVersion011" + mytime, 
		ApplicationFamily="AppFamily011", 
		ParameterSetID=DbsQueryableParameterSet(Hash="001234565798685", 
							Name="MyFirstParam01",
                                                        Annotation="This is test",
                                                        Content="int a= {}, b={c=1, d=33}, f={}, x, y, x"
        mytime += str(i)
        fileList = []
	#Insert Primary
	apiObj = DbsUnitTestApi(api.insertPrimaryDataset, f)
	primary = 'StressTestPrimary' + mytime
	pri1 = DbsPrimaryDataset (Name = primary, Type='MC')
	apiObj.run(pri1, excep = False)

	#Insert Algorithm
	apiObj = DbsUnitTestApi(api.insertAlgorithm,f)
	algo1 = DbsAlgorithm (ExecutableName="StressTestExe01", 
		ApplicationVersion= "StressTestVersion01" + mytime, 
		ApplicationFamily="StressTestAppFamily01", 
		ParameterSetID=DbsQueryableParameterSet(Hash="001234565798685", 
							Name="StressTestParam01", 
							Version="V001", 
							Type="test", 
							Annotation="This is a stress test param", 
							Content="int a= {}, b={c=1, d=33}, f={}, x, y, x"
			                              )
	)
	apiObj.run(algo1, excep = False)
	
	#Insert Tier
	apiObj = DbsUnitTestApi(api.insertTier, f)
	tierName1 = "GEN"
	tierName2 = "SIM"
	apiObj.run(tierName1, excep = False)
	apiObj.run(tierName2, excep = False)

	tierList = [tierName1, tierName2]
Beispiel #7
0
def createAlgorithm(datasetInfo, configMetadata=None, apiRef=None):
    """
    _createAlgorithm_

    Create an algorithm assuming that datasetInfo is a
    ProdCommon.MCPayloads.DatasetInfo like dictionary

    """

    exeName = datasetInfo['ApplicationName']
    appVersion = datasetInfo['ApplicationVersion']
    appFamily = datasetInfo["ApplicationFamily"]

    #
    # HACK:  Problem with large PSets (is this still relevant ?)
    #
    # Repacker jobs have no PSetContent/PSetHash
    #
    psetContent = datasetInfo.get('PSetContent', None)
    if psetContent == None:
        psetContent = "PSET_CONTENT_NOT_AVAILABLE"
    psetHash = datasetInfo.get('PSetHash', None)
    if psetHash == None:
        psetHash = "NO_PSET_HASH"
    else:
        if psetHash.find(";"):
            # no need for fake hash in new schema
            psetHash = psetHash.split(";")[0]
            psetHash = psetHash.replace("hash=", "")

    ## No more hacks
    #msg = ">>>>>>>>>>>>>>>>>>>>>>>>>>>>\n"
    #msg += "TEST HACK USED FOR PSetContent\n"
    #msg += ">>>>>>>>>>>>>>>>>>>>>>>>>>>>"
    #logging.warning(msg)
    #print msg
    #psetContent = "This is not a PSet"

    #
    # HACK: 100 char limit on cfg file name
    if configMetadata != None:
        cfgName = configMetadata['name']
        if len(cfgName) > 100:
            msg = ">>>>>>>>>>>>>>>>>>>>>>>>>>>>\n"
            msg += "TEST HACK USED FOR Config File Name"
            msg += ">>>>>>>>>>>>>>>>>>>>>>>>>>>>"
            logging.warning(msg)
            print msg
            configMetadata['name'] = cfgName[-99]

        psetInstance = DbsQueryableParameterSet(
            Hash=psetHash,
            Name=configMetadata['name'],
            Version=configMetadata['version'],
            Type=configMetadata['Type'],
            Annotation=configMetadata['annotation'],
            Content=psetContent,
        )

        algorithmInstance = DbsAlgorithm(ExecutableName=exeName,
                                         ApplicationVersion=appVersion,
                                         ApplicationFamily=appFamily,
                                         ParameterSetID=psetInstance)
    else:
        psetInstance = DbsQueryableParameterSet(Hash=psetHash)
        algorithmInstance = DbsAlgorithm(ExecutableName=exeName,
                                         ApplicationVersion=appVersion,
                                         ApplicationFamily=appFamily,
                                         ParameterSetID=psetInstance)

    if apiRef != None:
        apiRef.insertAlgorithm(algorithmInstance)
    return algorithmInstance
Beispiel #8
0
from DBSAPI.dbsPrimaryDataset import DbsPrimaryDataset
from DBSAPI.dbsProcessedDataset import DbsProcessedDataset
from DBSAPI.dbsOptions import DbsOptionParser

optManager  = DbsOptionParser()
(opts,args) = optManager.getOpt()
api = DbsApi(opts.__dict__)


mytime = time.strftime("_%Y%m%d_%Hh%Mm%Ss",time.localtime())
primary = DbsPrimaryDataset (Name = "test_primary_001" + mytime, Type="test")

algo = DbsAlgorithm (
         ExecutableName="TestExe01" + mytime,
         ApplicationVersion= "TestVersion01" + mytime,
         ApplicationFamily="AppFamily01" + mytime,
         ParameterSetID=DbsQueryableParameterSet(
           Hash="001234565798685",
           )
         )

proc = DbsProcessedDataset (
        PrimaryDataset=primary, 
        Name="TestProcessedDS001" + mytime, 
        PhysicsGroup="BPositive",
        Status="Valid",
        TierList=['SIM', 'GEN'],
        AlgoList=[algo],
        )

procChild = DbsProcessedDataset (
        PrimaryDataset=primary, 
Beispiel #9
0
                       newBlockName,
                       storage_element_list=[seName])

    blockRef = dbsApi.listBlocks(dataset=datasetPath,
                                 block_name=newBlockName)[0]
    print blockRef

    newFiles = []
    for newFileLFN in badFiles[newBlockName]:
        localFile = DBSBufferFile(lfn=newFileLFN)
        localFile.load(parentage=1)

        (primaryDS, procDS, tier) = datasetPath[1:].split("/", 3)
        primary = DbsPrimaryDataset(Name=primaryDS, Type="mc")
        algo = DbsAlgorithm(ExecutableName=localFile["appName"],
                            ApplicationVersion=localFile["appVer"],
                            ApplicationFamily=localFile["appFam"],
                            ParameterSetID=psetInstance)
        processed = DbsProcessedDataset(PrimaryDataset=primary,
                                        AlgoList=[algo],
                                        Name=procDS,
                                        TierList=[tier],
                                        ParentList=[],
                                        PhysicsGroup="NoGroup",
                                        Status="VALID",
                                        GlobalTag="")
        newFiles.append(
            DBSInterface.createDBSFileFromBufferFile(localFile, processed))

    dbsApi.insertFiles(datasetPath, newFiles, blockRef)
    dbsApi.closeBlock(block=newBlockName)
Beispiel #10
0
    optManager = DbsOptionParser()
    (opts, args) = optManager.getOpt()
    api = DbsApi(opts.__dict__)

    import pdb

    ##Primary Dataset
    primary = DbsPrimaryDataset(Name="test_primary_001", Type="TEST")
    api.insertPrimaryDataset(primary)

    #Algorithm used by Parent and Child Datasets for our test
    algo = DbsAlgorithm(ExecutableName="TestExe01",
                        ApplicationVersion="TestVersion01",
                        ApplicationFamily="AppFamily01",
                        ParameterSetID=DbsQueryableParameterSet(
                            Hash="001234565798685",
                            Name="MyFirstParam01",
                            Version="V001",
                            Type="test",
                            Annotation="This is test",
                            Content="int a= {}, b={c=1, d=33}, f={}, x, y, x"))

    api.insertAlgorithm(algo)

    # Parent Dataset
    parent_procds = DbsProcessedDataset(
        PrimaryDataset=primary,
        Name="TestProcessedDS001-Parent",
        PhysicsGroup="BPositive",
        Status="VALID",
        TierList=['GEN', 'SIM'],
        AlgoList=[algo],
Beispiel #11
0
fileSizeG = 563226500
fileStatusG = validStatus
fileValidStatusG = validStatus
fileTypeG = 'STREAMER'

qim_name1 = "Tracker_Global"
qim_name2 = "TIB_Local"
qim_name3 = "TIB_Power"
qim_int = "TIB_Percentage"

primObj = DbsPrimaryDataset(Name=primName, Type=primType)
algoObj1 = DbsAlgorithm(ExecutableName=algoExe1,
                        ApplicationVersion=algoVer1,
                        ApplicationFamily=algoFam1,
                        ParameterSetID=DbsQueryableParameterSet(
                            Hash=psHash1,
                            Name=psName1,
                            Version=psVer1,
                            Type=psType1,
                            Annotation=psAnno1,
                            Content=psCon1))
algoObj2 = DbsAlgorithm(ExecutableName=algoExe2,
                        ApplicationVersion=algoVer2,
                        ApplicationFamily=algoFam2,
                        ParameterSetID=DbsQueryableParameterSet(
                            Hash=psHash2,
                            Name=psName2,
                            Version=psVer2,
                            Type=psType2,
                            Annotation=psAnno2,
                            Content=psCon2))