from DBSAPI.dbsPrimaryDataset import DbsPrimaryDataset from DBSAPI.dbsProcessedDataset import DbsProcessedDataset from DBSAPI.dbsOptions import DbsOptionParser optManager = DbsOptionParser() (opts, args) = optManager.getOpt() api = DbsApi(opts.__dict__) try: merge_algo = DbsAlgorithm( ExecutableName="EdmFastMerge", ApplicationVersion="v101", ApplicationFamily="Merge", ) path = "/test_primary_001/TestProcessedDS001/SIM" merge_proc = api.insertMergedDataset(path, "ThisISMergedDataset001", merge_algo) # File will go into THIS Block block = DbsFileBlock(StorageElement=['test1', 'test3'], Name="/test_primary_001/TestProcessedDS001/SIM#12345") merged_file = DbsFile( Checksum='00000', LogicalFileName='MERGEDFILE_001', NumberOfEvents=10000, FileSize=1000000, Status='VALID', ValidationStatus='VALID', FileType='EVD', Dataset=merge_proc, Block=block,
from DBSAPI.dbsPrimaryDataset import DbsPrimaryDataset from DBSAPI.dbsProcessedDataset import DbsProcessedDataset from DBSAPI.dbsOptions import DbsOptionParser optManager = DbsOptionParser() (opts,args) = optManager.getOpt() api = DbsApi(opts.__dict__) try: merge_algo = DbsAlgorithm ( ExecutableName="EdmFastMerge", ApplicationVersion= "v101", ApplicationFamily="Merge", ) path = "/test_primary_anzar_001/SIM/TestProcessedDS001" merge_proc = api.insertMergedDataset(path, "ThisISMergedDataset001", merge_algo) # File will go into THIS Block block = DbsFileBlock ( StorageElement=['test1', 'test3'], Name="/this/hahah#12345" ) merged_file = DbsFile ( Checksum= '00000', LogicalFileName= 'MERGEDFILE_006', NumberOfEvents= 10000, FileSize= 000000, Status= 'VALID', ValidationStatus = 'VALID', FileType= 'EVD',
Status="VALID", TierList=['GEN', 'SIM'], AlgoList=[algo], RunsList=[1, 2], # Provide a Run Number List that goes with this ProcDS ParentList=[parent_procds] #parent_procds as its parent ) api.insertProcessedDataset(child_procds) # Lets us create Merged Dataset for Child Dataset merge_algo = DbsAlgorithm ( ExecutableName="EdmFastMerge", ApplicationVersion= "v101", ApplicationFamily="Merge", ) merged_ds = api.insertMergedDataset(child_procds, "ThisISMergedDataset001", merge_algo) # Now we should be able to # See that there is a Processed Dataset with Name: ThisISMergedDataset001 print "\nThere is a Processed Dataset with Name: ThisISMergedDataset001 ???" print api.listProcessedDatasets("test_primary_001", "*", "ThisISMergedDataset001") # It has Runs from child_procds print "\nIt has Runs (1,2) from child_procds ???" print api.listRuns(merged_ds) # And its Parent is parent_procds print "\nAnd its Parent is parent_procds" print api.listDatasetParents(merged_ds) except DbsApiException, ex:
AlgoList=[algo], RunsList=[ 1, 2 ], # Provide a Run Number List that goes with this ProcDS ParentList=[parent_procds] #parent_procds as its parent ) api.insertProcessedDataset(child_procds) # Lets us create Merged Dataset for Child Dataset merge_algo = DbsAlgorithm( ExecutableName="EdmFastMerge", ApplicationVersion="v101", ApplicationFamily="Merge", ) merged_ds = api.insertMergedDataset(child_procds, "ThisISMergedDataset001", merge_algo) # Now we should be able to # See that there is a Processed Dataset with Name: ThisISMergedDataset001 print "\nThere is a Processed Dataset with Name: ThisISMergedDataset001 ???" print api.listProcessedDatasets("test_primary_001", "*", "ThisISMergedDataset001") # It has Runs from child_procds print "\nIt has Runs (1,2) from child_procds ???" print api.listRuns(merged_ds) # And its Parent is parent_procds print "\nAnd its Parent is parent_procds" print api.listDatasetParents(merged_ds)