def updateFileStatus(self): bo = DBSFile(self.logger, self.dbi, self.owner) import pdb pdb.set_trace() bo.updateStatus( '/store/mc/Winter09/TTbar-madgraph/GEN-SIM-DIGI-RECO/IDEAL_V11_FastSim_v1/0060/0A83790D-71E1-DD11-9732-001EC9AAA058.root', 1)
def test02(self): """business.DBSFile.listFileParents: Basic""" dburl = os.environ["DBS_TEST_DBURL_READER"] dbowner = os.environ["DBS_TEST_DBOWNER_READER"] dbi = DBFactory(self.logger, dburl).connect() bo = DBSFile(self.logger, dbi, dbowner) result = bo.listFileParents(logical_file_name='%') self.assertTrue(type(result) == list) self.assertEqual(len(result), 0)
class DBS3BusinessList(threading.Thread): def __init__(self, dburl, owner): threading.Thread.__init__(self) logger = logging.getLogger("dbs test logger") dbi = DBFactory(logger, dburl).connect() self.bo = DBSFile(logger, dbi, owner) def run(self): t = time.time() print self.bo.listFiles("/GlobalAug07-C/Online/RAW") print "Time: %s " % (time.time() - t)
def testFiles(self): """ This method tests the buisness login (and dao ) for the insertFiles() API """ bo = DBSFile(self.logger, self.dbi, self.owner) binput = [{ 'adler32': u'NOTSET', 'file_type': 'EDM', 'file_output_config_list': [{ 'release_version': 'CMSSW_1_2_3', 'pset_hash': '76e303993a1c2f842159dbfeeed9a0dd', 'app_name': 'cmsRun', 'output_module_label': 'Merged' }], 'dataset': '/unittest_web_primary_ds_name_684/unittest_web_dataset_684/GEN-SIM-RAW', 'file_size': u'2012211901', 'auto_cross_section': 0.0, 'check_sum': u'1504266448', 'file_lumi_list': [{ 'lumi_section_num': u'27414', 'run_num': u'1' }, { 'lumi_section_num': u'26422', 'run_num': u'1' }, { 'lumi_section_num': u'29838', 'run_num': u'1' }], 'file_parent_list': [], 'event_count': u'1619', 'logical_file_name': '/store/mc/parent_684/0.root', 'block': '/unittest_web_primary_ds_name_684/unittest_web_dataset_684/GEN-SIM-RAW#684', 'creation_date': 1234, 'create_by': 'anzar', "last_modification_date": 1234, "last_modified_by": "anzar", }] bo.insertFile(binput)
def __init__(self, config, dbi=None): """ All parameters are provided through DBSConfig module """ config.__dict__['default_expires'] = config.dbs.default_expires RESTModel.__init__(self, config) dbowner = config.database.dbowner if dbi: self.dbi = dbi self.logger = logger self.dbsDataset = DBSDataset(self.logger, self.dbi, dbowner) self.dbsFile = DBSFile(self.logger, self.dbi, dbowner) self.dbsBlock = DBSBlock(self.logger, self.dbi, dbowner)
def test04(self): """DBSFile.insertFile""" from dbs.business.DBSFile import DBSFile bo = DBSFile(self.logger, self.dbi) binput = [] for k in range(100): file = { "logicalfilename": "/store/but_file_%s_%s.root" % (IC, str(k)), "isfilevalid": True, "dataset": "/BUT_%s/BUT_PROCESSED_DATASET_V%s/GEN-SIM-RECO" % (IC, IC), "block": "/BUT_%s/BUT_PROCESSED_DATASET_V%s/GEN-SIM-RECO#BUT_BLOCK_%s" % (IC, IC, IC), "filetype": "EDM", "checksum": "999", "eventcount": 1000, "filesize": 1024, "branchhash": "TEST", "adler32": "adler32", "md5": "md5", "autocrosssection": 12345., "creationdate": 1234, "createby": "*****@*****.**", "lastmodificationdate": 12345, "lastmodifiedby": "*****@*****.**" } binput.append(file) bo.insertFile(binput)
class DBS3BusinessList(threading.Thread): def __init__(self, dburl, owner): threading.Thread.__init__(self) logger = logging.getLogger("dbs test logger") dbi = DBFactory(logger, dburl).connect() self.bo = DBSFile(logger, dbi, owner) def run(self): t = time.time() print(self.bo.listFiles("/GlobalAug07-C/Online/RAW")) print("Time: %s " % (time.time() - t))
def testFiles(self): """ This method tests the buisness login (and dao ) for the insertFiles() API """ bo = DBSFile(self.logger, self.dbi, self.owner) binput = [ {'adler32': u'NOTSET', 'file_type': 'EDM', 'file_output_config_list': [{'release_version': 'CMSSW_1_2_3', 'pset_hash': '76e303993a1c2f842159dbfeeed9a0dd', 'app_name': 'cmsRun', 'output_module_label': 'Merged'}], 'dataset': '/unittest_web_primary_ds_name_684/unittest_web_dataset_684/GEN-SIM-RAW', 'file_size': u'2012211901', 'auto_cross_section': 0.0, 'check_sum': u'1504266448', 'file_lumi_list': [{'lumi_section_num': u'27414', 'run_num': u'1'}, {'lumi_section_num': u'26422', 'run_num': u'1'}, {'lumi_section_num': u'29838', 'run_num': u'1'}], 'file_parent_list': [], 'event_count': u'1619', 'logical_file_name': '/store/mc/parent_684/0.root', 'block': '/unittest_web_primary_ds_name_684/unittest_web_dataset_684/GEN-SIM-RAW#684', 'creation_date' : 1234, 'create_by' : 'anzar', "last_modification_date" : 1234, "last_modified_by" : "anzar", } ] bo.insertFile(binput)
def testDBSFileList(self): """business.DBSFile.listFiles: Basic""" dburl = os.environ["DBS_TEST_DBURL_READER"] dbowner = os.environ["DBS_TEST_DBOWNER_READER"] dbi = DBFactory(self.logger, dburl).connect() bo = DBSFile(self.logger, dbi, dbowner) result = bo.listFiles('NoSuchFile%') self.assertTrue(type(result) == list) self.assertEqual(len(result), 0) result = bo.listFiles(dataset='NoSuchDataset%') self.assertTrue(type(result) == list) self.assertEqual(len(result), 0) result = bo.listFiles(block_name='NoSuchBlock%') self.assertTrue(type(result) == list) self.assertEqual(len(result), 0) result = bo.listFiles(logical_file_name='NoSuchLFN%') self.assertTrue(type(result) == list) self.assertEqual(len(result), 0)
def test08(self): """DBSFile.listFiles""" from dbs.business.DBSFile import DBSFile bo = DBSFile(self.logger, self.dbi) bo.listFiles("/BUT_%s/BUT_PROCESSED_DATASET_V%s/GEN-SIM-RECO" % (IC, IC)) bo.listFiles( block="/BUT_%s/BUT_PROCESSED_DATASET_V%s/GEN-SIM-RECO#BUT_BLOCK_%s" % (IC, IC, IC))
def test04(self): """DBSFile.insertFile""" from dbs.business.DBSFile import DBSFile bo = DBSFile(self.logger, self.dbi) binput = [] for k in range(100): file = {"logicalfilename":"/store/but_file_%s_%s.root" % (IC, str(k)), "isfilevalid":True, "dataset":"/BUT_%s/BUT_PROCESSED_DATASET_V%s/GEN-SIM-RECO" % (IC, IC), "block":"/BUT_%s/BUT_PROCESSED_DATASET_V%s/GEN-SIM-RECO#BUT_BLOCK_%s" % (IC, IC, IC), "filetype":"EDM", "checksum":"999", "eventcount":1000, "filesize":1024, "branchhash":"TEST", "adler32":"adler32", "md5":"md5", "autocrosssection":12345., "creationdate":1234, "createby":"*****@*****.**", "lastmodificationdate":12345, "lastmodifiedby":"*****@*****.**"} binput.append(file) bo.insertFile(binput)
def testDBSFileList(self): """business.DBSFile.listFiles: Basic""" dburl = os.environ["DBS_TEST_DBURL_READER"] dbowner = os.environ["DBS_TEST_DBOWNER_READER"] dbi = DBFactory(self.logger, dburl).connect() bo = DBSFile(self.logger, dbi, dbowner) result = bo.listFiles('NoSuchFile') self.assertTrue(type(result) == list) self.assertEqual(len(result), 0) result = bo.listFiles(dataset='NoSuchDataset') self.assertTrue(type(result) == list) self.assertEqual(len(result), 0) result = bo.listFiles(block_name='NoSuchBlock') self.assertTrue(type(result) == list) self.assertEqual(len(result), 0) result = bo.listFiles(logical_file_name='NoSuchLFN') self.assertTrue(type(result) == list) self.assertEqual(len(result), 0)
class DBSReaderModel(RESTModel): """ DBS3 Server API Documentation """ def __init__(self, config, dbi=None): """ All parameters are provided through DBSConfig module """ config.__dict__['default_expires'] = config.dbs.default_expires RESTModel.__init__(self, config) dbowner = config.database.dbowner if dbi: self.dbi = dbi self.logger = logger self.dbsDataset = DBSDataset(self.logger, self.dbi, dbowner) self.dbsFile = DBSFile(self.logger, self.dbi, dbowner) self.dbsBlock = DBSBlock(self.logger, self.dbi, dbowner) def listDatasets(self, dataset="", parent_dataset="", is_dataset_valid=1, release_version="", pset_hash="", app_name="", output_module_label="", global_tag="", processing_version=0, acquisition_era_name="", run_num=-1, physics_group_name="", logical_file_name="", primary_ds_name="", primary_ds_type="", processed_ds_name='', data_tier_name="", dataset_access_type="VALID", prep_id='', create_by="", last_modified_by="", min_cdate='0', max_cdate='0', min_ldate='0', max_ldate='0', cdate='0', ldate='0', detail=False, dataset_id=-1): """ API to list dataset(s) in DBS * You can use ANY combination of these parameters in this API * In absence of parameters, all valid datasets known to the DBS instance will be returned :param dataset: Full dataset (path) of the dataset :type dataset: str :param parent_dataset: Full dataset (path) of the dataset :type parent_dataset: str :param release_version: cmssw version :type release_version: str :param pset_hash: pset hash :type pset_hash: str :param app_name: Application name (generally it is cmsRun) :type app_name: str :param output_module_label: output_module_label :type output_module_label: str :param global_tag: global_tag :type global_tag: str :param processing_version: Processing Version :type processing_version: str :param acquisition_era_name: Acquisition Era :type acquisition_era_name: str :param run_num: Specify a specific run number or range. Possible format are: run_num, 'run_min-run_max' or ['run_min-run_max', run1, run2, ...]. :type run_num: int,list,str :param physics_group_name: List only dataset having physics_group_name attribute :type physics_group_name: str :param logical_file_name: List dataset containing the logical_file_name :type logical_file_name: str :param primary_ds_name: Primary Dataset Name :type primary_ds_name: str :param primary_ds_type: Primary Dataset Type (Type of data, MC/DATA) :type primary_ds_type: str :param processed_ds_name: List datasets having this processed dataset name :type processed_ds_name: str :param data_tier_name: Data Tier :type data_tier_name: str :param dataset_access_type: Dataset Access Type ( PRODUCTION, DEPRECATED etc.) :type dataset_access_type: str :param prep_id: prep_id :type prep_id: str :param create_by: Creator of the dataset :type create_by: str :param last_modified_by: Last modifier of the dataset :type last_modified_by: str :param min_cdate: Lower limit for the creation date (unixtime) (Optional) :type min_cdate: int, str :param max_cdate: Upper limit for the creation date (unixtime) (Optional) :type max_cdate: int, str :param min_ldate: Lower limit for the last modification date (unixtime) (Optional) :type min_ldate: int, str :param max_ldate: Upper limit for the last modification date (unixtime) (Optional) :type max_ldate: int, str :param cdate: creation date (unixtime) (Optional) :type cdate: int, str :param ldate: last modification date (unixtime) (Optional) :type ldate: int, str :param detail: List all details of a dataset :type detail: bool :param dataset_id: dataset table primary key used by CMS Computing Analytics. :type dataset_id: int, long, str :returns: List of dictionaries containing the following keys (dataset). If the detail option is used. The dictionary contain the following keys (primary_ds_name, physics_group_name, acquisition_era_name, create_by, dataset_access_type, data_tier_name, last_modified_by, creation_date, processing_version, processed_ds_name, xtcrosssection, last_modification_date, dataset_id, dataset, prep_id, primary_ds_type) :rtype: list of dicts """ dataset = dataset.replace("*", "%") parent_dataset = parent_dataset.replace("*", "%") release_version = release_version.replace("*", "%") pset_hash = pset_hash.replace("*", "%") app_name = app_name.replace("*", "%") output_module_label = output_module_label.replace("*", "%") global_tag = global_tag.replace("*", "%") logical_file_name = logical_file_name.replace("*", "%") physics_group_name = physics_group_name.replace("*", "%") primary_ds_name = primary_ds_name.replace("*", "%") primary_ds_type = primary_ds_type.replace("*", "%") data_tier_name = data_tier_name.replace("*", "%") dataset_access_type = dataset_access_type.replace("*", "%") processed_ds_name = processed_ds_name.replace("*", "%") acquisition_era_name = acquisition_era_name.replace("*", "%") #processing_version = processing_version.replace("*", "%") #create_by and last_modified_by have be full spelled, no wildcard will allowed. #We got them from request head so they can be either HN account name or DN. #This is depended on how an user's account is set up. try: dataset_id = int(dataset_id) except: dbsExceptionHandler( "dbsException-invalid-input2", "Invalid Input for dataset_id that has to be an int.", self.logger.exception, 'dataset_id has to be an int.') if create_by.find('*')!=-1 or create_by.find('%')!=-1 or last_modified_by.find('*')!=-1\ or last_modified_by.find('%')!=-1: dbsExceptionHandler( "dbsException-invalid-input2", "Invalid Input for create_by or last_modified_by.\ No wildcard allowed.", self.logger.exception, 'No wildcards allowed for create_by or last_modified_by') try: if isinstance(min_cdate, basestring) and ('*' in min_cdate or '%' in min_cdate): min_cdate = 0 else: try: min_cdate = int(min_cdate) except: dbsExceptionHandler("dbsException-invalid-input", "invalid input for min_cdate") if isinstance(max_cdate, basestring) and ('*' in max_cdate or '%' in max_cdate): max_cdate = 0 else: try: max_cdate = int(max_cdate) except: dbsExceptionHandler("dbsException-invalid-input", "invalid input for max_cdate") if isinstance(min_ldate, basestring) and ('*' in min_ldate or '%' in min_ldate): min_ldate = 0 else: try: min_ldate = int(min_ldate) except: dbsExceptionHandler("dbsException-invalid-input", "invalid input for min_ldate") if isinstance(max_ldate, basestring) and ('*' in max_ldate or '%' in max_ldate): max_ldate = 0 else: try: max_ldate = int(max_ldate) except: dbsExceptionHandler("dbsException-invalid-input", "invalid input for max_ldate") if isinstance(cdate, basestring) and ('*' in cdate or '%' in cdate): cdate = 0 else: try: cdate = int(cdate) except: dbsExceptionHandler("dbsException-invalid-input", "invalid input for cdate") if isinstance(ldate, basestring) and ('*' in ldate or '%' in ldate): ldate = 0 else: try: ldate = int(ldate) except: dbsExceptionHandler("dbsException-invalid-input", "invalid input for ldate") except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) except Exception as ex: sError = "DBSReaderModel/listDatasets. %s \n. Exception trace: \n %s" \ % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError) detail = detail in (True, 1, "True", "1", 'true') try: return self.dbsDataset.listDatasets( dataset, parent_dataset, is_dataset_valid, release_version, pset_hash, app_name, output_module_label, global_tag, processing_version, acquisition_era_name, run_num, physics_group_name, logical_file_name, primary_ds_name, primary_ds_type, processed_ds_name, data_tier_name, dataset_access_type, prep_id, create_by, last_modified_by, min_cdate, max_cdate, min_ldate, max_ldate, cdate, ldate, detail, dataset_id) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) except Exception as ex: sError = "DBSReaderModel/listdatasets. %s.\n Exception trace: \n %s" % ( ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError) def listBlocks(self, dataset="", block_name="", data_tier_name="", origin_site_name="", logical_file_name="", run_num=-1, min_cdate='0', max_cdate='0', min_ldate='0', max_ldate='0', cdate='0', ldate='0', open_for_writing=-1, detail=False): """ API to list a block in DBS. At least one of the parameters block_name, dataset, data_tier_name or logical_file_name are required. If data_tier_name is provided, min_cdate and max_cdate have to be specified and the difference in time have to be less than 31 days. :param block_name: name of the block :type block_name: str :param dataset: dataset :type dataset: str :param data_tier_name: data tier :type data_tier_name: str :param logical_file_name: Logical File Name :type logical_file_name: str :param origin_site_name: Origin Site Name (Optional) :type origin_site_name: str :param open_for_writing: Open for Writting (Optional) :type open_for_writing: int (0 or 1) :param run_num: run_num numbers (Optional). Possible format are: run_num, 'run_min-run_max' or ['run_min-run_max', run1, run2, ...]. :type run_num: int, list of runs or list of run ranges :param min_cdate: Lower limit for the creation date (unixtime) (Optional) :type min_cdate: int, str :param max_cdate: Upper limit for the creation date (unixtime) (Optional) :type max_cdate: int, str :param min_ldate: Lower limit for the last modification date (unixtime) (Optional) :type min_ldate: int, str :param max_ldate: Upper limit for the last modification date (unixtime) (Optional) :type max_ldate: int, str :param cdate: creation date (unixtime) (Optional) :type cdate: int, str :param ldate: last modification date (unixtime) (Optional) :type ldate: int, str :param detail: Get detailed information of a block (Optional) :type detail: bool :returns: List of dictionaries containing following keys (block_name). If option detail is used the dictionaries contain the following keys (block_id, create_by, creation_date, open_for_writing, last_modified_by, dataset, block_name, file_count, origin_site_name, last_modification_date, dataset_id and block_size) :rtype: list of dicts """ dataset = dataset.replace("*", "%") block_name = block_name.replace("*", "%") logical_file_name = logical_file_name.replace("*", "%") origin_site_name = origin_site_name.replace("*", "%") # if isinstance(min_cdate, basestring) and ('*' in min_cdate or '%' in min_cdate): min_cdate = 0 else: try: min_cdate = int(min_cdate) except: dbsExceptionHandler("dbsException-invalid-input", "invalid input for min_cdate") # if isinstance(max_cdate, basestring) and ('*' in max_cdate or '%' in max_cdate): max_cdate = 0 else: try: max_cdate = int(max_cdate) except: dbsExceptionHandler("dbsException-invalid-input", "invalid input for max_cdate") # if isinstance(min_ldate, basestring) and ('*' in min_ldate or '%' in min_ldate): min_ldate = 0 else: try: min_ldate = int(min_ldate) except: dbsExceptionHandler("dbsException-invalid-input", "invalid input for max_cdate") # if isinstance(max_ldate, basestring) and ('*' in max_ldate or '%' in max_ldate): max_ldate = 0 else: try: max_ldate = int(max_ldate) except: dbsExceptionHandler("dbsException-invalid-input", "invalid input for max_ldate") # if isinstance(cdate, basestring) and ('*' in cdate or '%' in cdate): cdate = 0 else: try: cdate = int(cdate) except: dbsExceptionHandler("dbsException-invalid-input", "invalid input for cdate") # if isinstance(cdate, basestring) and ('*' in ldate or '%' in ldate): ldate = 0 else: try: ldate = int(ldate) except: dbsExceptionHandler("dbsException-invalid-input", "invalid input for ldate") # detail = detail in (True, 1, "True", "1", 'true') try: b = self.dbsBlock.listBlocks(dataset, block_name, data_tier_name, origin_site_name, logical_file_name, run_num, min_cdate, max_cdate, min_ldate, max_ldate, cdate, ldate, open_for_writing, detail) #for item in b: #yield item return b # except HTTPError: # raise except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) except Exception as ex: sError = "DBSReaderModel/listBlocks. %s\n. Exception trace: \n %s" \ % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError) def listFiles(self, dataset="", block_name="", logical_file_name="", release_version="", pset_hash="", app_name="", output_module_label="", run_num=-1, origin_site_name="", lumi_list="", detail=False, validFileOnly=0): """ API to list files in DBS. Either non-wildcarded logical_file_name, non-wildcarded dataset or non-wildcarded block_name is required. The combination of a non-wildcarded dataset or block_name with an wildcarded logical_file_name is supported. * For lumi_list the following two json formats are supported: - [a1, a2, a3,] - [[a,b], [c, d],] * lumi_list can be either a list of lumi section numbers as [a1, a2, a3,] or a list of lumi section range as [[a,b], [c, d],]. Thay cannot be mixed. * If lumi_list is provided run only run_num=single-run-number is allowed * When lfn list is present, no run or lumi list is allowed. :param logical_file_name: logical_file_name of the file :type logical_file_name: str :param dataset: dataset :type dataset: str :param block_name: block name :type block_name: str :param release_version: release version :type release_version: str :param pset_hash: parameter set hash :type pset_hash: str :param app_name: Name of the application :type app_name: str :param output_module_label: name of the used output module :type output_module_label: str :param run_num: run , run ranges, and run list. Possible format are: run_num, 'run_min-run_max' or ['run_min-run_max', run1, run2, ...]. :type run_num: int, list, string :param origin_site_name: site where the file was created :type origin_site_name: str :param lumi_list: List containing luminosity sections :type lumi_list: list :param detail: Get detailed information about a file :type detail: bool :param validFileOnly: default=0 return all the files. when =1, only return files with is_file_valid=1 or dataset_access_type=PRODUCTION or VALID :type validFileOnly: int :returns: List of dictionaries containing the following keys (logical_file_name). If detail parameter is true, the dictionaries contain the following keys (check_sum, branch_hash_id, adler32, block_id, event_count, file_type, create_by, logical_file_name, creation_date, last_modified_by, dataset, block_name, file_id, file_size, last_modification_date, dataset_id, file_type_id, auto_cross_section, md5, is_file_valid) :rtype: list of dicts """ logical_file_name = logical_file_name.replace("*", "%") release_version = release_version.replace("*", "%") pset_hash = pset_hash.replace("*", "%") app_name = app_name.replace("*", "%") block_name = block_name.replace("*", "%") origin_site_name = origin_site_name.replace("*", "%") dataset = dataset.replace("*", "%") if lumi_list: if run_num == -1 or not run_num: dbsExceptionHandler( "dbsException-invalid-input", "When lumi_list is given, require a single run_num.", self.logger.exception) else: try: lumi_list = self.dbsUtils2.decodeLumiIntervals(lumi_list) except Exception as de: dbsExceptionHandler("dbsException-invalid-input", "Invalid lumi_list input: " + str(de), self.logger.exception) else: if not isinstance(run_num, list): if run_num == 1 or run_num == '1': dbsExceptionHandler( "dbsException-invalid-input", "files API does not supprt run_num=1 when no lumi.", self.logger.exception) else: if 1 in run_num or '1' in run_num: dbsExceptionHandler( "dbsException-invalid-input", "files API does not supprt run_num=1 when no lumi.", self.logger.exception) detail = detail in (True, 1, "True", "1", 'true') output_module_label = output_module_label.replace("*", "%") try: result = self.dbsFile.listFiles(dataset, block_name, logical_file_name, release_version, pset_hash, app_name, output_module_label, run_num, origin_site_name, lumi_list, detail, validFileOnly) for item in result: yield item # except HTTPError as he: # raise he except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) except Exception as ex: sError = "DBSReaderModel/listFiles. %s \n Exception trace: \n %s" % ( ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', ex.message, self.logger.exception, sError)
def __init__(self, dburl, owner): threading.Thread.__init__(self) logger = logging.getLogger("dbs test logger") dbi = DBFactory(logger, dburl).connect() self.bo = DBSFile(logger, dbi, owner)
def test08(self): """DBSFile.listFiles""" from dbs.business.DBSFile import DBSFile bo = DBSFile(self.logger, self.dbi) bo.listFiles("/BUT_%s/BUT_PROCESSED_DATASET_V%s/GEN-SIM-RECO" % (IC, IC)) bo.listFiles(block = "/BUT_%s/BUT_PROCESSED_DATASET_V%s/GEN-SIM-RECO#BUT_BLOCK_%s" % (IC, IC, IC))
def updateFileStatus(self): bo = DBSFile(self.logger, self.dbi, self.owner) import pdb pdb.set_trace() bo.updateStatus('/store/mc/Winter09/TTbar-madgraph/GEN-SIM-DIGI-RECO/IDEAL_V11_FastSim_v1/0060/0A83790D-71E1-DD11-9732-001EC9AAA058.root', 1)