def setUp(self): self.account = 'root' self.scope = 'mock' self.rse = 'MOCK5' self.file_sizes = 2 self.dataset = 'dataset_%s' % generate_uuid() self.rule_client = RuleClient() self.did_client = DIDClient() self.replica_client = ReplicaClient() self.upload_client = UploadClient() if config_get_bool('common', 'multi_vo', raise_exception=False, default=False): self.vo = { 'vo': config_get('client', 'vo', raise_exception=False, default='tst') } else: self.vo = {} self.rse_id = get_rse_id(rse=self.rse, **self.vo)
def main(argv): try: did_client=DIDClient() rule_client=RuleClient() openfile_name = sys.argv[1] scope_name = 'twgrid-user-wchang' mysql_engine = create_engine("mysql://*****:*****@rucio-db01.grid.sinica.edu.tw/rucio") with open(openfile_name) as file: for line in file: connection = mysql_engine.connect() data=line.strip('\n') dataset=data.split(" ")[0] file_name=data.split(" ")[1] file_size=int(data.split(" ")[2]) account=data.split(" ")[3] pre_md5='twgrid-user-wchang:'+file_name md5_sum = hashlib.md5(pre_md5).hexdigest() files = [] files.append({'scope': scope_name, 'name': file_name, 'md5':md5_sum, 'bytes':file_size, 'adler32':'0cc737eb'}) contact_db = connection.execute("select * from dids where scope='%s' and name='%s';"%(scope_name, file_name)) num_rows = contact_db.rowcount if num_rows == 0: print "Register File : %s "%file_name did_client.add_files_to_dataset(scope=scope_name, name=dataset, files=files, rse='TW-DPM01_TWGRIDSCRATCHDISK') #rule_client.add_replication_rule(dids=[{'scope': scope_name, 'name': file_name}], account=account, copies=1, \ # rse_expression='TW-DPM01_TWGRIDSCRATCHDISK', grouping='DATASET') else: print "Attach File : %s To %s "%(file_name, dataset) did_client.attach_dids(scope=scope_name, name=dataset, dids=files) connection.close() except SubscriptionDuplicate, e: print e
class TestDIDClients(unittest.TestCase): def setUp(self): self.did_client = DIDClient() self.lifetime_client = LifetimeClient() def test_create_and_check_lifetime_exception(self): """ LIFETIME (CLIENT): Test the creation of a Lifetime Model exception """ tmp_scope = 'mock' tmp_dsn1 = 'dsn_%s' % generate_uuid() self.did_client.add_did(scope=tmp_scope, name=tmp_dsn1, type=DIDType.DATASET) dids = [ { 'scope': tmp_scope, 'name': tmp_dsn1, 'did_type': DIDType.DATASET }, ] exceptions = self.lifetime_client.list_exceptions() exception_id = self.lifetime_client.add_exception( dids, account='root', pattern='wekhewfk', comments='This is a comment', expires_at=datetime.now()) exceptions = [ exception['id'] for exception in self.lifetime_client.list_exceptions() ] assert exception_id in exceptions
def setup(self): self.did_client = DIDClient() self.replica_client = ReplicaClient() self.base_client = BaseClient(account='root', ca_cert=config_get('client', 'ca_cert'), auth_type='x509') self.token = self.base_client.headers['X-Rucio-Auth-Token'] self.fname = generate_uuid() rses = ['MOCK', 'MOCK3', 'MOCK4'] dsn = generate_uuid() self.files = [{ 'scope': 'mock', 'name': self.fname, 'bytes': 1, 'adler32': '0cc737eb' }] self.did_client.add_dataset(scope='mock', name=dsn) self.did_client.add_files_to_dataset('mock', name=dsn, files=self.files, rse='MOCK') for r in rses: self.replica_client.add_replicas(r, self.files)
def test_list_datasets_per_rse(self): """ REPLICA (CLIENT): List datasets in RSE.""" rule_client = RuleClient() did_client = DIDClient() scope = 'mock' dataset = 'dataset_' + str(generate_uuid()) did_client.add_dataset(scope=scope, name=dataset) rule_client.add_replication_rule(dids=[{ 'scope': scope, 'name': dataset }], account='root', copies=1, rse_expression='MOCK', grouping='DATASET') replicas = [ r for r in list_datasets_per_rse( rse_id=get_rse_id(rse='MOCK', **self.vo), filters={ 'scope': InternalScope(scope, **self.vo), 'name': 'data*' }) ] assert replicas != []
class TestMetaDIDClient(unittest.TestCase): """ Test the metadata DID client """ def setUp(self): """ Setup the Test Case """ self.did_client = DIDClient() self.meta_client = MetaClient() self.rse_client = RSEClient() self.scope_client = ScopeClient() def test_add_list_meta(self): """ META DID (CLIENTS): Add metadata to a data identifier""" # Add a scope tmp_scope = 'mock' # Add a dataset tmp_dataset = 'dsn_%s' % uuid() self.did_client.add_dataset(scope=tmp_scope, name=tmp_dataset) # Add a key key = 'project' value = 'data13_hip' self.did_client.set_metadata(scope=tmp_scope, name=tmp_dataset, key=key, value=value) meta = self.did_client.get_metadata(scope=tmp_scope, name=tmp_dataset) assert key in meta assert meta[key] == value
def setup(self): self.account_client = AccountClient() self.scope_client = ScopeClient() self.meta_client = MetaClient() self.did_client = DIDClient() self.replica_client = ReplicaClient() self.rse_client = RSEClient()
class TestMetaDIDClient(): def setup(self): self.did_client = DIDClient() self.meta_client = MetaClient() self.rse_client = RSEClient() self.scope_client = ScopeClient() def test_add_list_meta(self): """ META DID (CLIENTS): Add metadata to a data identifier""" # Add a scope tmp_scope = 'mock' # Add a dataset tmp_dataset = 'dsn_%s' % uuid() self.did_client.add_dataset(scope=tmp_scope, name=tmp_dataset) # Add a key key = 'project' value = 'data13_hip' self.did_client.set_metadata(scope=tmp_scope, name=tmp_dataset, key=key, value=value) meta = self.did_client.get_metadata(scope=tmp_scope, name=tmp_dataset) assert_in(key, meta) assert_equal(meta[key], value)
def setup(self): self.did_client = DIDClient() self.tmp_scope = 'mock' self.tmp_name = 'name_%s' % uuid() self.did_client.add_did(scope=self.tmp_scope, name=self.tmp_name, type="DATASET")
def __init__(self, dataset, site, rse=None, scope=DEFAULT_SCOPE, uuid=None, check=True, lifetime=None, dry_run=False): self.dataset = dataset self.site = site if rse is None: rse = site self.rse = rse self.scope = scope self.uuid = uuid self.check = check self.lifetime = lifetime self.dry_run = dry_run self.blocks = [] self.url = '' self.getmetadata() self.get_global_url() self.didc = DIDClient() self.repc = ReplicaClient() self.gfal = Gfal2Context()
def __init__(self): #Take all data types categories self.RAW_RECORDS_TPC_TYPES = helper.get_hostconfig( )['raw_records_tpc_types'] self.RAW_RECORDS_MV_TYPES = helper.get_hostconfig( )['raw_records_mv_types'] self.RAW_RECORDS_NV_TYPES = helper.get_hostconfig( )['raw_records_nv_types'] self.LIGHT_RAW_RECORDS_TPC_TYPES = helper.get_hostconfig( )['light_raw_records_tpc_types'] self.LIGHT_RAW_RECORDS_MV_TYPES = helper.get_hostconfig( )['light_raw_records_mv_types'] self.LIGHT_RAW_RECORDS_NV_TYPES = helper.get_hostconfig( )['light_raw_records_nv_types'] self.HIGH_LEVEL_TYPES = helper.get_hostconfig()['high_level_types'] self.RECORDS_TYPES = helper.get_hostconfig()['records_types'] #Choose which data type you want to treat self.DTYPES = self.RAW_RECORDS_TPC_TYPES + self.RAW_RECORDS_MV_TYPES + self.RAW_RECORDS_NV_TYPES + self.LIGHT_RAW_RECORDS_TPC_TYPES + self.LIGHT_RAW_RECORDS_MV_TYPES + self.LIGHT_RAW_RECORDS_NV_TYPES + self.HIGH_LEVEL_TYPES + self.RECORDS_TYPES #Take the list of all XENON RSEs self.RSES = helper.get_hostconfig()['rses'] #Take the RSE that is used to perform the upload self.UPLOAD_TO = helper.get_hostconfig()['upload_to'] #Take the directory where datamanager has to upload data self.DATADIR = helper.get_hostconfig()['path_data_to_upload'] # Get the sequence of rules to be created according to the data type self.RAW_RECORDS_TPC_RSES = helper.get_hostconfig( )["raw_records_tpc_rses"] self.RAW_RECORDS_MV_RSES = helper.get_hostconfig( )["raw_records_mv_rses"] self.RAW_RECORDS_NV_RSES = helper.get_hostconfig( )["raw_records_nv_rses"] self.LIGHT_RAW_RECORDS_TPC_RSES = helper.get_hostconfig( )["light_raw_records_tpc_rses"] self.LIGHT_RAW_RECORDS_MV_RSES = helper.get_hostconfig( )["light_raw_records_mv_rses"] self.LIGHT_RAW_RECORDS_NV_RSES = helper.get_hostconfig( )["light_raw_records_nv_rses"] self.HIGH_LEVEL_RSES = helper.get_hostconfig()["high_level_rses"] self.RECORDS_RSES = helper.get_hostconfig()["records_rses"] #Init the runDB self.db = ConnectMongoDB() #Init Rucio for later uploads and handling: self.rc = RucioSummoner() self.didclient = DIDClient() self.replicaclient = ReplicaClient() #Rucio Rule assignment priority self.priority = 3 #Parameters to write warnings self.minimum_number_acceptable_rses = 2 self.minimum_deltadays_allowed = 3
def __init__(self): self.dc = DIDClient() self.rc = ReplicaClient() if config_get_bool('common', 'multi_vo', raise_exception=False, default=False): self.vo = {'vo': config_get('client', 'vo', raise_exception=False, default='tst')} else: self.vo = {}
def setUp(self): self.account = 'root' self.scope = 'mock' self.rule_client = RuleClient() self.did_client = DIDClient() self.replica_client = ReplicaClient() self.upload_client = UploadClient() self.file_sizes = 2 self.dataset = 'dataset_%s' % generate_uuid() self.rse = 'MOCK5' self.rse_id = get_rse_id(rse=self.rse)
def setUp(self): """ Constructor.""" if config_get_bool('common', 'multi_vo', raise_exception=False, default=False): self.vo = {'vo': get_vo()} else: self.vo = {} self.did_client = DIDClient()
def scan_experiment_directory(path): """ Register all xtc file in a directory to RUCIO """ exper_per_scope = True token = os.path.normpath(path).strip('/').split('/') instr, exper, dtype = token[3:6] # get all known dids known_dids = set() cl = DIDClient() scope = exper ds = "xtc" for did in cl.list_dids(scope, {'name': "xtc.files.*"}, type='file'): print(did) known_dids.add(did) #print(type(did['name']), did['name']) print("Know dids for ", scope, ds, known_dids) to_add = [] for fn in os.listdir(path): if not op.isfile(op.join(path, fn)): print("not a file, skip", fn) continue try: _, name, runtok, streamtok = os.path.splitext(fn)[0].split('_') except ValueError: print("Wrong file format", fn) continue name = "xtc.files.{}".format(fn) # xtc.e_mfxte1234_0002_s0002.xtc smdname = "xtc.files.{}".format(fn.replace('.xtc', '.smd.xtc')) if name in known_dids: print("File alredy in dataset") continue run_number = int(runtok[1:]) stream = int(streamtok[1:]) to_add.append({ 'pfn': os.path.join(path, fn), 'scope': scope, 'name': name, 'run': run_number, }) smdpfn = op.join(path, 'smalldata', fn.replace('.xtc', '.smd.xtc')) to_add.append({ 'pfn': smdpfn, 'scope': scope, 'name': smdname, 'run': run_number, }) return to_add
def setUp(self): if config_get_bool('common', 'multi_vo', raise_exception=False, default=False): self.vo = {'vo': config_get('client', 'vo', raise_exception=False, default='tst')} else: self.vo = {} self.account_client = AccountClient() self.scope_client = ScopeClient() self.meta_client = MetaClient() self.did_client = DIDClient() self.replica_client = ReplicaClient() self.rse_client = RSEClient()
def test_client_temporary_dids(): """ TMP DATA IDENTIFIERS (CLIENT): """ client = DIDClient() temporary_dids = [] for _ in range(10): temporary_dids.append({'scope': 'mock', 'name': 'object_%s' % generate_uuid(), 'rse': 'MOCK', 'bytes': 1, 'path': None}) client.add_temporary_dids(dids=temporary_dids)
def setUp(self): if config_get_bool('common', 'multi_vo', raise_exception=False, default=False): self.vo = {'vo': config_get('client', 'vo', raise_exception=False, default='tst')} else: self.vo = {} logger = logging.getLogger('dlul_client') logger.addHandler(logging.StreamHandler()) logger.setLevel(logging.DEBUG) self.client = Client() self.did_client = DIDClient() self.upload_client = UploadClient(_client=self.client, logger=logger) self.download_client = DownloadClient(client=self.client, logger=logger)
def test_list_dataset_replicas(self): """ REPLICA (CLIENT): List dataset replicas.""" replica_client = ReplicaClient() rule_client = RuleClient() did_client = DIDClient() scope = 'mock' dataset = 'dataset_' + str(generate_uuid()) did_client.add_dataset(scope=scope, name=dataset) rule_client.add_replication_rule(dids=[{'scope': scope, 'name': dataset}], account='root', copies=1, rse_expression='MOCK', grouping='DATASET') replicas = [r for r in replica_client.list_dataset_replicas(scope=scope, name=dataset)] assert len(replicas) == 1
def setup(self): try: remove('/tmp/.rucio_root/auth_token_root') except OSError as e: if e.args[0] != 2: raise e self.marker = '$> ' self.host = config_get('client', 'rucio_host') self.auth_host = config_get('client', 'auth_host') self.user = '******' self.def_rse = 'MOCK4' self.did_client = DIDClient() self.replica_client = ReplicaClient() self.account_client = AccountLimitClient() self.account_client.set_account_limit('root', self.def_rse, -1)
def cmsBlocksInContainer(self, container, scope='cms'): didClient = DIDClient(account=self.account, auth_type=self.auth_type) block_names = [] response = didClient.get_did(scope=scope, name=container) if response['type'].upper() != 'CONTAINER': return block_names response = didClient.list_content(scope=scope, name=container) for item in response: if item['type'].upper() == 'DATASET': block_names.append(item['name']) return block_names
def setUpClass(cls): cls.sub_client = SubscriptionClient() cls.did_client = DIDClient() cls.projects = [ 'data12_900GeV', 'data12_8TeV', 'data13_900GeV', 'data13_8TeV' ] cls.pattern1 = r'(_tid|physics_(Muons|JetTauEtmiss|Egamma)\..*\.ESD|express_express(?!.*NTUP|.*\.ESD|.*RAW)|(physics|express)(?!.*NTUP).* \
def setup(self): self.did_client = DIDClient() self.tmp_scope = 'mock' self.tmp_name = 'name_%s' % uuid() self.did_client.add_did(scope=self.tmp_scope, name=self.tmp_name, type="DATASET") self.implemented = True session = get_session() if session.bind.dialect.name == 'oracle': oracle_version = int( session.connection().connection.version.split('.')[0]) if oracle_version < 12: self.implemented = False elif session.bind.dialect.name == 'sqlite': self.implemented = False
def reg_file_list(dids): """ add files for a run to rucio. attach the files to a run data set: filenames: <scope>:xtc.file.<fn> dataset: <scope>:xtc.runNNNNN """ print("Add files to RUCIO") if len(dids) == 0: print("No files to add") return 0 dids_to_register = [] scopes = set() for did in dids: nd = { 'pfn': "file:{}".format(did['pfn']), 'bytes': os.path.getsize(did['pfn']), 'adler32': adler32(did['pfn']), 'name': did['name'], 'scope': did['scope'] } dids_to_register.append(nd) scopes.add(did['scope']) print(nd) print(scopes) if len(scopes) != 1: print("Wrong number of scopes", len(scopes)) return 2 # register files to xtc.files client = ReplicaClient() client.add_replicas('LCLS_REGD', dids_to_register) # add files to run dataset known_run_ds = {} client = DIDClient() for did in dids: run = did['run'] scope = did['scope'] if run not in known_run_ds: run_ds = "xtc.run%05d" % run try: client.get_did(scope, run_ds) except DataIdentifierNotFound: print("Create new dataset", run_ds) client.add_dataset(scope, run_ds) client.add_datasets_to_container(scope, 'xtc', [{'scope': scope, 'name': run_ds},]) known_run_ds[run] = run_ds ds = known_run_ds[run] try: client.attach_dids(scope, run_ds, [{'scope': scope, 'name': did['name']}]) except FileAlreadyExists: print("File already exists", did['name']) else: print("attached", ds, did['name'])
def LIST_DIDS_WILDCARD(self, scope, wildcard): jdoe_account = 'jdoe' client = DIDClient(account=jdoe_account) print 'run with: ' + str(wildcard) start = time() with monitor.record_timer_block('jdoe.list_dids_wildcard'): dids = [did for did in client.list_dids(scope=scope, filters=wildcard, type='dataset')] duration = time() - start cnt = len(dids) print 'got %d dids' % cnt monitor.record_counter('jdoe.list_dids_wildcard.num_results', cnt) if cnt != 0: monitor.record_counter('jdoe.list_dids_wildcard.time_per_did', duration / cnt) return {'no_datasets': cnt}
class TestNamingConventionCore(unittest.TestCase): ''' Class to test naming convention enforcement. ''' def setUp(self): """ Constructor.""" if config_get_bool('common', 'multi_vo', raise_exception=False, default=False): self.vo = {'vo': get_vo()} else: self.vo = {} self.did_client = DIDClient() def test_naming_convention(self): """ NAMING_CONVENTION(CORE): Add and validate naming convention.""" conventions = {} for convention in list_naming_conventions(): conventions[convention['scope']] = convention['regexp'] scope = InternalScope('mock', **self.vo) if scope not in conventions: add_naming_convention( scope=scope, regexp=r'^(?P<project>mock)\.(?P<datatype>\w+)\.\w+$', convention_type=KeyType.DATASET) meta = validate_name(scope=InternalScope('mck', **self.vo), name='mock.DESD.yipeeee', did_type='D') assert meta is None meta = validate_name(scope=scope, name='mock.DESD.yipeeee', did_type='D') assert meta == {u'project': 'mock', u'datatype': 'DESD'} with pytest.raises(InvalidObject): validate_name(scope=scope, name='mockyipeeee', did_type='D') # Register a dataset tmp_dataset = 'mock.AD.' + str(generate_uuid()) with pytest.raises(InvalidObject): self.did_client.add_dataset(scope='mock', name=tmp_dataset, meta={'datatype': 'DESD'}) with pytest.raises(InvalidObject): self.did_client.add_dataset(scope='mock', name=tmp_dataset) tmp_dataset = 'mock.AOD.' + str(generate_uuid()) self.did_client.add_dataset(scope='mock', name=tmp_dataset) observed_datatype = self.did_client.get_metadata( scope='mock', name=tmp_dataset)['datatype'] assert observed_datatype == 'AOD' delete_naming_convention(scope=scope, convention_type=KeyType.DATASET)
def __init__(self, run, originrse=DEFAULT_ORIGIN_RSE, destrse=None, scope=DEFAULT_SCOPE, check=True, lifetime=None, dry_run=False, container=None): """ :param dataset: Name of the PhEDEx dataset to synchronize with Rucio. :param pnn: PhEDEx node name to filter on for replica information. """ self.run = run self.originrse = originrse self.destrse = destrse self.scope = scope self.check = check self.lifetime = lifetime self.dry_run = dry_run self.container = container self.rucio_datasets = {} self.run_files = {} self.existent_replica_files = {} self.url = '' self.gfal = Gfal2Context() self.run_Number = None self.get_run_Number() self.files_storage = {} self.get_global_url() self.didc = DIDClient() self.repc = ReplicaClient() self.rulesClient = RuleClient() # Right now obtaining the Metadata from the storage at WIPAC # Hopefully in the future from JADE # TODO self.get_run_Files() self.get_rucio_metadata() self.update_run_Files() self.get_files_metadata()
def setUpClass(cls): if config_get_bool('common', 'multi_vo', raise_exception=False, default=False): cls.vo = {'vo': get_vo()} else: cls.vo = {} cls.sub_client = SubscriptionClient() cls.did_client = DIDClient() cls.projects = ['data12_900GeV', 'data12_8TeV', 'data13_900GeV', 'data13_8TeV'] cls.pattern1 = r'(_tid|physics_(Muons|JetTauEtmiss|Egamma)\..*\.ESD|express_express(?!.*NTUP|.*\.ESD|.*RAW)|(physics|express)(?!.*NTUP).* \
def rucio_register(self, filenames): files = [] dids = [] for filename in filenames: size = os.stat(str(filename)).st_size adler = adler32(str(filename)) files.append({ 'scope': self.scope, 'name': str(filename.parts[-1]), 'bytes': size, 'adler32': adler, 'pfn': self.pfn + str(filename.parts[-1]) }) replica_client = ReplicaClient() replica_client.add_replicas(rse=self.rse, files=files) didclient = DIDClient() didclient.add_files_to_dataset(self.scope, self.dataset, files)
def setup(self): try: remove('/tmp/.rucio_root/auth_token_root') except OSError as e: if e.args[0] != 2: raise e self.marker = '$> ' self.host = config_get('client', 'rucio_host') self.auth_host = config_get('client', 'auth_host') self.user = '******' self.def_rse = 'MOCK4' self.did_client = DIDClient() self.replica_client = ReplicaClient() self.account_client = AccountLimitClient() self.account_client.set_account_limit('root', self.def_rse, -1) add_rse_attribute(self.def_rse, 'istape', 'False') self.upload_success_str = 'Successfully uploaded file %s'
def test_did_set_metadata_bulk_multi_client(testdid): """ DID (CLIENT) : Test setting metadata in bulk with multiple key-values """ skip_without_json() testkeys = list(map(lambda i: 'testkey' + str(i), range(3))) testmeta = {key: key + 'value' for key in testkeys} # let two keys have the same value testmeta[testkeys[1]] = testmeta[testkeys[0]] didclient = DIDClient() external_testdid = testdid.copy() external_testdid['scope'] = testdid['scope'].external result = didclient.set_metadata_bulk(meta=testmeta, recursive=False, **external_testdid) assert result is True meta = get_metadata(plugin="ALL", **testdid) print('Metadata:', meta) for testkey in testkeys: assert testkey in meta and meta[testkey] == testmeta[testkey]
class TestDIDClients(object): def __init__(self): self.did_client = DIDClient() self.replica_client = ReplicaClient() def test_add_and_list_archive(self): """ ARCHIVE (CLIENT): Add files to archive and list the content.""" scope, rse = 'mock', 'MOCK' archive_file = 'file_' + generate_uuid() + '.zip' files = [] for i in range(10): files.append({ 'scope': scope, 'name': 'lfn.%s' % str(generate_uuid()), 'bytes': 724963570, 'adler32': '0cc737eb', 'type': 'FILE', 'meta': { 'guid': str(generate_uuid()) } }) self.replica_client.add_replicas(rse=rse, files=[{ 'scope': scope, 'name': archive_file, 'bytes': 1, 'adler32': '0cc737eb' }]) self.did_client.add_files_to_archive(scope=scope, name=archive_file, files=files) content = [ fil for fil in self.did_client.list_archive_content(scope=scope, name=archive_file) ] assert_equal(len(content), 10)
def test_list_dataset_replicas_bulk(self): """ REPLICA (CLIENT): List dataset replicas bulk.""" replica_client = ReplicaClient() rule_client = RuleClient() did_client = DIDClient() scope = 'mock' did1 = {'scope': scope, 'name': 'dataset_' + str(generate_uuid())} did_client.add_dataset(**did1) did2 = {'scope': scope, 'name': 'dataset_' + str(generate_uuid())} did_client.add_dataset(**did2) dids = [did1, did2] rule_client.add_replication_rule(dids=dids, account='root', copies=1, rse_expression='MOCK', grouping='DATASET') with pytest.raises(InvalidObject): replica_client.list_dataset_replicas_bulk( dids=[{ 'type': "I'm Different" }]) replicas = list(replica_client.list_dataset_replicas_bulk(dids=dids)) assert len(replicas) == 2 for did in dids: def replica_contains_did(rep): return all(map(lambda k: k in rep and did[k] == rep[k], did)) assert any( map(replica_contains_did, replicas)), "%s must be in returned replicas" % (did, )
class TestReplicationRuleClient(): @classmethod def setUpClass(cls): # Add test RSE cls.rse1 = 'MOCK' cls.rse3 = 'MOCK3' cls.rse4 = 'MOCK4' cls.rse5 = 'MOCK5' cls.rse1_id = get_rse(cls.rse1).id cls.rse3_id = get_rse(cls.rse3).id cls.rse4_id = get_rse(cls.rse4).id cls.rse5_id = get_rse(cls.rse5).id # Add Tags cls.T1 = tag_generator() cls.T2 = tag_generator() add_rse_attribute(cls.rse1, cls.T1, True) add_rse_attribute(cls.rse3, cls.T1, True) add_rse_attribute(cls.rse4, cls.T2, True) add_rse_attribute(cls.rse5, cls.T1, True) # Add fake weights add_rse_attribute(cls.rse1, "fakeweight", 10) add_rse_attribute(cls.rse3, "fakeweight", 0) add_rse_attribute(cls.rse4, "fakeweight", 0) add_rse_attribute(cls.rse5, "fakeweight", 0) def setup(self): self.rule_client = RuleClient() self.did_client = DIDClient() self.subscription_client = SubscriptionClient() self.account_client = AccountClient() self.lock_client = LockClient() def test_add_rule(self): """ REPLICATION RULE (CLIENT): Add a replication rule """ scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') ret = self.rule_client.add_replication_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=2, rse_expression=self.T1, grouping='NONE') assert_is_instance(ret, list) def test_delete_rule(self): """ REPLICATION RULE (CLIENT): Delete a replication rule """ scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=1, rse_expression=self.rse1, grouping='NONE', weight='fakeweight', lifetime=None, locked=False, subscription_id=None)[0] ret = self.rule_client.delete_replication_rule(rule_id=rule_id) assert(ret is True) assert_raises(RuleNotFound, self.rule_client.delete_replication_rule, rule_id) def test_list_rules_by_did(self): """ DID (CLIENT): List Replication Rules per DID """ scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') rule_id_1 = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=1, rse_expression=self.rse1, grouping='NONE', weight='fakeweight', lifetime=None, locked=False, subscription_id=None)[0] rule_id_2 = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=1, rse_expression=self.rse3, grouping='NONE', weight='fakeweight', lifetime=None, locked=False, subscription_id=None)[0] ret = self.did_client.list_did_rules(scope=scope, name=dataset) ids = [rule['id'] for rule in ret] assert_in(rule_id_1, ids) assert_in(rule_id_2, ids) def test_get_rule(self): """ REPLICATION RULE (CLIENT): Get Replication Rule by id """ scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') ret = self.rule_client.add_replication_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=2, rse_expression=self.T1, grouping='NONE') get = self.rule_client.get_replication_rule(ret[0]) assert(ret[0] == get['id']) def test_get_rule_by_account(self): """ ACCOUNT (CLIENT): Get Replication Rule by account """ scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') ret = self.rule_client.add_replication_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=2, rse_expression=self.T1, grouping='NONE') get = self.account_client.list_account_rules('jdoe') rules = [rule['id'] for rule in get] assert_in(ret[0], rules) def test_locked_rule(self): """ REPLICATION RULE (CLIENT): Delete a locked replication rule""" scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') rule_id_1 = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=1, rse_expression=self.rse1, grouping='NONE', weight='fakeweight', lifetime=None, locked=True, subscription_id=None)[0] assert_raises(AccessDenied, delete_rule, rule_id_1) self.rule_client.update_replication_rule(rule_id=rule_id_1, options={'locked': False}) delete_rule(rule_id=rule_id_1) def test_dataset_lock(self): """ DATASETLOCK (CLIENT): Get a datasetlock for a specific dataset""" scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') rule_id_1 = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=1, rse_expression=self.rse1, grouping='DATASET', weight='fakeweight', lifetime=None, locked=True, subscription_id=None)[0] rule_ids = [lock['rule_id'] for lock in self.lock_client.get_dataset_locks(scope=scope, name=dataset)] assert_in(rule_id_1, rule_ids) def test_change_rule_lifetime(self): """ REPLICATION RULE (CLIENT): Change rule lifetime""" scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') rule_id_1 = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=1, rse_expression=self.rse1, grouping='DATASET', weight='fakeweight', lifetime=150, locked=True, subscription_id=None)[0] get = self.rule_client.get_replication_rule(rule_id_1) self.rule_client.update_replication_rule(rule_id_1, options={'lifetime': 10000}) get2 = self.rule_client.get_replication_rule(rule_id_1) assert(get['expires_at'] != get2['expires_at'])
def setup(self): self.rule_client = RuleClient() self.did_client = DIDClient() self.subscription_client = SubscriptionClient() self.account_client = AccountClient() self.lock_client = LockClient()
def setup(self): self.did_client = DIDClient() self.meta_client = MetaClient() self.rse_client = RSEClient() self.scope_client = ScopeClient()
from rucio.db import models from rucio.db.session import read_session, transactional_session, stream_session from rucio.db.constants import DIDType from rucio.common import exception from rucio.client.didclient import DIDClient dc = DIDClient() import sys # # remove client rucio path and use /opt/rucio/lib as server path # sys.path.insert(0, '/opt/rucio/lib/') # sys.path.remove('/opt/rucio/.venv/lib/python2.6/site-packages/rucio-0.2.5_15_g2289bda_dev1416935547-py2.6.egg') # del sys.modules['rucio'] # # import imp # # rucio = imp.load_source('rucio','/opt/rucio/lib/rucio/__init__.py') # from rucio.api.did import get_did # print get_did('ams-user-chenghsi', 'd2') # dc.detach_dids('ams-user-chenghsi', 'd1', ['file3']) # dc.non_attached('ams-user-chenghsi', [{'scope':'ams-user-chenghsi', 'name':'file1'}, {'scope':'ams-user-chenghsi', 'name':'file2'}]) print dc.non_attached('twgrid-user-chenghsi', [{'scope':'ams-user-chenghsi', 'name':'file1'}, {'scope':'twgrid-user-chenghsi', 'name':'file2'}, {'scope':'twgrid-user-chenghsi', 'name':'file6'}, {'scope':'twgrid-user-chenghsi', 'name':'file1'}]) # print dc.non_attached('ams-user-chenghsi', ['file1', 'name':'file2'}]) # @stream_session @read_session def non_attached(scope, names, session=None): """ List data identifier contents. :param scope: The scope name. :param names: The data identifier name. :param session: The database session in use. """
''' This is a script to test the difference of list-dids and list-rules ''' from rucio.db import models from rucio.db.session import read_session, transactional_session, stream_session from rucio.db.constants import DIDType from rucio.common import exception from rucio.client.didclient import DIDClient dc = DIDClient() import sys # del sys.modules['rucio'] # # import imp # # rucio = imp.load_source('rucio','/opt/rucio/lib/rucio/__init__.py') filename = sys.argv[1] for x in dc.list_did_rules('ams-user-chenghsi', filename): print x[u'name'] # print get_did('ams-user-chenghsi', 'd2') # dc.detach_dids('ams-user-chenghsi', 'd1', ['file3']) # print non_attached('ams-user-chenghsi', ['file1', 'file2', 'file3', 'file4'])
class TestDIDClients: def setup(self): self.account_client = AccountClient() self.scope_client = ScopeClient() self.meta_client = MetaClient() self.did_client = DIDClient() self.replica_client = ReplicaClient() self.rse_client = RSEClient() def test_list_dids(self): """ DATA IDENTIFIERS (CLIENT): List dids by pattern.""" tmp_scope = scope_name_generator() tmp_files = [] tmp_files.append('file_a_1%s' % generate_uuid()) tmp_files.append('file_a_2%s' % generate_uuid()) tmp_files.append('file_b_1%s' % generate_uuid()) tmp_rse = 'MOCK' self.scope_client.add_scope('jdoe', tmp_scope) for tmp_file in tmp_files: self.replica_client.add_replica(tmp_rse, tmp_scope, tmp_file, 1L, '0cc737eb') results = [] for result in self.did_client.list_dids(tmp_scope, {'name': 'file\_a\_*'}, type='file'): results.append(result) assert_equal(len(results), 2) results = [] for result in self.did_client.list_dids(tmp_scope, {'name': 'file\_a\_1*'}, type='file'): results.append(result) assert_equal(len(results), 1) results = [] for result in self.did_client.list_dids(tmp_scope, {'name': 'file\__\_1*'}, type='file'): results.append(result) assert_equal(len(results), 2) results = [] for result in self.did_client.list_dids(tmp_scope, {'name': 'file*'}, type='file'): results.append(result) assert_equal(len(results), 3) results = [] for result in self.did_client.list_dids(tmp_scope, {'name': 'file*'}): results.append(result) assert_equal(len(results), 0) with assert_raises(UnsupportedOperation): self.did_client.list_dids(tmp_scope, {'name': 'file*'}, type='whateverytype') def test_list_by_metadata(self): """ DATA IDENTIFIERS (CLIENT): List did with metadata""" dsns = [] tmp_scope = 'mock' tmp_dsn1 = 'dsn_%s' % generate_uuid() dsns.append(tmp_dsn1) dataset_meta = {'project': 'data12_8TeV', 'run_number': 400000, 'stream_name': 'physics_CosmicCalo', 'prod_step': 'merge', 'datatype': 'NTUP_TRIG', 'version': 'f392_m920', } self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn1, meta=dataset_meta) tmp_dsn2 = 'dsn_%s' % generate_uuid() dsns.append(tmp_dsn2) dataset_meta['run_number'] = 400001 self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn2, meta=dataset_meta) tmp_dsn3 = 'dsn_%s' % generate_uuid() dsns.append(tmp_dsn3) dataset_meta['stream_name'] = 'physics_Egamma' dataset_meta['datatype'] = 'NTUP_SMWZ' self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn3, meta=dataset_meta) dids = self.did_client.list_dids(tmp_scope, {'project': 'data12_8TeV', 'version': 'f392_m920'}) results = [] for d in dids: results.append(d) for dsn in dsns: assert_in(dsn, results) dsns.remove(tmp_dsn1) dids = self.did_client.list_dids(tmp_scope, {'project': 'data12_8TeV', 'run_number': 400001}) results = [] for d in dids: results.append(d) for dsn in dsns: assert_in(dsn, results) dsns.remove(tmp_dsn2) dids = self.did_client.list_dids(tmp_scope, {'project': 'data12_8TeV', 'stream_name': 'physics_Egamma', 'datatype': 'NTUP_SMWZ'}) results = [] for d in dids: results.append(d) for dsn in dsns: assert_in(dsn, results) with assert_raises(KeyNotFound): self.did_client.list_dids(tmp_scope, {'NotReallyAKey': 'NotReallyAValue'}) def test_add_did(self): """ DATA IDENTIFIERS (CLIENT): Add, populate and list did content""" tmp_scope = 'mock' tmp_rse = 'MOCK' tmp_dsn = 'dsn_%s' % generate_uuid() # PFN example: rfio://castoratlas.cern.ch/castor/cern.ch/grid/atlas/tzero/xx/xx/xx/filename dataset_meta = {'project': 'data13_hip', 'run_number': 300000, 'stream_name': 'physics_CosmicCalo', 'prod_step': 'merge', 'datatype': 'NTUP_TRIG', 'version': 'f392_m927', } rules = [{'copies': 1, 'rse_expression': 'MOCK', 'account': 'root'}] with assert_raises(ScopeNotFound): self.did_client.add_dataset(scope='Nimportnawak', name=tmp_dsn, statuses={'monotonic': True}, meta=dataset_meta, rules=rules) self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn, statuses={'monotonic': True}, meta=dataset_meta, rules=rules) with assert_raises(DataIdentifierNotFound): self.did_client.add_files_to_dataset(scope=tmp_scope, name=tmp_dsn, files=[{'scope': tmp_scope, 'name': 'lfn.%(tmp_dsn)s.' % locals() + str(generate_uuid()), 'bytes': 724963570L, 'adler32': '0cc737eb'}, ]) files = [] for i in xrange(5): lfn = 'lfn.%(tmp_dsn)s.' % locals() + str(generate_uuid()) pfn = 'mock://localhost/tmp/rucio_rse/%(project)s/%(version)s/%(prod_step)s' % dataset_meta # it doesn't work with mock: TBF # pfn = 'srm://mock2.com:2880/pnfs/rucio/disk-only/scratchdisk/rucio_tests/%(project)s/%(version)s/%(prod_step)s' % dataset_meta pfn += '%(tmp_dsn)s/%(lfn)s' % locals() file_meta = {'guid': str(generate_uuid()), 'events': 10} files.append({'scope': tmp_scope, 'name': lfn, 'bytes': 724963570L, 'adler32': '0cc737eb', 'pfn': pfn, 'meta': file_meta}) rules = [{'copies': 1, 'rse_expression': 'CERN-PROD_TZERO', 'lifetime': timedelta(days=2)}] self.did_client.add_files_to_dataset(scope=tmp_scope, name=tmp_dsn, files=files, rse=tmp_rse) files = [] for i in xrange(5): lfn = '%(tmp_dsn)s.' % locals() + str(generate_uuid()) pfn = 'mock://localhost/tmp/rucio_rse/%(project)s/%(version)s/%(prod_step)s' % dataset_meta # it doesn't work with mock: TBF # pfn = 'srm://mock2.com:2880/pnfs/rucio/disk-only/scratchdisk/rucio_tests/%(project)s/%(version)s/%(prod_step)s' % dataset_meta pfn += '%(tmp_dsn)s/%(lfn)s' % locals() file_meta = {'guid': str(generate_uuid()), 'events': 100} files.append({'scope': tmp_scope, 'name': lfn, 'bytes': 724963570L, 'adler32': '0cc737eb', 'pfn': pfn, 'meta': file_meta}) rules = [{'copies': 1, 'rse_expression': 'CERN-PROD_TZERO', 'lifetime': timedelta(days=2)}] self.did_client.add_files_to_dataset(scope=tmp_scope, name=tmp_dsn, files=files, rse=tmp_rse) self.did_client.close(scope=tmp_scope, name=tmp_dsn) def test_attach_dids_to_dids(self): """ DATA IDENTIFIERS (CLIENT): Attach dids to dids""" tmp_scope = 'mock' tmp_rse = 'MOCK' nb_datasets = 5 nb_files = 5 attachments, dsns = list(), list() guid_to_query = None dsn = {} for i in xrange(nb_datasets): attachment = {} attachment['scope'] = tmp_scope attachment['name'] = 'dsn.%s' % str(generate_uuid()) attachment['rse'] = tmp_rse files = [] for i in xrange(nb_files): files.append({'scope': tmp_scope, 'name': 'lfn.%s' % str(generate_uuid()), 'bytes': 724963570L, 'adler32': '0cc737eb', 'meta': {'guid': str(generate_uuid()), 'events': 100}}) attachment['dids'] = files guid_to_query = files[0]['meta']['guid'] dsn = {'scope': tmp_scope, 'name': attachment['name']} dsns.append(dsn) attachments.append(attachment) self.did_client.add_datasets(dsns=dsns) self.did_client.attach_dids_to_dids(attachments=attachments) l = [i for i in self.did_client.get_dataset_by_guid(guid_to_query)] assert_equal([dsn], l) cnt_name = 'cnt_%s' % generate_uuid() self.did_client.add_container(scope='mock', name=cnt_name) with assert_raises(UnsupportedOperation): self.did_client.attach_dids_to_dids([{'scope': 'mock', 'name': cnt_name, 'rse': tmp_rse, 'dids': attachment['dids']}]) def test_add_dataset(self): """ DATA IDENTIFIERS (CLIENT): Add dataset """ tmp_scope = 'mock' tmp_dsn = 'dsn_%s' % generate_uuid() self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn, meta={'project': 'data13_hip'}) did = self.did_client.get_did(tmp_scope, tmp_dsn) assert_equal(did['scope'], tmp_scope) assert_equal(did['name'], tmp_dsn) with assert_raises(DataIdentifierNotFound): self.did_client.get_did('i_dont_exist', 'neither_do_i') def test_add_datasets(self): """ DATA IDENTIFIERS (CLIENT): Bulk add datasets """ tmp_scope = 'mock' dsns = list() for i in xrange(500): tmp_dsn = {'name': 'dsn_%s' % generate_uuid(), 'scope': tmp_scope, 'meta': {'project': 'data13_hip'}} dsns.append(tmp_dsn) self.did_client.add_datasets(dsns) def test_exists(self): """ DATA IDENTIFIERS (CLIENT): Check if data identifier exists """ tmp_scope = 'mock' tmp_file = 'file_%s' % generate_uuid() tmp_rse = 'MOCK' self.replica_client.add_replica(tmp_rse, tmp_scope, tmp_file, 1L, '0cc737eb') did = self.did_client.get_did(tmp_scope, tmp_file) assert_equal(did['scope'], tmp_scope) assert_equal(did['name'], tmp_file) with assert_raises(DataIdentifierNotFound): self.did_client.get_did('i_dont_exist', 'neither_do_i') def test_did_hierarchy(self): """ DATA IDENTIFIERS (CLIENT): Check did hierarchy rule """ account = 'jdoe' rse = 'MOCK' scope = scope_name_generator() file = ['file_%s' % generate_uuid() for i in range(10)] dst = ['dst_%s' % generate_uuid() for i in range(4)] cnt = ['cnt_%s' % generate_uuid() for i in range(4)] self.scope_client.add_scope(account, scope) for i in range(10): self.replica_client.add_replica(rse, scope, file[i], 1, '0cc737eb') for i in range(4): self.did_client.add_did(scope, dst[i], 'DATASET', statuses=None, meta=None, rules=None) for i in range(4): self.did_client.add_did(scope, cnt[i], 'CONTAINER', statuses=None, meta=None, rules=None) for i in range(4): self.did_client.add_files_to_dataset(scope, dst[i], [{'scope': scope, 'name': file[2 * i], 'bytes': 1L, 'adler32': '0cc737eb'}, {'scope': scope, 'name': file[2 * i + 1], 'bytes': 1L, 'adler32': '0cc737eb'}]) self.did_client.add_containers_to_container(scope, cnt[1], [{'scope': scope, 'name': cnt[2]}, {'scope': scope, 'name': cnt[3]}]) self.did_client.add_datasets_to_container(scope, cnt[0], [{'scope': scope, 'name': dst[1]}, {'scope': scope, 'name': dst[2]}]) result = self.did_client.scope_list(scope, recursive=True) for r in result: pass # TODO: fix, fix, fix # if r['name'] == cnt[1]: # assert_equal(r['type'], 'container') # assert_equal(r['level'], 0) # if (r['name'] == cnt[0]) or (r['name'] == dst[0]) or (r['name'] == file[8]) or (r['name'] == file[9]): # assert_equal(r['level'], 0) # else: # assert_equal(r['level'], 1) def test_detach_did(self): """ DATA IDENTIFIERS (CLIENT): Detach dids from a did""" account = 'jdoe' rse = 'MOCK' scope = scope_name_generator() file = ['file_%s' % generate_uuid() for i in range(10)] dst = ['dst_%s' % generate_uuid() for i in range(4)] cnt = ['cnt_%s' % generate_uuid() for i in range(2)] self.scope_client.add_scope(account, scope) for i in range(10): self.replica_client.add_replica(rse, scope, file[i], 1L, '0cc737eb') for i in range(4): self.did_client.add_dataset(scope, dst[i], statuses=None, meta=None, rules=None) for i in range(2): self.did_client.add_container(scope, cnt[i], statuses=None, meta=None, rules=None) for i in range(4): self.did_client.add_files_to_dataset(scope, dst[i], [{'scope': scope, 'name': file[2 * i], 'bytes': 1L, 'adler32': '0cc737eb'}, {'scope': scope, 'name': file[2 * i + 1], 'bytes': 1L, 'adler32': '0cc737eb'}]) self.did_client.add_containers_to_container(scope, cnt[1], [{'scope': scope, 'name': dst[2]}, {'scope': scope, 'name': dst[3]}]) with assert_raises(UnsupportedOperation): self.did_client.add_datasets_to_container(scope, cnt[0], [{'scope': scope, 'name': dst[1]}, {'scope': scope, 'name': cnt[1]}]) self.did_client.add_datasets_to_container(scope, cnt[0], [{'scope': scope, 'name': dst[1]}, {'scope': scope, 'name': dst[2]}]) self.did_client.detach_dids(scope, cnt[0], [{'scope': scope, 'name': dst[1]}]) self.did_client.detach_dids(scope, dst[3], [{'scope': scope, 'name': file[6]}, {'scope': scope, 'name': file[7]}]) result = self.did_client.scope_list(scope, recursive=True) for r in result: if r['name'] == dst[1]: assert_equal(r['level'], 0) if r['type'] is 'file': if (r['name'] in file[6:9]): assert_equal(r['level'], 0) else: assert_not_equal(r['level'], 0) with assert_raises(UnsupportedOperation): self.did_client.detach_dids(scope=scope, name=cnt[0], dids=[{'scope': scope, 'name': cnt[0]}]) def test_scope_list(self): """ DATA IDENTIFIERS (CLIENT): Add, aggregate, and list data identifiers in a scope """ # create some dummy data self.tmp_accounts = ['jdoe' for i in xrange(3)] self.tmp_scopes = [scope_name_generator() for i in xrange(3)] self.tmp_rses = [rse_name_generator() for i in xrange(3)] self.tmp_files = ['file_%s' % generate_uuid() for i in xrange(3)] self.tmp_datasets = ['dataset_%s' % generate_uuid() for i in xrange(3)] self.tmp_containers = ['container_%s' % generate_uuid() for i in xrange(3)] # add dummy data to the catalogue for i in xrange(3): self.scope_client.add_scope(self.tmp_accounts[i], self.tmp_scopes[i]) self.rse_client.add_rse(self.tmp_rses[i]) self.replica_client.add_replica(self.tmp_rses[i], self.tmp_scopes[i], self.tmp_files[i], 1L, '0cc737eb') # put files in datasets for i in xrange(3): for j in xrange(3): files = [{'scope': self.tmp_scopes[j], 'name': self.tmp_files[j], 'bytes': 1L, 'adler32': '0cc737eb'}] self.did_client.add_dataset(self.tmp_scopes[i], self.tmp_datasets[j]) self.did_client.add_files_to_dataset(self.tmp_scopes[i], self.tmp_datasets[j], files)
####from client#### from rucio.client.replicaclient import ReplicaClient repCli = ReplicaClient() #did = 'ams-user-chenghsi:Acceptance_Form.jpg'.split(':') #did = 'ams-2011B-ISS.B620-pass4:1368923945.00000001.root' import sys from rucio.client.didclient import DIDClient didCli = DIDClient() from rucio.client.ruleclient import RuleClient ruleCli = RuleClient() from rucio.client.rseclient import RSEClient rseCli = RSEClient() from rucio.common import exception argv_file =str(sys.argv[1]) with open(argv_file, 'r') as dids: for did in dids: did = did.rstrip('\n') print did if ':' in did: did_list = did.split(':') scope = did_list[0] filename = did_list[1] else: filename = did scope = 'ams-user-testuser1' rse_name = 'TW-EOS00_AMS02DATADISK' adler32 = '' md5 = '' bytes = 0 #print 'before:' for x in repCli.list_replicas([{'scope': scope, 'name': filename}]):