class TestMetaDIDClient(): def setup(self): self.did_client = DIDClient() self.meta_client = MetaClient() self.rse_client = RSEClient() self.scope_client = ScopeClient() def test_add_list_meta(self): """ META DID (CLIENTS): Add metadata to a data identifier""" # Add a scope tmp_scope = 'mock' # Add a dataset tmp_dataset = 'dsn_%s' % uuid() self.did_client.add_dataset(scope=tmp_scope, name=tmp_dataset) # Add a key key = 'project' value = 'data13_hip' self.did_client.set_metadata(scope=tmp_scope, name=tmp_dataset, key=key, value=value) meta = self.did_client.get_metadata(scope=tmp_scope, name=tmp_dataset) assert_in(key, meta) assert_equal(meta[key], value)
def test_list_datasets_per_rse(self): """ REPLICA (CLIENT): List datasets in RSE.""" rule_client = RuleClient() did_client = DIDClient() scope = 'mock' dataset = 'dataset_' + str(generate_uuid()) did_client.add_dataset(scope=scope, name=dataset) rule_client.add_replication_rule(dids=[{ 'scope': scope, 'name': dataset }], account='root', copies=1, rse_expression='MOCK', grouping='DATASET') replicas = [ r for r in list_datasets_per_rse( rse_id=get_rse_id(rse='MOCK', **self.vo), filters={ 'scope': InternalScope(scope, **self.vo), 'name': 'data*' }) ] assert replicas != []
class TestMetaDIDClient(unittest.TestCase): """ Test the metadata DID client """ def setUp(self): """ Setup the Test Case """ self.did_client = DIDClient() self.meta_client = MetaClient() self.rse_client = RSEClient() self.scope_client = ScopeClient() def test_add_list_meta(self): """ META DID (CLIENTS): Add metadata to a data identifier""" # Add a scope tmp_scope = 'mock' # Add a dataset tmp_dataset = 'dsn_%s' % uuid() self.did_client.add_dataset(scope=tmp_scope, name=tmp_dataset) # Add a key key = 'project' value = 'data13_hip' self.did_client.set_metadata(scope=tmp_scope, name=tmp_dataset, key=key, value=value) meta = self.did_client.get_metadata(scope=tmp_scope, name=tmp_dataset) assert key in meta assert meta[key] == value
def test_list_dataset_replicas_bulk(self): """ REPLICA (CLIENT): List dataset replicas bulk.""" replica_client = ReplicaClient() rule_client = RuleClient() did_client = DIDClient() scope = 'mock' did1 = {'scope': scope, 'name': 'dataset_' + str(generate_uuid())} did_client.add_dataset(**did1) did2 = {'scope': scope, 'name': 'dataset_' + str(generate_uuid())} did_client.add_dataset(**did2) dids = [did1, did2] rule_client.add_replication_rule(dids=dids, account='root', copies=1, rse_expression='MOCK', grouping='DATASET') with pytest.raises(InvalidObject): replica_client.list_dataset_replicas_bulk( dids=[{ 'type': "I'm Different" }]) replicas = list(replica_client.list_dataset_replicas_bulk(dids=dids)) assert len(replicas) == 2 for did in dids: def replica_contains_did(rep): return all(map(lambda k: k in rep and did[k] == rep[k], did)) assert any( map(replica_contains_did, replicas)), "%s must be in returned replicas" % (did, )
def test_list_dataset_replicas(self): """ REPLICA (CLIENT): List dataset replicas.""" replica_client = ReplicaClient() rule_client = RuleClient() did_client = DIDClient() scope = 'mock' dataset = 'dataset_' + str(generate_uuid()) did_client.add_dataset(scope=scope, name=dataset) rule_client.add_replication_rule(dids=[{'scope': scope, 'name': dataset}], account='root', copies=1, rse_expression='MOCK', grouping='DATASET') replicas = [r for r in replica_client.list_dataset_replicas(scope=scope, name=dataset)] assert len(replicas) == 1
class TestNamingConventionCore(unittest.TestCase): ''' Class to test naming convention enforcement. ''' def setUp(self): """ Constructor.""" if config_get_bool('common', 'multi_vo', raise_exception=False, default=False): self.vo = {'vo': get_vo()} else: self.vo = {} self.did_client = DIDClient() def test_naming_convention(self): """ NAMING_CONVENTION(CORE): Add and validate naming convention.""" conventions = {} for convention in list_naming_conventions(): conventions[convention['scope']] = convention['regexp'] scope = InternalScope('mock', **self.vo) if scope not in conventions: add_naming_convention( scope=scope, regexp=r'^(?P<project>mock)\.(?P<datatype>\w+)\.\w+$', convention_type=KeyType.DATASET) meta = validate_name(scope=InternalScope('mck', **self.vo), name='mock.DESD.yipeeee', did_type='D') assert meta is None meta = validate_name(scope=scope, name='mock.DESD.yipeeee', did_type='D') assert meta == {u'project': 'mock', u'datatype': 'DESD'} with pytest.raises(InvalidObject): validate_name(scope=scope, name='mockyipeeee', did_type='D') # Register a dataset tmp_dataset = 'mock.AD.' + str(generate_uuid()) with pytest.raises(InvalidObject): self.did_client.add_dataset(scope='mock', name=tmp_dataset, meta={'datatype': 'DESD'}) with pytest.raises(InvalidObject): self.did_client.add_dataset(scope='mock', name=tmp_dataset) tmp_dataset = 'mock.AOD.' + str(generate_uuid()) self.did_client.add_dataset(scope='mock', name=tmp_dataset) observed_datatype = self.did_client.get_metadata( scope='mock', name=tmp_dataset)['datatype'] assert observed_datatype == 'AOD' delete_naming_convention(scope=scope, convention_type=KeyType.DATASET)
class TestNamingConventionCore: ''' Class to test naming convention enforcement. ''' def __init__(self): """ Constructor.""" self.did_client = DIDClient() def test_naming_convention(self): """ NAMING_CONVENTION(CORE): Add and validate naming convention.""" conventions = {} for convention in list_naming_conventions(): conventions[convention['scope']] = convention['regexp'] scope = InternalScope('mock') if scope not in conventions: add_naming_convention( scope=scope, regexp='^(?P<project>mock)\.(?P<datatype>\w+)\.\w+$', convention_type=KeyType.DATASET) meta = validate_name(scope=InternalScope('mck'), name='mock.DESD.yipeeee', did_type='D') assert_equal(meta, None) meta = validate_name(scope=scope, name='mock.DESD.yipeeee', did_type='D') assert_equal(meta, {u'project': 'mock', u'datatype': 'DESD'}) with assert_raises(InvalidObject): validate_name(scope=scope, name='mockyipeeee', did_type='D') # Register a dataset tmp_dataset = 'mock.AD.' + str(generate_uuid()) with assert_raises(InvalidObject): self.did_client.add_dataset(scope='mock', name=tmp_dataset, meta={'datatype': 'DESD'}) with assert_raises(InvalidObject): self.did_client.add_dataset(scope='mock', name=tmp_dataset) tmp_dataset = 'mock.AOD.' + str(generate_uuid()) self.did_client.add_dataset(scope='mock', name=tmp_dataset) observed_datatype = self.did_client.get_metadata( scope='mock', name=tmp_dataset)['datatype'] assert_equal(observed_datatype, 'AOD') delete_naming_convention(scope=scope, regexp='(?P<project>mock)\.(\w+)$', convention_type=KeyType.DATASET)
class TestDidMetaClient(unittest.TestCase): def setUp(self): self.did_client = DIDClient() self.tmp_scope = 'mock' self.session = get_session() self.json_implemented = JSONDidMeta().json_implemented(self.session) def tearDown(self): self.session.commit() # pylint: disable=no-member def test_set_metadata(self): """ META (CLIENTS) : Adds a fully set json column to a did, updates if some keys present """ tmp_name = 'name_%s' % generate_uuid() self.did_client.add_did(scope=self.tmp_scope, name=tmp_name, type="DATASET") # Test JSON case if self.json_implemented: # data1 = ["key1": "value_" + str(generate_uuid()), "key2": "value_" + str(generate_uuid()), "key3": "value_" + str(generate_uuid())] value1 = "value_" + str(generate_uuid()) value2 = "value_" + str(generate_uuid()) value3 = "value_" + str(generate_uuid()) self.did_client.set_metadata(scope=self.tmp_scope, name=tmp_name, key="key1", value=value1) self.did_client.set_metadata(scope=self.tmp_scope, name=tmp_name, key="key2", value=value2) self.did_client.set_metadata(scope=self.tmp_scope, name=tmp_name, key="key3", value=value3) metadata = self.did_client.get_metadata(scope=self.tmp_scope, name=tmp_name, plugin="JSON") assert len(metadata) == 3 assert metadata['key1'] == value1 assert metadata['key2'] == value2 assert metadata['key3'] == value3 # Test DID_COLUMNS case self.did_client.set_metadata(scope=self.tmp_scope, name=tmp_name, key='project', value='data12_12TeV') assert self.did_client.get_metadata( scope=self.tmp_scope, name=tmp_name)['project'] == 'data12_12TeV' def test_delete_metadata(self): """ META (CLIENTS) : Deletes metadata key """ tmp_name = 'name_%s' % generate_uuid() self.did_client.add_did(scope=self.tmp_scope, name=tmp_name, type="DATASET") # Test JSON case if self.json_implemented: value1 = "value_" + str(generate_uuid()) value2 = "value_" + str(generate_uuid()) value3 = "value_" + str(generate_uuid()) self.did_client.set_metadata(scope=self.tmp_scope, name=tmp_name, key="key1", value=value1) self.did_client.set_metadata(scope=self.tmp_scope, name=tmp_name, key="key2", value=value2) self.did_client.set_metadata(scope=self.tmp_scope, name=tmp_name, key="key3", value=value3) self.did_client.delete_metadata(scope=self.tmp_scope, name=tmp_name, key='key2') metadata = self.did_client.get_metadata(scope=self.tmp_scope, name=tmp_name, plugin="JSON") assert len(metadata) == 2 assert metadata['key1'] == value1 assert metadata['key3'] == value3 with pytest.raises(KeyNotFound): self.did_client.delete_metadata(scope=self.tmp_scope, name=tmp_name, key="key9") def test_get_metadata(self): """ META (CLIENTS) : Gets all metadata for the given did """ tmp_name = 'name_%s' % generate_uuid() self.did_client.add_did(scope=self.tmp_scope, name=tmp_name, type="DATASET") # Test JSON case if self.json_implemented: value1 = "value_" + str(generate_uuid()) value2 = "value_" + str(generate_uuid()) self.did_client.set_metadata(scope=self.tmp_scope, name=tmp_name, key="key1", value=value1) self.did_client.set_metadata(scope=self.tmp_scope, name=tmp_name, key="key2", value=value2) metadata = self.did_client.get_metadata(scope=self.tmp_scope, name=tmp_name, plugin="JSON") assert len(metadata) == 2 assert metadata['key1'] == value1 assert metadata['key2'] == value2 # Test DID_COLUMNS case self.did_client.set_metadata(scope=self.tmp_scope, name=tmp_name, key='project', value='data12_14TeV') assert self.did_client.get_metadata( scope=self.tmp_scope, name=tmp_name)['project'] == 'data12_14TeV' # Test Mixed case if self.json_implemented: all_metadata = self.did_client.get_metadata(scope=self.tmp_scope, name=tmp_name, plugin="ALL") assert all_metadata['key1'] == value1 assert all_metadata['key2'] == value2 assert all_metadata['project'] == "data12_14TeV" def test_list_dids_extended(self): """ META (CLIENTS) : Get all dids matching the values of the provided metadata keys """ # Test did Columns use case dsns = [] tmp_scope = 'mock' tmp_dsn1 = 'dsn_%s' % generate_uuid() dsns.append(tmp_dsn1) dataset_meta = { 'project': 'data12_8TeV', 'run_number': 400000, 'stream_name': 'physics_CosmicCalo', 'prod_step': 'merge', 'datatype': 'NTUP_TRIG', 'version': 'f392_m920', } self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn1, meta=dataset_meta) tmp_dsn2 = 'dsn_%s' % generate_uuid() dsns.append(tmp_dsn2) dataset_meta['run_number'] = 400001 self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn2, meta=dataset_meta) tmp_dsn3 = 'dsn_%s' % generate_uuid() dsns.append(tmp_dsn3) dataset_meta['stream_name'] = 'physics_Egamma' dataset_meta['datatype'] = 'NTUP_SMWZ' self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn3, meta=dataset_meta) dids = self.did_client.list_dids_extended(tmp_scope, { 'project': 'data12_8TeV', 'version': 'f392_m920' }) results = [] for d in dids: results.append(d) for dsn in dsns: assert dsn in results dsns.remove(tmp_dsn1) dids = self.did_client.list_dids_extended(tmp_scope, { 'project': 'data12_8TeV', 'run_number': 400001 }) results = [] for d in dids: results.append(d) for dsn in dsns: assert dsn in results dsns.remove(tmp_dsn2) dids = self.did_client.list_dids_extended( tmp_scope, { 'project': 'data12_8TeV', 'stream_name': 'physics_Egamma', 'datatype': 'NTUP_SMWZ' }) results = [] for d in dids: results.append(d) for dsn in dsns: assert dsn in results # Test JSON use case if self.json_implemented: did1 = 'name_%s' % generate_uuid() did2 = 'name_%s' % generate_uuid() did3 = 'name_%s' % generate_uuid() did4 = 'name_%s' % generate_uuid() key1 = 'key_1_%s' % generate_uuid() key2 = 'key_2_%s' % generate_uuid() key3 = 'key_3_%s' % generate_uuid() value1 = 'value_1_%s' % generate_uuid() value2 = 'value_2_%s' % generate_uuid() value3 = 'value_3_%s' % generate_uuid() value_not_1 = 'value_not_1_%s' % generate_uuid() value_not_2 = 'value_not_1_%s' % generate_uuid() value_unique = 'value_unique_%s' % generate_uuid() self.did_client.add_did(scope=tmp_scope, name=did1, type="DATASET") self.did_client.add_did(scope=tmp_scope, name=did2, type="DATASET") self.did_client.add_did(scope=tmp_scope, name=did3, type="DATASET") self.did_client.add_did(scope=tmp_scope, name=did4, type="DATASET") self.did_client.set_metadata(scope=tmp_scope, name=did1, key=key1, value=value1) self.did_client.set_metadata(scope=tmp_scope, name=did1, key=key2, value=value2) self.did_client.set_metadata(scope=tmp_scope, name=did2, key=key1, value=value1) self.did_client.set_metadata(scope=tmp_scope, name=did2, key=key2, value=value_not_2) self.did_client.set_metadata(scope=tmp_scope, name=did2, key=key3, value=value3) self.did_client.set_metadata(scope=tmp_scope, name=did3, key=key1, value=value_not_1) self.did_client.set_metadata(scope=tmp_scope, name=did3, key=key2, value=value2) self.did_client.set_metadata(scope=tmp_scope, name=did3, key=key3, value=value3) self.did_client.set_metadata(scope=tmp_scope, name=did4, key=key1, value=value1) self.did_client.set_metadata(scope=tmp_scope, name=did4, key=key2, value=value2) self.did_client.set_metadata(scope=tmp_scope, name=did4, key=key3, value=value_unique) # Key not there dids = self.did_client.list_dids_extended(tmp_scope, {'key45': 'value'}) results = [] for d in dids: results.append(d) assert len(results) == 0 # Value not there dids = self.did_client.list_dids_extended( tmp_scope, {key1: 'value_not_there'}) results = [] for d in dids: results.append(d) assert len(results) == 0 # key1 = value1 dids = self.did_client.list_dids_extended(tmp_scope, {key1: value1}) results = [] for d in dids: results.append(d) assert len(results) == 3 assert did1 in results assert did2 in results assert did4 in results # key1, key2 dids = self.did_client.list_dids_extended(tmp_scope, { key1: value1, key2: value2 }) results = [] for d in dids: results.append(d) assert len(results) == 2 assert did1 in results assert did4 in results # key1, key2, key 3 dids = self.did_client.list_dids_extended(tmp_scope, { key1: value1, key2: value2, key3: value3 }) results = [] for d in dids: results.append(d) assert len(results) == 0 # key3 = unique value dids = self.did_client.list_dids_extended(tmp_scope, {key3: value_unique}) results = [] for d in dids: results.append(d) assert len(results) == 1 assert did4 in results
class TestDIDClients: def setup(self): self.account_client = AccountClient() self.scope_client = ScopeClient() self.meta_client = MetaClient() self.did_client = DIDClient() self.replica_client = ReplicaClient() self.rse_client = RSEClient() def test_list_dids(self): """ DATA IDENTIFIERS (CLIENT): List dids by pattern.""" tmp_scope = scope_name_generator() tmp_files = [] tmp_files.append('file_a_1%s' % generate_uuid()) tmp_files.append('file_a_2%s' % generate_uuid()) tmp_files.append('file_b_1%s' % generate_uuid()) tmp_rse = 'MOCK' self.scope_client.add_scope('jdoe', tmp_scope) for tmp_file in tmp_files: self.replica_client.add_replica(tmp_rse, tmp_scope, tmp_file, 1L, '0cc737eb') results = [] for result in self.did_client.list_dids(tmp_scope, {'name': 'file_a_*'}, type='file'): results.append(result) assert_equal(len(results), 2) results = [] for result in self.did_client.list_dids(tmp_scope, {'name': 'file_a_1*'}, type='file'): results.append(result) assert_equal(len(results), 1) results = [] for result in self.did_client.list_dids(tmp_scope, {'name': 'file_*_1*'}, type='file'): results.append(result) assert_equal(len(results), 2) results = [] for result in self.did_client.list_dids(tmp_scope, {'name': 'file*'}, type='file'): results.append(result) assert_equal(len(results), 3) results = [] filters = {'name': 'file*', 'created_after': datetime.utcnow() - timedelta(hours=1)} for result in self.did_client.list_dids(tmp_scope, filters): results.append(result) assert_equal(len(results), 0) with assert_raises(UnsupportedOperation): self.did_client.list_dids(tmp_scope, {'name': 'file*'}, type='whateverytype') def test_list_by_metadata(self): """ DATA IDENTIFIERS (CLIENT): List did with metadata""" dsns = [] tmp_scope = 'mock' tmp_dsn1 = 'dsn_%s' % generate_uuid() dsns.append(tmp_dsn1) dataset_meta = {'project': 'data12_8TeV', 'run_number': 400000, 'stream_name': 'physics_CosmicCalo', 'prod_step': 'merge', 'datatype': 'NTUP_TRIG', 'version': 'f392_m920', } self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn1, meta=dataset_meta) tmp_dsn2 = 'dsn_%s' % generate_uuid() dsns.append(tmp_dsn2) dataset_meta['run_number'] = 400001 self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn2, meta=dataset_meta) tmp_dsn3 = 'dsn_%s' % generate_uuid() dsns.append(tmp_dsn3) dataset_meta['stream_name'] = 'physics_Egamma' dataset_meta['datatype'] = 'NTUP_SMWZ' self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn3, meta=dataset_meta) dids = self.did_client.list_dids(tmp_scope, {'project': 'data12_8TeV', 'version': 'f392_m920'}) results = [] for d in dids: results.append(d) for dsn in dsns: assert_in(dsn, results) dsns.remove(tmp_dsn1) dids = self.did_client.list_dids(tmp_scope, {'project': 'data12_8TeV', 'run_number': 400001}) results = [] for d in dids: results.append(d) for dsn in dsns: assert_in(dsn, results) dsns.remove(tmp_dsn2) dids = self.did_client.list_dids(tmp_scope, {'project': 'data12_8TeV', 'stream_name': 'physics_Egamma', 'datatype': 'NTUP_SMWZ'}) results = [] for d in dids: results.append(d) for dsn in dsns: assert_in(dsn, results) with assert_raises(KeyNotFound): self.did_client.list_dids(tmp_scope, {'NotReallyAKey': 'NotReallyAValue'}) def test_add_did(self): """ DATA IDENTIFIERS (CLIENT): Add, populate, list did content and create a sample""" tmp_scope = 'mock' tmp_rse = 'MOCK' tmp_dsn = 'dsn_%s' % generate_uuid() set_account_limit('root', get_rse_id('MOCK'), -1) set_account_limit('root', get_rse_id('CERN-PROD_TZERO'), -1) # PFN example: rfio://castoratlas.cern.ch/castor/cern.ch/grid/atlas/tzero/xx/xx/xx/filename dataset_meta = {'project': 'data13_hip', 'run_number': 300000, 'stream_name': 'physics_CosmicCalo', 'prod_step': 'merge', 'datatype': 'NTUP_TRIG', 'version': 'f392_m927', } rules = [{'copies': 1, 'rse_expression': 'MOCK', 'account': 'root'}] with assert_raises(ScopeNotFound): self.did_client.add_dataset(scope='Nimportnawak', name=tmp_dsn, statuses={'monotonic': True}, meta=dataset_meta, rules=rules) files = [{'scope': tmp_scope, 'name': 'lfn.%(tmp_dsn)s.' % locals() + str(generate_uuid()), 'bytes': 724963570L, 'adler32': '0cc737eb'}, ]
class DatasetInjector(object): """ General Class for injecting a cms dataset in rucio """ def __init__(self, dataset, site, rse=None, scope=DEFAULT_SCOPE, uuid=None, check=True, lifetime=None, dry_run=False): self.dataset = dataset self.site = site if rse is None: rse = site self.rse = rse self.scope = scope self.uuid = uuid self.check = check self.lifetime = lifetime self.dry_run = dry_run self.blocks = [] self.url = '' self.getmetadata() self.get_global_url() self.didc = DIDClient() self.repc = ReplicaClient() self.gfal = Gfal2Context() def get_file_url(self, lfn): """ Return the rucio url of a file. """ return self.url + '/' + lfn def get_global_url(self): """ Return the base path of the rucio url """ print("Getting parameters for rse %s" % self.rse) rse = rsemgr.get_rse_info(self.rse) proto = rse['protocols'][0] schema = proto['scheme'] prefix = proto['prefix'] + '/' + self.scope.replace('.', '/') if schema == 'srm': prefix = proto['extended_attributes']['web_service_path'] + prefix url = schema + '://' + proto['hostname'] if proto['port'] != 0: url = url + ':' + str(proto['port']) self.url = url + prefix print("Determined base url %s" % self.url) def getmetadata(self): """ Gets the list of blocks at a site, their files and their metadata """ print("Initializing... getting the list of blocks and files") blocks = das_go_client("block dataset=%s site=%s system=phedex" % (self.dataset, self.site)) for item in blocks: uuid = item['block'][0]['name'].split('#')[1] if (self.uuid is None) or (uuid == self.uuid): block = {'name': item['block'][0]['name'], 'files': []} files = das_go_client("file block=%s site=%s system=phedex" % (block['name'], self.site)) for item2 in files: cksum = re.match(r"adler32:([^,]+)", item2['file'][0]['checksum']) cksum = cksum.group(0).split(':')[1] cksum = "{0:0{1}x}".format(int(cksum, 16), 8) block['files'].append({ 'name': item2['file'][0]['name'], 'checksum': cksum, 'size': item2['file'][0]['size'] }) self.blocks.append(block) print("Initalization done.") def register(self): """ Create the container, the datasets and attach them to the container. """ print("Registering...") self.register_container() for block in self.blocks: self.register_dataset(block['name']) for filemd in block['files']: self.register_replica(filemd) self.attach_file(filemd['name'], block['name']) print("All datasets, blocks and files registered") def register_container(self): """ Create the container. """ print("registering container %s" % self.dataset) if self.dry_run: print(' Dry run only. Not creating container.') return try: self.didc.add_container(scope=self.scope, name=self.dataset, lifetime=self.lifetime) except DataIdentifierAlreadyExists: print(" Container %s already exists" % self.dataset) def register_dataset(self, block): """ Create the dataset and attach them to teh container """ print("registering dataset %s" % block) if self.dry_run: print(' Dry run only. Not creating dataset.') return try: self.didc.add_dataset(scope=self.scope, name=block, lifetime=self.lifetime) except DataIdentifierAlreadyExists: print(" Dataset %s already exists" % block) try: print("attaching dataset %s to container %s" % (block, self.dataset)) self.didc.attach_dids(scope=self.scope, name=self.dataset, dids=[{ 'scope': self.scope, 'name': block }]) except RucioException: print(" Dataset already attached") def attach_file(self, lfn, block): """ Attach the file to the container """ if self.dry_run: print(' Dry run only. Not attaching files.') return try: print("attaching file %s" % lfn) self.didc.attach_dids(scope=self.scope, name=block, dids=[{ 'scope': self.scope, 'name': lfn }]) except FileAlreadyExists: print("File already attached") def register_replica(self, filemd): """ Register file replica. """ print("registering file %s" % filemd['name']) if self.dry_run: print(' Dry run only. Not registering files.') return if self.check: self.check_storage(filemd) if not self.check_replica(filemd['name']): self.repc.add_replicas(rse=self.rse, files=[{ 'scope': self.scope, 'name': filemd['name'], 'adler32': filemd['checksum'], 'bytes': filemd['size'], 'pfn': self.get_file_url(filemd['name']) }]) def check_storage(self, filemd): """ Check size and checksum of a file on storage """ url = self.get_file_url(filemd['name']) print("checking url %s" % url) try: size = self.gfal.stat(str(url)).st_size checksum = self.gfal.checksum(str(url), 'adler32') print("got size and checksum of file: pfn=%s size=%s checksum=%s" % (url, size, checksum)) except GError: print("no file found at %s" % url) return False if str(size) != str(filemd['size']): print("wrong size for file %s. Expected %s got %s" % (filemd['name'], filemd['size'], size)) return False if str(checksum) != str(filemd['checksum']): print("wrong checksum for file %s. Expected %s git %s" % (filemd['name'], filemd['checksum'], checksum)) return False print("size and checksum are ok") return True def check_replica(self, lfn): """ Check if a replica of the given file at the site already exists. """ print("checking if file %s with scope %s has already a replica at %s" % (lfn, self.scope, self.rse)) replicas = list( self.repc.list_replicas([{ 'scope': self.scope, 'name': lfn }])) if replicas: replicas = replicas[0] if 'rses' in replicas: if self.rse in replicas['rses']: print("file %s with scope %s has already a replica at %s" % (lfn, self.scope, self.rse)) return True print("no existing replicas") return False
class TestReplicaClients: def setup(self): self.replica_client = ReplicaClient() self.did_client = DIDClient() def test_add_list_bad_replicas(self): """ REPLICA (CLIENT): Add bad replicas""" tmp_scope = 'mock' nbfiles = 5 # Adding replicas to deterministic RSE files = [{'scope': tmp_scope, 'name': 'file_%s' % generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb', 'meta': {'events': 10}} for i in range(nbfiles)] rse_info = rsemgr.get_rse_info('MOCK') rse_id1 = rse_info['id'] self.replica_client.add_replicas(rse='MOCK', files=files) # Listing replicas on deterministic RSE replicas, list_rep = [], [] for replica in self.replica_client.list_replicas(dids=[{'scope': f['scope'], 'name': f['name']} for f in files], schemes=['srm'], unavailable=True): replicas.extend(replica['rses']['MOCK']) list_rep.append(replica) r = self.replica_client.declare_bad_file_replicas(replicas, 'This is a good reason') assert_equal(r, {}) bad_replicas = list_bad_replicas() nbbadrep = 0 for rep in list_rep: for badrep in bad_replicas: if badrep['rse_id'] == rse_id1: if badrep['scope'] == rep['scope'] and badrep['name'] == rep['name']: nbbadrep += 1 assert_equal(len(replicas), nbbadrep) # Run necromancer once run(threads=1, bulk=10000, once=True) # Try to attach a lost file tmp_dsn = 'dataset_%s' % generate_uuid() self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn) with assert_raises(UnsupportedOperation): self.did_client.add_files_to_dataset(tmp_scope, name=tmp_dsn, files=files, rse='MOCK') # Adding replicas to non-deterministic RSE files = [{'scope': tmp_scope, 'name': 'file_%s' % generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb', 'pfn': 'srm://mock2.com:8443/srm/managerv2?SFN=/rucio/tmpdisk/rucio_tests/%s/%s' % (tmp_scope, generate_uuid()), 'meta': {'events': 10}} for i in range(nbfiles)] rse_info = rsemgr.get_rse_info('MOCK2') rse_id2 = rse_info['id'] self.replica_client.add_replicas(rse='MOCK2', files=files) # Listing replicas on non-deterministic RSE replicas, list_rep = [], [] for replica in self.replica_client.list_replicas(dids=[{'scope': f['scope'], 'name': f['name']} for f in files], schemes=['srm'], unavailable=True): replicas.extend(replica['rses']['MOCK2']) list_rep.append(replica) print(replicas, list_rep) r = self.replica_client.declare_bad_file_replicas(replicas, 'This is a good reason') print(r) assert_equal(r, {}) bad_replicas = list_bad_replicas() nbbadrep = 0 for rep in list_rep: for badrep in bad_replicas: if badrep['rse_id'] == rse_id2: if badrep['scope'] == rep['scope'] and badrep['name'] == rep['name']: nbbadrep += 1 assert_equal(len(replicas), nbbadrep) # Now adding non-existing bad replicas files = ['srm://mock2.com/rucio/tmpdisk/rucio_tests/%s/%s' % (tmp_scope, generate_uuid()), ] r = self.replica_client.declare_bad_file_replicas(files, 'This is a good reason') output = ['%s Unknown replica' % rep for rep in files] assert_equal(r, {'MOCK2': output}) def test_add_suspicious_replicas(self): """ REPLICA (CLIENT): Add suspicious replicas""" tmp_scope = 'mock' nbfiles = 5 # Adding replicas to deterministic RSE files = [{'scope': tmp_scope, 'name': 'file_%s' % generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb', 'meta': {'events': 10}} for i in range(nbfiles)] self.replica_client.add_replicas(rse='MOCK', files=files) # Listing replicas on deterministic RSE replicas = [] list_rep = [] for replica in self.replica_client.list_replicas(dids=[{'scope': f['scope'], 'name': f['name']} for f in files], schemes=['srm'], unavailable=True): replicas.extend(replica['rses']['MOCK']) list_rep.append(replica) r = self.replica_client.declare_suspicious_file_replicas(replicas, 'This is a good reason') assert_equal(r, {}) # Adding replicas to non-deterministic RSE files = [{'scope': tmp_scope, 'name': 'file_%s' % generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb', 'pfn': 'srm://mock2.com:8443/srm/managerv2?SFN=/rucio/tmpdisk/rucio_tests/%s/%s' % (tmp_scope, generate_uuid()), 'meta': {'events': 10}} for i in range(nbfiles)] self.replica_client.add_replicas(rse='MOCK2', files=files) # Listing replicas on non-deterministic RSE replicas = [] list_rep = [] for replica in self.replica_client.list_replicas(dids=[{'scope': f['scope'], 'name': f['name']} for f in files], schemes=['srm'], unavailable=True): replicas.extend(replica['rses']['MOCK2']) list_rep.append(replica) r = self.replica_client.declare_suspicious_file_replicas(replicas, 'This is a good reason') assert_equal(r, {}) # Now adding non-existing bad replicas files = ['srm://mock2.com/rucio/tmpdisk/rucio_tests/%s/%s' % (tmp_scope, generate_uuid()), ] r = self.replica_client.declare_suspicious_file_replicas(files, 'This is a good reason') output = ['%s Unknown replica' % rep for rep in files] assert_equal(r, {'MOCK2': output}) def test_bad_replica_methods_for_UI(self): """ REPLICA (REST): Test the listing of bad and suspicious replicas """ mw = [] headers1 = {'X-Rucio-Account': 'root', 'X-Rucio-Username': '******', 'X-Rucio-Password': '******'} r1 = TestApp(auth_app.wsgifunc(*mw)).get('/userpass', headers=headers1, expect_errors=True) assert_equal(r1.status, 200) token = str(r1.header('X-Rucio-Auth-Token')) headers2 = {'X-Rucio-Auth-Token': str(token)} data = dumps({}) r2 = TestApp(rep_app.wsgifunc(*mw)).get('/bad/states', headers=headers2, params=data, expect_errors=True) assert_equal(r2.status, 200) tot_files = [] for line in r2.body.split('\n'): if line != '': tot_files.append(dumps(line)) nb_tot_files = len(tot_files) data = dumps({'state': 'B'}) r2 = TestApp(rep_app.wsgifunc(*mw)).get('/bad/states', headers=headers2, params=data, expect_errors=True) assert_equal(r2.status, 200) tot_bad_files = [] for line in r2.body.split('\n'): if line != '': tot_bad_files.append(dumps(line)) nb_tot_bad_files1 = len(tot_bad_files) data = dumps({'state': 'S', 'list_pfns': 'True'}) r2 = TestApp(rep_app.wsgifunc(*mw)).get('/bad/states', headers=headers2, params=data, expect_errors=True) assert_equal(r2.status, 200) tot_suspicious_files = [] for line in r2.body.split('\n'): if line != '': tot_suspicious_files.append(dumps(line)) nb_tot_suspicious_files = len(tot_suspicious_files) assert_equal(nb_tot_files, nb_tot_bad_files1 + nb_tot_suspicious_files) tomorrow = datetime.utcnow() + timedelta(days=1) data = dumps({'state': 'B', 'younger_than': tomorrow.isoformat()}) r2 = TestApp(rep_app.wsgifunc(*mw)).get('/bad/states', headers=headers2, params=data, expect_errors=True) assert_equal(r2.status, 200) tot_bad_files = [] for line in r2.body.split('\n'): if line != '': tot_bad_files.append(dumps(line)) nb_tot_bad_files = len(tot_bad_files) assert_equal(nb_tot_bad_files, 0) data = dumps({}) r2 = TestApp(rep_app.wsgifunc(*mw)).get('/bad/summary', headers=headers2, params=data, expect_errors=True) assert_equal(r2.status, 200) nb_tot_bad_files2 = 0 for line in r2.body.split('\n'): if line != '': line = loads(line) nb_tot_bad_files2 += int(line['BAD']) assert_equal(nb_tot_bad_files1, nb_tot_bad_files2) def test_add_list_replicas(self): """ REPLICA (CLIENT): Add, change state and list file replicas """ tmp_scope = 'mock' nbfiles = 5 files1 = [{'scope': tmp_scope, 'name': 'file_%s' % generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb', 'meta': {'events': 10}} for i in range(nbfiles)] self.replica_client.add_replicas(rse='MOCK', files=files1) files2 = [{'scope': tmp_scope, 'name': 'file_%s' % generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb', 'meta': {'events': 10}} for i in range(nbfiles)] self.replica_client.add_replicas(rse='MOCK3', files=files2) replicas = [r for r in self.replica_client.list_replicas(dids=[{'scope': i['scope'], 'name': i['name']} for i in files1])] assert_equal(len(replicas), len(files1)) replicas = [r for r in self.replica_client.list_replicas(dids=[{'scope': i['scope'], 'name': i['name']} for i in files2], schemes=['file'])] assert_equal(len(replicas), 5) replicas = [r for r in self.replica_client.list_replicas(dids=[{'scope': i['scope'], 'name': i['name']} for i in files2], schemes=['srm'])] assert_equal(len(replicas), 5) files3 = [{'scope': tmp_scope, 'name': 'file_%s' % generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb', 'state': 'U', 'meta': {'events': 10}} for i in range(nbfiles)] self.replica_client.add_replicas(rse='MOCK3', files=files3) replicas = [r for r in self.replica_client.list_replicas(dids=[{'scope': i['scope'], 'name': i['name']} for i in files3], schemes=['file'])] for i in range(nbfiles): assert_equal(replicas[i]['rses'], {}) files4 = [] for file in files3: file['state'] = 'A' files4.append(file) self.replica_client.update_replicas_states('MOCK3', files=files4) replicas = [r for r in self.replica_client.list_replicas(dids=[{'scope': i['scope'], 'name': i['name']} for i in files3], schemes=['file'], unavailable=True)] assert_equal(len(replicas), 5) for i in range(nbfiles): assert_in('MOCK3', replicas[i]['rses']) def test_delete_replicas(self): """ REPLICA (CLIENT): Add and delete file replicas """ tmp_scope = 'mock' nbfiles = 5 files = [{'scope': tmp_scope, 'name': 'file_%s' % generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb', 'meta': {'events': 10}} for i in range(nbfiles)] self.replica_client.add_replicas(rse='MOCK', files=files) with assert_raises(AccessDenied): self.replica_client.delete_replicas(rse='MOCK', files=files)
class TestBinRucio(): def setup(self): try: remove('/tmp/.rucio_root/auth_token_root') except OSError as e: if e.args[0] != 2: raise e self.marker = '$> ' self.host = config_get('client', 'rucio_host') self.auth_host = config_get('client', 'auth_host') self.user = '******' self.def_rse = 'MOCK4' self.did_client = DIDClient() self.replica_client = ReplicaClient() self.account_client = AccountLimitClient() self.account_client.set_account_limit('root', self.def_rse, -1) add_rse_attribute(self.def_rse, 'istape', 'False') self.upload_success_str = 'Successfully uploaded file %s' def test_rucio_version(self): """CLIENT(USER): Rucio version""" cmd = 'bin/rucio --version' exitcode, out, err = execute(cmd) nose.tools.assert_true('rucio' in err) def test_rucio_ping(self): """CLIENT(USER): Rucio ping""" cmd = 'rucio --host %s ping' % self.host print(self.marker + cmd) exitcode, out, err = execute(cmd) def test_add_account(self): """CLIENT(ADMIN): Add account""" tmp_val = account_name_generator() cmd = 'rucio-admin account add %s' % tmp_val print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, ) nose.tools.assert_equal('Added new account: %s\n' % tmp_val, out) def test_whoami(self): """CLIENT(USER): Rucio whoami""" cmd = 'rucio whoami' print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, ) nose.tools.assert_regexp_matches(out, re.compile('.*account.*')) def test_add_identity(self): """CLIENT(ADMIN): Add identity""" tmp_val = account_name_generator() cmd = 'rucio-admin account add %s' % tmp_val exitcode, out, err = execute(cmd) nose.tools.assert_equal('Added new account: %s\n' % tmp_val, out) cmd = 'rucio-admin identity add --account %s --type GSS --id [email protected] --email [email protected]' % tmp_val print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, ) nose.tools.assert_equal( 'Added new identity to account: [email protected]%s\n' % tmp_val, out) def test_del_identity(self): """CLIENT(ADMIN): Test del identity""" tmp_acc = account_name_generator() # create account cmd = 'rucio-admin account add %s' % tmp_acc exitcode, out, err = execute(cmd) # add identity to account cmd = 'rucio-admin identity add --account %s --type GSS --id [email protected] --email [email protected]' % tmp_acc exitcode, out, err = execute(cmd) # delete identity from account cmd = 'rucio-admin identity delete --account %s --type GSS --id [email protected]' % tmp_acc print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) nose.tools.assert_equal('Deleted identity: [email protected]\n', out) # list identities for account cmd = 'rucio-admin account list-identities %s' % (tmp_acc) print(self.marker + cmd) print(cmd) exitcode, out, err = execute(cmd) print(out, err) nose.tools.assert_equal('', out) def test_attributes(self): """CLIENT(ADMIN): Add/List/Delete attributes""" tmp_acc = account_name_generator() # create account cmd = 'rucio-admin account add %s' % tmp_acc exitcode, out, err = execute(cmd) # add attribute to the account cmd = 'rucio-admin account add-attribute {0} --key test_attribute_key --value true'.format( tmp_acc) exitcode, out, err = execute(cmd) print(out) print(err) nose.tools.assert_equal(0, exitcode) # list attributes cmd = 'rucio-admin account list-attributes {0}'.format(tmp_acc) exitcode, out, err = execute(cmd) print(out) print(err) nose.tools.assert_equal(0, exitcode) # delete attribute to the account cmd = 'rucio-admin account delete-attribute {0} --key test_attribute_key'.format( tmp_acc) exitcode, out, err = execute(cmd) print(out) print(err) nose.tools.assert_equal(0, exitcode) def test_add_scope(self): """CLIENT(ADMIN): Add scope""" tmp_scp = scope_name_generator() tmp_acc = account_name_generator() cmd = 'rucio-admin account add %s' % tmp_acc exitcode, out, err = execute(cmd) cmd = 'rucio-admin scope add --account %s --scope %s' % (tmp_acc, tmp_scp) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) nose.tools.assert_equal( 'Added new scope to account: %s-%s\n' % (tmp_scp, tmp_acc), out) def test_add_rse(self): """CLIENT(ADMIN): Add RSE""" tmp_val = rse_name_generator() cmd = 'rucio-admin rse add %s' % tmp_val print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, ) nose.tools.assert_equal('Added new deterministic RSE: %s\n' % tmp_val, out) def test_add_rse_nondet(self): """CLIENT(ADMIN): Add non-deterministic RSE""" tmp_val = rse_name_generator() cmd = 'rucio-admin rse add --non-deterministic %s' % tmp_val print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, ) nose.tools.assert_equal( 'Added new non-deterministic RSE: %s\n' % tmp_val, out) def test_list_rses(self): """CLIENT(ADMIN): List RSEs""" tmp_val = rse_name_generator() cmd = 'rucio-admin rse add %s' % tmp_val exitcode, out, err = execute(cmd) cmd = 'rucio-admin rse list' print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, ) nose.tools.assert_regexp_matches(out, re.compile('.*%s.*' % tmp_val)) def test_upload(self): """CLIENT(USER): Upload""" tmp_val = rse_name_generator() cmd = 'rucio-admin rse add %s' % tmp_val exitcode, out, err = execute(cmd) cmd = 'rucio upload' print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, ) def test_download(self): """CLIENT(USER): Download""" cmd = 'rucio download' print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, ) def test_upload_file(self): """CLIENT(USER): Rucio upload files""" tmp_file1 = file_generator() tmp_file2 = file_generator() tmp_file3 = file_generator() cmd = 'rucio -v upload --rse {0} --scope {1} {2} {3} {4}'.format( self.def_rse, self.user, tmp_file1, tmp_file2, tmp_file3) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) remove(tmp_file1) remove(tmp_file2) remove(tmp_file3) nose.tools.assert_true((self.upload_success_str % path.basename(tmp_file1)) in out) nose.tools.assert_true((self.upload_success_str % path.basename(tmp_file2)) in out) nose.tools.assert_true((self.upload_success_str % path.basename(tmp_file3)) in out) def test_upload_file_guid(self): """CLIENT(USER): Rucio upload file with guid""" tmp_file1 = file_generator() tmp_guid = generate_uuid() cmd = 'rucio -v upload --rse {0} --guid {1} --scope {2} {3}'.format( self.def_rse, tmp_guid, self.user, tmp_file1) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) remove(tmp_file1) nose.tools.assert_true((self.upload_success_str % path.basename(tmp_file1)) in out) def test_upload_repeated_file(self): """CLIENT(USER): Rucio upload repeated files""" # One of the files to upload is already catalogued but was removed tmp_file1 = file_generator() tmp_file2 = file_generator() tmp_file3 = file_generator() tmp_file1_name = path.basename(tmp_file1) cmd = 'rucio -v upload --rse {0} --scope {1} {2}'.format( self.def_rse, self.user, tmp_file1) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) # get the rule for the file cmd = "rucio list-rules {0}:{1} | grep {0}:{1} | cut -f1 -d\ ".format( self.user, tmp_file1_name) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) rule = out # delete the file from the catalog cmd = "rucio delete-rule {0}".format(rule) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # delete the physical file cmd = "find /tmp/rucio_rse/ -name {0} |xargs rm".format(tmp_file1_name) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) cmd = 'rucio -v upload --rse {0} --scope {1} {2} {3} {4}'.format( self.def_rse, self.user, tmp_file1, tmp_file2, tmp_file3) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) remove(tmp_file1) remove(tmp_file2) remove(tmp_file3) nose.tools.assert_true((self.upload_success_str % tmp_file1_name) in out) def test_upload_repeated_file_dataset(self): """CLIENT(USER): Rucio upload repeated files to dataset""" # One of the files to upload is already in the dataset tmp_file1 = file_generator() tmp_file2 = file_generator() tmp_file3 = file_generator() tmp_file1_name = path.basename(tmp_file1) tmp_file3_name = path.basename(tmp_file3) tmp_dsn = self.user + ':DSet' + rse_name_generator( ) # something like mock:DSetMOCK_S0M37HING # Adding files to a new dataset cmd = 'rucio -v upload --rse {0} --scope {1} {2} {3}'.format( self.def_rse, self.user, tmp_file1, tmp_dsn) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) # upload the files to the dataset cmd = 'rucio -v upload --rse {0} --scope {1} {2} {3} {4} {5}'.format( self.def_rse, self.user, tmp_file1, tmp_file2, tmp_file3, tmp_dsn) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) remove(tmp_file1) remove(tmp_file2) remove(tmp_file3) # searching for the file in the new dataset cmd = 'rucio list-files {0}'.format(tmp_dsn) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) # tmp_file1 must be in the dataset nose.tools.assert_not_equal( re.search("{0}:{1}".format(self.user, tmp_file1_name), out), None) # tmp_file3 must be in the dataset nose.tools.assert_not_equal( re.search("{0}:{1}".format(self.user, tmp_file3_name), out), None) def test_upload_file_dataset(self): """CLIENT(USER): Rucio upload files to dataset""" tmp_file1 = file_generator() tmp_file2 = file_generator() tmp_file3 = file_generator() tmp_file1_name = path.basename(tmp_file1) tmp_dsn = self.user + ':DSet' + rse_name_generator( ) # something like mock:DSetMOCK_S0M37HING # Adding files to a new dataset cmd = 'rucio -v upload --rse {0} --scope {1} {2} {3} {4} {5}'.format( self.def_rse, self.user, tmp_file1, tmp_file2, tmp_file3, tmp_dsn) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) remove(tmp_file1) remove(tmp_file2) remove(tmp_file3) # searching for the file in the new dataset cmd = 'rucio list-files {0}'.format(tmp_dsn) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) nose.tools.assert_not_equal( re.search("{0}:{1}".format(self.user, tmp_file1_name), out), None) def test_upload_adds_md5digest(self): """CLIENT(USER): Upload Checksums""" # user has a file to upload filename = file_generator() tmp_file1_name = path.basename(filename) file_md5 = md5(filename) # user uploads file cmd = 'rucio -v upload --rse {0} --scope {1} {2}'.format( self.def_rse, self.user, filename) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) # When inspecting the metadata of the new file the user finds the md5 checksum meta = self.did_client.get_metadata(scope=self.user, name=tmp_file1_name) nose.tools.assert_in('md5', meta) nose.tools.assert_equal(meta['md5'], file_md5) remove(filename) def test_create_dataset(self): """CLIENT(USER): Rucio add dataset""" tmp_name = self.user + ':DSet' + rse_name_generator( ) # something like mock:DSetMOCK_S0M37HING cmd = 'rucio add-dataset ' + tmp_name print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) nose.tools.assert_not_equal(re.search('Added ' + tmp_name, out), None) def test_add_files_to_dataset(self): """CLIENT(USER): Rucio add files to dataset""" tmp_file1 = file_generator() tmp_file2 = file_generator() tmp_dataset = self.user + ':DSet' + rse_name_generator( ) # something like mock:DSetMOCK_S0M37HING # add files cmd = 'rucio upload --rse {0} --scope {1} {2} {3}'.format( self.def_rse, self.user, tmp_file1, tmp_file2) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # create dataset cmd = 'rucio add-dataset ' + tmp_dataset print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # add files to dataset cmd = 'rucio attach {0} {3}:{1} {3}:{2}'.format( tmp_dataset, tmp_file1[5:], tmp_file2[5:], self.user) # triming '/tmp/' from filename print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # find the added files cmd = 'rucio list-files ' + tmp_dataset print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) nose.tools.assert_not_equal(re.search(tmp_file1[5:], out), None) def test_download_file(self): """CLIENT(USER): Rucio download files""" tmp_file1 = file_generator() # add files cmd = 'rucio upload --rse {0} --scope {1} {2}'.format( self.def_rse, self.user, tmp_file1) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # download files cmd = 'rucio -v download --dir /tmp {0}:{1}'.format( self.user, tmp_file1[5:]) # triming '/tmp/' from filename print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # search for the files with ls cmd = 'ls /tmp/' # search in /tmp/ print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) nose.tools.assert_not_equal(re.search(tmp_file1[5:], out), None) tmp_file1 = file_generator() # add files cmd = 'rucio upload --rse {0} --scope {1} {2}'.format( self.def_rse, self.user, tmp_file1) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # download files cmd = 'rucio -v download --dir /tmp {0}:{1}'.format( self.user, tmp_file1[5:-2] + '*') # triming '/tmp/' from filename print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # search for the files with ls cmd = 'ls /tmp/' # search in /tmp/ print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) nose.tools.assert_not_equal(re.search(tmp_file1[5:], out), None) try: for i in listdir('data13_hip'): unlink('data13_hip/%s' % i) rmdir('data13_hip') except Exception: pass def test_download_filter(self): """CLIENT(USER): Rucio download with filter options""" # Use filter option to download file with wildcarded name tmp_file1 = file_generator() uuid = generate_uuid() cmd = 'rucio upload --rse {0} --scope {1} --guid {2} {3}'.format( self.def_rse, self.user, uuid, tmp_file1) exitcode, out, err = execute(cmd) print(out, err) remove(tmp_file1) wrong_guid = generate_uuid() cmd = 'rucio -v download --dir /tmp {0}:{1} --filter guid={2}'.format( self.user, '*', wrong_guid) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) cmd = 'ls /tmp/{0}'.format(self.user) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) nose.tools.assert_equal(re.search(tmp_file1[5:], out), None) cmd = 'rucio -v download --dir /tmp {0}:{1} --filter guid={2}'.format( self.user, '*', uuid) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) cmd = 'ls /tmp/{0}'.format(self.user) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) nose.tools.assert_not_equal(re.search(tmp_file1[5:], out), None) # Only use filter option to download file tmp_file1 = file_generator() uuid = generate_uuid() cmd = 'rucio upload --rse {0} --scope {1} --guid {2} {3}'.format( self.def_rse, self.user, uuid, tmp_file1) exitcode, out, err = execute(cmd) print(out, err) remove(tmp_file1) wrong_guid = generate_uuid() cmd = 'rucio -v download --dir /tmp --scope {0} --filter guid={1}'.format( self.user, wrong_guid) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) cmd = 'ls /tmp/{0}'.format(self.user) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) nose.tools.assert_equal(re.search(tmp_file1[5:], out), None) cmd = 'rucio -v download --dir /tmp --scope {0} --filter guid={1}'.format( self.user, uuid) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) cmd = 'ls /tmp/{0}'.format(self.user) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) nose.tools.assert_not_equal(re.search(tmp_file1[5:], out), None) # Only use filter option to download dataset tmp_file1 = file_generator() dataset_name = 'dataset_%s' % generate_uuid() cmd = 'rucio upload --rse {0} --scope {1} {2} {1}:{3}'.format( self.def_rse, self.user, tmp_file1, dataset_name) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) remove(tmp_file1) db_session = session.get_session() db_session.query(models.DataIdentifier).filter_by( scope=self.user, name=dataset_name).one().length = 15 db_session.commit() cmd = 'rucio download --dir /tmp --scope {0} --filter length=100'.format( self.user) exitcode, out, err = execute(cmd) cmd = 'ls /tmp/{0}'.format(dataset_name) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) nose.tools.assert_equal(re.search(tmp_file1[5:], out), None) cmd = 'rucio download --dir /tmp --scope {0} --filter length=15'.format( self.user) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) cmd = 'ls /tmp/{0}'.format(dataset_name) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) nose.tools.assert_not_equal(re.search(tmp_file1[5:], out), None) # Use filter option to download dataset with wildcarded name tmp_file1 = file_generator() dataset_name = 'dataset_%s' % generate_uuid() cmd = 'rucio upload --rse {0} --scope {1} {2} {1}:{3}'.format( self.def_rse, self.user, tmp_file1, dataset_name) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) remove(tmp_file1) db_session = session.get_session() db_session.query(models.DataIdentifier).filter_by( scope=self.user, name=dataset_name).one().length = 1 db_session.commit() cmd = 'rucio download --dir /tmp {0}:{1} --filter length=10'.format( self.user, dataset_name[0:-1] + '*') exitcode, out, err = execute(cmd) cmd = 'ls /tmp/{0}'.format(dataset_name) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) nose.tools.assert_equal(re.search(tmp_file1[5:], out), None) cmd = 'rucio download --dir /tmp {0}:{1} --filter length=1'.format( self.user, dataset_name[0:-1] + '*') print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) cmd = 'ls /tmp/{0}'.format(dataset_name) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) nose.tools.assert_not_equal(re.search(tmp_file1[5:], out), None) def test_download_succeeds_md5only(self): """CLIENT(USER): Rucio download succeeds MD5 only""" # user has a file to upload filename = file_generator() file_md5 = md5(filename) filesize = stat(filename).st_size lfn = { 'name': filename[5:], 'scope': self.user, 'bytes': filesize, 'md5': file_md5 } # user uploads file self.replica_client.add_replicas(files=[lfn], rse=self.def_rse) rse_settings = rsemgr.get_rse_info(self.def_rse) protocol = rsemgr.create_protocol(rse_settings, 'write') protocol.connect() pfn = protocol.lfns2pfns(lfn).values()[0] protocol.put(filename[5:], pfn, filename[:5]) protocol.close() remove(filename) # download files cmd = 'rucio -v download --dir /tmp {0}:{1}'.format( self.user, filename[5:]) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # search for the files with ls cmd = 'ls /tmp/{0}'.format(self.user) # search in /tmp/ print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) nose.tools.assert_not_equal(re.search(filename[5:], out), None) try: for i in listdir('data13_hip'): unlink('data13_hip/%s' % i) rmdir('data13_hip') except Exception: pass def test_download_fails_badmd5(self): """CLIENT(USER): Rucio download fails on MD5 mismatch""" # user has a file to upload filename = file_generator() file_md5 = md5(filename) filesize = stat(filename).st_size lfn = { 'name': filename[5:], 'scope': self.user, 'bytes': filesize, 'md5': '0123456789abcdef0123456789abcdef' } # user uploads file self.replica_client.add_replicas(files=[lfn], rse=self.def_rse) rse_settings = rsemgr.get_rse_info(self.def_rse) protocol = rsemgr.create_protocol(rse_settings, 'write') protocol.connect() pfn = protocol.lfns2pfns(lfn).values()[0] protocol.put(filename[5:], pfn, filename[:5]) protocol.close() remove(filename) # download file cmd = 'rucio -v download --dir /tmp {0}:{1}'.format( self.user, filename[5:]) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) report = 'Local\ checksum\:\ {0},\ Rucio\ checksum\:\ 0123456789abcdef0123456789abcdef'.format( file_md5) print('searching', report, 'in', err) nose.tools.assert_not_equal(re.search(report, err), None) # The file should not exist cmd = 'ls /tmp/' # search in /tmp/ print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) nose.tools.assert_equal(re.search(filename[5:], out), None) try: for i in listdir('data13_hip'): unlink('data13_hip/%s' % i) rmdir('data13_hip') except Exception: pass def test_download_dataset(self): """CLIENT(USER): Rucio download dataset""" tmp_file1 = file_generator() tmp_dataset = self.user + ':DSet' + rse_name_generator( ) # something like mock:DSetMOCK_S0M37HING # add files cmd = 'rucio upload --rse {0} --scope {1} {2}'.format( self.def_rse, self.user, tmp_file1) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # create dataset cmd = 'rucio add-dataset ' + tmp_dataset print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # add files to dataset cmd = 'rucio attach {0} {1}:{2}'.format( tmp_dataset, self.user, tmp_file1[5:]) # triming '/tmp/' from filename print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # download dataset cmd = 'rucio -v download --dir /tmp {0}'.format( tmp_dataset) # triming '/tmp/' from filename print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) search = '{0} successfully downloaded'.format( tmp_file1[5:]) # triming '/tmp/' from filename nose.tools.assert_not_equal(re.search(search, err), None) def test_create_rule(self): """CLIENT(USER): Rucio add rule""" tmp_file1 = file_generator() # add files cmd = 'rucio upload --rse {0} --scope {1} {2}'.format( self.def_rse, self.user, tmp_file1) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # add rse tmp_rse = rse_name_generator() cmd = 'rucio-admin rse add {0}'.format(tmp_rse) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) # add quota self.account_client.set_account_limit('root', tmp_rse, -1) # add rse atributes cmd = 'rucio-admin rse set-attribute --rse {0} --key spacetoken --value ATLASSCRATCHDISK'.format( tmp_rse) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # add rse tmp_rse = rse_name_generator() cmd = 'rucio-admin rse add {0}'.format(tmp_rse) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # add quota self.account_client.set_account_limit('root', tmp_rse, -1) # add rse atributes cmd = 'rucio-admin rse set-attribute --rse {0} --key spacetoken --value ATLASSCRATCHDISK'.format( tmp_rse) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # add rse tmp_rse = rse_name_generator() cmd = 'rucio-admin rse add {0}'.format(tmp_rse) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # add quota self.account_client.set_account_limit('root', tmp_rse, -1) # add rse atributes cmd = 'rucio-admin rse set-attribute --rse {0} --key spacetoken --value ATLASSCRATCHDISK'.format( tmp_rse) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # add rules cmd = "rucio add-rule {0}:{1} 3 'spacetoken=ATLASSCRATCHDISK'".format( self.user, tmp_file1[5:]) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) rule = out[:-1] # triming new line character # check if rule exist for the file cmd = "rucio list-rules {0}:{1}".format(self.user, tmp_file1[5:]) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) nose.tools.assert_not_equal(re.search(rule, out), None) def test_delete_rule(self): """CLIENT(USER): rule deletion""" self.account_client.set_account_limit('root', self.def_rse, -1) tmp_file1 = file_generator() # add files cmd = 'rucio upload --rse {0} --scope {1} {2}'.format( self.def_rse, self.user, tmp_file1) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # add rse tmp_rse = rse_name_generator() cmd = 'rucio-admin rse add {0}'.format(tmp_rse) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) self.account_client.set_account_limit('root', tmp_rse, -1) # add rse atributes cmd = 'rucio-admin rse set-attribute --rse {0} --key spacetoken --value ATLASSCRATCHDISK'.format( tmp_rse) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # add rules cmd = "rucio add-rule {0}:{1} 1 'spacetoken=ATLASSCRATCHDISK'".format( self.user, tmp_file1[5:]) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(err) print(out) # get the rules for the file cmd = "rucio list-rules {0}:{1} | grep {0}:{1} | cut -f1 -d\ ".format( self.user, tmp_file1[5:]) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) (rule1, rule2) = out.split() # delete the rules for the file cmd = "rucio delete-rule {0}".format(rule1) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) cmd = "rucio delete-rule {0}".format(rule2) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # search for the file cmd = "rucio list-dids --filter type=all {0}:{1}".format( self.user, tmp_file1[5:]) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) nose.tools.assert_equal(5, len(out.splitlines())) def test_add_file_twice(self): """CLIENT(USER): Add file twice""" tmp_file1 = file_generator() # add file twice cmd = 'rucio upload --rse {0} --scope {1} {2}'.format( self.def_rse, self.user, tmp_file1) print(self.marker + cmd) exitcode, out, err = execute(cmd) cmd = 'rucio upload --rse {0} --scope {1} {2}'.format( self.def_rse, self.user, tmp_file1) print(self.marker + cmd) exitcode, out, err = execute(cmd) nose.tools.assert_equal( re.search( "File {0}:{1} successfully uploaded on the storage".format( self.user, tmp_file1[5:]), out), None) def test_add_delete_add_file(self): """CLIENT(USER): Add/Delete/Add""" tmp_file1 = file_generator() # add file cmd = 'rucio upload --rse {0} --scope {1} {2}'.format( self.def_rse, self.user, tmp_file1) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # get the rule for the file cmd = "rucio list-rules {0}:{1} | grep {0}:{1} | cut -f1 -d\ ".format( self.user, tmp_file1[5:]) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) rule = out # delete the file from the catalog cmd = "rucio delete-rule {0}".format(rule) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # delete the fisical file cmd = "find /tmp/rucio_rse/ -name {0} |xargs rm".format(tmp_file1[5:]) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # modify the file to avoid same checksum cmd = "echo 'delta' >> {0}".format(tmp_file1) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) # add the same file cmd = 'rucio upload --rse {0} --scope {1} {2}'.format( self.def_rse, self.user, tmp_file1) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out, err) nose.tools.assert_equal( re.search( "File {0}:{1} successfully uploaded on the storage".format( self.user, tmp_file1[5:]), out), None) def test_attach_files_dataset(self): """CLIENT(USER): Rucio attach files to dataset""" # Attach files to a dataset using the attach method tmp_file1 = file_generator() tmp_file2 = file_generator() tmp_file3 = file_generator() tmp_dsn = self.user + ':DSet' + rse_name_generator( ) # something like mock:DSetMOCK_S0M37HING # Adding files to a new dataset cmd = 'rucio upload --rse {0} --scope {1} {2} {3}'.format( self.def_rse, self.user, tmp_file1, tmp_dsn) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) # upload the files cmd = 'rucio upload --rse {0} --scope {1} {2} {3}'.format( self.def_rse, self.user, tmp_file2, tmp_file3) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) remove(tmp_file1) remove(tmp_file2) remove(tmp_file3) # attach the files to the dataset cmd = 'rucio attach {0} {1}:{2} {1}:{3}'.format( tmp_dsn, self.user, tmp_file2[5:], tmp_file3[5:]) # triming '/tmp/' from filenames print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) # searching for the file in the new dataset cmd = 'rucio list-files {0}'.format(tmp_dsn) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) # tmp_file2 must be in the dataset nose.tools.assert_not_equal( re.search("{0}:{1}".format(self.user, tmp_file2[5:]), out), None) # tmp_file3 must be in the dataset nose.tools.assert_not_equal( re.search("{0}:{1}".format(self.user, tmp_file3[5:]), out), None) def test_detach_files_dataset(self): """CLIENT(USER): Rucio detach files to dataset""" # Attach files to a dataset using the attach method tmp_file1 = file_generator() tmp_file2 = file_generator() tmp_file3 = file_generator() tmp_dsn = self.user + ':DSet' + rse_name_generator( ) # something like mock:DSetMOCK_S0M37HING # Adding files to a new dataset cmd = 'rucio upload --rse {0} --scope {1} {2} {3} {4} {5}'.format( self.def_rse, self.user, tmp_file1, tmp_file2, tmp_file3, tmp_dsn) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) remove(tmp_file1) remove(tmp_file2) remove(tmp_file3) # detach the files to the dataset cmd = 'rucio detach {0} {1}:{2} {1}:{3}'.format( tmp_dsn, self.user, tmp_file2[5:], tmp_file3[5:]) # triming '/tmp/' from filenames print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) # searching for the file in the new dataset cmd = 'rucio list-files {0}'.format(tmp_dsn) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) # tmp_file1 must be in the dataset nose.tools.assert_not_equal( re.search("{0}:{1}".format(self.user, tmp_file1[5:]), out), None) # tmp_file3 must be in the dataset nose.tools.assert_equal( re.search("{0}:{1}".format(self.user, tmp_file3[5:]), out), None) def test_attach_file_twice(self): """CLIENT(USER): Rucio attach a file twice""" # Attach files to a dataset using the attach method tmp_file1 = file_generator() tmp_dsn = self.user + ':DSet' + rse_name_generator( ) # something like mock:DSetMOCK_S0M37HING # Adding files to a new dataset cmd = 'rucio upload --rse {0} --scope {1} {2} {3}'.format( self.def_rse, self.user, tmp_file1, tmp_dsn) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) remove(tmp_file1) # attach the files to the dataset cmd = 'rucio attach {0} {1}:{2}'.format( tmp_dsn, self.user, tmp_file1[5:]) # triming '/tmp/' from filenames print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) nose.tools.assert_not_equal(re.search("The file already exists", err), None) def test_attach_dataset_twice(self): """ CLIENT(USER): Rucio attach a dataset twice """ container = 'container_%s' % generate_uuid() dataset = 'dataset_%s' % generate_uuid() self.did_client.add_container(scope=self.user, name=container) self.did_client.add_dataset(scope=self.user, name=dataset) # Attach dataset to container cmd = 'rucio attach {0}:{1} {0}:{2}'.format(self.user, container, dataset) exitcode, out, err = execute(cmd) # Attach again cmd = 'rucio attach {0}:{1} {0}:{2}'.format(self.user, container, dataset) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) nose.tools.assert_not_equal( re.search( "Data identifier already added to the destination content", err), None) def test_detach_non_existing_file(self): """CLIENT(USER): Rucio detach a non existing file""" tmp_file1 = file_generator() tmp_dsn = self.user + ':DSet' + rse_name_generator( ) # something like mock:DSetMOCK_S0M37HING # Adding files to a new dataset cmd = 'rucio upload --rse {0} --scope {1} {2} {3}'.format( self.def_rse, self.user, tmp_file1, tmp_dsn) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) remove(tmp_file1) # attach the files to the dataset cmd = 'rucio detach {0} {1}:{2}'.format( tmp_dsn, self.user, 'file_ghost') # triming '/tmp/' from filenames print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) nose.tools.assert_not_equal( re.search("Data identifier not found.", err), None) def test_list_did_recursive(self): """ CLIENT(USER): List did recursive """ # Setup nested collections tmp_scope = 'mock' tmp_container_1 = 'container_%s' % generate_uuid() cmd = 'rucio add-container {0}:{1}'.format(tmp_scope, tmp_container_1) exitcode, out, err = execute(cmd) tmp_container_2 = 'container_%s' % generate_uuid() cmd = 'rucio add-container {0}:{1}'.format(tmp_scope, tmp_container_2) exitcode, out, err = execute(cmd) tmp_container_3 = 'container_%s' % generate_uuid() cmd = 'rucio add-container {0}:{1}'.format(tmp_scope, tmp_container_3) exitcode, out, err = execute(cmd) cmd = 'rucio attach {0}:{1} {0}:{2}'.format(tmp_scope, tmp_container_1, tmp_container_2) exitcode, out, err = execute(cmd) cmd = 'rucio attach {0}:{1} {0}:{2}'.format(tmp_scope, tmp_container_2, tmp_container_3) exitcode, out, err = execute(cmd) # All attached DIDs are expected cmd = 'rucio list-dids {0}:{1} --recursive'.format( tmp_scope, tmp_container_1) exitcode, out, err = execute(cmd) nose.tools.assert_not_equal(re.search(tmp_container_1, out), None) nose.tools.assert_not_equal(re.search(tmp_container_2, out), None) nose.tools.assert_not_equal(re.search(tmp_container_3, out), None) # Wildcards are not allowed to use with --recursive cmd = 'rucio list-dids {0}:* --recursive'.format(tmp_scope) exitcode, out, err = execute(cmd) nose.tools.assert_not_equal( re.search("Option recursive cannot be used with wildcards", err), None) def test_attach_many_dids(self): """ CLIENT(USER): Rucio attach many (>1000) DIDs """ # Setup data for CLI check tmp_dsn_name = 'Container' + rse_name_generator() tmp_dsn_did = self.user + ':' + tmp_dsn_name self.did_client.add_did(scope=self.user, name=tmp_dsn_name, type='CONTAINER') files = [{ 'name': 'dsn_%s' % generate_uuid(), 'scope': self.user, 'type': 'DATASET' } for i in range(0, 1500)] self.did_client.add_dids(files[:1000]) self.did_client.add_dids(files[1000:]) # Attaching over 1000 DIDs with CLI cmd = 'rucio attach {0}'.format(tmp_dsn_did) for tmp_file in files: cmd += ' {0}:{1}'.format(tmp_file['scope'], tmp_file['name']) exitcode, out, err = execute(cmd) print(out) print(err) # Checking if the execution was successfull and if the DIDs belong together nose.tools.assert_not_equal( re.search('DIDs successfully attached', out), None) cmd = 'rucio list-content {0}'.format(tmp_dsn_did) print(self.marker + cmd) exitcode, out, err = execute(cmd) # first dataset must be in the container nose.tools.assert_not_equal( re.search("{0}:{1}".format(self.user, files[0]['name']), out), None) # last dataset must be in the container nose.tools.assert_not_equal( re.search("{0}:{1}".format(self.user, files[-1]['name']), out), None) # Setup data with file did_file_path = 'list_dids.txt' files = [{ 'name': 'dsn_%s' % generate_uuid(), 'scope': self.user, 'type': 'DATASET' } for i in range(0, 1500)] self.did_client.add_dids(files[:1000]) self.did_client.add_dids(files[1000:]) with open(did_file_path, 'w') as did_file: for file in files: did_file.write(file['scope'] + ':' + file['name'] + '\n') did_file.close() # Attaching over 1000 files per file cmd = 'rucio attach {0} -f {1}'.format(tmp_dsn_did, did_file_path) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) remove(did_file_path) # Checking if the execution was successfull and if the DIDs belong together nose.tools.assert_not_equal( re.search('DIDs successfully attached', out), None) cmd = 'rucio list-content {0}'.format(tmp_dsn_did) print(self.marker + cmd) exitcode, out, err = execute(cmd) # first file must be in the dataset nose.tools.assert_not_equal( re.search("{0}:{1}".format(self.user, files[0]['name']), out), None) # last file must be in the dataset nose.tools.assert_not_equal( re.search("{0}:{1}".format(self.user, files[-1]['name']), out), None) def test_attach_many_dids_twice(self): """ CLIENT(USER) Attach many (>1000) DIDs twice """ # Setup data for CLI check container_name = 'container' + generate_uuid() container = self.user + ':' + container_name self.did_client.add_did(scope=self.user, name=container_name, type='CONTAINER') datasets = [{ 'name': 'dsn_%s' % generate_uuid(), 'scope': self.user, 'type': 'DATASET' } for i in range(0, 1500)] self.did_client.add_dids(datasets[:1000]) self.did_client.add_dids(datasets[1000:]) # Attaching over 1000 DIDs with CLI cmd = 'rucio attach {0}'.format(container) for dataset in datasets: cmd += ' {0}:{1}'.format(dataset['scope'], dataset['name']) exitcode, out, err = execute(cmd) # Attaching twice cmd = 'rucio attach {0}'.format(container) for dataset in datasets: cmd += ' {0}:{1}'.format(dataset['scope'], dataset['name']) exitcode, out, err = execute(cmd) nose.tools.assert_not_equal( re.search("DIDs successfully attached", out), None) # Attaching twice plus one DID that is not already attached new_dataset = { 'name': 'dsn_%s' % generate_uuid(), 'scope': self.user, 'type': 'DATASET' } datasets.append(new_dataset) self.did_client.add_did(scope=self.user, name=new_dataset['name'], type='DATASET') cmd = 'rucio attach {0}'.format(container) for dataset in datasets: cmd += ' {0}:{1}'.format(dataset['scope'], dataset['name']) exitcode, out, err = execute(cmd) nose.tools.assert_not_equal( re.search("DIDs successfully attached", out), None) cmd = 'rucio list-content {0}'.format(container) exitcode, out, err = execute(cmd) nose.tools.assert_not_equal( re.search("{0}:{1}".format(self.user, new_dataset['name']), out), None)
class TestDIDClients: def setup(self): self.account_client = AccountClient() self.scope_client = ScopeClient() self.meta_client = MetaClient() self.did_client = DIDClient() self.replica_client = ReplicaClient() self.rse_client = RSEClient() def test_list_dids(self): """ DATA IDENTIFIERS (CLIENT): List dids by pattern.""" tmp_scope = scope_name_generator() tmp_files = [] tmp_files.append('file_a_1%s' % generate_uuid()) tmp_files.append('file_a_2%s' % generate_uuid()) tmp_files.append('file_b_1%s' % generate_uuid()) tmp_rse = 'MOCK' self.scope_client.add_scope('jdoe', tmp_scope) for tmp_file in tmp_files: self.replica_client.add_replica(tmp_rse, tmp_scope, tmp_file, 1L, '0cc737eb') results = [] for result in self.did_client.list_dids(tmp_scope, {'name': 'file\_a\_*'}, type='file'): results.append(result) assert_equal(len(results), 2) results = [] for result in self.did_client.list_dids(tmp_scope, {'name': 'file\_a\_1*'}, type='file'): results.append(result) assert_equal(len(results), 1) results = [] for result in self.did_client.list_dids(tmp_scope, {'name': 'file\__\_1*'}, type='file'): results.append(result) assert_equal(len(results), 2) results = [] for result in self.did_client.list_dids(tmp_scope, {'name': 'file*'}, type='file'): results.append(result) assert_equal(len(results), 3) results = [] for result in self.did_client.list_dids(tmp_scope, {'name': 'file*'}): results.append(result) assert_equal(len(results), 0) with assert_raises(UnsupportedOperation): self.did_client.list_dids(tmp_scope, {'name': 'file*'}, type='whateverytype') def test_list_by_metadata(self): """ DATA IDENTIFIERS (CLIENT): List did with metadata""" dsns = [] tmp_scope = 'mock' tmp_dsn1 = 'dsn_%s' % generate_uuid() dsns.append(tmp_dsn1) dataset_meta = {'project': 'data12_8TeV', 'run_number': 400000, 'stream_name': 'physics_CosmicCalo', 'prod_step': 'merge', 'datatype': 'NTUP_TRIG', 'version': 'f392_m920', } self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn1, meta=dataset_meta) tmp_dsn2 = 'dsn_%s' % generate_uuid() dsns.append(tmp_dsn2) dataset_meta['run_number'] = 400001 self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn2, meta=dataset_meta) tmp_dsn3 = 'dsn_%s' % generate_uuid() dsns.append(tmp_dsn3) dataset_meta['stream_name'] = 'physics_Egamma' dataset_meta['datatype'] = 'NTUP_SMWZ' self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn3, meta=dataset_meta) dids = self.did_client.list_dids(tmp_scope, {'project': 'data12_8TeV', 'version': 'f392_m920'}) results = [] for d in dids: results.append(d) for dsn in dsns: assert_in(dsn, results) dsns.remove(tmp_dsn1) dids = self.did_client.list_dids(tmp_scope, {'project': 'data12_8TeV', 'run_number': 400001}) results = [] for d in dids: results.append(d) for dsn in dsns: assert_in(dsn, results) dsns.remove(tmp_dsn2) dids = self.did_client.list_dids(tmp_scope, {'project': 'data12_8TeV', 'stream_name': 'physics_Egamma', 'datatype': 'NTUP_SMWZ'}) results = [] for d in dids: results.append(d) for dsn in dsns: assert_in(dsn, results) with assert_raises(KeyNotFound): self.did_client.list_dids(tmp_scope, {'NotReallyAKey': 'NotReallyAValue'}) def test_add_did(self): """ DATA IDENTIFIERS (CLIENT): Add, populate and list did content""" tmp_scope = 'mock' tmp_rse = 'MOCK' tmp_dsn = 'dsn_%s' % generate_uuid() # PFN example: rfio://castoratlas.cern.ch/castor/cern.ch/grid/atlas/tzero/xx/xx/xx/filename dataset_meta = {'project': 'data13_hip', 'run_number': 300000, 'stream_name': 'physics_CosmicCalo', 'prod_step': 'merge', 'datatype': 'NTUP_TRIG', 'version': 'f392_m927', } rules = [{'copies': 1, 'rse_expression': 'MOCK', 'account': 'root'}] with assert_raises(ScopeNotFound): self.did_client.add_dataset(scope='Nimportnawak', name=tmp_dsn, statuses={'monotonic': True}, meta=dataset_meta, rules=rules) self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn, statuses={'monotonic': True}, meta=dataset_meta, rules=rules) with assert_raises(DataIdentifierNotFound): self.did_client.add_files_to_dataset(scope=tmp_scope, name=tmp_dsn, files=[{'scope': tmp_scope, 'name': 'lfn.%(tmp_dsn)s.' % locals() + str(generate_uuid()), 'bytes': 724963570L, 'adler32': '0cc737eb'}, ]) files = [] for i in xrange(5): lfn = 'lfn.%(tmp_dsn)s.' % locals() + str(generate_uuid()) pfn = 'mock://localhost/tmp/rucio_rse/%(project)s/%(version)s/%(prod_step)s' % dataset_meta # it doesn't work with mock: TBF # pfn = 'srm://mock2.com:2880/pnfs/rucio/disk-only/scratchdisk/rucio_tests/%(project)s/%(version)s/%(prod_step)s' % dataset_meta pfn += '%(tmp_dsn)s/%(lfn)s' % locals() file_meta = {'guid': str(generate_uuid()), 'events': 10} files.append({'scope': tmp_scope, 'name': lfn, 'bytes': 724963570L, 'adler32': '0cc737eb', 'pfn': pfn, 'meta': file_meta}) rules = [{'copies': 1, 'rse_expression': 'CERN-PROD_TZERO', 'lifetime': timedelta(days=2)}] self.did_client.add_files_to_dataset(scope=tmp_scope, name=tmp_dsn, files=files, rse=tmp_rse) files = [] for i in xrange(5): lfn = '%(tmp_dsn)s.' % locals() + str(generate_uuid()) pfn = 'mock://localhost/tmp/rucio_rse/%(project)s/%(version)s/%(prod_step)s' % dataset_meta # it doesn't work with mock: TBF # pfn = 'srm://mock2.com:2880/pnfs/rucio/disk-only/scratchdisk/rucio_tests/%(project)s/%(version)s/%(prod_step)s' % dataset_meta pfn += '%(tmp_dsn)s/%(lfn)s' % locals() file_meta = {'guid': str(generate_uuid()), 'events': 100} files.append({'scope': tmp_scope, 'name': lfn, 'bytes': 724963570L, 'adler32': '0cc737eb', 'pfn': pfn, 'meta': file_meta}) rules = [{'copies': 1, 'rse_expression': 'CERN-PROD_TZERO', 'lifetime': timedelta(days=2)}] self.did_client.add_files_to_dataset(scope=tmp_scope, name=tmp_dsn, files=files, rse=tmp_rse) self.did_client.close(scope=tmp_scope, name=tmp_dsn) def test_attach_dids_to_dids(self): """ DATA IDENTIFIERS (CLIENT): Attach dids to dids""" tmp_scope = 'mock' tmp_rse = 'MOCK' nb_datasets = 5 nb_files = 5 attachments, dsns = list(), list() guid_to_query = None dsn = {} for i in xrange(nb_datasets): attachment = {} attachment['scope'] = tmp_scope attachment['name'] = 'dsn.%s' % str(generate_uuid()) attachment['rse'] = tmp_rse files = [] for i in xrange(nb_files): files.append({'scope': tmp_scope, 'name': 'lfn.%s' % str(generate_uuid()), 'bytes': 724963570L, 'adler32': '0cc737eb', 'meta': {'guid': str(generate_uuid()), 'events': 100}}) attachment['dids'] = files guid_to_query = files[0]['meta']['guid'] dsn = {'scope': tmp_scope, 'name': attachment['name']} dsns.append(dsn) attachments.append(attachment) self.did_client.add_datasets(dsns=dsns) self.did_client.attach_dids_to_dids(attachments=attachments) l = [i for i in self.did_client.get_dataset_by_guid(guid_to_query)] assert_equal([dsn], l) cnt_name = 'cnt_%s' % generate_uuid() self.did_client.add_container(scope='mock', name=cnt_name) with assert_raises(UnsupportedOperation): self.did_client.attach_dids_to_dids([{'scope': 'mock', 'name': cnt_name, 'rse': tmp_rse, 'dids': attachment['dids']}]) def test_add_dataset(self): """ DATA IDENTIFIERS (CLIENT): Add dataset """ tmp_scope = 'mock' tmp_dsn = 'dsn_%s' % generate_uuid() self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn, meta={'project': 'data13_hip'}) did = self.did_client.get_did(tmp_scope, tmp_dsn) assert_equal(did['scope'], tmp_scope) assert_equal(did['name'], tmp_dsn) with assert_raises(DataIdentifierNotFound): self.did_client.get_did('i_dont_exist', 'neither_do_i') def test_add_datasets(self): """ DATA IDENTIFIERS (CLIENT): Bulk add datasets """ tmp_scope = 'mock' dsns = list() for i in xrange(500): tmp_dsn = {'name': 'dsn_%s' % generate_uuid(), 'scope': tmp_scope, 'meta': {'project': 'data13_hip'}} dsns.append(tmp_dsn) self.did_client.add_datasets(dsns) def test_exists(self): """ DATA IDENTIFIERS (CLIENT): Check if data identifier exists """ tmp_scope = 'mock' tmp_file = 'file_%s' % generate_uuid() tmp_rse = 'MOCK' self.replica_client.add_replica(tmp_rse, tmp_scope, tmp_file, 1L, '0cc737eb') did = self.did_client.get_did(tmp_scope, tmp_file) assert_equal(did['scope'], tmp_scope) assert_equal(did['name'], tmp_file) with assert_raises(DataIdentifierNotFound): self.did_client.get_did('i_dont_exist', 'neither_do_i') def test_did_hierarchy(self): """ DATA IDENTIFIERS (CLIENT): Check did hierarchy rule """ account = 'jdoe' rse = 'MOCK' scope = scope_name_generator() file = ['file_%s' % generate_uuid() for i in range(10)] dst = ['dst_%s' % generate_uuid() for i in range(4)] cnt = ['cnt_%s' % generate_uuid() for i in range(4)] self.scope_client.add_scope(account, scope) for i in range(10): self.replica_client.add_replica(rse, scope, file[i], 1, '0cc737eb') for i in range(4): self.did_client.add_did(scope, dst[i], 'DATASET', statuses=None, meta=None, rules=None) for i in range(4): self.did_client.add_did(scope, cnt[i], 'CONTAINER', statuses=None, meta=None, rules=None) for i in range(4): self.did_client.add_files_to_dataset(scope, dst[i], [{'scope': scope, 'name': file[2 * i], 'bytes': 1L, 'adler32': '0cc737eb'}, {'scope': scope, 'name': file[2 * i + 1], 'bytes': 1L, 'adler32': '0cc737eb'}]) self.did_client.add_containers_to_container(scope, cnt[1], [{'scope': scope, 'name': cnt[2]}, {'scope': scope, 'name': cnt[3]}]) self.did_client.add_datasets_to_container(scope, cnt[0], [{'scope': scope, 'name': dst[1]}, {'scope': scope, 'name': dst[2]}]) result = self.did_client.scope_list(scope, recursive=True) for r in result: pass # TODO: fix, fix, fix # if r['name'] == cnt[1]: # assert_equal(r['type'], 'container') # assert_equal(r['level'], 0) # if (r['name'] == cnt[0]) or (r['name'] == dst[0]) or (r['name'] == file[8]) or (r['name'] == file[9]): # assert_equal(r['level'], 0) # else: # assert_equal(r['level'], 1) def test_detach_did(self): """ DATA IDENTIFIERS (CLIENT): Detach dids from a did""" account = 'jdoe' rse = 'MOCK' scope = scope_name_generator() file = ['file_%s' % generate_uuid() for i in range(10)] dst = ['dst_%s' % generate_uuid() for i in range(4)] cnt = ['cnt_%s' % generate_uuid() for i in range(2)] self.scope_client.add_scope(account, scope) for i in range(10): self.replica_client.add_replica(rse, scope, file[i], 1L, '0cc737eb') for i in range(4): self.did_client.add_dataset(scope, dst[i], statuses=None, meta=None, rules=None) for i in range(2): self.did_client.add_container(scope, cnt[i], statuses=None, meta=None, rules=None) for i in range(4): self.did_client.add_files_to_dataset(scope, dst[i], [{'scope': scope, 'name': file[2 * i], 'bytes': 1L, 'adler32': '0cc737eb'}, {'scope': scope, 'name': file[2 * i + 1], 'bytes': 1L, 'adler32': '0cc737eb'}]) self.did_client.add_containers_to_container(scope, cnt[1], [{'scope': scope, 'name': dst[2]}, {'scope': scope, 'name': dst[3]}]) with assert_raises(UnsupportedOperation): self.did_client.add_datasets_to_container(scope, cnt[0], [{'scope': scope, 'name': dst[1]}, {'scope': scope, 'name': cnt[1]}]) self.did_client.add_datasets_to_container(scope, cnt[0], [{'scope': scope, 'name': dst[1]}, {'scope': scope, 'name': dst[2]}]) self.did_client.detach_dids(scope, cnt[0], [{'scope': scope, 'name': dst[1]}]) self.did_client.detach_dids(scope, dst[3], [{'scope': scope, 'name': file[6]}, {'scope': scope, 'name': file[7]}]) result = self.did_client.scope_list(scope, recursive=True) for r in result: if r['name'] == dst[1]: assert_equal(r['level'], 0) if r['type'] is 'file': if (r['name'] in file[6:9]): assert_equal(r['level'], 0) else: assert_not_equal(r['level'], 0) with assert_raises(UnsupportedOperation): self.did_client.detach_dids(scope=scope, name=cnt[0], dids=[{'scope': scope, 'name': cnt[0]}]) def test_scope_list(self): """ DATA IDENTIFIERS (CLIENT): Add, aggregate, and list data identifiers in a scope """ # create some dummy data self.tmp_accounts = ['jdoe' for i in xrange(3)] self.tmp_scopes = [scope_name_generator() for i in xrange(3)] self.tmp_rses = [rse_name_generator() for i in xrange(3)] self.tmp_files = ['file_%s' % generate_uuid() for i in xrange(3)] self.tmp_datasets = ['dataset_%s' % generate_uuid() for i in xrange(3)] self.tmp_containers = ['container_%s' % generate_uuid() for i in xrange(3)] # add dummy data to the catalogue for i in xrange(3): self.scope_client.add_scope(self.tmp_accounts[i], self.tmp_scopes[i]) self.rse_client.add_rse(self.tmp_rses[i]) self.replica_client.add_replica(self.tmp_rses[i], self.tmp_scopes[i], self.tmp_files[i], 1L, '0cc737eb') # put files in datasets for i in xrange(3): for j in xrange(3): files = [{'scope': self.tmp_scopes[j], 'name': self.tmp_files[j], 'bytes': 1L, 'adler32': '0cc737eb'}] self.did_client.add_dataset(self.tmp_scopes[i], self.tmp_datasets[j]) self.did_client.add_files_to_dataset(self.tmp_scopes[i], self.tmp_datasets[j], files)
class RunSync(object): """ Synchronize the replica of a given run at WIPAC-ORIG the corresponding Rucio site. """ def __init__(self, run, originrse=DEFAULT_ORIGIN_RSE, destrse=None, scope=DEFAULT_SCOPE, check=True, lifetime=None, dry_run=False, container=None): """ :param dataset: Name of the PhEDEx dataset to synchronize with Rucio. :param pnn: PhEDEx node name to filter on for replica information. """ self.run = run self.originrse = originrse self.destrse = destrse self.scope = scope self.check = check self.lifetime = lifetime self.dry_run = dry_run self.container = container self.rucio_datasets = {} self.run_files = {} self.existent_replica_files = {} self.url = '' self.gfal = Gfal2Context() self.run_Number = None self.get_run_Number() self.files_storage = {} self.get_global_url() self.didc = DIDClient() self.repc = ReplicaClient() self.rulesClient = RuleClient() # Right now obtaining the Metadata from the storage at WIPAC # Hopefully in the future from JADE # TODO self.get_run_Files() self.get_rucio_metadata() self.update_run_Files() self.get_files_metadata() def update_run_Files(self): """ Updating the run files wiht only the files that have not been registered """ for f in self.existent_replica_files: file_name = f.split('/')[-1:][0] if file_name in self.run_files: print("File: %s already registered. Skipping it" % file_name) self.run_files.pop(file_name) def get_files_metadata(self): for f in self.run_files: if self.run + '/' + f not in self.existent_replica_files: self.obtain_metadata(f) print("Metadat initialization done") def obtain_metadata(self, filename): """ Get the size and checksum for every file in the run from the gftp server """ url = self.get_file_url(filename) print("checking metadata for url %s" % url) try: size = self.gfal.stat(str(url)).st_size adler32 = self.gfal.checksum(str(url), 'adler32') print( "got size and adler 32checksum of file: pfn=%s size=%s checksum=%s" % (url, size, adler32)) self.run_files[filename] = { 'size': size, 'adler32': adler32, 'name': self.run + '/' + filename } except GError: print("no file found at %s" % url) return False def get_file_url(self, filename): return self.url + '/' + self.run + '/' + filename def get_global_url(self): """ Return the base path of the rucio url """ print("Getting parameters for rse %s" % self.originrse) rse = rsemgr.get_rse_info(self.originrse) proto = rse['protocols'][0] schema = proto['scheme'] prefix = proto['prefix'] + self.scope.replace('.', '/') if schema == 'srm': prefix = proto['extended_attributes']['web_service_path'] + prefix url = schema + '://' + proto['hostname'] if proto['port'] != 0: url = url + ':' + str(proto['port']) self.url = url + prefix print("Determined base url %s" % self.url) def get_run_Number(self): """ Obtain the run number out of whole run IceCube/2016/filtered/level2pass2/0101/Run00127347 """ print("Obtaining run number out of run(dataset): %s" % self.run) self.run_Number = self.run.split("/")[-1] print("Run number (dataset): %s" % self.run_Number) def get_run_Files(self): """ Gets the list of files for a given run and their checksums from the storage """ self.run_url = self.url + '/' + self.run print("Listin files from url : %s" % self.run_url) run_files = [] try: run_files = self.gfal.listdir(str(self.run_url)) except GError: print("No files found at %s" % str(self.run_url)) print("Files found in storage:") count = 0 for f in run_files: if len(f) > 3: if count < 5000: self.run_files[f] = {} count = count + 1 else: break def get_rucio_metadata(self): """ Gets the list of datasets at the Rucio RSE, the files, and the metadata. """ print( "Initializing Rucio... getting the list of blocks and files at %s" % self.originrse) registered_datasets = self.repc.list_datasets_per_rse(self.originrse) for dataset in registered_datasets: self.rucio_datasets[dataset] = {} replica_info = self.repc.list_replicas([{ "scope": self.scope, "name": '/' + self.run_Number }], rse_expression="rse=%s" % self.originrse) replica_files = set() for file_info in replica_info: name = file_info['name'] if self.originrse in file_info['rses']: replica_files.add(name) self.existent_replica_files = replica_files print("Rucio initialization done.") def register(self): """ Create the container, the datasets and attach them to the container. """ print("Registering...") self.register_dataset(self.run_Number) self.register_replicas(self.run_files) self.register_container(self.container) self.attach_dataset_to_container(self.run_Number, self.container) self.add_replica_rule(dataset=self.run_Number, destRSE=self.destrse) def register_container(self, container): """ Registering the container """ print("Registering the container %s with scope: %s" % (container, self.scope)) if container is None: print('No container added, not registering any container') return if self.dry_run: print('Dry run only, not registering the container') return try: self.didc.add_container(scope=self.scope, name=container, lifetime=self.lifetime) except DataIdentifierAlreadyExists: print("Container %s already exists" % container) except InvalidObject: print("Problem with container name: %s" % container) def attach_dataset_to_container(self, dataset, container): """ Attaching the dataset to a container """ print("Attaching dataset %s, to container: %s" % (dataset, container)) if container is None: print('No container added, not registering dataset in container') return if self.dry_run: print('Dry run only, not attaching dataset container') return try: self.didc.attach_dids(scope=self.scope, name=container, dids=[{ 'scope': self.scope, 'name': '/' + dataset }]) except RucioException: print("dataset already attached to container") return def register_dataset(self, run): """ Registering a dataset in the rucio database """ print("registering dataset %s" % run) if self.dry_run: print(' Dry run only. Not creating dataset.') return try: self.didc.add_dataset(scope=self.scope, name=run, lifetime=self.lifetime) except DataIdentifierAlreadyExists: print(" Dataset %s already exists" % run) def register_replicas(self, replicas): """ Register file replica. """ if not replicas: return print("registering files in Rucio: %s" % ", ".join([replicas[filemd]['name'] for filemd in replicas])) if self.dry_run: print(' Dry run only. Not registering files.') return try: self.repc.add_replicas(rse=self.originrse, files=[{ 'scope': self.scope, 'name': replicas[filemd]['name'], 'adler32': replicas[filemd]['adler32'], 'bytes': replicas[filemd]['size'], } for filemd in replicas]) print("Adding files to dataset: %s" % self.run_Number) except InvalidObject: print("Problem with file name does not match pattern") for filemd in replicas: try: self.didc.attach_dids(scope=self.scope, name=self.run_Number, dids=[{ 'scope': self.scope, 'name': replicas[filemd]['name'] }]) except FileAlreadyExists: print("File already attached") def add_replica_rule(self, destRSE, dataset): """ Create a replication rule for one dataset "Run" at an RSE """ print("Creating replica rule for dataset %s at rse: %s" % (dataset, destRSE)) if self.dry_run: print(' Dry run only. Not creating rules') return if destRSE: try: self.rulesClient.add_replication_rule([{ "scope": self.scope, "name": "/" + dataset }], copies=1, rse_expression=destRSE) except DuplicateRule: print('Rule already exists')
def test_list_dataset_replicas_archive(self): """ REPLICA (CLIENT): List dataset replicas with archives. """ replica_client = ReplicaClient() did_client = DIDClient() rule_client = RuleClient() scope = 'mock' rse = 'APERTURE_%s' % rse_name_generator() rse_id = add_rse(rse, **self.vo) add_protocol(rse_id=rse_id, parameter={ 'scheme': 'root', 'hostname': 'root.aperture.com', 'port': 1409, 'prefix': '//test/chamber/', 'impl': 'rucio.rse.protocols.xrootd.Default', 'domains': { 'lan': { 'read': 1, 'write': 1, 'delete': 1 }, 'wan': { 'read': 1, 'write': 1, 'delete': 1 } } }) rse2 = 'BLACKMESA_%s' % rse_name_generator() rse2_id = add_rse(rse2, **self.vo) add_protocol(rse_id=rse2_id, parameter={ 'scheme': 'root', 'hostname': 'root.blackmesa.com', 'port': 1409, 'prefix': '//underground/facility', 'impl': 'rucio.rse.protocols.xrootd.Default', 'domains': { 'lan': { 'read': 1, 'write': 1, 'delete': 1 }, 'wan': { 'read': 1, 'write': 1, 'delete': 1 } } }) # register archive archive = { 'scope': scope, 'name': 'another.%s.zip' % generate_uuid(), 'type': 'FILE', 'bytes': 2596, 'adler32': 'deedbeaf' } replica_client.add_replicas(rse=rse, files=[archive]) replica_client.add_replicas(rse=rse2, files=[archive]) archived_files = [{ 'scope': scope, 'name': 'zippedfile-%i-%s' % (i, str(generate_uuid())), 'type': 'FILE', 'bytes': 4322, 'adler32': 'deaddead' } for i in range(2)] replica_client.add_replicas(rse=rse2, files=archived_files) did_client.add_files_to_archive(scope=scope, name=archive['name'], files=archived_files) dataset_name = 'find_me.' + str(generate_uuid()) did_client.add_dataset(scope=scope, name=dataset_name) did_client.attach_dids(scope=scope, name=dataset_name, dids=archived_files) rule_client.add_replication_rule(dids=[{ 'scope': scope, 'name': dataset_name }], account='root', copies=1, rse_expression=rse, grouping='DATASET') res = [ r for r in replica_client.list_dataset_replicas(scope=scope, name=dataset_name) ] assert len(res) == 1 assert res[0]['state'] == 'UNAVAILABLE' res = [ r for r in replica_client.list_dataset_replicas( scope=scope, name=dataset_name, deep=True) ] assert len(res) == 3 assert res[0]['state'] == 'AVAILABLE' assert res[1]['state'] == 'AVAILABLE' assert res[2]['state'] == 'AVAILABLE' del_rse(rse_id)
class TestReplicaMetalink: def setup(self): self.did_client = DIDClient() self.replica_client = ReplicaClient() self.base_client = BaseClient(account='root', ca_cert=config_get('client', 'ca_cert'), auth_type='x509') self.token = self.base_client.headers['X-Rucio-Auth-Token'] self.fname = generate_uuid() rses = ['MOCK', 'MOCK3', 'MOCK4'] dsn = generate_uuid() self.files = [{ 'scope': 'mock', 'name': self.fname, 'bytes': 1, 'adler32': '0cc737eb' }] self.did_client.add_dataset(scope='mock', name=dsn) self.did_client.add_files_to_dataset('mock', name=dsn, files=self.files, rse='MOCK') for r in rses: self.replica_client.add_replicas(r, self.files) def test_list_replicas_metalink_4(self): """ REPLICA (METALINK): List replicas as metalink version 4 """ ml = xmltodict.parse(self.replica_client.list_replicas( self.files, metalink=4, unavailable=True, schemes=['https', 'sftp', 'file']), xml_attribs=False) assert_equal(3, len(ml['metalink']['file']['url'])) def test_get_did_from_pfns_nondeterministic(self): """ REPLICA (CLIENT): Get list of DIDs associated to PFNs for non-deterministic sites""" rse = 'MOCK2' tmp_scope = 'mock' nbfiles = 3 pfns = [] input = {} rse_info = rsemgr.get_rse_info(rse) assert_equal(rse_info['deterministic'], False) files = [{ 'scope': tmp_scope, 'name': 'file_%s' % generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb', 'pfn': 'srm://mock2.com:8443/srm/managerv2?SFN=/rucio/tmpdisk/rucio_tests/%s/%s' % (tmp_scope, generate_uuid()), 'meta': { 'events': 10 } } for _ in range(nbfiles)] for f in files: input[f['pfn']] = {'scope': f['scope'], 'name': f['name']} add_replicas(rse=rse, files=files, account='root', ignore_availability=True) for replica in list_replicas(dids=[{ 'scope': f['scope'], 'name': f['name'], 'type': DIDType.FILE } for f in files], schemes=['srm'], ignore_availability=True): for rse in replica['rses']: pfns.extend(replica['rses'][rse]) for result in self.replica_client.get_did_from_pfns(pfns, rse): pfn = result.keys()[0] assert_equal(input[pfn], result.values()[0]) def test_get_did_from_pfns_deterministic(self): """ REPLICA (CLIENT): Get list of DIDs associated to PFNs for deterministic sites""" tmp_scope = 'mock' rse = 'MOCK3' nbfiles = 3 pfns = [] input = {} rse_info = rsemgr.get_rse_info(rse) assert_equal(rse_info['deterministic'], True) files = [{ 'scope': tmp_scope, 'name': 'file_%s' % generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb', 'meta': { 'events': 10 } } for _ in range(nbfiles)] p = rsemgr.create_protocol(rse_info, 'read', scheme='srm') for f in files: pfn = p.lfns2pfns(lfns={ 'scope': f['scope'], 'name': f['name'] }).values()[0] pfns.append(pfn) input[pfn] = {'scope': f['scope'], 'name': f['name']} add_replicas(rse=rse, files=files, account='root', ignore_availability=True) for result in self.replica_client.get_did_from_pfns(pfns, rse): pfn = result.keys()[0] assert_equal(input[pfn], result.values()[0])
class TestReplicaClients: def setup(self): self.replica_client = ReplicaClient() self.did_client = DIDClient() def test_add_list_bad_replicas(self): """ REPLICA (CLIENT): Add bad replicas""" tmp_scope = 'mock' nbfiles = 5 # Adding replicas to deterministic RSE files = [{ 'scope': tmp_scope, 'name': 'file_%s' % generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb', 'meta': { 'events': 10 } } for _ in range(nbfiles)] rse_info = rsemgr.get_rse_info('MOCK') rse_id1 = rse_info['id'] self.replica_client.add_replicas(rse='MOCK', files=files) # Listing replicas on deterministic RSE replicas, list_rep = [], [] for replica in self.replica_client.list_replicas(dids=[{ 'scope': f['scope'], 'name': f['name'] } for f in files], schemes=['srm'], unavailable=True): replicas.extend(replica['rses']['MOCK']) list_rep.append(replica) r = self.replica_client.declare_bad_file_replicas( replicas, 'This is a good reason') assert_equal(r, {}) bad_replicas = list_bad_replicas() nbbadrep = 0 for rep in list_rep: for badrep in bad_replicas: if badrep['rse_id'] == rse_id1: if badrep['scope'] == rep['scope'] and badrep[ 'name'] == rep['name']: nbbadrep += 1 assert_equal(len(replicas), nbbadrep) # Run necromancer once necromancer_run(threads=1, bulk=10000, once=True) # Try to attach a lost file tmp_dsn = 'dataset_%s' % generate_uuid() self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn) with assert_raises(UnsupportedOperation): self.did_client.add_files_to_dataset(tmp_scope, name=tmp_dsn, files=files, rse='MOCK') # Adding replicas to non-deterministic RSE files = [{ 'scope': tmp_scope, 'name': 'file_%s' % generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb', 'pfn': 'srm://mock2.com:8443/srm/managerv2?SFN=/rucio/tmpdisk/rucio_tests/%s/%s' % (tmp_scope, generate_uuid()), 'meta': { 'events': 10 } } for _ in range(nbfiles)] rse_info = rsemgr.get_rse_info('MOCK2') rse_id2 = rse_info['id'] self.replica_client.add_replicas(rse='MOCK2', files=files) # Listing replicas on non-deterministic RSE replicas, list_rep = [], [] for replica in self.replica_client.list_replicas(dids=[{ 'scope': f['scope'], 'name': f['name'] } for f in files], schemes=['srm'], unavailable=True): replicas.extend(replica['rses']['MOCK2']) list_rep.append(replica) print(replicas, list_rep) r = self.replica_client.declare_bad_file_replicas( replicas, 'This is a good reason') print(r) assert_equal(r, {}) bad_replicas = list_bad_replicas() nbbadrep = 0 for rep in list_rep: for badrep in bad_replicas: if badrep['rse_id'] == rse_id2: if badrep['scope'] == rep['scope'] and badrep[ 'name'] == rep['name']: nbbadrep += 1 assert_equal(len(replicas), nbbadrep) # Now adding non-existing bad replicas files = [ 'srm://mock2.com/rucio/tmpdisk/rucio_tests/%s/%s' % (tmp_scope, generate_uuid()), ] r = self.replica_client.declare_bad_file_replicas( files, 'This is a good reason') output = ['%s Unknown replica' % rep for rep in files] assert_equal(r, {'MOCK2': output}) def test_add_suspicious_replicas(self): """ REPLICA (CLIENT): Add suspicious replicas""" tmp_scope = 'mock' nbfiles = 5 # Adding replicas to deterministic RSE files = [{ 'scope': tmp_scope, 'name': 'file_%s' % generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb', 'meta': { 'events': 10 } } for _ in range(nbfiles)] self.replica_client.add_replicas(rse='MOCK', files=files) # Listing replicas on deterministic RSE replicas = [] list_rep = [] for replica in self.replica_client.list_replicas(dids=[{ 'scope': f['scope'], 'name': f['name'] } for f in files], schemes=['srm'], unavailable=True): replicas.extend(replica['rses']['MOCK']) list_rep.append(replica) r = self.replica_client.declare_suspicious_file_replicas( replicas, 'This is a good reason') assert_equal(r, {}) # Adding replicas to non-deterministic RSE files = [{ 'scope': tmp_scope, 'name': 'file_%s' % generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb', 'pfn': 'srm://mock2.com:8443/srm/managerv2?SFN=/rucio/tmpdisk/rucio_tests/%s/%s' % (tmp_scope, generate_uuid()), 'meta': { 'events': 10 } } for _ in range(nbfiles)] self.replica_client.add_replicas(rse='MOCK2', files=files) # Listing replicas on non-deterministic RSE replicas = [] list_rep = [] for replica in self.replica_client.list_replicas(dids=[{ 'scope': f['scope'], 'name': f['name'] } for f in files], schemes=['srm'], unavailable=True): replicas.extend(replica['rses']['MOCK2']) list_rep.append(replica) r = self.replica_client.declare_suspicious_file_replicas( replicas, 'This is a good reason') assert_equal(r, {}) # Now adding non-existing bad replicas files = [ 'srm://mock2.com/rucio/tmpdisk/rucio_tests/%s/%s' % (tmp_scope, generate_uuid()), ] r = self.replica_client.declare_suspicious_file_replicas( files, 'This is a good reason') output = ['%s Unknown replica' % rep for rep in files] assert_equal(r, {'MOCK2': output}) def test_bad_replica_methods_for_UI(self): """ REPLICA (REST): Test the listing of bad and suspicious replicas """ mw = [] headers1 = { 'X-Rucio-Account': 'root', 'X-Rucio-Username': '******', 'X-Rucio-Password': '******' } result = TestApp(auth_app.wsgifunc(*mw)).get('/userpass', headers=headers1, expect_errors=True) assert_equal(result.status, 200) token = str(result.header('X-Rucio-Auth-Token')) headers2 = {'X-Rucio-Auth-Token': str(token)} data = dumps({}) result = TestApp(rep_app.wsgifunc(*mw)).get('/bad/states', headers=headers2, params=data, expect_errors=True) assert_equal(result.status, 200) tot_files = [] for line in result.body.split('\n'): if line != '': tot_files.append(dumps(line)) nb_tot_files = len(tot_files) data = dumps({'state': 'B'}) result = TestApp(rep_app.wsgifunc(*mw)).get('/bad/states', headers=headers2, params=data, expect_errors=True) assert_equal(result.status, 200) tot_bad_files = [] for line in result.body.split('\n'): if line != '': tot_bad_files.append(dumps(line)) nb_tot_bad_files1 = len(tot_bad_files) data = dumps({'state': 'S', 'list_pfns': 'True'}) result = TestApp(rep_app.wsgifunc(*mw)).get('/bad/states', headers=headers2, params=data, expect_errors=True) assert_equal(result.status, 200) tot_suspicious_files = [] for line in result.body.split('\n'): if line != '': tot_suspicious_files.append(dumps(line)) nb_tot_suspicious_files = len(tot_suspicious_files) data = dumps({'state': 'T', 'list_pfns': 'True'}) result = TestApp(rep_app.wsgifunc(*mw)).get('/bad/states', headers=headers2, params=data, expect_errors=True) assert_equal(result.status, 200) tot_temporary_unavailable_files = [] for line in result.body.split('\n'): if line != '': tot_temporary_unavailable_files.append(dumps(line)) nb_tot_temporary_unavailable_files = len( tot_temporary_unavailable_files) assert_equal( nb_tot_files, nb_tot_bad_files1 + nb_tot_suspicious_files + nb_tot_temporary_unavailable_files) tomorrow = datetime.utcnow() + timedelta(days=1) data = dumps({'state': 'B', 'younger_than': tomorrow.isoformat()}) result = TestApp(rep_app.wsgifunc(*mw)).get('/bad/states', headers=headers2, params=data, expect_errors=True) assert_equal(result.status, 200) tot_bad_files = [] for line in result.body.split('\n'): if line != '': tot_bad_files.append(dumps(line)) nb_tot_bad_files = len(tot_bad_files) assert_equal(nb_tot_bad_files, 0) data = dumps({}) result = TestApp(rep_app.wsgifunc(*mw)).get('/bad/summary', headers=headers2, params=data, expect_errors=True) assert_equal(result.status, 200) nb_tot_bad_files2 = 0 for line in result.body.split('\n'): if line != '': line = loads(line) nb_tot_bad_files2 += int(line.get('BAD', 0)) assert_equal(nb_tot_bad_files1, nb_tot_bad_files2) def test_add_list_replicas(self): """ REPLICA (CLIENT): Add, change state and list file replicas """ tmp_scope = 'mock' nbfiles = 5 files1 = [{ 'scope': tmp_scope, 'name': 'file_%s' % generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb', 'meta': { 'events': 10 } } for _ in range(nbfiles)] self.replica_client.add_replicas(rse='MOCK', files=files1) files2 = [{ 'scope': tmp_scope, 'name': 'file_%s' % generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb', 'meta': { 'events': 10 } } for _ in range(nbfiles)] self.replica_client.add_replicas(rse='MOCK3', files=files2) replicas = [ r for r in self.replica_client.list_replicas( dids=[{ 'scope': i['scope'], 'name': i['name'] } for i in files1]) ] assert_equal(len(replicas), len(files1)) replicas = [ r for r in self.replica_client.list_replicas( dids=[{ 'scope': i['scope'], 'name': i['name'] } for i in files2], schemes=['file']) ] assert_equal(len(replicas), 5) replicas = [ r for r in self.replica_client.list_replicas( dids=[{ 'scope': i['scope'], 'name': i['name'] } for i in files2], schemes=['srm']) ] assert_equal(len(replicas), 5) files3 = [{ 'scope': tmp_scope, 'name': 'file_%s' % generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb', 'state': 'U', 'meta': { 'events': 10 } } for _ in range(nbfiles)] self.replica_client.add_replicas(rse='MOCK3', files=files3) replicas = [ r for r in self.replica_client.list_replicas( dids=[{ 'scope': i['scope'], 'name': i['name'] } for i in files3], schemes=['file']) ] for i in range(nbfiles): assert_equal(replicas[i]['rses'], {}) files4 = [] for file in files3: file['state'] = 'A' files4.append(file) self.replica_client.update_replicas_states('MOCK3', files=files4) replicas = [ r for r in self.replica_client.list_replicas( dids=[{ 'scope': i['scope'], 'name': i['name'] } for i in files3], schemes=['file'], unavailable=True) ] assert_equal(len(replicas), 5) for i in range(nbfiles): assert_in('MOCK3', replicas[i]['rses']) def test_delete_replicas(self): """ REPLICA (CLIENT): Add and delete file replicas """ tmp_scope = 'mock' nbfiles = 5 files = [{ 'scope': tmp_scope, 'name': 'file_%s' % generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb', 'meta': { 'events': 10 } } for _ in range(nbfiles)] self.replica_client.add_replicas(rse='MOCK', files=files) with assert_raises(AccessDenied): self.replica_client.delete_replicas(rse='MOCK', files=files) # replicas = [r for r in self.replica_client.list_replicas(dids=[{'scope': i['scope'], 'name': i['name']} for i in files])] # assert_equal(len(replicas), 0) def test_add_temporary_unavailable_pfns(self): """ REPLICA (CLIENT): Add temporary unavailable PFNs""" tmp_scope = 'mock' nbfiles = 5 # Adding replicas to deterministic RSE files = [{ 'scope': tmp_scope, 'name': 'file_%s' % generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb', 'meta': { 'events': 10 } } for _ in range(nbfiles)] self.replica_client.add_replicas(rse='MOCK', files=files) # Listing replicas on deterministic RSE list_rep = [] for replica in self.replica_client.list_replicas(dids=[{ 'scope': f['scope'], 'name': f['name'] } for f in files], schemes=['srm'], unavailable=True): pfn = replica['pfns'].keys()[0] list_rep.append(pfn) # Submit bad PFNs now = datetime.utcnow() reason_str = generate_uuid() self.replica_client.add_bad_pfns(pfns=list_rep, reason=str(reason_str), state='TEMPORARY_UNAVAILABLE', expires_at=now.isoformat()) result = get_bad_pfns(limit=10000, thread=None, total_threads=None, session=None) bad_pfns = {} for res in result: bad_pfns[res['pfn']] = (res['state'], res['reason'], res['expires_at']) for pfn in list_rep: pfn = str(clean_surls([pfn])[0]) assert_in(pfn, bad_pfns) assert_equal(str(bad_pfns[pfn][0]), 'TEMPORARY_UNAVAILABLE') assert_equal(bad_pfns[pfn][1], reason_str) # Submit with wrong state with assert_raises(RucioException): self.replica_client.add_bad_pfns(pfns=list_rep, reason=str(reason_str), state='BADSTATE', expires_at=now.isoformat()) # Run minos once minos_run(threads=1, bulk=10000, once=True) result = get_bad_pfns(limit=10000, thread=None, total_threads=None, session=None) pfns = [res['pfn'] for res in result] res_pfns = [] for replica in list_rep: if replica in pfns: res_pfns.append(replica) assert_equal(res_pfns, []) # Check the state in the replica table for did in files: rep = get_replicas_state(scope=did['scope'], name=did['name']) assert_equal(str(rep.keys()[0]), 'TEMPORARY_UNAVAILABLE') rep = [] for did in files: did['state'] = ReplicaState.from_sym('TEMPORARY_UNAVAILABLE') rep.append(did) # Run the minos expiration minos_temp_run(threads=1, once=True) # Check the state in the replica table for did in files: rep = get_replicas_state(scope=did['scope'], name=did['name']) assert_equal(str(rep.keys()[0]), 'AVAILABLE') def test_set_tombstone(self): """ REPLICA (CLIENT): set tombstone on replica """ # Set tombstone on one replica rse = 'MOCK4' scope = 'mock' user = '******' name = generate_uuid() add_replica(rse, scope, name, 4, user) assert_equal(get_replica(rse, scope, name)['tombstone'], None) self.replica_client.set_tombstone([{ 'rse': rse, 'scope': scope, 'name': name }]) assert_equal(get_replica(rse, scope, name)['tombstone'], OBSOLETE) # Set tombstone on locked replica name = generate_uuid() add_replica(rse, scope, name, 4, user) RuleClient().add_replication_rule([{ 'name': name, 'scope': scope }], 1, rse, locked=True) with assert_raises(ReplicaIsLocked): self.replica_client.set_tombstone([{ 'rse': rse, 'scope': scope, 'name': name }]) # Set tombstone on not found replica name = generate_uuid() with assert_raises(ReplicaNotFound): self.replica_client.set_tombstone([{ 'rse': rse, 'scope': scope, 'name': name }])
class TestDIDClients: def setup(self): self.account_client = AccountClient() self.scope_client = ScopeClient() self.meta_client = MetaClient() self.did_client = DIDClient() self.replica_client = ReplicaClient() self.rse_client = RSEClient() def test_list_dids(self): """ DATA IDENTIFIERS (CLIENT): List dids by pattern.""" tmp_scope = scope_name_generator() tmp_files = [] tmp_files.append('file_a_1%s' % generate_uuid()) tmp_files.append('file_a_2%s' % generate_uuid()) tmp_files.append('file_b_1%s' % generate_uuid()) tmp_rse = 'MOCK' self.scope_client.add_scope('jdoe', tmp_scope) for tmp_file in tmp_files: self.replica_client.add_replica(tmp_rse, tmp_scope, tmp_file, 1, '0cc737eb') results = [] for result in self.did_client.list_dids(tmp_scope, {'name': 'file_a_*'}, type='file'): results.append(result) assert_equal(len(results), 2) results = [] for result in self.did_client.list_dids(tmp_scope, {'name': 'file_a_1*'}, type='file'): results.append(result) assert_equal(len(results), 1) results = [] for result in self.did_client.list_dids(tmp_scope, {'name': 'file_*_1*'}, type='file'): results.append(result) assert_equal(len(results), 2) results = [] for result in self.did_client.list_dids(tmp_scope, {'name': 'file*'}, type='file'): results.append(result) assert_equal(len(results), 3) results = [] filters = {'name': 'file*', 'created_after': datetime.utcnow() - timedelta(hours=1)} for result in self.did_client.list_dids(tmp_scope, filters): results.append(result) assert_equal(len(results), 0) with assert_raises(UnsupportedOperation): self.did_client.list_dids(tmp_scope, {'name': 'file*'}, type='whateverytype') def test_list_recursive(self): """ DATA IDENTIFIERS (CLIENT): List did recursive """ # Create nested containers and datast tmp_scope_1 = 'list-did-recursive' tmp_scope_2 = 'list-did-recursive-2' self.scope_client.add_scope('root', tmp_scope_1) self.scope_client.add_scope('root', tmp_scope_2) tmp_container_1 = 'container_%s' % generate_uuid() self.did_client.add_container(scope=tmp_scope_1, name=tmp_container_1) tmp_container_2 = 'container_%s' % generate_uuid() self.did_client.add_container(scope=tmp_scope_1, name=tmp_container_2) tmp_dataset_1 = 'dataset_%s' % generate_uuid() self.did_client.add_dataset(scope=tmp_scope_2, name=tmp_dataset_1) tmp_dataset_2 = 'dataset_%s' % generate_uuid() self.did_client.add_dataset(scope=tmp_scope_1, name=tmp_dataset_2) self.did_client.attach_dids(scope=tmp_scope_1, name=tmp_container_1, dids=[{'scope': tmp_scope_2, 'name': tmp_dataset_1}]) self.did_client.attach_dids(scope=tmp_scope_1, name=tmp_container_2, dids=[{'scope': tmp_scope_1, 'name': tmp_dataset_2}]) self.did_client.attach_dids(scope=tmp_scope_1, name=tmp_container_1, dids=[{'scope': tmp_scope_1, 'name': tmp_container_2}]) # List DIDs not recursive - only the first container is expected dids = [str(did) for did in self.did_client.list_dids(scope=tmp_scope_1, recursive=False, type='all', filters={'name': tmp_container_1})] assert_equal(dids, [tmp_container_1]) # List DIDs recursive - first container and all attached collections are expected dids = [str(did) for did in self.did_client.list_dids(scope=tmp_scope_1, recursive=True, type='all', filters={'name': tmp_container_1})] assert_true(tmp_container_1 in dids) assert_true(tmp_container_2 in dids) assert_true(tmp_dataset_1 in dids) assert_true(tmp_dataset_2 in dids) assert_equal(len(dids), 4) # List DIDs recursive - only containers are expected dids = [str(did) for did in self.did_client.list_dids(scope=tmp_scope_1, recursive=True, type='container', filters={'name': tmp_container_1})] assert_true(tmp_container_1 in dids) assert_true(tmp_container_2 in dids) assert_true(tmp_dataset_1 not in dids) assert_true(tmp_dataset_2 not in dids) assert_equal(len(dids), 2) def test_list_by_length(self): """ DATA IDENTIFIERS (CLIENT): List did with length """ tmp_scope = 'mock' tmp_dsn = 'dsn_%s' % generate_uuid() self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn) dids = self.did_client.list_dids(tmp_scope, {'length.gt': 0}) results = [] for d in dids: results.append(d) assert_not_equal(len(results), 0) dids = self.did_client.list_dids(tmp_scope, {'length.gt': -1, 'length.lt': 1}) results = [] for d in dids: results.append(d) assert_equal(len(results), 0) dids = self.did_client.list_dids(tmp_scope, {'length': 0}) results = [] for d in dids: results.append(d) assert_equal(len(results), 0) def test_list_by_metadata(self): """ DATA IDENTIFIERS (CLIENT): List did with metadata""" dsns = [] tmp_scope = 'mock' tmp_dsn1 = 'dsn_%s' % generate_uuid() dsns.append(tmp_dsn1) dataset_meta = {'project': 'data12_8TeV', 'run_number': 400000, 'stream_name': 'physics_CosmicCalo', 'prod_step': 'merge', 'datatype': 'NTUP_TRIG', 'version': 'f392_m920', } self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn1, meta=dataset_meta) tmp_dsn2 = 'dsn_%s' % generate_uuid() dsns.append(tmp_dsn2) dataset_meta['run_number'] = 400001 self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn2, meta=dataset_meta) tmp_dsn3 = 'dsn_%s' % generate_uuid() dsns.append(tmp_dsn3) dataset_meta['stream_name'] = 'physics_Egamma' dataset_meta['datatype'] = 'NTUP_SMWZ' self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn3, meta=dataset_meta) dids = self.did_client.list_dids(tmp_scope, {'project': 'data12_8TeV', 'version': 'f392_m920'}) results = [] for d in dids: results.append(d) for dsn in dsns: assert_in(dsn, results) dsns.remove(tmp_dsn1) dids = self.did_client.list_dids(tmp_scope, {'project': 'data12_8TeV', 'run_number': 400001}) results = [] for d in dids: results.append(d) for dsn in dsns: assert_in(dsn, results) dsns.remove(tmp_dsn2) dids = self.did_client.list_dids(tmp_scope, {'project': 'data12_8TeV', 'stream_name': 'physics_Egamma', 'datatype': 'NTUP_SMWZ'}) results = [] for d in dids: results.append(d) for dsn in dsns: assert_in(dsn, results) with assert_raises(KeyNotFound): self.did_client.list_dids(tmp_scope, {'NotReallyAKey': 'NotReallyAValue'}) def test_add_did(self): """ DATA IDENTIFIERS (CLIENT): Add, populate, list did content and create a sample""" tmp_scope = 'mock' tmp_rse = 'MOCK' tmp_dsn = 'dsn_%s' % generate_uuid() root = InternalAccount('root') set_local_account_limit(root, get_rse_id('MOCK'), -1) set_local_account_limit(root, get_rse_id('CERN-PROD_TZERO'), -1) # PFN example: rfio://castoratlas.cern.ch/castor/cern.ch/grid/atlas/tzero/xx/xx/xx/filename dataset_meta = {'project': 'data13_hip', 'run_number': 300000, 'stream_name': 'physics_CosmicCalo', 'prod_step': 'merge', 'datatype': 'NTUP_TRIG', 'version': 'f392_m927', } rules = [{'copies': 1, 'rse_expression': 'MOCK', 'account': 'root'}] with assert_raises(ScopeNotFound): self.did_client.add_dataset(scope='Nimportnawak', name=tmp_dsn, statuses={'monotonic': True}, meta=dataset_meta, rules=rules) files = [{'scope': tmp_scope, 'name': 'lfn.%(tmp_dsn)s.' % locals() + str(generate_uuid()), 'bytes': 724963570, 'adler32': '0cc737eb'}, ] with assert_raises(DataIdentifierNotFound): self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn, statuses={'monotonic': True}, meta=dataset_meta, rules=rules, files=files) with assert_raises(DataIdentifierNotFound): self.did_client.add_files_to_dataset(scope=tmp_scope, name=tmp_dsn, files=files) files = [] for i in range(5): lfn = 'lfn.%(tmp_dsn)s.' % locals() + str(generate_uuid()) pfn = 'mock://localhost/tmp/rucio_rse/%(project)s/%(version)s/%(prod_step)s' % dataset_meta # it doesn't work with mock: TBF # pfn = 'srm://mock2.com:2880/pnfs/rucio/disk-only/scratchdisk/rucio_tests/%(project)s/%(version)s/%(prod_step)s' % dataset_meta pfn += '%(tmp_dsn)s/%(lfn)s' % locals() file_meta = {'guid': str(generate_uuid()), 'events': 10} files.append({'scope': tmp_scope, 'name': lfn, 'bytes': 724963570, 'adler32': '0cc737eb', 'pfn': pfn, 'meta': file_meta}) rules = [{'copies': 1, 'rse_expression': 'CERN-PROD_TZERO', 'lifetime': timedelta(days=2), 'account': 'root'}] with assert_raises(InvalidPath): self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn, statuses={'monotonic': True}, meta=dataset_meta, rules=rules, files=files, rse=tmp_rse) files_without_pfn = [{'scope': i['scope'], 'name': i['name'], 'bytes': i['bytes'], 'adler32': i['adler32'], 'meta': i['meta']} for i in files] self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn, statuses={'monotonic': True}, meta=dataset_meta, rules=rules, files=files_without_pfn, rse=tmp_rse) with assert_raises(DataIdentifierAlreadyExists): self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn, files=files, rse=tmp_rse) files = [] for i in range(5): lfn = '%(tmp_dsn)s.' % locals() + str(generate_uuid()) pfn = 'mock://localhost/tmp/rucio_rse/%(project)s/%(version)s/%(prod_step)s' % dataset_meta # it doesn't work with mock: TBF # pfn = 'srm://mock2.com:2880/pnfs/rucio/disk-only/scratchdisk/rucio_tests/%(project)s/%(version)s/%(prod_step)s' % dataset_meta pfn += '%(tmp_dsn)s/%(lfn)s' % locals() file_meta = {'guid': str(generate_uuid()), 'events': 100} files.append({'scope': tmp_scope, 'name': lfn, 'bytes': 724963570, 'adler32': '0cc737eb', 'pfn': pfn, 'meta': file_meta}) rules = [{'copies': 1, 'rse_expression': 'CERN-PROD_TZERO', 'lifetime': timedelta(days=2)}] with assert_raises(InvalidPath): self.did_client.add_files_to_dataset(scope=tmp_scope, name=tmp_dsn, files=files, rse=tmp_rse) files_without_pfn = [{'scope': i['scope'], 'name': i['name'], 'bytes': i['bytes'], 'adler32': i['adler32'], 'meta': i['meta']} for i in files] self.did_client.add_files_to_dataset(scope=tmp_scope, name=tmp_dsn, files=files_without_pfn, rse=tmp_rse) self.did_client.close(scope=tmp_scope, name=tmp_dsn) tmp_dsn_output = 'dsn_%s' % generate_uuid() self.did_client.create_did_sample(input_scope=tmp_scope, input_name=tmp_dsn, output_scope=tmp_scope, output_name=tmp_dsn_output, nbfiles=2) files = [f for f in self.did_client.list_files(scope=tmp_scope, name=tmp_dsn_output)] assert_equal(len(files), 2) def test_attach_dids_to_dids(self): """ DATA IDENTIFIERS (CLIENT): Attach dids to dids""" tmp_scope = 'mock' tmp_rse = 'MOCK' nb_datasets = 5 nb_files = 5 attachments, dsns = list(), list() guid_to_query = None dsn = {} for i in range(nb_datasets): attachment = {} attachment['scope'] = tmp_scope attachment['name'] = 'dsn.%s' % str(generate_uuid()) attachment['rse'] = tmp_rse files = [] for i in range(nb_files): files.append({'scope': tmp_scope, 'name': 'lfn.%s' % str(generate_uuid()), 'bytes': 724963570, 'adler32': '0cc737eb', 'meta': {'guid': str(generate_uuid()), 'events': 100}}) attachment['dids'] = files guid_to_query = files[0]['meta']['guid'] dsn = {'scope': tmp_scope, 'name': attachment['name']} dsns.append(dsn) attachments.append(attachment) self.did_client.add_datasets(dsns=dsns) self.did_client.attach_dids_to_dids(attachments=attachments) dsns_l = [i for i in self.did_client.get_dataset_by_guid(guid_to_query)] assert_equal([dsn], dsns_l) cnt_name = 'cnt_%s' % generate_uuid() self.did_client.add_container(scope='mock', name=cnt_name) with assert_raises(UnsupportedOperation): self.did_client.attach_dids_to_dids([{'scope': 'mock', 'name': cnt_name, 'rse': tmp_rse, 'dids': attachment['dids']}]) def test_add_files_to_datasets(self): """ DATA IDENTIFIERS (CLIENT): Add files to Datasets""" tmp_scope = 'mock' tmp_rse = 'MOCK' dsn1 = 'dsn.%s' % str(generate_uuid()) dsn2 = 'dsn.%s' % str(generate_uuid()) meta = {'transient': True} files1, files2, nb_files = [], [], 5 for i in range(nb_files): files1.append({'scope': tmp_scope, 'name': 'lfn.%s' % str(generate_uuid()), 'bytes': 724963570, 'adler32': '0cc737eb', 'meta': {'guid': str(generate_uuid()), 'events': 100}}) files2.append({'scope': tmp_scope, 'name': 'lfn.%s' % str(generate_uuid()), 'bytes': 724963570, 'adler32': '0cc737eb', 'meta': {'guid': str(generate_uuid()), 'events': 100}}) self.did_client.add_dataset(scope=tmp_scope, name=dsn1, files=files1, rse=tmp_rse, meta=meta) self.did_client.add_dataset(scope=tmp_scope, name=dsn2, files=files2, rse=tmp_rse, meta=meta) attachments = [{'scope': tmp_scope, 'name': dsn1, 'dids': files2, 'rse': tmp_rse}, {'scope': tmp_scope, 'name': dsn2, 'dids': files1, 'rse': tmp_rse}] self.did_client.add_files_to_datasets(attachments) files = [f for f in self.did_client.list_files(scope=tmp_scope, name=dsn1)] assert_equal(len(files), 10) with assert_raises(FileAlreadyExists): self.did_client.add_files_to_datasets(attachments) for attachment in attachments: for i in range(nb_files): attachment['dids'].append({'scope': tmp_scope, 'name': 'lfn.%s' % str(generate_uuid()), 'bytes': 724963570, 'adler32': '0cc737eb', 'meta': {'guid': str(generate_uuid()), 'events': 100}}) self.did_client.add_files_to_datasets(attachments, ignore_duplicate=True) files = [f for f in self.did_client.list_files(scope=tmp_scope, name=dsn1)] assert_equal(len(files), 15) # Corrupt meta-data files = [] for attachment in attachments: for file in attachment['dids']: file['bytes'] = 1000 break with assert_raises(FileConsistencyMismatch): self.did_client.add_files_to_datasets(attachments, ignore_duplicate=True) def test_add_dataset(self): """ DATA IDENTIFIERS (CLIENT): Add dataset """ tmp_scope = 'mock' tmp_dsn = 'dsn_%s' % generate_uuid() self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn, meta={'project': 'data13_hip'}) did = self.did_client.get_did(tmp_scope, tmp_dsn) assert_equal(did['scope'], tmp_scope) assert_equal(did['name'], tmp_dsn) with assert_raises(DataIdentifierNotFound): self.did_client.get_did('i_dont_exist', 'neither_do_i') def test_add_datasets(self): """ DATA IDENTIFIERS (CLIENT): Bulk add datasets """ tmp_scope = 'mock' dsns = list() for i in range(500): tmp_dsn = {'name': 'dsn_%s' % generate_uuid(), 'scope': tmp_scope, 'meta': {'project': 'data13_hip'}} dsns.append(tmp_dsn) self.did_client.add_datasets(dsns) def test_exists(self): """ DATA IDENTIFIERS (CLIENT): Check if data identifier exists """ tmp_scope = 'mock' tmp_file = 'file_%s' % generate_uuid() tmp_rse = 'MOCK' self.replica_client.add_replica(tmp_rse, tmp_scope, tmp_file, 1, '0cc737eb') did = self.did_client.get_did(tmp_scope, tmp_file) assert_equal(did['scope'], tmp_scope) assert_equal(did['name'], tmp_file) with assert_raises(DataIdentifierNotFound): self.did_client.get_did('i_dont_exist', 'neither_do_i') def test_did_hierarchy(self): """ DATA IDENTIFIERS (CLIENT): Check did hierarchy rule """ account = 'jdoe' rse = 'MOCK' scope = scope_name_generator() file = ['file_%s' % generate_uuid() for i in range(10)] dst = ['dst_%s' % generate_uuid() for i in range(4)] cnt = ['cnt_%s' % generate_uuid() for i in range(4)] self.scope_client.add_scope(account, scope) for i in range(10): self.replica_client.add_replica(rse, scope, file[i], 1, '0cc737eb') for i in range(4): self.did_client.add_did(scope, dst[i], 'DATASET', statuses=None, meta=None, rules=None) for i in range(4): self.did_client.add_did(scope, cnt[i], 'CONTAINER', statuses=None, meta=None, rules=None) for i in range(4): self.did_client.add_files_to_dataset(scope, dst[i], [{'scope': scope, 'name': file[2 * i], 'bytes': 1, 'adler32': '0cc737eb'}, {'scope': scope, 'name': file[2 * i + 1], 'bytes': 1, 'adler32': '0cc737eb'}]) self.did_client.add_containers_to_container(scope, cnt[1], [{'scope': scope, 'name': cnt[2]}, {'scope': scope, 'name': cnt[3]}]) self.did_client.add_datasets_to_container(scope, cnt[0], [{'scope': scope, 'name': dst[1]}, {'scope': scope, 'name': dst[2]}]) result = self.did_client.scope_list(scope, recursive=True) for r in result: pass # TODO: fix, fix, fix # if r['name'] == cnt[1]: # assert_equal(r['type'], 'container') # assert_equal(r['level'], 0) # if (r['name'] == cnt[0]) or (r['name'] == dst[0]) or (r['name'] == file[8]) or (r['name'] == file[9]): # assert_equal(r['level'], 0) # else: # assert_equal(r['level'], 1) def test_detach_did(self): """ DATA IDENTIFIERS (CLIENT): Detach dids from a did""" account = 'jdoe' rse = 'MOCK' scope = scope_name_generator() file = ['file_%s' % generate_uuid() for i in range(10)] dst = ['dst_%s' % generate_uuid() for i in range(5)] cnt = ['cnt_%s' % generate_uuid() for i in range(2)] self.scope_client.add_scope(account, scope) for i in range(10): self.replica_client.add_replica(rse, scope, file[i], 1, '0cc737eb') for i in range(5): self.did_client.add_dataset(scope, dst[i], statuses=None, meta=None, rules=None) for i in range(2): self.did_client.add_container(scope, cnt[i], statuses=None, meta=None, rules=None) for i in range(5): self.did_client.add_files_to_dataset(scope, dst[i], [{'scope': scope, 'name': file[2 * i], 'bytes': 1, 'adler32': '0cc737eb'}, {'scope': scope, 'name': file[2 * i + 1], 'bytes': 1, 'adler32': '0cc737eb'}]) self.did_client.add_containers_to_container(scope, cnt[1], [{'scope': scope, 'name': dst[2]}, {'scope': scope, 'name': dst[3]}]) with assert_raises(UnsupportedOperation): self.did_client.add_datasets_to_container(scope, cnt[0], [{'scope': scope, 'name': dst[1]}, {'scope': scope, 'name': cnt[1]}]) self.did_client.add_datasets_to_container(scope, cnt[0], [{'scope': scope, 'name': dst[1]}, {'scope': scope, 'name': dst[2]}]) self.did_client.detach_dids(scope, cnt[0], [{'scope': scope, 'name': dst[1]}]) self.did_client.detach_dids(scope, dst[3], [{'scope': scope, 'name': file[6]}, {'scope': scope, 'name': file[7]}]) result = self.did_client.scope_list(scope, recursive=True) for r in result: if r['name'] == dst[1]: assert_equal(r['level'], 0) if r['type'] == 'file': if (r['name'] in file[6:9]): assert_equal(r['level'], 0) else: assert_not_equal(r['level'], 0) with assert_raises(UnsupportedOperation): self.did_client.detach_dids(scope=scope, name=cnt[0], dids=[{'scope': scope, 'name': cnt[0]}]) self.did_client.close(scope, dst[4]) metadata = self.did_client.get_metadata(scope, dst[4]) i_bytes, i_length = metadata['bytes'], metadata['length'] metadata = self.did_client.get_metadata(scope, file[8]) file1_bytes = metadata['bytes'] metadata = self.did_client.get_metadata(scope, file[9]) file2_bytes = metadata['bytes'] self.did_client.detach_dids(scope, dst[4], [{'scope': scope, 'name': file[8]}, {'scope': scope, 'name': file[9]}]) metadata = self.did_client.get_metadata(scope, dst[4]) f_bytes, f_length = metadata['bytes'], metadata['length'] assert_equal(i_bytes, f_bytes + file1_bytes + file2_bytes) assert_equal(i_length, f_length + 1 + 1) def test_scope_list(self): """ DATA IDENTIFIERS (CLIENT): Add, aggregate, and list data identifiers in a scope """ # create some dummy data self.tmp_accounts = ['jdoe' for i in range(3)] self.tmp_scopes = [scope_name_generator() for i in range(3)] self.tmp_rses = [rse_name_generator() for i in range(3)] self.tmp_files = ['file_%s' % generate_uuid() for i in range(3)] self.tmp_datasets = ['dataset_%s' % generate_uuid() for i in range(3)] self.tmp_containers = ['container_%s' % generate_uuid() for i in range(3)] # add dummy data to the catalogue for i in range(3): self.scope_client.add_scope(self.tmp_accounts[i], self.tmp_scopes[i]) self.rse_client.add_rse(self.tmp_rses[i]) self.replica_client.add_replica(self.tmp_rses[i], self.tmp_scopes[i], self.tmp_files[i], 1, '0cc737eb') # put files in datasets for i in range(3): for j in range(3): files = [{'scope': self.tmp_scopes[j], 'name': self.tmp_files[j], 'bytes': 1, 'adler32': '0cc737eb'}] self.did_client.add_dataset(self.tmp_scopes[i], self.tmp_datasets[j]) self.did_client.add_files_to_dataset(self.tmp_scopes[i], self.tmp_datasets[j], files) # put datasets in containers for i in range(3): for j in range(3): datasets = [{'scope': self.tmp_scopes[j], 'name': self.tmp_datasets[j]}] self.did_client.add_container(self.tmp_scopes[i], self.tmp_containers[j]) self.did_client.add_datasets_to_container(self.tmp_scopes[i], self.tmp_containers[j], datasets) # reverse check if everything is in order for i in range(3): result = self.did_client.scope_list(self.tmp_scopes[i], recursive=True) r_topdids = [] r_otherscopedids = [] r_scope = [] for r in result: if r['level'] == 0: r_topdids.append(r['scope'] + ':' + r['name']) r_scope.append(r['scope']) if r['scope'] != self.tmp_scopes[i]: r_otherscopedids.append(r['scope'] + ':' + r['name']) assert_in(r['level'], [1, 2]) for j in range(3): assert_equal(self.tmp_scopes[i], r_scope[j]) if j != i: assert_in(self.tmp_scopes[j] + ':' + self.tmp_files[j], r_otherscopedids) assert_not_in(self.tmp_scopes[i] + ':' + self.tmp_files[i], r_topdids) def test_get_did(self): """ DATA IDENTIFIERS (CLIENT): add a new data identifier and try to retrieve it back""" rse = 'MOCK' scope = 'mock' file = generate_uuid() dsn = generate_uuid() self.replica_client.add_replica(rse, scope, file, 1, '0cc737eb') did = self.did_client.get_did(scope, file) assert_equal(did['scope'], scope) assert_equal(did['name'], file) self.did_client.add_dataset(scope=scope, name=dsn, lifetime=10000000) did2 = self.did_client.get_did(scope, dsn) assert_equal(type(did2['expired_at']), datetime) def test_get_meta(self): """ DATA IDENTIFIERS (CLIENT): add a new meta data for an identifier and try to retrieve it back""" rse = 'MOCK' scope = 'mock' file = generate_uuid() keys = ['project', 'run_number'] values = ['data13_hip', 12345678] self.replica_client.add_replica(rse, scope, file, 1, '0cc737eb') for i in range(2): self.did_client.set_metadata(scope, file, keys[i], values[i]) meta = self.did_client.get_metadata(scope, file) for i in range(2): assert_equal(meta[keys[i]], values[i]) def test_list_content(self): """ DATA IDENTIFIERS (CLIENT): test to list contents for an identifier""" rse = 'MOCK' scope = 'mock' nbfiles = 5 dataset1 = generate_uuid() dataset2 = generate_uuid() container = generate_uuid() files1 = [{'scope': scope, 'name': generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb'} for i in range(nbfiles)] files2 = [{'scope': scope, 'name': generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb'} for i in range(nbfiles)] self.did_client.add_dataset(scope, dataset1) with assert_raises(DataIdentifierAlreadyExists): self.did_client.add_dataset(scope, dataset1) self.did_client.add_files_to_dataset(scope, dataset1, files1, rse=rse) self.did_client.add_dataset(scope, dataset2) self.did_client.add_files_to_dataset(scope, dataset2, files2, rse=rse) self.did_client.add_container(scope, container) datasets = [{'scope': scope, 'name': dataset1}, {'scope': scope, 'name': dataset2}] self.did_client.add_datasets_to_container(scope, container, datasets) contents = self.did_client.list_content(scope, container) datasets_s = [d['name'] for d in contents] assert_in(dataset1, datasets_s) assert_in(dataset2, datasets_s) def test_list_files(self): """ DATA IDENTIFIERS (CLIENT): List files for a container""" rse = 'MOCK' scope = 'mock' dataset1 = generate_uuid() dataset2 = generate_uuid() container = generate_uuid() files1 = [] files2 = [] for i in range(10): files1.append({'scope': scope, 'name': generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb'}) files2.append({'scope': scope, 'name': generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb'}) for i in range(10): self.replica_client.add_replica(rse, scope, files1[i]['name'], 1, '0cc737eb') self.replica_client.add_replica(rse, scope, files2[i]['name'], 1, '0cc737eb') self.did_client.add_dataset(scope, dataset1) self.did_client.add_files_to_dataset(scope, dataset1, files1) self.did_client.add_dataset(scope, dataset2) self.did_client.add_files_to_dataset(scope, dataset2, files2) datasets = [{'scope': scope, 'name': dataset1}, {'scope': scope, 'name': dataset2}] self.did_client.add_container(scope, container) self.did_client.add_datasets_to_container(scope, container, datasets) # List file content content = self.did_client.list_files(scope, files1[i]['name']) assert_true(content is not None) for d in content: assert_true(d['name'] == files1[i]['name']) # List container content for d in [{'name': x['name'], 'scope': x['scope'], 'bytes': x['bytes'], 'adler32': x['adler32']} for x in self.did_client.list_files(scope, container)]: assert_in(d, files1 + files2) # List non-existing data identifier content with assert_raises(DataIdentifierNotFound): self.did_client.list_files(scope, 'Nimportnawak') def test_list_replicas(self): """ DATA IDENTIFIERS (CLIENT): List replicas for a container""" rse = 'MOCK' scope = 'mock' dsn1 = generate_uuid() dsn2 = generate_uuid() cnt = generate_uuid() files1 = [] files2 = [] for i in range(10): files1.append({'scope': scope, 'name': generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb'}) files2.append({'scope': scope, 'name': generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb'}) self.did_client.add_dataset(scope, dsn1) self.did_client.add_files_to_dataset(scope, dsn1, files1, rse=rse) self.did_client.add_dataset(scope, dsn2) self.did_client.add_files_to_dataset(scope, dsn2, files2, rse=rse) self.did_client.add_container(scope, cnt) self.did_client.add_datasets_to_container(scope, cnt, [{'scope': scope, 'name': dsn1}, {'scope': scope, 'name': dsn2}]) replicas = self.replica_client.list_replicas(dids=[{'scope': scope, 'name': dsn1}]) assert_true(replicas is not None) replicas = self.replica_client.list_replicas(dids=[{'scope': scope, 'name': cnt}]) assert_true(replicas is not None) @raises(UnsupportedOperation) def test_close(self): """ DATA IDENTIFIERS (CLIENT): test to close data identifiers""" tmp_rse = 'MOCK' tmp_scope = 'mock' # Add dataset tmp_dataset = 'dsn_%s' % generate_uuid() # Add file replica tmp_file = 'file_%s' % generate_uuid() self.replica_client.add_replica(rse=tmp_rse, scope=tmp_scope, name=tmp_file, bytes=1, adler32='0cc737eb') # Add dataset self.did_client.add_dataset(scope=tmp_scope, name=tmp_dataset) # Add files to dataset files = [{'scope': tmp_scope, 'name': tmp_file, 'bytes': 1, 'adler32': '0cc737eb'}, ] self.did_client.add_files_to_dataset(scope=tmp_scope, name=tmp_dataset, files=files) # Add a second file replica tmp_file = 'file_%s' % generate_uuid() self.replica_client.add_replica(tmp_rse, tmp_scope, tmp_file, 1, '0cc737eb') # Add files to dataset files = [{'scope': tmp_scope, 'name': tmp_file, 'bytes': 1, 'adler32': '0cc737eb'}, ] self.did_client.add_files_to_dataset(scope=tmp_scope, name=tmp_dataset, files=files) # Close dataset with assert_raises(UnsupportedStatus): self.did_client.set_status(scope=tmp_scope, name=tmp_dataset, close=False) self.did_client.set_status(scope=tmp_scope, name=tmp_dataset, open=False) # Add a third file replica tmp_file = 'file_%s' % generate_uuid() self.replica_client.add_replica(tmp_rse, tmp_scope, tmp_file, 1, '0cc737eb') # Add files to dataset files = [{'scope': tmp_scope, 'name': tmp_file, 'bytes': 1, 'adler32': '0cc737eb'}, ] self.did_client.attach_dids(scope=tmp_scope, name=tmp_dataset, dids=files) @raises def test_open(self): """ DATA IDENTIFIERS (CLIENT): test to re-open data identifiers for priv account""" tmp_rse = 'MOCK' tmp_scope = 'mock' # Add dataset tmp_dataset = 'dsn_%s' % generate_uuid() # Add file replica tmp_file = 'file_%s' % generate_uuid() self.replica_client.add_replica(rse=tmp_rse, scope=tmp_scope, name=tmp_file, bytes=1, adler32='0cc737eb') # Add dataset self.did_client.add_dataset(scope=tmp_scope, name=tmp_dataset) # Add files to dataset files = [{'scope': tmp_scope, 'name': tmp_file, 'bytes': 1, 'adler32': '0cc737eb'}, ] self.did_client.add_files_to_dataset(scope=tmp_scope, name=tmp_dataset, files=files) # Add a second file replica tmp_file = 'file_%s' % generate_uuid() self.replica_client.add_replica(tmp_rse, tmp_scope, tmp_file, 1, '0cc737eb') # Add files to dataset files = [{'scope': tmp_scope, 'name': tmp_file, 'bytes': 1, 'adler32': '0cc737eb'}, ] self.did_client.add_files_to_dataset(scope=tmp_scope, name=tmp_dataset, files=files) # Close dataset with assert_raises(UnsupportedStatus): self.did_client.set_status(scope=tmp_scope, name=tmp_dataset, close=False) self.did_client.set_status(scope=tmp_scope, name=tmp_dataset, open=False) # Add a third file replica self.did_client.set_status(scope=tmp_scope, name=tmp_dataset, open=True) def test_bulk_get_meta(self): """ DATA IDENTIFIERS (CLIENT): Add a new meta data for a list of DIDs and try to retrieve them back""" key = 'project' rse = 'MOCK' scope = 'mock' files = ['file_%s' % generate_uuid() for _ in range(4)] dst = ['dst_%s' % generate_uuid() for _ in range(4)] cnt = ['cnt_%s' % generate_uuid() for _ in range(4)] meta_mapping = {} list_dids = [] for idx in range(4): self.replica_client.add_replica(rse, scope, files[idx], 1, '0cc737eb') self.did_client.set_metadata(scope, files[idx], key, 'file_%s' % idx) list_dids.append({'scope': scope, 'name': files[idx]}) meta_mapping['%s:%s' % (scope, files[idx])] = (key, 'file_%s' % idx) for idx in range(4): self.did_client.add_did(scope, dst[idx], 'DATASET', statuses=None, meta={key: 'dsn_%s' % idx}, rules=None) list_dids.append({'scope': scope, 'name': dst[idx]}) meta_mapping['%s:%s' % (scope, dst[idx])] = (key, 'dsn_%s' % idx) for idx in range(4): self.did_client.add_did(scope, cnt[idx], 'CONTAINER', statuses=None, meta={key: 'cnt_%s' % idx}, rules=None) list_dids.append({'scope': scope, 'name': cnt[idx]}) meta_mapping['%s:%s' % (scope, cnt[idx])] = (key, 'cnt_%s' % idx) list_meta = [_ for _ in self.did_client.get_metadata_bulk(list_dids)] res_list_dids = [{'scope': entry['scope'], 'name': entry['name']} for entry in list_meta] res_list_dids.sort() list_dids.sort() assert_equal(list_dids, res_list_dids) for meta in list_meta: did = '%s:%s' % (meta['scope'], meta['name']) met = meta_mapping[did] assert_equal((key, meta[key]), met) cnt = ['cnt_%s' % generate_uuid() for _ in range(4)] for idx in range(4): list_dids.append({'scope': scope, 'name': cnt[idx]}) list_meta = [_ for _ in self.did_client.get_metadata_bulk(list_dids)] assert_equal(len(list_meta), 12) list_dids = [] for idx in range(4): list_dids.append({'scope': scope, 'name': cnt[idx]}) list_meta = [_ for _ in self.did_client.get_metadata_bulk(list_dids)] assert_equal(len(list_meta), 0)
for blockObj in blocks: block = blockObj['block'][0]['name'] dasOutput = subprocess.check_output(DAS + [DAS_SITE % block]) sites = json.loads(dasOutput) firstSite = sites[0]['site'][0]['name'] if firstSite != DUMMY_RSE: # Skip things not at Nebraska for now print("Skipping block %s at %s" % (block, firstSite)) continue # Make Rucio dataset to correspond to CMS blocks. Attach this dataset to the container representing CMS dataset rucio_block_ds = block.replace('/', '', 1).replace('/', PATTERN) rucio_block_ds = block block_datasets.append({'scope':'cms', 'name':rucio_block_ds}) try: print(" Adding (block) dataset %s" % rucio_block_ds) status = dClient.add_dataset(scope='cms', name=rucio_block_ds, lifetime=DAYS_TO_LIVE*24*3600) print(' Status for add_dataset', status) except DataIdentifierAlreadyExists: print('Dataset already exists') try: status = dClient.attach_dids(scope='cms', name=RUCIO_CONTAINER, dids=[{'scope': 'cms', 'name': rucio_block_ds}]) print(' Status for attach dataset', status) except RucioException: print("Attach faild, probabably already done.") print(' Creating files for block %s' % block) phedex_files = json.loads(subprocess.check_output(DAS + [DAS_FILE_PHEDEX % block])) dbs_files = json.loads(subprocess.check_output(DAS + [DAS_FILE_DBS % block]).strip())