def test_judge_evaluate_detach_datasetlock(self): """ JUDGE EVALUATOR: Test if the a datasetlock is detached correctly when removing a dataset from a container""" re_evaluator(once=True) scope = 'mock' files = create_files(3, scope, self.rse1, bytes=100) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') container = 'container_' + str(uuid()) add_did(scope, container, DIDType.from_sym('CONTAINER'), 'jdoe') attach_dids(scope, container, [{'scope': scope, 'name': dataset}], 'jdoe') # Add a rule to the Container add_rule(dids=[{'scope': scope, 'name': container}], account='jdoe', copies=1, rse_expression=self.rse1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None) # Check if the datasetlock is there locks = [ds_lock for ds_lock in get_dataset_locks(scope=scope, name=dataset)] assert(len(locks) > 0) detach_dids(scope, container, [{'scope': scope, 'name': dataset}]) # Fake judge re_evaluator(once=True) locks = [ds_lock for ds_lock in get_dataset_locks(scope=scope, name=dataset)] assert(len(locks) == 0)
def test_add_rule_container_dataset_with_weights(self): """ REPLICATION RULE (CORE): Add a replication rule on a container, DATASET Grouping, WEIGHTS""" scope = 'mock' container = 'container_' + str(uuid()) add_did(scope, container, DIDType.from_sym('CONTAINER'), 'jdoe') all_files = [] dataset_files = [] for i in xrange(3): files = create_files(3, scope, self.rse1) all_files.extend(files) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') attach_dids(scope, container, [{'scope': scope, 'name': dataset}], 'jdoe') dataset_files.append({'scope': scope, 'name': dataset, 'files': files}) add_rule(dids=[{'scope': scope, 'name': container}], account='jdoe', copies=2, rse_expression=self.T1, grouping='DATASET', weight='fakeweight', lifetime=None, locked=False, subscription_id=None) t1 = set([self.rse1_id, self.rse3_id, self.rse5_id]) for dataset in dataset_files: first_locks = None for file in dataset['files']: if first_locks is None: first_locks = set([lock['rse_id'] for lock in get_replica_locks(scope=file['scope'], name=file['name'])]) rse_locks = set([lock['rse_id'] for lock in get_replica_locks(scope=file['scope'], name=file['name'])]) assert(len(t1.intersection(rse_locks)) == 2) assert(len(first_locks.intersection(rse_locks)) == 2) assert_in(self.rse1_id, rse_locks)
def test_list_rules_states(self): """ SUBSCRIPTION (API): Test listing of rule states for subscription """ tmp_scope = 'mock_' + uuid()[:8] add_scope(tmp_scope, 'root') site_a = 'RSE%s' % uuid().upper() site_b = 'RSE%s' % uuid().upper() add_rse(site_a) add_rse(site_b) # add a new dataset dsn = 'dataset-%s' % uuid() add_did(scope=tmp_scope, name=dsn, type=DIDType.DATASET, account='root') subscription_name = uuid() id = add_subscription(name=subscription_name, account='root', filter={'account': 'root'}, replication_rules=[(1, 'T1_DATADISK', False, True)], lifetime=100000, retroactive=0, dry_run=0, comments='This is a comment') subscriptions = list_subscriptions(name=subscription_name, account='root') # workaround until add_subscription returns the id id = None for s in subscriptions: id = s['id'] # Add two rules add_rule(dids=[{'scope': tmp_scope, 'name': dsn}], account='root', copies=1, rse_expression=site_a, grouping='NONE', weight=None, lifetime=None, locked=False, subscription_id=id) add_rule(dids=[{'scope': tmp_scope, 'name': dsn}], account='root', copies=1, rse_expression=site_b, grouping='NONE', weight=None, lifetime=None, locked=False, subscription_id=id) for r in list_subscription_rule_states(account='root', name=subscription_name): assert_equal(r[3], 2)
def xtest_add_value_with_type(self): """ META (CLIENTS): Add a new value to a key with a type constraint""" key = 'key_' + str(uuid())[:20] value = 'value_' + str(uuid()) self.meta_client.add_key(key=key, key_type='ALL', value_type=unicode) self.meta_client.add_value(key=key, value=value) values = self.meta_client.list_values(key=key) assert_in(value, values) self.meta_client.add_value(key=key, value=1234)
def test_get_rule(self): """ REPLICATION RULE (CORE): Test to get a previously created rule""" scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=2, rse_expression=self.T1, grouping='NONE', weight='fakeweight', lifetime=None, locked=False, subscription_id=None)[0] assert(rule_id == get_rule(rule_id)['id'].replace('-', '').lower()) assert_raises(RuleNotFound, get_rule, uuid())
def xtest_add_value_with_regexp(self): """ META (CORE): Add a new value to a key with a regexp constraint""" key = 'guid' + str(uuid())[:20] value = str(uuid()) # regexp for uuid regexp = '[a-f0-9]{8}[a-f0-9]{4}[a-f0-9]{4}[a-f0-9]{4}[a-f0-9]{12}' self.meta_client.add_key(key=key, key_type='ALL', value_regexp=regexp) self.meta_client.add_value(key=key, value=value) values = self.meta_client.list_values(key=key) assert_in(value, values) self.meta_client.add_value(key=key, value='Nimportnawak')
def xtest_add_and_list_values(self): """ META (CLIENTS): Add a value and List all values.""" key = 'key_' + str(uuid())[:20] value = 'value_' + str(uuid()) ret = self.meta_client.add_key(key=key, key_type='ALL') assert_true(ret) ret = self.meta_client.add_value(key=key, value=value) values = self.meta_client.list_values(key=key) assert_is_instance(values, list) assert_in(value, values)
def test_delete_rule(self): """ REPLICATION RULE (CORE): Test to delete a previously created rule""" scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=2, rse_expression=self.T1, grouping='DATASET', weight='fakeweight', lifetime=None, locked=False, subscription_id=None)[0] delete_rule(rule_id) for file in files: rse_locks = get_replica_locks(scope=file['scope'], name=file['name']) assert(len(rse_locks) == 0) assert_raises(RuleNotFound, delete_rule, uuid())
def test_account_counter_judge_evaluate_detach(self): """ JUDGE EVALUATOR: Test if the account counter is updated correctly when a file is removed from a DS""" re_evaluator(once=True) account_update(once=True) scope = 'mock' files = create_files(3, scope, self.rse1, bytes=100) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') # Add a first rule to the DS add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=1, rse_expression=self.rse1, grouping='ALL', weight=None, lifetime=None, locked=False, subscription_id=None) account_update(once=True) account_counter_before = get_counter(self.rse1_id, 'jdoe') detach_dids(scope, dataset, [files[0]]) # Fake judge re_evaluator(once=True) account_update(once=True) account_counter_after = get_counter(self.rse1_id, 'jdoe') assert(account_counter_before['bytes'] - 100 == account_counter_after['bytes']) assert(account_counter_before['files'] - 1 == account_counter_after['files'])
def test_upload_download(self): """CLIENT(USER): rucio upload files to dataset/download dataset""" tmp_file1 = file_generator() tmp_file2 = file_generator() tmp_file3 = file_generator() tmp_dsn = 'tests.rucio_client_test_server_' + uuid() # Adding files to a new dataset cmd = 'rucio upload --rse {0} --scope {1} --files {2} {3} {4} --did {1}:{5}'.format(self.rse, self.scope, tmp_file1, tmp_file2, tmp_file3, tmp_dsn) print self.marker + cmd exitcode, out, err = execute(cmd) print out print err remove(tmp_file1) remove(tmp_file2) remove(tmp_file3) nose.tools.assert_equal(0, exitcode) # Downloading dataset cmd = 'rucio download --dir /tmp/ {0}:{1}'.format(self.scope, tmp_dsn) print self.marker + cmd exitcode, out, err = execute(cmd) print out print err # The files should be there cmd = 'ls /tmp/{0}/rucio_testfile_*'.format(self.scope) print self.marker + cmd exitcode, out, err = execute(cmd) print err, out nose.tools.assert_equal(0, exitcode) # cleaning remove('/tmp/{0}/'.format(self.scope) + tmp_file1[5:]) remove('/tmp/{0}/'.format(self.scope) + tmp_file2[5:]) remove('/tmp/{0}/'.format(self.scope) + tmp_file3[5:]) self.generated_dids + '{0}:{1} {0}:{2} {0}:{3} {0}:{4}'.format(self.scope, tmp_file1, tmp_file2, tmp_file3, tmp_dsn).split(' ')
def test_repair_a_rule_with_source_replica_expression(self): """ JUDGE EVALUATOR: Test the judge when a with two rules with source_replica_expression""" scope = 'mock' files = create_files(3, scope, self.rse4) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') # Add a first rule to the DS rule_id1 = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=1, rse_expression=self.rse1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)[0] rule_id2 = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=1, rse_expression=self.rse3, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None, source_replica_expression=self.rse1)[0] assert(RuleState.REPLICATING == get_rule(rule_id1)['state']) assert(RuleState.STUCK == get_rule(rule_id2)['state']) successful_transfer(scope=scope, name=files[0]['name'], rse_id=self.rse1_id, nowait=False) successful_transfer(scope=scope, name=files[1]['name'], rse_id=self.rse1_id, nowait=False) successful_transfer(scope=scope, name=files[2]['name'], rse_id=self.rse1_id, nowait=False) # Also make replicas AVAILABLE session = get_session() replica = session.query(models.RSEFileAssociation).filter_by(scope=scope, name=files[0]['name'], rse_id=self.rse1_id).one() replica.state = ReplicaState.AVAILABLE replica = session.query(models.RSEFileAssociation).filter_by(scope=scope, name=files[1]['name'], rse_id=self.rse1_id).one() replica.state = ReplicaState.AVAILABLE replica = session.query(models.RSEFileAssociation).filter_by(scope=scope, name=files[2]['name'], rse_id=self.rse1_id).one() replica.state = ReplicaState.AVAILABLE session.commit() rule_repairer(once=True) assert(RuleState.OK == get_rule(rule_id1)['state']) assert(RuleState.REPLICATING == get_rule(rule_id2)['state'])
def test_create_and_update_and_list_subscription(self): """ SUBSCRIPTION (REST): Test the creation of a new subscription, update it, list it """ mw = [] headers1 = {'X-Rucio-Account': 'root', 'X-Rucio-Username': '******', 'X-Rucio-Password': '******'} r1 = TestApp(auth_app.wsgifunc(*mw)).get('/userpass', headers=headers1, expect_errors=True) assert_equal(r1.status, 200) token = str(r1.header('X-Rucio-Auth-Token')) subscription_name = uuid() headers2 = {'X-Rucio-Auth-Token': str(token)} data = dumps({'name': subscription_name, 'filter': {'project': ['data12_900GeV', 'data12_8TeV', 'data13_900GeV', 'data13_8TeV'], 'datatype': ['AOD', ], 'excluded_pattern': '(_tid|physics_(Muons|JetTauEtmiss|Egamma)\..*\.ESD|express_express(?!.*NTUP|.*\.ESD|.*RAW)|(physics|express)(?!.*NTUP).* \ \.x|physics_WarmStart|calibration(?!_PixelBeam.merge.(NTUP_IDVTXLUMI|AOD))|merge.HIST|NTUP_MUONCALIB|NTUP_TRIG)', 'account': 'tier0'}, 'replication_rules': [(2, 'T1_DATATAPE', True, True), (1, 'T1_DATADISK', False, True)], 'lifetime': 100000, 'retroactive': 0, 'dry_run': 0, 'comments': 'blahblah'}) r2 = TestApp(subs_app.wsgifunc(*mw)).post('/root/%s' % (subscription_name), headers=headers2, params=data, expect_errors=True) assert_equal(r2.status, 201) data = dumps({'filter': {'project': ['toto', ]}}) r3 = TestApp(subs_app.wsgifunc(*mw)).put('/root/%s' % (subscription_name), headers=headers2, params=data, expect_errors=True) assert_equal(r3.status, 201) r4 = TestApp(subs_app.wsgifunc(*mw)).get('/root/%s' % (subscription_name), headers=headers2, expect_errors=True) print r4 print type(loads(r4.body)) assert_equal(r4.status, 200) assert_equal(loads(loads(r4.body)['filter'])['project'][0], 'toto')
def test_to_repair_a_rule_with_only_1_rse_whose_transfers_failed(self): """ JUDGE REPAIRER: Test to repair a rule with only 1 rse whose transfers failed (lock)""" rule_repairer(once=True) # Clean out the repairer scope = 'mock' files = create_files(4, scope, self.rse4, bytes=100) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=1, rse_expression=self.rse1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)[0] successful_transfer(scope=scope, name=files[0]['name'], rse_id=get_replica_locks(scope=files[0]['scope'], name=files[2]['name'])[0].rse_id, nowait=False) successful_transfer(scope=scope, name=files[1]['name'], rse_id=get_replica_locks(scope=files[1]['scope'], name=files[2]['name'])[0].rse_id, nowait=False) failed_transfer(scope=scope, name=files[2]['name'], rse_id=get_replica_locks(scope=files[2]['scope'], name=files[2]['name'])[0].rse_id) failed_transfer(scope=scope, name=files[3]['name'], rse_id=get_replica_locks(scope=files[3]['scope'], name=files[3]['name'])[0].rse_id) cancel_request_did(scope=scope, name=files[2]['name'], dest_rse_id=get_replica_locks(scope=files[2]['scope'], name=files[2]['name'])[0].rse_id) cancel_request_did(scope=scope, name=files[3]['name'], dest_rse_id=get_replica_locks(scope=files[3]['scope'], name=files[2]['name'])[0].rse_id) assert(rule_id == get_rule(rule_id)['id'].replace('-', '').lower()) assert(RuleState.STUCK == get_rule(rule_id)['state']) rule_repairer(once=True) # Stil assert STUCK because of delays: assert(RuleState.STUCK == get_rule(rule_id)['state']) assert(get_replica_locks(scope=files[2]['scope'], name=files[2]['name'])[0].rse_id == get_replica_locks(scope=files[3]['scope'], name=files[3]['name'])[0].rse_id)
def xtest_add_and_list_keys(self): """ META (CLIENTS): Add a key and List all keys.""" key = 'key_' + str(uuid())[:20] ret = self.meta_client.add_key(key=key, key_type='ALL') assert_true(ret) keys = self.meta_client.list_keys() assert_is_instance(keys, list) assert_in(key, keys)
def test_list_rules_states(self): """ SUBSCRIPTION (REST): Test listing of rule states for subscription """ tmp_scope = 'mock_' + uuid()[:8] add_scope(tmp_scope, 'root') mw = [] site_a = 'RSE%s' % uuid().upper() site_b = 'RSE%s' % uuid().upper() add_rse(site_a) add_rse(site_b) # add a new dataset dsn = 'dataset-%s' % uuid() add_did(scope=tmp_scope, name=dsn, type=DIDType.DATASET, account='root') subscription_name = uuid() id = add_subscription(name=subscription_name, account='root', filter={'account': 'root'}, replication_rules=[(1, 'T1_DATADISK', False, True)], lifetime=100000, retroactive=0, dry_run=0, comments='We want a shrubbery') subscriptions = list_subscriptions(name=subscription_name, account='root') # workaround until add_subscription returns the id id = None for s in subscriptions: id = s['id'] # Add two rules add_rule(dids=[{'scope': tmp_scope, 'name': dsn}], account='root', copies=1, rse_expression=site_a, grouping='NONE', weight=None, lifetime=None, locked=False, subscription_id=id) add_rule(dids=[{'scope': tmp_scope, 'name': dsn}], account='root', copies=1, rse_expression=site_b, grouping='NONE', weight=None, lifetime=None, locked=False, subscription_id=id) headers1 = {'X-Rucio-Account': 'root', 'X-Rucio-Username': '******', 'X-Rucio-Password': '******'} r1 = TestApp(auth_app.wsgifunc(*mw)).get('/userpass', headers=headers1, expect_errors=True) assert_equal(r1.status, 200) token = str(r1.header('X-Rucio-Auth-Token')) headers2 = {'X-Rucio-Auth-Token': str(token)} r2 = TestApp(subs_app.wsgifunc(*mw)).get('/%s/%s/Rules/States' % ('root', subscription_name), headers=headers2, expect_errors=True) for line in r2.body.split('\n'): print line rs = loads(line) if rs[1] == subscription_name: break assert_equal(rs[3], 2)
def file_generator(size=2048, namelen=10): """ Create a bogus file and returns it's name. :param size: size in bytes :returns: The name of the generated file. """ fn = '/tmp/rucio_testfile_' + uuid() execute('dd if=/dev/urandom of={0} count={1} bs=1'.format(fn, size)) return fn
def test_create_list_subscription_by_id(self): """ SUBSCRIPTION (API): Test the creation of a new subscription and list it by id """ subscription_name = uuid() subscription_id = add_subscription(name=subscription_name, account='root', filter={'project': ['data12_900GeV', 'data12_8TeV', 'data13_900GeV', 'data13_8TeV'], 'datatype': ['AOD', ], 'excluded_pattern': '(_tid|physics_(Muons|JetTauEtmiss|Egamma)\..*\.ESD|express_express(?!.*NTUP|.*\.ESD|.*RAW)|(physics|express)(?!.*NTUP).* \ \.x|physics_WarmStart|calibration(?!_PixelBeam.merge.(NTUP_IDVTXLUMI|AOD))|merge.HIST|NTUP_MUONCALIB|NTUP_TRIG)', 'account': 'tier0'}, replication_rules=[(2, 'T1_DATATAPE', True, True), (1, 'T1_DATADISK', False, True)], lifetime=100000, retroactive=0, dry_run=0, comments='This is a comment') subscription_info = get_subscription_by_id(subscription_id) assert_equal(loads(subscription_info['filter'])['project'], ['data12_900GeV', 'data12_8TeV', 'data13_900GeV', 'data13_8TeV'])
def test_delete_rule_and_cancel_transfers(self): """ REPLICATION RULE (CORE): Test to delete a previously created rule and do not cancel overlapping transfers""" scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') rule_id_1 = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=1, rse_expression=self.rse1, grouping='NONE', weight='fakeweight', lifetime=None, locked=False, subscription_id=None)[0] add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=2, rse_expression=self.T1, grouping='NONE', weight='fakeweight', lifetime=None, locked=False, subscription_id=None)[0] add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=3, rse_expression=self.T1, grouping='NONE', weight='fakeweight', lifetime=None, locked=False, subscription_id=None)[0] delete_rule(rule_id_1) for file in files: rse_locks = get_replica_locks(scope=file['scope'], name=file['name']) assert(len(rse_locks) == 5) # TODO Need to check transfer queue here, this is actually not the check of this test case assert_raises(RuleNotFound, delete_rule, uuid())
def test_add_rule(self): """ REPLICATION RULE (CLIENT): Add a replication rule """ scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') ret = self.rule_client.add_replication_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=2, rse_expression=self.T1, grouping='NONE') assert_is_instance(ret, list)
def test_add_rule_container_none(self): """ REPLICATION RULE (CORE): Add a replication rule on a container, NONE Grouping""" scope = 'mock' container = 'container_' + str(uuid()) add_did(scope, container, DIDType.from_sym('CONTAINER'), 'jdoe') all_files = [] for i in xrange(3): files = create_files(3, scope, self.rse1) all_files.extend(files) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') attach_dids(scope, container, [{'scope': scope, 'name': dataset}], 'jdoe') add_rule(dids=[{'scope': scope, 'name': container}], account='jdoe', copies=1, rse_expression=self.T2, grouping='NONE', weight=None, lifetime=None, locked=False, subscription_id=None) for file in all_files: rse_locks = set([lock['rse_id'] for lock in get_replica_locks(scope=file['scope'], name=file['name'])]) assert_in(self.rse4_id, rse_locks) assert_not_in(self.rse5_id, rse_locks)
def test_create_existing_subscription(self): """ SUBSCRIPTION (CLIENT): Test the creation of a existing subscription """ subscription_name = uuid() result = self.client.add_subscription(name=subscription_name, account='root', filter={'project': ['data12_900GeV', 'data12_8TeV', 'data13_900GeV', 'data13_8TeV'], 'datatype': ['AOD', ], 'excluded_pattern': '(_tid|physics_(Muons|JetTauEtmiss|Egamma)\..*\.ESD|express_express(?!.*NTUP|.*\.ESD|.*RAW)|(physics|express)(?!.*NTUP).* \ \.x|physics_WarmStart|calibration(?!_PixelBeam.merge.(NTUP_IDVTXLUMI|AOD))|merge.HIST|NTUP_MUONCALIB|NTUP_TRIG)', 'account': 'tier0'}, replication_rules=[(2, 'T1_DATATAPE', True, True), (1, 'T1_DATADISK', False, True)], lifetime=100000, retroactive=0, dry_run=0, comments='Ni ! Ni!') assert_true(result) result = self.client.add_subscription(name=subscription_name, account='root', filter={'project': ['data12_900GeV', 'data12_8TeV', 'data13_900GeV', 'data13_8TeV'], 'datatype': ['AOD', ], 'excluded_pattern': '(_tid|physics_(Muons|JetTauEtmiss|Egamma)\..*\.ESD|express_express(?!.*NTUP|.*\.ESD|.*RAW)|(physics|express)(?!.*NTUP).* \ \.x|physics_WarmStart|calibration(?!_PixelBeam.merge.(NTUP_IDVTXLUMI|AOD))|merge.HIST|NTUP_MUONCALIB|NTUP_TRIG)', 'account': 'tier0'}, replication_rules=[(2, 'T1_DATATAPE', True, True), (1, 'T1_DATADISK', False, True)], lifetime=100000, retroactive=0, dry_run=0, comments='Ni ! Ni!')
def populateDB(): listrses = list_rses({'T1': '1'}) print len(listrses), listrses # listrses = list_rses() # print len(listrses), listrses # sys.exit() account = 'root' project = 'mc12_8TeV' dictDistrib = [{'datatype': 'HITS', 'prodstep': 'merge', 'nbfiles': 302, 'totfilesize': 225394185112, 'nbreplicas': 1}, {'datatype': 'HITS', 'prodstep': 'simul', 'nbfiles': 620, 'totfilesize': 97930909866, 'nbreplicas': 1}, {'datatype': 'EVNT', 'prodstep': 'evgen', 'nbfiles': 324, 'totfilesize': 7809298802, 'nbreplicas': 3}, {'datatype': 'AOD', 'prodstep': 'merge', 'nbfiles': 52, 'totfilesize': 106942334943, 'nbreplicas': 4}, {'datatype': 'AOD', 'prodstep': 'recon', 'nbfiles': 858, 'totfilesize': 182186965627, 'nbreplicas': 1}] for d in dictDistrib: for day in xrange(0, 180): for i in xrange(0, 30): scope = project prod_step = d['prodstep'] datatype = d['datatype'] nbfiles = int(d['nbfiles']) filesize = int(int(d['totfilesize'])/float(nbfiles)) nbfiles = int(random.gauss(nbfiles, nbfiles/10)) filesize = int(random.gauss(filesize, filesize/10)) nbreplicas = int(d['nbreplicas']) dataset_meta = {'project': project, 'stream_name': 'dummy', 'prod_step': prod_step, 'datatype': datatype} source_rses = [] if nbreplicas: iter = 0 while (len(source_rses) != nbreplicas and iter != 100): rnd_site = random.choice(listrses) iter += 1 if rnd_site not in source_rses: source_rses.append(rnd_site) try: dsn = '%s.%s.%s.%i.%i' % (project, prod_step, datatype, day, i) print '%i Creating %s with %i files of size %i located at %i sites' % (i, dsn, nbfiles, filesize, len(source_rses)) add_identifier(scope=scope, name=dsn, type='dataset', issuer=account, statuses={'monotonic': True}, meta=dataset_meta) files = ['file_%s' % uuid() for i in xrange(nbfiles)] listfiles = [] for file in files: listfiles.append({'scope': scope, 'name': file, 'size': filesize}) for source_rse in source_rses: add_file_replica(source_rse, scope, file, filesize, issuer=account) attach_identifier(scope, name=dsn, dids=listfiles, issuer=account) for source_rse in source_rses: try: add_replication_rule(dids=[{'scope': scope, 'name': dsn}], account=account, copies=1, rse_expression=source_rse, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None, issuer='root') except InvalidReplicationRule, e: print e except RucioException, e: print e
def test_dataset_lock(self): """ DATASETLOCK (CLIENT): Get a datasetlock for a specific dataset""" scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') rule_id_1 = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=1, rse_expression=self.rse1, grouping='DATASET', weight='fakeweight', lifetime=None, locked=True, subscription_id=None)[0] rule_ids = [lock['rule_id'] for lock in self.lock_client.get_dataset_locks(scope=scope, name=dataset)] assert_in(rule_id_1, rule_ids)
def createRSEs(): # Add test RSEs for i in xrange(0, 3): rse1 = str(uuid()) rse2 = str(uuid()) add_rse(rse1, issuer='root') add_rse(rse2, issuer='root') add_rse_attribute(rse1, "T1", True, issuer='root') add_rse_attribute(rse2, "T1", True, issuer='root') add_rse_attribute(rse1, "DISK", True, issuer='root') add_rse_attribute(rse2, "TAPE", True, issuer='root') for i in xrange(0, 10): rse1 = str(uuid()) add_rse(rse1, issuer='root') add_rse_attribute(rse1, "T2", True, issuer='root') add_rse_attribute(rse1, "DISK", True, issuer='root') source_rse = str(uuid()) add_rse(source_rse, issuer='root') add_rse_attribute(source_rse, "T0", True, issuer='root')
def test_add_rules_datasets_none(self): """ REPLICATION RULE (CORE): Add replication rules to multiple datasets, NONE Grouping""" scope = 'mock' files1 = create_files(3, scope, self.rse4) dataset1 = 'dataset_' + str(uuid()) add_did(scope, dataset1, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset1, files1, 'jdoe') files2 = create_files(3, scope, self.rse4) dataset2 = 'dataset_' + str(uuid()) add_did(scope, dataset2, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset2, files2, 'jdoe') # Add the rules to both DS add_rules(dids=[{'scope': scope, 'name': dataset1}, {'scope': scope, 'name': dataset2}], rules=[{'account': 'jdoe', 'copies': 1, 'rse_expression': self.T1, 'grouping': 'NONE', 'weight': None, 'lifetime': None, 'locked': False, 'subscription_id': None}, {'account': 'root', 'copies': 1, 'rse_expression': self.T1, 'grouping': 'NONE', 'weight': 'fakeweight', 'lifetime': None, 'locked': False, 'subscription_id': None}]) # Check if the Locks are created properly for file in files1: rse_locks = [lock['rse_id'] for lock in get_replica_locks(scope=file['scope'], name=file['name'])] assert(rse_locks[0] == rse_locks[1]) for file in files2: rse_locks = [lock['rse_id'] for lock in get_replica_locks(scope=file['scope'], name=file['name'])] assert(rse_locks[0] == rse_locks[1])
def test_add_rule_duplicate(self): """ REPLICATION RULE (CORE): Add a replication rule duplicate""" scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') # Add a first rule to the DS add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=2, rse_expression=self.T1, grouping='NONE', weight=None, lifetime=None, locked=False, subscription_id=None) # Add a second rule and check if the right locks are created assert_raises(DuplicateRule, add_rule, dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=2, rse_expression=self.T1, grouping='NONE', weight=None, lifetime=None, locked=False, subscription_id=None)
def test_delete_rule(self): """ REPLICATION RULE (CLIENT): Delete a replication rule """ scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=1, rse_expression=self.rse1, grouping='NONE', weight='fakeweight', lifetime=None, locked=False, subscription_id=None)[0] ret = self.rule_client.delete_replication_rule(rule_id=rule_id) assert(ret is True) assert_raises(RuleNotFound, self.rule_client.delete_replication_rule, rule_id)
def test_locked_rule(self): """ REPLICATION RULE (CLIENT): Delete a locked replication rule""" scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') rule_id_1 = add_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=1, rse_expression=self.rse1, grouping='NONE', weight='fakeweight', lifetime=None, locked=True, subscription_id=None)[0] assert_raises(AccessDenied, delete_rule, rule_id_1) self.rule_client.update_replication_rule(rule_id=rule_id_1, options={'locked': False}) delete_rule(rule_id=rule_id_1)
def test_get_rule_by_account(self): """ ACCOUNT (CLIENT): Get Replication Rule by account """ scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') ret = self.rule_client.add_replication_rule(dids=[{'scope': scope, 'name': dataset}], account='jdoe', copies=2, rse_expression=self.T1, grouping='NONE') get = self.account_client.list_account_rules('jdoe') rules = [rule['id'] for rule in get] assert_in(ret[0], rules)
def test_judge_add_dataset_to_container(self): """ JUDGE EVALUATOR: Test the judge when adding dataset to container""" scope = 'mock' files = create_files(3, scope, self.rse1) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') parent_container = 'dataset_' + str(uuid()) add_did(scope, parent_container, DIDType.from_sym('CONTAINER'), 'jdoe') # Add a first rule to the DS add_rule(dids=[{'scope': scope, 'name': parent_container}], account='jdoe', copies=2, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None) attach_dids(scope, parent_container, [{'scope': scope, 'name': dataset}], 'jdoe') # Fake judge re_evaluator(once=True) # Check if the Locks are created properly for file in files: assert(len(get_replica_locks(scope=file['scope'], name=file['name'])) == 2) # Check if the DatasetLocks are created properly dataset_locks = [lock for lock in get_dataset_locks(scope=scope, name=dataset)] assert(len(dataset_locks) == 2)
def scope_name_generator(): """ Generate random scope name. :returns: A random scope name """ return 'mock_' + str(uuid()).lower()[:16]
def test_create_scope_no_account(self): """ SCOPE (CLIENTS): try to create scope for not existing account.""" account = str(uuid()).lower()[:30] scope = scope_name_generator() self.scope_client.add_scope(account, scope)
def test_update_nonexisting_subscription(self): """ SUBSCRIPTION (CLIENT): Test the update of a non-existing subscription """ subscription_name = uuid() with pytest.raises(SubscriptionNotFound): self.sub_client.update_subscription(name=subscription_name, filter_={'project': ['toto', ]})
def test_update_nonexisting_subscription(self): """ SUBSCRIPTION (CLIENT): Test the update of a non-existing subscription """ subscription_name = uuid() self.sub_client.update_subscription(name=subscription_name, filter={'project': ['toto', ]})
def test_add_key(self): """ META (CORE): Add a new key """ types = [{ 'type': DIDType.FILE, 'expected': KeyType.FILE }, { 'type': DIDType.CONTAINER, 'expected': KeyType.CONTAINER }, { 'type': DIDType.DATASET, 'expected': KeyType.DATASET }, { 'type': KeyType.ALL, 'expected': KeyType.ALL }, { 'type': KeyType.DERIVED, 'expected': KeyType.DERIVED }, { 'type': KeyType.FILE, 'expected': KeyType.FILE }, { 'type': KeyType.COLLECTION, 'expected': KeyType.COLLECTION }, { 'type': KeyType.CONTAINER, 'expected': KeyType.CONTAINER }, { 'type': KeyType.DATASET, 'expected': KeyType.DATASET }, { 'type': 'FILE', 'expected': KeyType.FILE }, { 'type': 'ALL', 'expected': KeyType.ALL }, { 'type': 'COLLECTION', 'expected': KeyType.COLLECTION }, { 'type': 'DATASET', 'expected': KeyType.DATASET }, { 'type': 'D', 'expected': KeyType.DATASET }, { 'type': 'FILE', 'expected': KeyType.FILE }, { 'type': 'F', 'expected': KeyType.FILE }, { 'type': 'DERIVED', 'expected': KeyType.DERIVED }, { 'type': 'C', 'expected': KeyType.CONTAINER }] for key_type in types: key_name = 'datatype%s' % str(uuid()) add_key(key_name, key_type['type']) stored_key_type = session.get_session().query( models.DIDKey).filter_by(key=key_name).one()['key_type'] assert_true(stored_key_type, key_type['expected']) with assert_raises(UnsupportedKeyType): add_key('datatype', DIDType.ARCHIVE) with assert_raises(UnsupportedKeyType): add_key('datatype', 'A')
def test_get_account_notfound(self): """ ACCOUNT (CLIENTS): try to get information about not existing account.""" account = str(uuid()) self.client.get_account(account)
def test_create_and_list_subscription_by_id(self): """ SUBSCRIPTION (REST): Test the creation of a new subscription and get by subscription id """ mw = [] headers1 = { 'X-Rucio-Account': 'root', 'X-Rucio-Username': '******', 'X-Rucio-Password': '******' } res1 = TestApp(auth_app.wsgifunc(*mw)).get('/userpass', headers=headers1, expect_errors=True) assert_equal(res1.status, 200) token = str(res1.header('X-Rucio-Auth-Token')) subscription_name = uuid() headers2 = {'X-Rucio-Auth-Token': str(token)} data = dumps({ 'options': { 'filter': { 'project': self.projects, 'datatype': [ 'AOD', ], 'excluded_pattern': self.pattern1, 'account': [ 'tier0', ] }, 'replication_rules': [{ 'lifetime': 86400, 'rse_expression': 'MOCK|MOCK2', 'copies': 2, 'activity': 'Data Brokering' }], 'lifetime': 100000, 'retroactive': 0, 'dry_run': 0, 'comments': 'blahblah' } }) res2 = TestApp(subs_app.wsgifunc(*mw)).post('/root/%s' % (subscription_name), headers=headers2, params=data, expect_errors=True) assert_equal(res2.status, 201) subscription_id = res2.body res3 = TestApp(subs_app.wsgifunc(*mw)).get('/Id/%s' % (subscription_id), headers=headers2, expect_errors=True) assert_equal(res3.status, 200) assert_equal( loads(loads(res3.body)['filter'])['project'][0], 'data12_900GeV')
def test_create_existing_subscription(self): """ SUBSCRIPTION (REST): Test the creation of a existing subscription """ mw = [] headers1 = { 'X-Rucio-Account': 'root', 'X-Rucio-Username': '******', 'X-Rucio-Password': '******' } res1 = TestApp(auth_app.wsgifunc(*mw)).get('/userpass', headers=headers1, expect_errors=True) assert_equal(res1.status, 200) token = str(res1.header('X-Rucio-Auth-Token')) subscription_name = uuid() headers2 = {'X-Rucio-Auth-Token': str(token)} data = dumps({ 'options': { 'name': subscription_name, 'filter': { 'project': self.projects, 'datatype': [ 'AOD', ], 'excluded_pattern': self.pattern1, 'account': [ 'tier0', ] }, 'replication_rules': [{ 'lifetime': 86400, 'rse_expression': 'MOCK|MOCK2', 'copies': 2, 'activity': 'Data Brokering' }], 'lifetime': 100000, 'retroactive': 0, 'dry_run': 0, 'comments': 'We are the knights who say Ni !' } }) res2 = TestApp(subs_app.wsgifunc(*mw)).post('/root/' + subscription_name, headers=headers2, params=data, expect_errors=True) assert_equal(res2.status, 201) res3 = TestApp(subs_app.wsgifunc(*mw)).post('/root/' + subscription_name, headers=headers2, params=data, expect_errors=True) assert_equal(res3.header('ExceptionClass'), 'SubscriptionDuplicate') assert_equal(res3.status, 409)
def test_list_rules_states(self): """ SUBSCRIPTION (REST): Test listing of rule states for subscription """ tmp_scope = 'mock_' + uuid()[:8] add_scope(tmp_scope, 'root') mw = [] site_a = 'RSE%s' % uuid().upper() site_b = 'RSE%s' % uuid().upper() site_a_id = add_rse(site_a) site_b_id = add_rse(site_b) # Add quota set_account_limit('root', site_a_id, -1) set_account_limit('root', site_b_id, -1) # add a new dataset dsn = 'dataset-%s' % uuid() add_did(scope=tmp_scope, name=dsn, type=DIDType.DATASET, account='root') subscription_name = uuid() subid = add_subscription(name=subscription_name, account='root', filter={ 'account': [ 'root', ], 'scope': [ tmp_scope, ] }, replication_rules=[{ 'lifetime': 86400, 'rse_expression': 'MOCK|MOCK2', 'copies': 2, 'activity': 'Data Brokering' }], lifetime=100000, retroactive=0, dry_run=0, comments='We want a shrubbery', issuer='root') # Add two rules add_rule(dids=[{ 'scope': tmp_scope, 'name': dsn }], account='root', copies=1, rse_expression=site_a, grouping='NONE', weight=None, lifetime=None, locked=False, subscription_id=subid) add_rule(dids=[{ 'scope': tmp_scope, 'name': dsn }], account='root', copies=1, rse_expression=site_b, grouping='NONE', weight=None, lifetime=None, locked=False, subscription_id=subid) headers1 = { 'X-Rucio-Account': 'root', 'X-Rucio-Username': '******', 'X-Rucio-Password': '******' } res1 = TestApp(auth_app.wsgifunc(*mw)).get('/userpass', headers=headers1, expect_errors=True) assert_equal(res1.status, 200) token = str(res1.header('X-Rucio-Auth-Token')) headers2 = {'X-Rucio-Auth-Token': str(token)} res2 = TestApp(subs_app.wsgifunc(*mw)).get('/%s/%s/Rules/States' % ('root', subscription_name), headers=headers2, expect_errors=True) for line in res2.body.split('\n'): print(line) rs = loads(line) if rs[1] == subscription_name: break assert_equal(rs[3], 2)
def xtest_add_unsupported_type(self): """ META (CLIENTS): Add an unsupported value for type """ key = 'key_' + str(uuid())[:20] self.meta_client.add_key(key=key, key_type='ALL', value_type=str)
def test_judge_evaluate_detach(self): """ JUDGE EVALUATOR: Test if the detach is done correctly""" re_evaluator(once=True) scope = 'mock' container = 'container_' + str(uuid()) add_did(scope, container, DIDType.from_sym('CONTAINER'), 'jdoe') scope = 'mock' files = create_files(3, scope, self.rse1_id, bytes=100) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') attach_dids(scope, container, [{ 'scope': scope, 'name': dataset }], 'jdoe') scope = 'mock' files = create_files(3, scope, self.rse1_id, bytes=100) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') attach_dids(scope, container, [{ 'scope': scope, 'name': dataset }], 'jdoe') scope = 'mock' files = create_files(3, scope, self.rse1_id, bytes=100) dataset = 'dataset_' + str(uuid()) add_did(scope, dataset, DIDType.from_sym('DATASET'), 'jdoe') attach_dids(scope, dataset, files, 'jdoe') attach_dids(scope, container, [{ 'scope': scope, 'name': dataset }], 'jdoe') # Add a first rule to the Container rule_id = add_rule(dids=[{ 'scope': scope, 'name': container }], account='jdoe', copies=1, rse_expression=self.rse1, grouping='ALL', weight=None, lifetime=None, locked=False, subscription_id=None)[0] # Fake judge re_evaluator(once=True) assert (9 == get_rule(rule_id)['locks_ok_cnt']) detach_dids(scope, dataset, [files[0]]) # Fake judge re_evaluator(once=True) assert (8 == get_rule(rule_id)['locks_ok_cnt'])
def account_name_generator(): """ Generate random account name. :returns: A random account name """ return 'jdoe-' + str(uuid()).lower()[:16]
def test_create_and_update_and_list_subscription(self): """ SUBSCRIPTION (CLIENT): Test the creation of a new subscription, update it, list it """ subscription_name = uuid() with assert_raises(InvalidObject): subid = self.sub_client.add_subscription(name=subscription_name, account='root', filter={ 'project': self.projects, 'datatype': [ 'AOD', ], 'excluded_pattern': self.pattern1, 'account': [ 'tier0', ] }, replication_rules=[{ 'lifetime': 86400, 'rse_expression': 'MOCK|MOCK2', 'copies': 2, 'activity': 'noactivity' }], lifetime=100000, retroactive=0, dry_run=0, comments='Ni ! Ni!') subid = self.sub_client.add_subscription(name=subscription_name, account='root', filter={ 'project': self.projects, 'datatype': [ 'AOD', ], 'excluded_pattern': self.pattern1, 'account': [ 'tier0', ] }, replication_rules=[{ 'lifetime': 86400, 'rse_expression': 'MOCK|MOCK2', 'copies': 2, 'activity': 'Data Brokering' }], lifetime=100000, retroactive=0, dry_run=0, comments='Ni ! Ni!') result = [ sub['id'] for sub in list_subscriptions(name=subscription_name, account='root') ] assert_equal(subid, result[0]) with assert_raises(TypeError): result = self.sub_client.update_subscription( name=subscription_name, account='root', filter='toto') result = self.sub_client.update_subscription( name=subscription_name, account='root', filter={'project': [ 'toto', ]}) assert_true(result) result = list_subscriptions(name=subscription_name, account='root') sub = [] for res in result: sub.append(res) assert_equal(len(sub), 1) assert_equal(loads(sub[0]['filter'])['project'][0], 'toto')
def test_list_rules_states(self): """ SUBSCRIPTION (API): Test listing of rule states for subscription """ tmp_scope = 'mock_' + uuid()[:8] add_scope(tmp_scope, 'root') site_a = 'RSE%s' % uuid().upper() site_b = 'RSE%s' % uuid().upper() site_a_id = add_rse(site_a) site_b_id = add_rse(site_b) # Add quota set_account_limit('root', site_a_id, -1) set_account_limit('root', site_b_id, -1) # add a new dataset dsn = 'dataset-%s' % uuid() add_did(scope=tmp_scope, name=dsn, type=DIDType.DATASET, account='root') subscription_name = uuid() subid = add_subscription(name=subscription_name, account='root', filter={ 'account': [ 'root', ], 'scope': [ tmp_scope, ] }, replication_rules=[{ 'lifetime': 86400, 'rse_expression': 'MOCK|MOCK2', 'copies': 2, 'activity': 'Data Brokering' }], lifetime=100000, retroactive=0, dry_run=0, comments='This is a comment', issuer='root') # Add two rules add_rule(dids=[{ 'scope': tmp_scope, 'name': dsn }], account='root', copies=1, rse_expression=site_a, grouping='NONE', weight=None, lifetime=None, locked=False, subscription_id=subid) add_rule(dids=[{ 'scope': tmp_scope, 'name': dsn }], account='root', copies=1, rse_expression=site_b, grouping='NONE', weight=None, lifetime=None, locked=False, subscription_id=subid) for rule in list_subscription_rule_states(account='root', name=subscription_name): assert_equal(rule[3], 2)
def test_get_account_notfound(self): """ ACCOUNT (CLIENTS): try to get information about not existing account.""" account = str(uuid()) with pytest.raises(AccountNotFound): self.client.get_account(account)
def test_lifetime_creation_core(root_account, rse_factory, mock_scope, did_factory): """ Test the creation of a lifetime exception on the core side """ nb_datatype = 3 nb_datasets = 2 * nb_datatype yesterday = datetime.now() - timedelta(days=1) tomorrow = datetime.now() + timedelta(days=1) rse, rse_id = rse_factory.make_posix_rse() datasets = [did_factory.make_dataset() for _ in range(nb_datasets)] metadata = [str(uuid()) for _ in range(nb_datatype)] list_dids = [] for cnt, meta in enumerate(metadata): dids = [] for dataset in datasets[2 * cnt:2 * (cnt + 1)]: set_metadata(dataset['scope'], dataset['name'], 'datatype', meta) if cnt < nb_datatype - 1: set_metadata(dataset['scope'], dataset['name'], 'eol_at', yesterday) dids.append((dataset['scope'], dataset['name'])) dids.sort() list_dids.append(dids) datasets.extend([{'scope': mock_scope, 'name': 'dataset_%s' % str(uuid()), 'did_type': DIDType.DATASET} for _ in range(2)]) # Test with cutoff_date not defined try: config_core.remove_option('lifetime_model', 'cutoff_date') except (ConfigNotFound, NoSectionError): pass with pytest.raises(UnsupportedOperation): add_exception(datasets, root_account, pattern='wekhewfk', comments='This is a comment', expires_at=datetime.now()) # Test with cutoff_date wrongly defined config_core.set(section='lifetime_model', option='cutoff_date', value='wrong_value') config_core.get(section='lifetime_model', option='cutoff_date', default=None, use_cache=False) with pytest.raises(UnsupportedOperation): add_exception(datasets, root_account, pattern='wekhewfk', comments='This is a comment', expires_at=datetime.now()) # Test with cutoff_date properly defined tomorrow = tomorrow.strftime('%Y-%m-%d') config_core.set(section='lifetime_model', option='cutoff_date', value=tomorrow) config_core.get(section='lifetime_model', option='cutoff_date', default=None, use_cache=False) result = add_exception(datasets, root_account, pattern='wekhewfk', comments='This is a comment', expires_at=datetime.now()) # Check if the Not Existing DIDs are identified result_unknown = [(entry['scope'], entry['name']) for entry in result['unknown']] result_unknown.sort() unknown = [(entry['scope'], entry['name']) for entry in datasets[nb_datasets:nb_datasets + 2]] unknown.sort() assert result_unknown == unknown # Check if the DIDs not affected by the Lifetime Model are identified result_not_affected = [(entry['scope'], entry['name']) for entry in result['not_affected']] result_not_affected.sort() not_affected = list_dids[-1] assert result_not_affected == not_affected # Check if an exception was done for each datatype list_exceptions = list() for exception_id in result['exceptions']: dids = [(entry['scope'], entry['name']) for entry in result['exceptions'][exception_id]] dids.sort() list_exceptions.append(dids) for did in list_dids[:nb_datatype - 1]: assert did in list_exceptions
def test_update_nonexisting_subscription(self): """ SUBSCRIPTION (API): Test the update of a non-existing subscription """ subscription_name = uuid() update_subscription(name=subscription_name, account='root', metadata={'filter': {'project': ['toto', ]}}, issuer='root')
def test_create_and_update_and_list_subscription(self): """ SUBSCRIPTION (API): Test the creation of a new subscription, update it, list it """ subscription_name = uuid() with assert_raises(InvalidObject): result = add_subscription(name=subscription_name, account='root', filter={ 'project': self.projects, 'datatype': [ 'AOD', ], 'excluded_pattern': self.pattern1, 'account': [ 'tier0', ] }, replication_rules=[{ 'lifetime': 86400, 'rse_expression': 'MOCK|MOCK2', 'copies': 2, 'activity': 'noactivity' }], lifetime=100000, retroactive=0, dry_run=0, comments='This is a comment', issuer='root') result = add_subscription(name=subscription_name, account='root', filter={ 'project': self.projects, 'datatype': [ 'AOD', ], 'excluded_pattern': self.pattern1, 'account': [ 'tier0', ] }, replication_rules=[{ 'lifetime': 86400, 'rse_expression': 'MOCK|MOCK2', 'copies': 2, 'activity': 'Data Brokering' }], lifetime=100000, retroactive=0, dry_run=0, comments='This is a comment', issuer='root') with assert_raises(TypeError): result = update_subscription(name=subscription_name, account='root', metadata={'filter': 'toto'}, issuer='root') with assert_raises(InvalidObject): result = update_subscription( name=subscription_name, account='root', metadata={'filter': { 'project': 'toto' }}, issuer='root') result = update_subscription( name=subscription_name, account='root', metadata={'filter': { 'project': [ 'toto', ] }}, issuer='root') assert_equal(result, None) result = list_subscriptions(name=subscription_name, account='root') sub = [] for res in result: sub.append(res) assert_equal(len(sub), 1) assert_equal(loads(sub[0]['filter'])['project'][0], 'toto')
def test_upload_download(self): """CLIENT(USER): rucio upload files to dataset/download dataset""" tmp_file1 = file_generator() tmp_file2 = file_generator() tmp_file3 = file_generator() tmp_dsn = 'tests.rucio_client_test_server_' + uuid() # Adding files to a new dataset cmd = 'rucio upload --rse {0} --scope {1} {2} {3} {4} {1}:{5}'.format(self.rse, self.scope, tmp_file1, tmp_file2, tmp_file3, tmp_dsn) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) remove(tmp_file1) remove(tmp_file2) remove(tmp_file3) if self.running_with_unittest: self.assertEqual(exitcode, 0) else: assert exitcode == 0 # List the files cmd = 'rucio list-files {0}:{1}'.format(self.scope, tmp_dsn) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) if self.running_with_unittest: self.assertEqual(exitcode, 0) else: assert exitcode == 0 # List the replicas cmd = 'rucio list-file-replicas {0}:{1}'.format(self.scope, tmp_dsn) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) if self.running_with_unittest: self.assertEqual(exitcode, 0) else: assert exitcode == 0 # Downloading dataset cmd = 'rucio download --dir /tmp/ {0}:{1}'.format(self.scope, tmp_dsn) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(out) print(err) # The files should be there cmd = 'ls /tmp/{0}/rucio_testfile_*'.format(tmp_dsn) print(self.marker + cmd) exitcode, out, err = execute(cmd) print(err, out) if self.running_with_unittest: self.assertEqual(exitcode, 0) else: assert exitcode == 0 # cleaning remove('/tmp/{0}/'.format(tmp_dsn) + tmp_file1[5:]) remove('/tmp/{0}/'.format(tmp_dsn) + tmp_file2[5:]) remove('/tmp/{0}/'.format(tmp_dsn) + tmp_file3[5:]) self.generated_dids + '{0}:{1} {0}:{2} {0}:{3} {0}:{4}'.format(self.scope, tmp_file1, tmp_file2, tmp_file3, tmp_dsn).split(' ')
def xtest_add_value_to_bad_key(self): """ META (CLIENTS): Add a new value to a non existing key """ value = 'value_' + str(uuid()) self.meta_client.add_value(key="Nimportnawak", value=value)