def check_correct_api(params, expected_requests): headers_dict = {'X-Rucio-Type': 'user', 'X-Rucio-Account': 'root'} response = rest_client.get('/requests/list', query_string=params, headers=headers(auth(auth_token), vohdr(vo), hdrdict(headers_dict))) assert response.status_code == 200 requests = set() for request in response.get_data(as_text=True).split('\n')[:-1]: request = parse_response(request) requests.add((request['state'], request['source_rse_id'], request['dest_rse_id'], request['name'])) assert requests == expected_requests
def test_userpass(rest_client, auth_token): """ ACCOUNT (REST): send a POST to add an identity to an account.""" username = uuid() # normal addition headers_dict = { 'X-Rucio-Username': username, 'X-Rucio-Password': '******', 'X-Rucio-Email': 'email' } response = rest_client.put('/identities/root/userpass', headers=headers(auth(auth_token), hdrdict(headers_dict))) assert response.status_code == 201
def test_update_nonexisting_subscription(rest_client, auth_token): """ SUBSCRIPTION (REST): Test the update of a non-existing subscription """ subscription_name = uuid() data = { 'options': { 'filter': { 'project': [ 'toto', ] } } } response = rest_client.put('/subscriptions/root/' + subscription_name, headers=headers(auth(auth_token)), json=data) assert response.status_code == 404 assert response.headers.get('ExceptionClass') == 'SubscriptionNotFound'
def test_sort_geoip_address_not_found_error(vo, rest_client, auth_token, protocols_setup, content_type): """Replicas: test sorting via geoip with ignoring geoip errors.""" class MockedGeoIPError(Exception): def __init__(self, *args): super(MockedGeoIPError, self).__init__(*args) def fake_get_geoip_db(*args, **kwargs): raise MockedGeoIPError() data = { 'dids': [{ 'scope': f['scope'].external, 'name': f['name'], 'type': 'FILE' } for f in protocols_setup['files']], 'schemes': schemes, 'sort': 'geoip', } # invalidate cache for __get_distance so that __get_geoip_db is called replica_sorter.REGION.invalidate() with mock.patch('rucio.core.replica_sorter.__get_geoip_db', side_effect=fake_get_geoip_db) as get_geoip_db_mock: response = rest_client.post('/replicas/list', headers=headers(auth(auth_token), vohdr(vo), accept(content_type)), json=data) assert response.status_code == 200 replicas_response = response.get_data(as_text=True) assert replicas_response get_geoip_db_mock.assert_called()
def check_error_api(params, exception_class, exception_message, code): headers_dict = {'X-Rucio-Type': 'user', 'X-Rucio-Account': 'root'} response = rest_client.get('/requests/history/list', query_string=params, headers=headers(auth(auth_token), vohdr(vo), hdrdict(headers_dict))) assert response.status_code == code body = parse_response(response.get_data(as_text=True)) assert body['ExceptionClass'] == exception_class assert body['ExceptionMessage'] == exception_message
def test_list_scope_account_not_found(rest_client, auth_token): """ SCOPE (REST): send a GET list all scopes for a not existing account """ response = rest_client.get('/accounts/testaccount/scopes/', headers=headers(auth(auth_token))) assert response.status_code == 404 assert response.headers.get('ExceptionClass') == 'AccountNotFound'
def test_whoami_account(rest_client, auth_token): """ ACCOUNT (REST): Test the whoami method.""" response = rest_client.get('/accounts/whoami', headers=headers(auth(auth_token))) assert response.status_code == 303
def test_create_and_update_and_list_subscription(rse_factory, rest_client, auth_token): """ SUBSCRIPTION (REST): Test the creation of a new subscription, update it, list it """ subscription_name = uuid() rse1, _ = rse_factory.make_mock_rse() rse2, _ = rse_factory.make_mock_rse() rse_expression = '%s|%s' % (rse1, rse2) projects = ['data12_900GeV', 'data12_8TeV', 'data13_900GeV', 'data13_8TeV'] pattern1 = r'(_tid|physics_(Muons|JetTauEtmiss|Egamma)\..*\.ESD|express_express(?!.*NTUP|.*\.ESD|.*RAW)|(physics|express)(?!.*NTUP).* \ \.x|physics_WarmStart|calibration(?!_PixelBeam.merge.(NTUP_IDVTXLUMI|AOD))|merge.HIST|NTUP_MUONCALIB|NTUP_TRIG)' data = { 'options': { 'filter': { 'project': projects, 'datatype': [ 'AOD', ], 'excluded_pattern': pattern1, 'account': [ 'tier0', ] }, 'replication_rules': [{ 'lifetime': 86400, 'rse_expression': rse_expression, 'copies': 2, 'activity': 'Data Brokering' }], 'lifetime': 100000, 'retroactive': 0, 'dry_run': 0, 'comments': 'blahblah' } } response = rest_client.post('/subscriptions/root/' + subscription_name, headers=headers(auth(auth_token)), json=data) assert response.status_code == 201 data = { 'options': { 'filter': { 'project': [ 'toto', ] } } } response = rest_client.put('/subscriptions/root/' + subscription_name, headers=headers(auth(auth_token)), json=data) assert response.status_code == 201 response = rest_client.get('/subscriptions/root/' + subscription_name, headers=headers(auth(auth_token))) assert response.status_code == 200 assert loads(loads( response.get_data(as_text=True))['filter'])['project'][0] == 'toto'
def test_not_sorting_lan_replicas(vo, rest_client, auth_token, protocols_setup, content_type): """Replicas: test not sorting only LANs.""" data = { 'dids': [{ 'scope': f['scope'].external, 'name': f['name'], 'type': 'FILE' } for f in protocols_setup['files']], # yes, this is rather a hack (but works on the API as well). I would like to have an rse_expression parameter instead. 'client_location': { 'site': '|site='.join( map(lambda info: info['site'], protocols_setup['rse_info'])) }, 'schemes': schemes, } def fake_sort_replicas(dictreplica, *args, **kwargs): # test that nothing is passed to sort_replicas assert not dictreplica return [] # invalidate cache for parse_expression('site=…') rse_expression_parser.REGION.invalidate() with mock.patch('rucio.web.rest.flaskapi.v1.replicas.sort_replicas', side_effect=fake_sort_replicas): response = rest_client.post('/replicas/list', headers=headers(auth(auth_token), vohdr(vo), accept(content_type)), json=data) assert response.status_code == 200 replicas_response = response.get_data(as_text=True) assert replicas_response if content_type == Mime.METALINK: replicas = parse_replicas_from_string(replicas_response) print(replicas) assert len(replicas) == 1 sources_list = replicas[0]['sources'] print(sources_list) # 4 for lan, since one is blocked for lan for each site assert len(sources_list) == 4 elif content_type == Mime.JSON_STREAM: replicas = list( map( json.loads, filter( bool, map(str.strip, replicas_response.splitlines(keepends=False))))) print(replicas) assert len(replicas) == 1 sources_dict = replicas[0]['pfns'] # 4 for lan, since one is blocked for lan for each site assert len(sources_dict) == 4
def test_create_user_success(rest_client, auth_token): """ ACCOUNT (REST): send a POST to create a new user """ acntusr = account_name_generator() data = {'type': 'USER', 'email': '*****@*****.**'} response = rest_client.post('/accounts/' + acntusr, headers=headers(auth(auth_token)), json=data) assert response.status_code == 201
def test_delete_identity_of_account(vo, rest_client): """ ACCOUNT (REST): send a DELETE to remove an identity of an account.""" account = account_name_generator() identity = uuid() password = '******' add_account(account, 'USER', '*****@*****.**', 'root', vo=vo) add_identity(identity, IdentityType.USERPASS, '*****@*****.**', password) add_account_identity(identity, IdentityType.USERPASS, InternalAccount(account, vo=vo), '*****@*****.**') auth_response = rest_client.get('/auth/userpass', headers=headers(loginhdr(account, identity, password), vohdr(vo))) assert auth_response.status_code == 200 assert 'X-Rucio-Auth-Token' in auth_response.headers token = str(auth_response.headers.get('X-Rucio-Auth-Token')) assert len(token) != 0 # normal deletion data = {'authtype': 'USERPASS', 'identity': identity} response = rest_client.delete('/accounts/' + account + '/identities', headers=headers(auth(token)), json=data) assert response.status_code == 200 # unauthorized deletion other_account = account_name_generator() data = {'authtype': 'USERPASS', 'identity': identity} response = rest_client.delete('/accounts/' + other_account + '/identities', headers=headers(auth(token)), json=data) assert response.status_code == 401
def test_create_user_missing_parameter(rest_client, auth_token): """ ACCOUNT (REST): send a POST with a missing parameter""" response = rest_client.post('/accounts/account', headers=headers(auth(auth_token)), json={}) assert response.status_code == 400 assert response.headers.get('ExceptionClass') == 'KeyError' assert loads(response.get_data(as_text=True)) == {"ExceptionMessage": "\'type\' not defined", "ExceptionClass": "KeyError"}
def test_create_user_non_json_body(rest_client, auth_token): """ ACCOUNT (REST): send a POST with a non json body""" response = rest_client.post('/accounts/testuser', headers=headers(auth(auth_token)), data="unfug") assert response.status_code == 400 assert response.headers.get('ExceptionClass') == 'ValueError' assert loads(response.get_data(as_text=True)) == {"ExceptionMessage": "cannot decode json parameter dictionary", "ExceptionClass": "ValueError"}
def test_sort_geoip_wan_client_location(vo, rest_client, auth_token, protocols_setup, content_type, mock_geoip_db, mock_get_lat_long): """Replicas: test sorting a few WANs via geoip.""" data = { 'dids': [{ 'scope': f['scope'].external, 'name': f['name'], 'type': 'FILE' } for f in protocols_setup['files']], 'schemes': schemes, 'sort': 'geoip', } first_aut_then_jpn = [ 'root.aperture.com', 'davs.aperture.com', 'gsiftp.aperture.com', 'gsiftp.blackmesa.com', 'davs.blackmesa.com', 'root.blackmesa.com' ] first_jpn_then_aut = [ 'gsiftp.blackmesa.com', 'davs.blackmesa.com', 'root.blackmesa.com', 'root.aperture.com', 'davs.aperture.com', 'gsiftp.aperture.com' ] for client_location, expected_order in ( ('Switzerland', first_aut_then_jpn), ('Romania', first_aut_then_jpn), ('Austria', first_aut_then_jpn), ('United Kingdom', first_aut_then_jpn), ('Libya', first_aut_then_jpn), ('China', first_jpn_then_aut), ('United States', first_jpn_then_aut), ('Japan', first_jpn_then_aut), ('Taiwan', first_jpn_then_aut), ('Israel', first_aut_then_jpn), ('Finland', first_aut_then_jpn), ('United Arab Emirates', first_aut_then_jpn), ): response = rest_client.post( '/replicas/list', headers=headers( auth(auth_token), vohdr(vo), accept(content_type), [('X-Forwarded-For', LOCATION_TO_IP[client_location])]), json=data) assert response.status_code == 200 replicas_response = response.get_data(as_text=True) assert replicas_response replicas = [] pfns = [] if content_type == Mime.METALINK: replicas = parse_replicas_from_string(replicas_response) pfns = [s['pfn'] for s in replicas[0]['sources']] elif content_type == Mime.JSON_STREAM: replicas = list( map( json.loads, filter( bool, map(str.strip, replicas_response.splitlines(keepends=False))))) pfns = list(replicas[0]['pfns']) print(client_location, pfns) assert len(replicas) == 1 assert [urlparse(pfn).hostname for pfn in pfns] == expected_order
def test_scope_failure(rest_client, auth_token): """ SCOPE (REST): send a POST to create a new scope for a not existing account to test the error""" scopeusr = scope_name_generator() account_name_generator() response = rest_client.post('/accounts/%s/scopes/%s' % (scopeusr, scopeusr), headers=headers(auth(auth_token))) assert response.status_code == 404
def test_sort_geoip_wan(vo, rest_client, auth_token, protocols_setup, content_type): """Replicas: test sorting a few WANs via geoip.""" n = 10 nmap = {} def fake_get_distance(se1, se2, *args, **kwargs): nonlocal n, nmap n = n - 1 print("fake_get_distance", {'se1': se1, 'se2': se2, 'n': n}) assert se1, 'pfn host must be se1 for this test' nmap[se1] = n return n data = { 'dids': [{ 'scope': f['scope'].external, 'name': f['name'], 'type': 'FILE' } for f in protocols_setup['files']], 'schemes': schemes, 'sort': 'geoip', } with mock.patch('rucio.core.replica_sorter.__get_distance', side_effect=fake_get_distance): response = rest_client.post('/replicas/list', headers=headers(auth(auth_token), vohdr(vo), accept(content_type)), json=data) assert response.status_code == 200 replicas_response = response.get_data(as_text=True) assert replicas_response # because urlparse hostname result is lower case sorted_hosts = list(map(str.lower, sorted(nmap, key=nmap.get))) if content_type == Mime.METALINK: replicas = parse_replicas_from_string(replicas_response) print(replicas) assert len(replicas) == 1 sources_list = replicas[0]['sources'] print(sources_list) assert len(sources_list) == 6 sorted_replica_hosts = list( sorted(sources_list, key=lambda source: source['priority'])) sorted_replica_hosts = list( map(lambda source: urlparse(source['pfn']).hostname, sorted_replica_hosts)) assert sorted_hosts == sorted_replica_hosts, 'assert sorting of result as distance suggested' elif content_type == Mime.JSON_STREAM: replicas = list( map( json.loads, filter( bool, map(str.strip, replicas_response.splitlines(keepends=False))))) print(replicas) assert len(replicas) == 1 sources_dict = replicas[0]['pfns'] assert len(sources_dict) == 6 sorted_replica_hosts = list( sorted(sources_dict, key=lambda pfn: sources_dict[pfn]['priority'])) sorted_replica_hosts = list( map(lambda source: urlparse(source).hostname, sorted_replica_hosts)) assert sorted_hosts == sorted_replica_hosts, 'assert sorting of result as distance suggested'
def test_sort_geoip_lan_before_wan(vo, rest_client, auth_token, protocols_setup, content_type, info_id): """Replicas: test sorting LAN sites before WANs via geoip.""" n = 2 nmap = {} def fake_get_distance(se1, se2, *args, **kwargs): nonlocal n, nmap n = n - 1 print("fake_get_distance", {'se1': se1, 'se2': se2, 'n': n}) assert se1, 'pfn host must be se1 for this test' nmap[se1] = n return n data = { 'dids': [{ 'scope': f['scope'].external, 'name': f['name'], 'type': 'FILE' } for f in protocols_setup['files']], 'client_location': { 'site': protocols_setup['rse_info'][info_id]['site'] }, 'schemes': schemes, 'sort': 'geoip', } # invalidate cache for parse_expression('site=…') rse_expression_parser.REGION.invalidate() with mock.patch('rucio.core.replica_sorter.__get_distance', side_effect=fake_get_distance): response = rest_client.post('/replicas/list', headers=headers(auth(auth_token), vohdr(vo), accept(content_type)), json=data) assert response.status_code == 200 replicas_response = response.get_data(as_text=True) assert replicas_response # because urlparse hostname result is lower case sorted_wan_hosts = list(map(str.lower, sorted(nmap, key=nmap.get))) if content_type == Mime.METALINK: replicas = parse_replicas_from_string(replicas_response) print(replicas) assert len(replicas) == 1 sources_list = replicas[0]['sources'] print(sources_list) # 3 for wan and 2 for lan, since one is blocked for lan for each site assert len(sources_list) == 5 sorted_replica_hosts = list( sorted(sources_list, key=lambda source: source['priority'])) print(sorted_replica_hosts) lan_pfns = list( filter(lambda source: source['domain'] == 'lan', sorted_replica_hosts)) assert len(lan_pfns) == 2 for lanpfn in lan_pfns: assert protocols_setup['rse_info'][info_id]['name'] == lanpfn[ 'rse'] sorted_replica_wan_hosts = list( map( lambda source: urlparse(source['pfn']).hostname, filter(lambda source: source['domain'] != 'lan', sorted_replica_hosts))) assert sorted_wan_hosts == sorted_replica_wan_hosts elif content_type == Mime.JSON_STREAM: replicas = list( map( json.loads, filter( bool, map(str.strip, replicas_response.splitlines(keepends=False))))) print(replicas) assert len(replicas) == 1 sources_dict = replicas[0]['pfns'] # 3 for wan and 2 for lan, since one is blocked for lan for each site assert len(sources_dict) == 5 sorted_replica_hosts = list( sorted(sources_dict, key=lambda pfn: sources_dict[pfn]['priority'])) lan_pfns = list( filter(lambda pfn: sources_dict[pfn]['domain'] == 'lan', sorted_replica_hosts)) assert len(lan_pfns) == 2 for lanpfn in lan_pfns: assert protocols_setup['rse_info'][info_id]['id'] == sources_dict[ lanpfn]['rse_id'] wan_pfns = filter(lambda pfn: sources_dict[pfn]['domain'] != 'lan', sorted_replica_hosts) sorted_replica_wan_hosts = list( map(lambda pfn: urlparse(pfn).hostname, wan_pfns)) assert sorted_wan_hosts == sorted_replica_wan_hosts
def __test_rest_bad_replica_methods_for_ui(rest_client, auth_token, list_pfns): """ REPLICA (REST): Test the listing of bad and suspicious replicas """ if list_pfns: common_data = {'list_pfns': 'True'} else: common_data = {} data = {**common_data} response = rest_client.get('/replicas/bad/states', headers=headers(auth(auth_token)), query_string=data) assert response.status_code == 200 tot_files = [] for line in response.get_data(as_text=True).split('\n'): if line != '': tot_files.append(dumps(line)) nb_tot_files = len(tot_files) data = {'state': 'B', **common_data} response = rest_client.get('/replicas/bad/states', headers=headers(auth(auth_token)), query_string=data) assert response.status_code == 200 tot_bad_files = [] for line in response.get_data(as_text=True).split('\n'): if line != '': tot_bad_files.append(dumps(line)) nb_tot_bad_files1 = len(tot_bad_files) data = {'state': 'S', **common_data} response = rest_client.get('/replicas/bad/states', headers=headers(auth(auth_token)), query_string=data) assert response.status_code == 200 tot_suspicious_files = [] for line in response.get_data(as_text=True).split('\n'): if line != '': tot_suspicious_files.append(dumps(line)) nb_tot_suspicious_files = len(tot_suspicious_files) data = {'state': 'T', **common_data} response = rest_client.get('/replicas/bad/states', headers=headers(auth(auth_token)), query_string=data) assert response.status_code == 200 tot_temporary_unavailable_files = [] for line in response.get_data(as_text=True).split('\n'): if line != '': tot_temporary_unavailable_files.append(dumps(line)) nb_tot_temporary_unavailable_files = len(tot_temporary_unavailable_files) assert nb_tot_files == nb_tot_bad_files1 + nb_tot_suspicious_files + nb_tot_temporary_unavailable_files tomorrow = datetime.utcnow() + timedelta(days=1) data = {'state': 'B', 'younger_than': tomorrow.isoformat(), **common_data} response = rest_client.get('/replicas/bad/states', headers=headers(auth(auth_token)), query_string=data) assert response.status_code == 200 tot_bad_files = [] for line in response.get_data(as_text=True).split('\n'): if line != '': tot_bad_files.append(dumps(line)) nb_tot_bad_files = len(tot_bad_files) assert nb_tot_bad_files == 0 if not list_pfns: response = rest_client.get('/replicas/bad/summary', headers=headers(auth(auth_token))) assert response.status_code == 200 nb_tot_bad_files2 = 0 for line in response.get_data(as_text=True).split('\n'): if line != '': line = loads(line) nb_tot_bad_files2 += int(line.get('BAD', 0)) assert nb_tot_bad_files1 == nb_tot_bad_files2
def test_get_user_failure(rest_client, auth_token): """ ACCOUNT (REST): send a GET with a wrong user test the error """ reponse = rest_client.get('/accounts/wronguser', headers=headers(auth(auth_token))) assert reponse.status_code == 404
def test_del_user_failure(rest_client, auth_token): """ ACCOUNT (REST): send a DELETE with a wrong user to test the error """ response = rest_client.delete('/accounts/wronguser', headers=headers(auth(auth_token))) assert response.status_code == 404
def test_list_rules_states(vo, rest_client, auth_token): """ SUBSCRIPTION (REST): Test listing of rule states for subscription """ tmp_scope = InternalScope('mock_' + uuid()[:8], vo=vo) root = InternalAccount('root', vo=vo) add_scope(tmp_scope, root) site_a = 'RSE%s' % uuid().upper() site_b = 'RSE%s' % uuid().upper() site_a_id = add_rse(site_a, vo=vo) site_b_id = add_rse(site_b, vo=vo) # Add quota set_local_account_limit(root, site_a_id, -1) set_local_account_limit(root, site_b_id, -1) # add a new dataset dsn = 'dataset-%s' % uuid() add_did(scope=tmp_scope, name=dsn, type=DIDType.DATASET, account=root) subscription_name = uuid() subid = add_subscription(name=subscription_name, account='root', filter={ 'account': [ 'root', ], 'scope': [ tmp_scope.external, ] }, replication_rules=[{ 'lifetime': 86400, 'rse_expression': 'MOCK|MOCK2', 'copies': 2, 'activity': 'Data Brokering' }], lifetime=100000, retroactive=0, dry_run=0, comments='We want a shrubbery', issuer='root', vo=vo) # Add two rules add_rule(dids=[{ 'scope': tmp_scope, 'name': dsn }], account=root, copies=1, rse_expression=site_a, grouping='NONE', weight=None, lifetime=None, locked=False, subscription_id=subid) add_rule(dids=[{ 'scope': tmp_scope, 'name': dsn }], account=root, copies=1, rse_expression=site_b, grouping='NONE', weight=None, lifetime=None, locked=False, subscription_id=subid) response = rest_client.get('/subscriptions/%s/%s/Rules/States' % ('root', subscription_name), headers=headers(auth(auth_token))) assert response.status_code == 200 rulestates = None for line in response.get_data(as_text=True).split('\n'): if line: rulestates = loads(line) if rulestates[1] == subscription_name: break assert rulestates is not None assert rulestates[3] == 2
def test_create_existing_subscription(rest_client, auth_token): """ SUBSCRIPTION (REST): Test the creation of a existing subscription """ subscription_name = uuid() projects = ['data12_900GeV', 'data12_8TeV', 'data13_900GeV', 'data13_8TeV'] pattern1 = r'(_tid|physics_(Muons|JetTauEtmiss|Egamma)\..*\.ESD|express_express(?!.*NTUP|.*\.ESD|.*RAW)|(physics|express)(?!.*NTUP).* \ \.x|physics_WarmStart|calibration(?!_PixelBeam.merge.(NTUP_IDVTXLUMI|AOD))|merge.HIST|NTUP_MUONCALIB|NTUP_TRIG)' data = {'options': {'name': subscription_name, 'filter': {'project': projects, 'datatype': ['AOD', ], 'excluded_pattern': pattern1, 'account': ['tier0', ]}, 'replication_rules': [{'lifetime': 86400, 'rse_expression': 'MOCK|MOCK2', 'copies': 2, 'activity': 'Data Brokering'}], 'lifetime': 100000, 'retroactive': 0, 'dry_run': 0, 'comments': 'We are the knights who say Ni !'}} response = rest_client.post('/subscriptions/root/' + subscription_name, headers=headers(auth(auth_token)), json=data) assert response.status_code == 201 response = rest_client.post('/subscriptions/root/' + subscription_name, headers=headers(auth(auth_token)), json=data) assert response.status_code == 409 assert response.headers.get('ExceptionClass') == 'SubscriptionDuplicate'