def test_same_request_different_users(dispatcher_live_fixture): server = dispatcher_live_fixture logger.info("constructed server: %s", server) # let's generate two valid tokens, for two different users token_payload_1 = {**default_token_payload, "sub": "*****@*****.**"} encoded_token_1 = jwt.encode(token_payload_1, secret_key, algorithm='HS256') token_payload_2 = {**default_token_payload, "sub": "*****@*****.**"} encoded_token_2 = jwt.encode(token_payload_2, secret_key, algorithm='HS256') # issuing a request each, with the same set of parameters params_1 = { **default_params, 'product_type': 'dummy', 'query_type': "Dummy", 'instrument': 'empty', 'token': encoded_token_1 } jdata_1 = ask( server, params_1, expected_query_status=["done"], max_time_s=50, ) assert jdata_1["exit_status"]["debug_message"] == "" assert jdata_1["exit_status"]["error_message"] == "" assert jdata_1["exit_status"]["message"] == "" job_id_1 = jdata_1['job_monitor']['job_id'] params_2 = { **default_params, 'product_type': 'dummy', 'query_type': "Dummy", 'instrument': 'empty', 'token': encoded_token_2 } jdata_2 = ask( server, params_2, expected_query_status=["done"], max_time_s=50, ) assert jdata_2["exit_status"]["debug_message"] == "" assert jdata_2["exit_status"]["error_message"] == "" assert jdata_2["exit_status"]["message"] == "" job_id_2 = jdata_2['job_monitor']['job_id'] assert job_id_1 != job_id_2 dir_list_1 = glob.glob('*_jid_%s*' % job_id_1) dir_list_2 = glob.glob('*_jid_%s*' % job_id_2) assert len(dir_list_1) == len(dir_list_2)
def test_numerical_authorization_user_roles(dispatcher_live_fixture, roles): server = dispatcher_live_fixture logger.info("constructed server: %s", server) # let's generate a valid token token_payload = { **default_token_payload, "roles": roles, } encoded_token = jwt.encode(token_payload, secret_key, algorithm='HS256') params = { **default_params, 'product_type': 'numerical', 'query_type': "Dummy", 'instrument': 'empty', 'p': 55, 'token': encoded_token } # just for having the roles in a list roles = roles.split(',') roles[:] = [r.strip() for r in roles] if 'unige-hpc-full' in roles: jdata = ask( server, params, expected_query_status=["done"], max_time_s=150, ) assert jdata["exit_status"]["debug_message"] == "" assert jdata["exit_status"]["error_message"] == "" assert jdata["exit_status"]["message"] == "" else: # let's make a public request if len(roles) == 0: params.pop('token') jdata = ask( server, params, expected_query_status=["failed"], max_time_s=150, expected_status_code=403, ) assert jdata["exit_status"]["debug_message"] == "" assert jdata["exit_status"]["error_message"] == "" assert jdata["exit_status"]["message"] == \ "Unfortunately, your priviledges are not sufficient to make the request for this particular product and parameter combination.\n"\ f"- Your priviledge roles include {roles}\n- "\ "You are lacking all of the following roles:\n" \ + (" - general: general role is needed for p>50\n" if "general" not in roles else "" ) + \ " - unige-hpc-full: unige-hpc-full role is needed for p>50 as well\n"\ "You can request support if you think you should be able to make this request." logger.info("Json output content") logger.info(json.dumps(jdata, indent=4))
def test_dummy_authorization_user_roles(dispatcher_live_fixture, roles): server = dispatcher_live_fixture logger.info("constructed server: %s", server) # let's generate a valid token token_payload = { **default_token_payload, "roles": roles, } encoded_token = jwt.encode(token_payload, secret_key, algorithm='HS256') params = { **default_params, 'product_type': "dummy", 'query_type': "Dummy", 'instrument': 'empty', 'token': encoded_token } jdata = ask( server, params, expected_query_status=["done"], max_time_s=150, ) assert jdata["exit_status"]["debug_message"] == "" assert jdata["exit_status"]["error_message"] == "" assert jdata["exit_status"]["message"] == "" logger.info("Json output content") logger.info(json.dumps(jdata, indent=4))
def test_scws_list_file(dispatcher_live_fixture): server = dispatcher_live_fixture logger.info("constructed server: %s", server) # let's generate a valid token token_payload = { **default_token_payload, "roles": "unige-hpc-full, general", } encoded_token = jwt.encode(token_payload, secret_key, algorithm='HS256') params = { **default_params, 'product_type': 'numerical', 'query_type': "Dummy", 'instrument': 'empty', 'p': 5, 'use_scws': 'user_file', 'token': encoded_token } file_path = DispatcherJobState.create_p_value_file(p_value=5) list_file = open(file_path) jdata = ask(server, params, expected_query_status=["done"], max_time_s=150, method='post', files={"user_scw_list_file": list_file.read()}) list_file.close() assert 'p_list' in jdata['products']['analysis_parameters'] assert 'use_scws' not in jdata['products']['analysis_parameters'] assert jdata['products']['analysis_parameters']['p_list'] == ['5'] # test job_id job_id = jdata['products']['job_id'] params.pop('use_scws', None) # adapting some values to string for k, v in params.items(): params[k] = str(v) restricted_par_dic = InstrumentQueryBackEnd.restricted_par_dic({ **params, "p_list": ["5"], "sub": "*****@*****.**" }) calculated_job_id = make_hash(restricted_par_dic) assert job_id == calculated_job_id
def test_value_range(dispatcher_long_living_fixture): server = dispatcher_long_living_fixture logger.info("constructed server: %s", server) # let's generate a valid token token_payload = { **default_token_payload, "roles": "unige-hpc-full, general", } encoded_token = jwt.encode(token_payload, secret_key, algorithm='HS256') for is_ok, p in [(True, 10), (False, 1000)]: params = { **default_params, 'p': p, 'product_type': 'numerical', 'query_type': "Dummy", 'instrument': 'empty', 'token': encoded_token } if is_ok: expected_query_status = 'done' expected_job_status = 'done' expected_status_code = 200 else: expected_query_status = None expected_job_status = None expected_status_code = 400 logger.info("constructed server: %s", server) jdata = ask(server, params, expected_query_status=expected_query_status, expected_job_status=expected_job_status, max_time_s=50, expected_status_code=expected_status_code) logger.info(list(jdata.keys())) logger.info(jdata) if is_ok: pass else: assert jdata['error_message'] == 'p value is restricted to 800 W'
def test_invalid_token(dispatcher_live_fixture): server = dispatcher_live_fixture logger.info("constructed server: %s", server) # let's generate an expired token exp_time = int(time.time()) - 500 # expired token token_payload = {**default_token_payload, "exp": exp_time} encoded_token = jwt.encode(token_payload, secret_key, algorithm='HS256') params = { **default_params, 'product_type': 'dummy', 'query_type': "Dummy", 'instrument': 'empty', 'token': encoded_token } # count the number of scratch folders dir_list = glob.glob('scratch_*') number_scartch_dirs = len(dir_list) jdata = ask(server, params, max_time_s=50, expected_query_status=None, expected_status_code=403) assert jdata[ 'error_message'] == 'the token provided is expired, please try to logout and login again' logger.info("Json output content") logger.info(json.dumps(jdata, indent=4)) # certain output information should not even returned assert 'session_id' not in jdata assert 'job_monitor' not in jdata # count again dir_list = glob.glob('scratch_*') assert number_scartch_dirs == len(dir_list)
def test_empty_instrument_request(dispatcher_live_fixture): server = dispatcher_live_fixture print("constructed server:", server) params = { **default_params, 'product_type': 'dummy', 'query_type': "Dummy", 'instrument': 'empty', } jdata = ask( server, params, expected_query_status=["done"], max_time_s=50, ) logger.info("Json output content") logger.info(json.dumps(jdata, indent=4)) assert jdata["exit_status"]["debug_message"] == "" assert jdata["exit_status"]["error_message"] == "" assert jdata["exit_status"]["message"] == ""
def test_user_catalog(dispatcher_live_fixture): server = dispatcher_live_fixture logger.info("constructed server: %s", server) # let's generate a valid token token_payload = { **default_token_payload, "roles": "unige-hpc-full, general", } encoded_token = jwt.encode(token_payload, secret_key, algorithm='HS256') selected_catalog_dict = dict(cat_lon_name="ra", cat_lat_name="dec", cat_frame="fk5", cat_coord_units="deg", cat_column_list=[[1], ["Test A"], [6], [5], [4], [3], [2], [1], [0]], cat_column_names=[ "meta_ID", "src_names", "significance", "ra", "dec", "NEW_SOURCE", "ISGRI_FLAG", "FLAG", "ERR_RAD" ], cat_column_descr=[["meta_ID", "<i8"], ["src_names", "<U6"], ["significance", "<i8"], ["ra", "<f8"], ["dec", "<f8"], ["NEW_SOURCE", "<i8"], ["ISGRI_FLAG", "<i8"], ["FLAG", "<i8"], ["ERR_RAD", "<i8"]]) params = { **default_params, 'product_type': 'dummy', 'query_type': "Dummy", 'instrument': 'empty', 'selected_catalog': json.dumps(selected_catalog_dict), 'token': encoded_token } jdata = ask(server, params, expected_query_status=["done"], max_time_s=150, method='post') assert 'selected_catalog' in jdata['products']['analysis_parameters'] assert jdata['products']['analysis_parameters'][ 'selected_catalog'] == json.dumps(selected_catalog_dict) # test job_id job_id = jdata['products']['job_id'] # adapting some values to string for k, v in params.items(): params[k] = str(v) restricted_par_dic = InstrumentQueryBackEnd.restricted_par_dic({ **params, "sub": "*****@*****.**" }) calculated_job_id = make_hash(restricted_par_dic) assert job_id == calculated_job_id
def test_consistency_parameters_json_dump_file(dispatcher_live_fixture, request_cred): DispatcherJobState.remove_scratch_folders() server = dispatcher_live_fixture logger.info("constructed server: %s", server) if request_cred == 'public': encoded_token = None else: token_payload = {**default_token_payload, "sub": "*****@*****.**"} encoded_token = jwt.encode(token_payload, secret_key, algorithm='HS256') # issuing a request each, with the same set of parameters params = { **default_params, 'query_status': "new", 'product_type': 'dummy', 'query_type': "Dummy", 'instrument': 'empty', 'token': encoded_token } jdata = ask( server, params, expected_query_status=["done"], max_time_s=50, ) assert jdata["exit_status"]["debug_message"] == "" assert jdata["exit_status"]["error_message"] == "" assert jdata["exit_status"]["message"] == "" job_id = jdata['job_monitor']['job_id'] session_id = jdata['session_id'] # get the analysis_parameters json file analysis_parameters_json_fn = f'scratch_sid_{session_id}_jid_{job_id}/analysis_parameters.json' # the aliased version might have been created analysis_parameters_json_fn_aliased = f'scratch_sid_{session_id}_jid_{job_id}_aliased/analysis_parameters.json' assert os.path.exists(analysis_parameters_json_fn) or os.path.exists( analysis_parameters_json_fn_aliased) if os.path.exists(analysis_parameters_json_fn): analysis_parameters_json_content_original = json.load( open(analysis_parameters_json_fn)) else: analysis_parameters_json_content_original = json.load( open(analysis_parameters_json_fn_aliased)) logger.info("starting query with the same session_id and job_id") # issue another call, different parameters but same job_id & session_id, to simulate the Fit button params = { **default_params, 'xspec_model': 'powerlaw', 'product_type': 'dummy', 'query_type': "Dummy", 'instrument': 'empty', 'token': encoded_token, 'session_id': session_id, 'job_id': job_id, 'query_status': "ready" } jdata = ask( server, params, expected_query_status=["done"], max_time_s=50, ) if os.path.exists(analysis_parameters_json_fn): analysis_parameters_json_content = json.load( open(analysis_parameters_json_fn)) else: analysis_parameters_json_content = json.load( open(analysis_parameters_json_fn_aliased)) assert analysis_parameters_json_content == analysis_parameters_json_content_original