def test_get_api_with_proxy_config(self): secrets = { "api": { config_keys[key]["config"]: value for key, value in source_config_details.items() if value is not None and "proxy" not in key } } secrets["api"].pop("clientCertificate", None) if source_config_details.get("proxy_address", None) is None: self.skipTest(f"missing proxy configuration") secrets_file = TempFileManager.create_temp_file(secrets) # Load the config with patch.dict('os.environ', {}, clear=True): factory = ApiClientFactory( api_secrets_filename=secrets_file.name, proxy_url=source_config_details["proxy_address"], proxy_username=source_config_details["proxy_username"], proxy_password=source_config_details["proxy_password"]) # Close and thus delete the temporary file TempFileManager.delete_temp_file(secrets_file) api = factory.build(InstrumentsApi) self.validate_api(api)
def test_get_api_with_configuration(self): factory = ApiClientFactory( api_secrets_filename=CredentialsSource.secrets_path() ) api = factory.build(InstrumentsApi) self.assertIsInstance(api, InstrumentsApi) self.validate_api(api)
def test_get_api_with_proxy_file(self): secrets = { "api": { config_keys[key]["config"]: value for key, value in source_config_details.items() if value is not None and "proxy" not in key }, "proxy": { config_keys[key]["config"]: value for key, value in source_config_details.items() if value is not None and "proxy" in key } } secrets["api"].pop("clientCertificate", None) if secrets["proxy"].get("address", None) is None: self.skipTest(f"missing proxy configuration") secrets_file = TempFileManager.create_temp_file(secrets) # Load the config factory = ApiClientFactory(api_secrets_filename=secrets_file.name) # Close and thus delete the temporary file TempFileManager.delete_temp_file(secrets_file) api = factory.build(InstrumentsApi) self.validate_api(api)
def test_get_unknown_api_throws_exception(self, _, api_to_build, error_message): factory = ApiClientFactory( api_secrets_filename=CredentialsSource.secrets_path() ) with self.assertRaises(TypeError) as error: factory.build(api_to_build) self.assertEqual(error.exception.args[0], error_message)
def test_good_env_pat_but_no_param_pat(self): with patch.dict(self.os_environ_dict_str, self.get_pat_env_var(), clear=True): factory = ApiClientFactory() api = factory.build(InstrumentsApi) self.assertIsInstance(api, InstrumentsApi) self.validate_api(api)
def test_get_api_with_token_url_as_env_var(self): token, refresh_token = tu.get_okta_tokens(CredentialsSource.secrets_path()) with patch.dict('os.environ', {"FBN_LUSID_API_URL": source_config_details["api_url"]}, clear=True): factory = ApiClientFactory( token=token, app_name=source_config_details["app_name"]) api = factory.build(InstrumentsApi) self.assertIsInstance(api, InstrumentsApi) self.validate_api(api)
def test_none_str_param_pat_but_good_secrets_envs(self): with patch.dict(self.os_environ_dict_str, self.get_env_vars_without_pat(), clear=True): factory = ApiClientFactory(token="None") api = factory.build(InstrumentsApi) self.assertIsInstance(api, InstrumentsApi) self.validate_api(api)
def test_no_pat_but_good_secrets_file_as_param(self): with patch.dict(self.os_environ_dict_str, self.get_env_vars_without_pat(), clear=True): factory = ApiClientFactory(api_secrets_filename=self.secrets) api = factory.build(InstrumentsApi) self.assertIsInstance(api, InstrumentsApi) self.validate_api(api)
def test_get_api_with_info(self): factory = ApiClientFactory( api_secrets_filename=CredentialsSource.secrets_path()) api = factory.build(ScopesApi) self.assertIsInstance(api, ScopesApi) result = api.list_scopes(call_info=lambda r: print(r)) self.assertIsNotNone(result)
def test_get_api_with_token(self): token, refresh_token = tu.get_okta_tokens( CredentialsSource.secrets_path()) factory = ApiClientFactory(token=token, api_url=source_config_details["api_url"], app_name=source_config_details["app_name"]) api = factory.build(InstrumentsApi) self.assertIsInstance(api, InstrumentsApi) self.validate_api(api)
def test_no_env_pat_but_good_param_pat(self): with patch.dict( self.os_environ_dict_str, {"FBN_LUSID_API_URL": self.source_config_details["api_url"]}, clear=True): factory = ApiClientFactory(token=self.pat_token) api = factory.build(InstrumentsApi) self.assertIsInstance(api, InstrumentsApi) self.validate_api(api)
def test_get_api_with_str_none_token(self): factory = ApiClientFactory( token=RefreshingToken(), api_url=source_config_details["api_url"], app_name=source_config_details["app_name"], api_secrets_filename=CredentialsSource.secrets_path(), ) api = factory.build(InstrumentsApi) self.assertIsInstance(api, InstrumentsApi) self.validate_api(api)
def test_wrapped_method(self): factory = ApiClientFactory( api_secrets_filename=CredentialsSource.secrets_path()) wrapped_scopes_api = factory.build(InstrumentsApi) portfolio = InstrumentsApi(wrapped_scopes_api.api_client) self.assertEqual(portfolio.__doc__, wrapped_scopes_api.__doc__) self.assertEqual(portfolio.__module__, wrapped_scopes_api.__module__) self.assertDictEqual(portfolio.__dict__, wrapped_scopes_api.__dict__)
def test_get_info_with_invalid_param_throws_error(self): factory = ApiClientFactory( api_secrets_filename=CredentialsSource.secrets_path() ) api = factory.build(InstrumentsApi) self.assertIsInstance(api, InstrumentsApi) with self.assertRaises(ValueError) as error: api.get_instrument_identifier_types(call_info="invalid param") self.assertEqual(error.exception.args[0], "call_info value must be a lambda")
def test_api(self): # create an ApiFactory configured with the api credentials secrets_file = Path(__file__).parent.parent.joinpath('secrets.json') api_factory = ApiClientFactory(api_secrets_filename=secrets_file) # create an api and call a function on it scopes_api = api_factory.build(ScopesApi) scopes = scopes_api.list_scopes() # verify the call was successful self.assertGreater(len(scopes.values), 0)
def test_get_api_with_correlation_id_from_env_var(self): env_vars = {config_keys[key]["env"]: value for key, value in source_config_details.items() if value is not None} env_vars["FBN_CORRELATION_ID"] = "env-correlation-id" with patch.dict('os.environ', env_vars, clear=True): factory = ApiClientFactory() api = factory.build(InstrumentsApi) self.assertIsInstance(api, InstrumentsApi) self.validate_api(api) self.assertTrue("CorrelationId" in api.api_client.default_headers, msg="CorrelationId not found in headers") self.assertEquals(api.api_client.default_headers["CorrelationId"], "env-correlation-id")
def main(argv): # create an ApiFactory configured with the api credentials secrets_file = Path(__file__).parent.parent.joinpath('secrets.json') api_factory = ApiClientFactory(api_secrets_filename=secrets_file) # create an api and call a function on it scopes_api = api_factory.build(ScopesApi) scopes = scopes_api.list_scopes() for scope in scopes.values: print(scope)
def test_get_api_with_correlation_id_from_param(self): env_vars = {config_keys[key]["env"]: value for key, value in source_config_details.items() if value is not None} with patch.dict('os.environ', env_vars, clear=True): factory = ApiClientFactory( api_secrets_filename=CredentialsSource.secrets_path(), correlation_id="param-correlation-id" ) api = factory.build(InstrumentsApi) self.assertIsInstance(api, InstrumentsApi) self.validate_api(api) self.assertTrue("CorrelationId" in api.api_client.default_headers, msg="CorrelationId not found in headers") self.assertEquals(api.api_client.default_headers["CorrelationId"], "param-correlation-id")
def test_bad_pat_in_param_but_good_pat_in_env_vars(self): with patch.dict(self.os_environ_dict_str, self.get_pat_env_var(), clear=True): try: factory = ApiClientFactory(token="INVALID_TOKEN") api = factory.build(InstrumentsApi) api.get_instrument_identifier_types() except ApiException as e: self.assertEquals(401, e.status)
def test_get_api_without_tcp_keep_alive(self): api_factory = ApiClientFactory( api_secrets_filename=CredentialsSource.secrets_path()) # Make sure tcp_keep_alive was passed through all of the layers self.assertFalse(api_factory.api_client.configuration.tcp_keep_alive) self.assertIsInstance(api_factory.api_client.rest_client.pool_manager, (PoolManager, ProxyManager))
def load_instruments(args): file_type = "instruments" # create ApiFactory factory = ApiClientFactory(api_secrets_filename=args["secrets_file"]) # get data if args["delimiter"]: logging.info(f"delimiter specified as {repr(args['delimiter'])}") logging.debug("Getting data") instruments = load_data_to_df_and_detect_delimiter(args) # get mappings mappings = load_json_file(args["mapping"]) if "property_columns" in mappings[file_type].keys() and not args["scope"]: err = ( r"properties must be upserted to a specified scope, but no scope was provided. " r"Please state what scope to upsert properties to using '-s'.") logging.error(err) raise ValueError(err) validate_mapping_file_structure(mappings, instruments.columns, file_type) if "cash_flag" in mappings.keys(): instruments, mappings = identify_cash_items(instruments, mappings, file_type, True) if args["dryrun"]: logging.info( "--dryrun specified as True, exiting before upsert call is made") return 0 instruments_response = load_from_data_frame( api_factory=factory, data_frame=instruments, scope=args["scope"], mapping_required=mappings[file_type]["required"], mapping_optional=mappings[file_type]["optional"] if "optional" in mappings[file_type].keys() else {}, file_type=file_type, identifier_mapping=mappings[file_type]["identifier_mapping"], batch_size=args["batch_size"], property_columns=mappings[file_type]["property_columns"] if "property_columns" in mappings[file_type].keys() else [], ) succ, errors, failed = cocoon_printer.format_instruments_response( instruments_response) logging.info(f"number of successful upserts: {len(succ)}") logging.info(f"number of failed upserts : {len(failed)}") logging.info(f"number of errors : {len(errors)}") if args["display_response_head"]: logging.info(succ.head(40)) logging.info(errors.head(40)) logging.info(failed.head(40)) return instruments_response
def load_quotes(args): file_type = "quotes" # create ApiFactory factory = ApiClientFactory(api_secrets_filename=args["secrets_file"]) # get data if args["delimiter"]: logging.info(f"delimiter specified as {repr(args['delimiter'])}") logging.debug("Getting data") quotes = load_data_to_df_and_detect_delimiter(args) # get mappings mappings = load_json_file(args["mapping"]) # check properties exist if "property_columns" in mappings[file_type].keys() and not args["scope"]: err = ( r"properties must be upserted to a specified scope, but no scope was provided. " r"Please state what scope to upsert properties to using '-s'.") logging.error(err) raise ValueError(err) quotes, mappings = identify_cash_items(quotes, mappings, "quotes", True) validate_mapping_file_structure(mappings, quotes.columns, file_type) if "quote_scalar" in mappings[file_type].keys(): quotes, mappings = scale_quote_of_type(quotes, mappings) if args["dryrun"]: return quotes quotes_response = load_from_data_frame( api_factory=factory, data_frame=quotes, scope=args["scope"], properties_scope=args.get("property_scope", args["scope"]), identifier_mapping={}, mapping_required=mappings[file_type]["required"], mapping_optional=mappings[file_type].get("optional", {}), file_type=file_type, batch_size=args["batch_size"], property_columns=mappings[file_type].get("property_columns", []), ) succ, errors, failed = cocoon_printer.format_quotes_response( quotes_response) logging.info(f"number of successful upserts: {len(succ)}") logging.info(f"number of failed upserts : {len(failed)}") logging.info(f"number of errors : {len(errors)}") if args["display_response_head"]: logging.info(succ.head(40)) logging.info(errors.head(40)) logging.info(failed.head(40)) return quotes_response
def test_use_apifactory_with_id_provider_response_handler(self): """ Ensures that an id_provider_response handler that is passed to the ApiClientFactory can be used during communication with the id provider (if appropriate). """ responses = [] def record_response(id_provider_response): nonlocal responses responses.append(id_provider_response.status_code) api_factory = ApiClientFactory( api_secrets_filename=CredentialsSource.secrets_path(), id_provider_response_handler=record_response) api = api_factory.build(InstrumentsApi) self.validate_api(api) self.assertGreater(len(responses), 0)
def test_use_apifactory_multiple_threads(self): with patch.dict('os.environ', self.get_env_vars_without_pat(), clear=True): access_token = str(ApiClientFactory( api_secrets_filename=CredentialsSource.secrets_path() ).api_client.configuration.access_token) api_factory = ApiClientFactory( api_secrets_filename=CredentialsSource.secrets_path() ) def get_identifier_types(factory): return factory.build(InstrumentsApi).get_instrument_identifier_types() thread1 = Thread(target=get_identifier_types, args=[api_factory]) thread2 = Thread(target=get_identifier_types, args=[api_factory]) thread3 = Thread(target=get_identifier_types, args=[api_factory]) with patch("requests.post") as identity_mock: identity_mock.side_effect = lambda *args, **kwargs: MockApiResponse( json_data={ "access_token": f"{access_token}", "refresh_token": "mock_refresh_token", "expires_in": 3600 }, status_code=200 ) thread1.start() thread2.start() thread3.start() thread1.join() thread2.join() thread3.join() # Ensure that we only got an access token once self.assertEqual(1, identity_mock.call_count)
def load_transactions(args): file_type = "transactions" factory = ApiClientFactory(api_secrets_filename=args["secrets_file"]) if args["delimiter"]: logging.info(f"delimiter specified as {repr(args['delimiter'])}") logging.debug("Getting data") transactions = load_data_to_df_and_detect_delimiter(args) mappings = load_json_file(args["mapping"]) if "cash_flag" in mappings.keys(): identify_cash_items(transactions, mappings, file_type) validate_mapping_file_structure(mappings, transactions.columns, file_type) if args["dryrun"]: logging.info("--dryrun specified as True, exiting before upsert call is made") return 0 transactions_response = load_from_data_frame( api_factory=factory, data_frame=transactions, scope=args["scope"], identifier_mapping=mappings[file_type]["identifier_mapping"], mapping_required=mappings[file_type]["required"], mapping_optional=mappings[file_type]["optional"] if "optional" in mappings[file_type].keys() else {}, file_type=file_type, batch_size=args["batch_size"], property_columns=mappings[file_type]["property_columns"] if "property_columns" in mappings[file_type].keys() else [], ) # print_response(transactions_response, file_type) succ, errors = cocoon_printer.format_transactions_response(transactions_response) logging.info(f"number of successful upserts: {len(succ)}") logging.info(f"number of errors : {len(errors)}") if args["display_response_head"]: logging.info(succ.head(40)) logging.info(errors.head(40)) return transactions_response
def load_holdings(args): file_type = "holdings" factory = ApiClientFactory(api_secrets_filename=args["secrets_file"]) if args["delimiter"]: logging.info(f"delimiter specified as {repr(args['delimiter'])}") logging.debug("Getting data") holdings = load_data_to_df_and_detect_delimiter(args) mappings = load_json_file(args["mapping"]) if "cash_flag" in mappings.keys(): holdings, mappings = identify_cash_items(holdings, mappings, file_type) validate_mapping_file_structure(mappings, holdings.columns, file_type) if args["dryrun"]: logging.info( "--dryrun specified as True, exiting before upsert call is made") return 0 holdings_response = load_from_data_frame( api_factory=factory, data_frame=holdings, scope=args["scope"], properties_scope=args.get("property_scope", args["scope"]), identifier_mapping=mappings[file_type]["identifier_mapping"], mapping_required=mappings[file_type]["required"], mapping_optional=mappings[file_type].get("optional", {}), file_type=file_type, batch_size=args["batch_size"], property_columns=mappings[file_type].get("property_columns", []), sub_holding_keys=mappings[file_type].get("sub_holding_keys", []), ) succ, errors = cocoon_printer.format_holdings_response(holdings_response) logging.info(f"number of successful upserts: {len(succ)}") logging.info(f"number of errors : {len(errors)}") if args["display_response_head"]: logging.info(succ.head(40)) logging.info(errors.head(40)) return holdings_response
def test_bad_secrets_file_in_param_but_good_pat_in_env_vars(self): all_env_vars = self.get_env_vars_without_pat() all_env_vars["FBN_LUSID_ACCESS_TOKEN"] = self.pat_token with patch.dict(self.os_environ_dict_str, all_env_vars, clear=True): with self.assertLogs() as context_manager: import logging logger = logging.getLogger() logger.setLevel(logging.DEBUG) ApiClientFactory( api_secrets_filename="secrets_file_not_exist.json") self.assertEqual( f"DEBUG:root:Provided secrets file of secrets_file_not_exist.json can not be found, please ensure you have correctly specified the full path to the file or don't provide a secrets file to use environment variables instead.", context_manager.output[1], )
from pathlib import Path from lusid.utilities import ApiClientFactory secrets_file = Path(__file__).parent.parent.joinpath("secrets.json") api_factory = ApiClientFactory(api_secrets_filename=secrets_file, api_url="https://fbn-ci.lusid.com/api", app_name="PerformanceDataSetPersistence")
def setUpClass(cls): # add mock to the module lusid.api.MockApi = MockApi cls.factory = ApiClientFactory(api_secrets_filename=CredentialsSource.secrets_path())