def test_a_taxonomies_request_is_executed(self): """ ...then verify that whole flow works as expected """ response_json = { "success": True, "taxonomies": { "iptc": { "description": "IPTC Media Topics", "languages": [ {"description": "English", "name": "en"}, {"description": "German", "name": "de"}, {"description": "Spanish", "name": "es"}, {"description": "French", "name": "fr"}, {"description": "Italian", "name": "it"}, ], } }, } response = MagicMock() response.status_code = 200 response.ok = True response.json.return_value = response_json self.patched_get.return_value = response client = ExpertAiClient() dm = client.iptc_taxonomies() self.assertEqual( dm.iptc.languages[0].get_language_by_description, "en" )
def test_a_request_is_created(self, patched_get_method_name_for_endpoint): """ ...then the proper HTTP method should be set """ def fake_get_method(self): return {url: "GET"}.get(url) url = self.endpoint_path expert_client = ExpertAiClient() patched_get_method_name_for_endpoint.side_effect = fake_get_method new_request = expert_client.create_request(self.endpoint_path) self.assertEqual(new_request.string_method, "GET") patched_get_method_name_for_endpoint.assert_called_once_with( self.endpoint_path)
def test_create_request_method_is_called(self, patched_verify_request): """ ...then the verify_request() should also be invoked with the correct arguments """ expert_client = ExpertAiClient() expert_client.create_request( endpoint_path="resource_urlpath", params={"language": "en"}, body={"text": "text"}, ) patched_verify_request.assert_called_with( "resource_urlpath", params={"language": "en"}, )
def test_a_bad_request_is_received(self, patched_object_mapper): """ ...then the ObjectMapper should not be called """ def response_json(): return { "errors": [{ "code": "PREPARE_DOCUMENT_FAILED", "message": "missing layout key in json", }], "success": False, } fake_response = MagicMock(status_code=constants.HTTP_SUCCESSFUL, json=response_json) expert_client = ExpertAiClient() expert_client.process_response(fake_response) patched_object_mapper.assert_not_called()
def setUp(self): super().setUp() self.expert_client = ExpertAiClient() self.test_body = {"document": {"text": "text"}} self.test_endpoint_path = "endpoint/{language}/{resource}"
class ExpertAiClientTestCase(ExpertAiTestCase): def setUp(self): super().setUp() self.expert_client = ExpertAiClient() self.test_body = {"document": {"text": "text"}} self.test_endpoint_path = "endpoint/{language}/{resource}" @patch( "expertai.nlapi.v1.client.ExpertAiClient.get_method_name_for_endpoint") def test_a_request_is_created(self, patched_get_method_name_for_endpoint): """ ...then the proper HTTP method should be set """ def fake_get_method(self): return {url: "GET"}.get(url) url = self.endpoint_path expert_client = ExpertAiClient() patched_get_method_name_for_endpoint.side_effect = fake_get_method new_request = expert_client.create_request(self.endpoint_path) self.assertEqual(new_request.string_method, "GET") patched_get_method_name_for_endpoint.assert_called_once_with( self.endpoint_path) @patch("expertai.nlapi.v1.validate.ExpertAiValidation.check_parameters") def test_a_request_is_verified(self, patched_check_parameters): """ ...then check_parameters method should be called """ self.expert_client.verify_request(endpoint_path="path/{language}", params={"language": "en"}) patched_check_parameters.assert_called_once_with( params={"language": "en"}) @patch("expertai.nlapi.v1.validate.ExpertAiValidation.check_parameters") def test_parameters_are_not_required(self, patched_check_parameters): """ ...then the check_parameters method should not be called """ self.expert_client.verify_request(endpoint_path="/path", params=None) patched_check_parameters.assert_not_called() def test_required_parameters_are_not_provided(self): """ ...then an error should be raised, indicating which parameter is missing """ self.assertRaises( MissingParametersError, self.expert_client.verify_request, endpoint_path="path/{lang}", ) def test_a_parameterized_urlpath(self): """ ...then keywords should be extracted """ self.assertEqual( self.expert_client.urlpath_keywords("path/{language}/{resource}"), ["language", "resource"], ) @patch("expertai.nlapi.v1.client.ExpertAiClient.verify_request") def test_create_request_method_is_called(self, patched_verify_request): """ ...then the verify_request() should also be invoked with the correct arguments """ expert_client = ExpertAiClient() expert_client.create_request( endpoint_path="resource_urlpath", params={"language": "en"}, body={"text": "text"}, ) patched_verify_request.assert_called_with( "resource_urlpath", params={"language": "en"}, ) @patch("expertai.nlapi.v1.client.ObjectMapper") def test_a_bad_request_is_received(self, patched_object_mapper): """ ...then the ObjectMapper should not be called """ def response_json(): return { "errors": [{ "code": "PREPARE_DOCUMENT_FAILED", "message": "missing layout key in json", }], "success": False, } fake_response = MagicMock(status_code=constants.HTTP_SUCCESSFUL, json=response_json) expert_client = ExpertAiClient() expert_client.process_response(fake_response) patched_object_mapper.assert_not_called()
def test_a_full_analysis_request_is_executed(self): """ ...then verify that whole flow works as expected """ response_json = { "success": True, "data": { "content": "Facebook is looking at buying U.S. startup for $6 million", "language": "en", "version": "sensei: 3.1.0; disambiguator: 15.0-QNTX-2016", "knowledge": [{ "label": "organization.company", "properties": [{ "type": "WikiDataId", "value": "Q380" }], "syncon": 288110, }], "phrases": [ { "tokens": [0], "type": "PP", "start": 54, "end": 65 }, ], "tokens": [{ "syncon": 62653, "start": 74, "end": 83, "type": "NOU", "lemma": "long time", "pos": "NOUN", "dependency": { "id": 11, "head": 7, "label": "nmod" }, "morphology": "Number=Sing", "paragraph": 0, "sentence": 0, "phrase": 4, "atoms": [ { "start": 74, "end": 78, "type": "ADJ", "lemma": "long", }, ], }], "mainSentences": [], "mainPhrases": [], "mainLemmas": [], "mainSyncons": [], "entities": [], "topics": [], "sentences": [{ "phrases": [0], "start": 0, "end": 100 }], "paragraphs": [], }, } response = MagicMock(text="e@i") response.status_code = 200 response.json.return_value = response_json self.patched_post.return_value = response client = ExpertAiClient() request_body = {"document": {"text": "text"}} data_model = client.full_analysis(body=request_body, params={"language": "es"}) # two POST requests are made, one for the token and one for analysis self.assertEqual(self.patched_post.call_count, 2) self.assertEqual(data_model.sentences[0].phrases[0].type_.key, "PP")