def test_send_events(self): response = self.client.send_events([{ "eventType": "click", "eventName": "foo", "index": self.index.name, "userToken": "bar", "objectIDs": ["one", "two"], "timestamp": F.two_days_ago_timestamp() }, { "eventType": "click", "eventName": "foo", "index": self.index.name, "userToken": "bar", "objectIDs": ["one", "two"], "timestamp": F.two_days_ago_timestamp() }]) self.assertTrue(response['status'] == 200) self.assertTrue(response['message'] == 'OK')
def test_cross_app_copy_index(self): rule = F.rule(object_id='one') synonym = F.synonym(object_id='one') responses = [ self.index.save_object({'objectID': 'one'}), self.index.save_rule(rule), self.index.save_synonym(synonym), self.index.set_settings({'searchableAttributes': ['objectID']}) ] MultipleResponse(responses).wait() AccountClient.copy_index(self.index, self.index2).wait() # Assert objects got copied res = self.index2.search('') self.assertEqual(len(res['hits']), 1) self.assertEqual(res['hits'][0], {'objectID': 'one'}) # Assert settings got copied settings = self.index2.get_settings() self.assertEqual(settings['searchableAttributes'], ['objectID']) # Assert synonyms got copied self.assertEqual(self.index2.get_rule('one'), rule) # Assert synonyms got copied self.assertEqual(self.index2.get_synonym('one'), synonym) # Assert that copying again fails because index already exists with self.assertRaises(AlgoliaException) as _: AccountClient.copy_index(self.index, self.index2)
def test_can_not_copy_index_from_same_account(self): index1 = F.index('foo') index2 = F.index('bar') # Assert that copying indexes of same application with self.assertRaises(AlgoliaException) as _: AccountClient.copy_index(index1, index2)
def setUp(self): search_client = F.search_client() self.client = RetryableClient( F.analytics_client(), ['Too Many Requests', 'Index does not exist']) self.index = F.index(search_client, self._testMethodName) self.index2 = F.index(search_client, '{}2'.format(self._testMethodName)) # noqa: E501
def test_can_not_copy_index_from_same_account(self): client = F.search_client() index1 = F.index(client, "foo") index2 = F.index(client, "bar") # Assert that copying indexes of same application with self.assertRaises(AlgoliaException) as _: AccountClient.copy_index(index1, index2)
def setUp(self): self.search_client = F.search_client() self.analytics_client = F.analytics_client() self.client = RetryableClient( self.analytics_client, ["Too Many Requests", "Index does not exist"]) self.index = F.index(self.search_client, self._testMethodName) self.index2 = F.index(self.search_client, "{}2".format(self._testMethodName)) # noqa: E501
def test_save_synonyms(self): # Test null response self.index.save_synonyms([]).wait() # Test object id validation with self.assertRaises(MissingObjectIdException) as _: self.index.save_synonyms([F.synonym(object_id=False)]) # Test object id validation with self.assertRaises(MissingObjectIdException) as _: self.index.save_synonym(F.synonym(object_id=False))
def test_secured_api_keys(self): hosts = F.hosts(self.client.app_id) config1 = SearchConfig(F.get_app_id(), F.get_api_key()) config1.hosts = hosts client1 = F.decide(SearchClient.create_with_config(config1)) index1 = F.index(client1, self._testMethodName) index2 = F.index(client1, '{}_dev'.format(self._testMethodName)) index1.save_object({'objectID': 'one'}).wait() index2.save_object({'objectID': 'one'}).wait() api_key = self.client.generate_secured_api_key( os.environ['ALGOLIA_SEARCH_KEY_1'], { "validUntil": int(round(time.time())) + (60 * 10), # + 10 min "restrictIndices": index1.name }) config2 = SearchConfig(F.get_app_id(), api_key) config2.hosts = hosts client2 = F.decide(SearchClient.create_with_config(config2)) index1_search = client2.init_index(index1.name) index2_search = client2.init_index(index2.name) index1_search.search('') with self.assertRaises(RequestException) as _: index2_search.search('')
def test_settings(self): self.index.save_object(F.obj()).wait() self.index.set_settings({'searchableAttributes': ['name']}).wait() self.assertEqual( self.index.get_settings()['searchableAttributes'], ['name'] ) self.index.set_settings({'typoTolerance': 'min'}).wait() self.assertEqual( self.index.get_settings()['typoTolerance'], 'min' ) self.index.set_settings(self.index.get_settings()).wait() self.assertEqual( self.index.get_settings()['typoTolerance'], 'min' ) self.assertEqual( self.index.get_settings()['searchableAttributes'], ['name'] )
def test_compounds_dictionaries(self): client = F.search_client2() entry_id = "".join( random.choice(string.ascii_letters) for i in range(10)) self.assertEqual( client.search_dictionary_entries("compounds", entry_id)["nbHits"], 0) entry = { "objectID": entry_id, "language": "de", "word": "kopfschmerztablette", "decomposition": ["kopf", "schmerz", "tablette"], } client.save_dictionary_entries("compounds", [entry]).wait() compounds = client.search_dictionary_entries("compounds", entry_id) self.assertEqual(compounds["nbHits"], 1) self.assertEqual(compounds["hits"][0]["objectID"], entry["objectID"]) self.assertEqual(compounds["hits"][0]["word"], entry["word"]) self.assertEqual(compounds["hits"][0]["decomposition"], entry["decomposition"]) client.delete_dictionary_entries("compounds", [entry_id]).wait() self.assertEqual( client.search_dictionary_entries("compounds", entry_id)["nbHits"], 0)
def test_async_session(self): app_id = Factory.get_app_id() api_key = Factory.get_api_key() client = SearchClient.create(app_id, api_key) import asyncio result = asyncio.get_event_loop().run_until_complete( asyncio.gather(client.list_api_keys_async())) self.assertIsInstance(result, list) asyncio.get_event_loop().run_until_complete( asyncio.gather(client.close())) self.assertTrue(client._transporter_async._requester._session.closed)
def test_browse_rules(self): def side_effect(req, **kwargs): hits = [{ "objectID": i, "_highlightResult": None } for i in range(0, 1000)] page = json.loads(req.body)["page"] if page == 3: hits = hits[0:800] response = Response() response.status_code = 200 response._content = str.encode( json.dumps({ "hits": hits, "nbHits": 3800, "page": page, "nbPages": 3, })) return response client = SearchClient.create("foo", "bar") client._transporter._requester._session = requests.Session() client._transporter._requester._session.send = mock.MagicMock( name="send") client._transporter._requester._session.send.side_effect = side_effect index = F.index(client, "test") rules = index.browse_rules() len_rules = len(list(rules)) self.assertEqual(len_rules, 3800)
def test_settings(self): self.index.save_object(F.obj()).wait() self.index.set_settings({'searchableAttributes': ['name']}).wait() self.assertEqual( self.index.get_settings()['searchableAttributes'], ['name'] ) self.index.set_settings({'typoTolerance': 'min'}).wait() self.assertEqual( self.index.get_settings()['typoTolerance'], 'min' ) self.index.set_settings(self.index.get_settings()).wait() self.assertEqual( self.index.get_settings()['typoTolerance'], 'min' ) self.assertEqual( self.index.get_settings()['searchableAttributes'], ['name'] ) self.index.set_settings({'indexLanguages': ['ja']}).wait() self.assertEqual( self.index.get_settings()['indexLanguages'], ['ja'] )
def test_secured_api_keys(self): self.index2 = F.index('{}_dev'.format(self._testMethodName)) self.index.save_object({'objectID': 'one'}).wait() self.index2.save_object({'objectID': 'one'}).wait() api_key = self.client.generate_secured_api_key( os.environ['ALGOLIA_SEARCH_KEY_1'], { "validUntil": int(round(time.time())) + (60 * 10), # + 10 min "restrictIndices": self.index.name }) F.search_client(api_key=api_key).init_index(self.index.name).search('') with self.assertRaises(RequestException) as _: F.search_client(api_key=api_key).init_index( self.index2.name).search('')
def test_dns_timeout(self): config = SearchConfig(F.get_app_id(), F.get_api_key()) config.hosts = HostsCollection([ Host('algolia.biz', 10), Host('{}-1.algolianet.com'.format(F.get_app_id())), Host('{}-2.algolianet.com'.format(F.get_app_id())), Host('{}-3.algolianet.com'.format(F.get_app_id())) ]) client = SearchClient.create_with_config(config) client.list_indices() # We test that the first Host `algolia.biz` is down. self.assertFalse(config.hosts.read()[0].up) self.assertTrue(config.hosts.read()[1].up) self.assertTrue(config.hosts.read()[2].up) self.assertTrue(config.hosts.read()[3].up)
def test_secured_api_keys(self): self.index2 = F.index('{}_dev'.format(self._testMethodName)) self.index.save_object({'objectID': 'one'}).wait() self.index2.save_object({'objectID': 'one'}).wait() api_key = self.client.generate_secured_api_key( os.environ['ALGOLIA_SEARCH_KEY_1'], { "validUntil": int(round(time.time())) + (60 * 10), # + 10 min "restrictIndices": self.index.name } ) F.search_client(api_key=api_key).init_index(self.index.name).search('') with self.assertRaises(RequestException) as _: F.search_client(api_key=api_key).init_index( self.index2.name).search('')
def test_stopwords_dictionaries(self): client = F.search_client2() entry_id = "".join( random.choice(string.ascii_letters) for i in range(10)) self.assertEqual( client.search_dictionary_entries("stopwords", entry_id)["nbHits"], 0) entry = {"objectID": entry_id, "language": "en", "word": "down"} client.save_dictionary_entries("stopwords", [entry]).wait() stopwords = client.search_dictionary_entries("stopwords", entry_id) self.assertEqual(stopwords["nbHits"], 1) self.assertEqual(stopwords["hits"][0]["objectID"], entry_id) self.assertEqual(stopwords["hits"][0]["word"], "down") client.delete_dictionary_entries("stopwords", [entry_id]).wait() self.assertEqual( client.search_dictionary_entries("stopwords", entry_id)["nbHits"], 0) old_dictionary_state = client.search_dictionary_entries( "stopwords", "") old_dictionary_entries = [] for hit in old_dictionary_state["hits"]: del hit["type"] old_dictionary_entries.append(hit) client.save_dictionary_entries("stopwords", [entry]).wait() self.assertEqual(stopwords["nbHits"], 1) client.save_dictionary_entries("stopwords", [entry]).wait() self.assertEqual( client.search_dictionary_entries("stopwords", entry_id)["nbHits"], 1) client.replace_dictionary_entries("stopwords", old_dictionary_entries).wait() self.assertEqual( client.search_dictionary_entries("stopwords", entry_id)["nbHits"], 0) stopwords_settings = { "disableStandardEntries": { "stopwords": { "en": True, "fr": True } } } client.set_dictionary_settings(stopwords_settings).wait() self.assertEqual(client.get_dictionary_settings(), stopwords_settings)
def test_async_session(self): app_id = Factory.get_app_id() api_key = Factory.get_api_key() client = SearchClient.create(app_id, api_key) import asyncio result = asyncio.get_event_loop().run_until_complete( asyncio.gather(client.list_api_keys_async()) ) self.assertIsInstance(result, list) asyncio.get_event_loop().run_until_complete( asyncio.gather(client.close()) ) self.assertTrue( client._transporter_async._requester._session.closed )
def test_replacing(self): responses = MultipleResponse() responses.push(self.index.save_object({"objectID": "one"})) responses.push(self.index.save_rule(F.rule(object_id="one"))) responses.push(self.index.save_synonym( {"objectID": "one", "type": "synonym", "synonyms": ["one", "two"]} )) responses.wait() responses.push(self.index.replace_all_objects([{"objectID": "two"}])) responses.push(self.index.replace_all_rules([{ "objectID": "two", "condition": {"anchoring": "is", "pattern": "pattern"}, "consequence": { "params": { "query": { "edits": [ {"type": "remove", "delete": "pattern"} ] } } } } ])) responses.push(self.index.replace_all_synonyms([ {"objectID": "two", "type": "synonym", "synonyms": ["one", "two"]} ])) responses.wait() # Check that record with objectID=`one` does not exist with self.assertRaises(RequestException) as _: self.index.get_object('one') # Check that record with objectID=`two` does exist self.assertEqual(self.index.get_object('two')['objectID'], 'two') # Check that rule with objectID=`one` does not exist with self.assertRaises(RequestException) as _: self.index.get_rule('one') # Check that rule with objectID=`two` does exist self.assertEqual(self.index.get_rule('two')['objectID'], 'two') # Check that synonym with objectID=`one` does not exist with self.assertRaises(RequestException) as _: self.index.get_synonym('one') # Check that synonym with objectID="two" does exist using getSynonym self.assertEqual(self.index.get_synonym('two')['objectID'], 'two')
def test_cross_app_copy_index(self): rule = F.rule(object_id="one") synonym = F.synonym(object_id="one") responses = [ self.index.save_object({"objectID": "one"}), self.index.save_rule(rule), self.index.save_synonym(synonym), self.index.set_settings({"searchableAttributes": ["objectID"]}), ] MultipleResponse(responses).wait() AccountClient.copy_index(self.index, self.index2).wait() # Assert objects got copied res = self.index2.search("") self.assertEqual(len(res["hits"]), 1) self.assertEqual(res["hits"][0], {"objectID": "one"}) # Assert settings got copied settings = self.index2.get_settings() self.assertEqual(settings["searchableAttributes"], ["objectID"]) # Assert rules got copied self.assertEqual(rule_without_metadata(self.index2.get_rule("one")), rule) # Assert synonyms got copied list_synonyms1 = [synonym for synonym in self.index.browse_synonyms()] list_synonyms2 = [synonym for synonym in self.index2.browse_synonyms()] self.assertEqual(list_synonyms1, list_synonyms2) # Assert synomys are the same self.assertEqual(self.index2.get_synonym("one"), synonym) # Assert that copying again fails because index already exists with self.assertRaises(AlgoliaException) as _: AccountClient.copy_index(self.index, self.index2)
def test_replace_all_objects(self): self.index._create_temporary_name = mock.Mock( name="_create_temporary_name") tmp_index_name = "index-name_tmp_bar" self.index._create_temporary_name.return_value = tmp_index_name # noqa: E501 obj = F.obj() self.index.replace_all_objects([obj]) # Asserts the operations of the replace all objects. self.transporter.write.assert_has_calls([ mock.call( "POST", "1/indexes/index-name/operation", { "operation": "copy", "destination": "index-name_tmp_bar" }, {"scope": ["settings", "synonyms", "rules"]}, ), mock.call( "POST", "1/indexes/index-name_tmp_bar/batch", {"requests": [{ "action": "updateObject", "body": obj }]}, None, ), mock.call( "POST", "1/indexes/index-name_tmp_bar/operation", { "operation": "move", "destination": "index-name" }, None, ), ]) self.index._transporter.read = mock.Mock(name="read") self.index._transporter.read.return_value = {"status": "published"} self.index._transporter.write = mock.Mock(name="write") self.index._transporter.write.return_value = {"taskID": 1} self.index.replace_all_objects([obj]) self.assertEqual(self.index._transporter.write.call_count, 3) self.index.replace_all_objects([obj], {"safe": True}) self.assertEqual(self.index._transporter.write.call_count, 6) # 3+3 self.assertEqual(self.index._transporter.read.call_count, 3) # 3 waits
def test_send_event(self): response = self.client.send_event( { "eventType": "click", "eventName": "foo", "index": self.index.name, "userToken": "bar", "objectIDs": ["one", "two"], "timestamp": F.two_days_ago_timestamp(), } ) self.assertTrue(response["status"] == 200) self.assertTrue(response["message"] == "OK")
def test_replace_all_objects(self): self.index._create_temporary_name = mock.Mock( name="_create_temporary_name") tmp_index_name = 'index-name_tmp_bar' self.index._create_temporary_name.return_value = tmp_index_name # noqa: E501 obj = F.obj() self.index.replace_all_objects([obj]) # Asserts the operations of the replace all objects. self.transporter.write.assert_has_calls( [mock.call('POST', '1/indexes/index-name/operation', {'operation': 'copy', 'destination': 'index-name_tmp_bar'}, {'scope': ['settings', 'synonyms', 'rules']}), mock.call('POST', '1/indexes/index-name_tmp_bar/batch', {'requests': [ {'action': 'updateObject', 'body': obj}]}, None), mock.call('POST', '1/indexes/index-name_tmp_bar/operation', {'operation': 'move', 'destination': 'index-name'}, None)] ) response = NullResponse() response.wait = mock.Mock(name="wait") self.index.copy_to = mock.Mock( name="copy_to") self.index.copy_to.return_value = response self.index.move_to = mock.Mock( name="move_to") self.index.move_to.return_value = response self.index.save_objects = mock.Mock( name="save_objects") self.index.save_objects.return_value = response self.index.replace_all_objects([obj]) self.assertEqual(response.wait.call_count, 0) result = self.index.replace_all_objects([obj], {'safe': True}) self.assertEqual(response.wait.call_count, 3) self.assertEqual(len(result.responses), 3) self.assertEqual(len(result._waitable), 0)
def test_safe_replacing(self): # Adds dummy object self.index.save_object(F.obj()).wait() # Calls replace all objects with an object without # object id, and with the safe parameter self.index.replace_all_objects([{ "name": "two" }], { "autoGenerateObjectIDIfNotExist": True, "safe": True }) response = self.index.search("") self.assertEqual(response["nbHits"], 1) hit = response["hits"][0] self.assertEqual(hit["name"], "two") self.assertIn("objectID", hit) self.assertIn("_highlightResult", hit)
def test_safe_replacing(self): # Adds dummy object self.index.save_object(F.obj()).wait() # Calls replace all objects with an object without # object id, and with the safe parameter self.index.replace_all_objects([{ 'name': 'two' }], { 'autoGenerateObjectIDIfNotExist': True, 'safe': True }) response = self.index.search('') self.assertEqual(response['nbHits'], 1) hit = response['hits'][0] self.assertEqual(hit['name'], 'two') self.assertIn('objectID', hit) self.assertIn('_highlightResult', hit)
def test_close(self): client = F.search_client() self.assertIsNone(client._transporter._requester._session) if os.environ.get("TEST_TYPE", False) == "async": self.assertIsNone(client._transporter_async._requester._session) client.list_api_keys() if os.environ.get("TEST_TYPE", False) == "async": # The async version was already called. self.assertIsNotNone(client._transporter_async._requester._session) self.assertIsNone(client._transporter._requester._session) client._base.list_api_keys() # Calls the sync version self.assertIsNotNone(client._transporter._requester._session) client.close() if os.environ.get("TEST_TYPE", False) == "async": # The async version was already called. self.assertIsNone(client._transporter_async._requester._session) self.assertIsNone(client._transporter._requester._session)
def test_plurals_dictionaries(self): client = F.search_client2() entry_id = "".join( random.choice(string.ascii_letters) for i in range(10)) self.assertEqual( client.search_dictionary_entries("plurals", entry_id)["nbHits"], 0) entry = { "objectID": entry_id, "language": "fr", "words": ["cheval", "chevaux"] } client.save_dictionary_entries("plurals", [entry]).wait() plurals = client.search_dictionary_entries("plurals", entry_id) self.assertEqual(plurals["nbHits"], 1) self.assertEqual(plurals["hits"][0]["objectID"], entry["objectID"]) self.assertEqual(plurals["hits"][0]["words"], entry["words"]) client.delete_dictionary_entries("plurals", [entry_id]).wait() self.assertEqual( client.search_dictionary_entries("plurals", entry_id)["nbHits"], 0)
def test_replace_all_objects(self): self.index._create_temporary_name = mock.Mock( name='_create_temporary_name') tmp_index_name = 'index-name_tmp_bar' self.index._create_temporary_name.return_value = tmp_index_name # noqa: E501 obj = F.obj() self.index.replace_all_objects([obj]) # Asserts the operations of the replace all objects. self.transporter.write.assert_has_calls([ mock.call('POST', '1/indexes/index-name/operation', { 'operation': 'copy', 'destination': 'index-name_tmp_bar' }, {'scope': ['settings', 'synonyms', 'rules']}), mock.call('POST', '1/indexes/index-name_tmp_bar/batch', {'requests': [{ 'action': 'updateObject', 'body': obj }]}, None), mock.call('POST', '1/indexes/index-name_tmp_bar/operation', { 'operation': 'move', 'destination': 'index-name' }, None) ]) self.index._transporter.read = mock.Mock(name='read') self.index._transporter.read.return_value = {'status': 'published'} self.index._transporter.write = mock.Mock(name='write') self.index._transporter.write.return_value = {'taskID': 1} self.index.replace_all_objects([obj]) self.assertEqual(self.index._transporter.write.call_count, 3) self.index.replace_all_objects([obj], {'safe': True}) self.assertEqual(self.index._transporter.write.call_count, 6) # 3+3 self.assertEqual(self.index._transporter.read.call_count, 3) # 3 waits
def setUp(self): self.index = F.index(self._testMethodName)
def setUp(self): self.client = F.recommendation_client()
def test_copy_move_index(self): objects = [ {'objectID': 'one', 'company': 'apple'}, {'objectID': 'two', 'company': 'algolia'} ] responses = MultipleResponse([ self.index.save_objects(objects), self.index.set_settings({'attributesForFaceting': ['company']}), self.index.save_synonym({ 'objectID': 'google_placeholder', 'type': 'placeholder', 'placeholder': '<GOOG>', 'replacements': ['Google', 'GOOG'] }), self.index.save_rule({ "objectID": "company_auto_faceting", "condition": { "anchoring": "contains", "pattern": "{facet:company}", }, "consequence": { "params": {"automaticFacetFilters": ["company"]} } }) ]).wait() self.index2 = F.index('{}_settings'.format(self._testMethodName)) responses.push(self.client.copy_settings( self.index.name, self.index2.name )) self.index3 = F.index('{}_rules'.format(self._testMethodName)) responses.push(self.client.copy_rules( self.index.name, self.index3.name )) self.index4 = F.index('{}_synonyms'.format(self._testMethodName)) responses.push(self.client.copy_synonyms( self.index.name, self.index4.name )) self.index5 = F.index('{}_full_copy'.format(self._testMethodName)) responses.push(self.client.copy_index( self.index.name, self.index5.name )) responses.wait() self.assertEqual( self.index2.get_settings()['attributesForFaceting'], ['company'] ) self.index3.get_rule('company_auto_faceting') with self.assertRaises(RequestException) as cm: self.index3.get_synonym('google_placeholder') self.index4.get_synonym('google_placeholder') with self.assertRaises(RequestException) as cm: self.index4.get_rule('company_auto_faceting') self.index5.get_synonym('google_placeholder') self.index5.get_rule('company_auto_faceting') self.assertEqual( self.index5.get_settings()['attributesForFaceting'], ['company'] ) for obj in self.index5.browse_objects(): self.assertIn(obj, objects) self.index6 = F.index('{}_after_move'.format(self._testMethodName)) self.client.move_index( self.index.name, self.index6.name ).wait() self.index6.get_synonym('google_placeholder') self.index6.get_rule('company_auto_faceting') self.assertEqual( self.index6.get_settings()['attributesForFaceting'], ['company'] ) for obj in self.index6.browse_objects(): self.assertIn(obj, objects) with self.assertRaises(RequestException) as cm: self.client.init_index(self.index.name).search('') self.assertEqual(cm.exception.status_code, 404)
def test_multiple_operations(self): index_name1 = self.index.name index_2 = F.index(self._testMethodName) index_name2 = index_2.name raw_response = self.client.multiple_batch([{ "indexName": index_name1, "action": "addObject", "body": { "firstname": "Jimmie" } }, { "indexName": index_name1, "action": "addObject", "body": { "firstname": "Jimmie" } }, { "indexName": index_name2, "action": "addObject", "body": { "firstname": "Jimmie" } }, { "indexName": index_name2, "action": "addObject", "body": { "firstname": "Jimmie" } }]).wait().raw_response object_ids = list( map(lambda object_id: object_id, raw_response['objectIDs'])) objects = self.client.multiple_get_objects([{ "indexName": index_name1, "objectID": object_ids[0] }, { "indexName": index_name1, "objectID": object_ids[1] }, { "indexName": index_name2, "objectID": object_ids[2] }, { "indexName": index_name2, "objectID": object_ids[3] }])['results'] self.assertEqual(objects[0]['objectID'], object_ids[0]) self.assertEqual(objects[1]['objectID'], object_ids[1]) self.assertEqual(objects[2]['objectID'], object_ids[2]) self.assertEqual(objects[3]['objectID'], object_ids[3]) results = self.client.multiple_queries([ { "indexName": index_name1, "params": QueryParametersSerializer.serialize({ "query": "", "hitsPerPage": 2 }) }, { "indexName": index_name2, "params": QueryParametersSerializer.serialize({ "query": "", "hitsPerPage": 2 }) }, ], {'strategy': 'none'})['results'] self.assertEqual(len(results), 2) self.assertEqual(len(results[0]['hits']), 2) self.assertEqual(results[0]['nbHits'], 4) self.assertEqual(len(results[1]['hits']), 2) self.assertEqual(results[1]['nbHits'], 4) results = self.client.multiple_queries( [{ "indexName": index_name1, "params": QueryParametersSerializer.serialize({ "query": "", "hitsPerPage": 2 }) }, { "indexName": index_name2, "params": QueryParametersSerializer.serialize({ "query": "", "hitsPerPage": 2 }) }], {'strategy': 'stopIfEnoughMatches'})['results'] self.assertEqual(len(results), 2) self.assertEqual(len(results[0]['hits']), 2) self.assertEqual(results[0]['nbHits'], 4) self.assertEqual(len(results[1]['hits']), 0) self.assertEqual(results[1]['nbHits'], 0) index_2.delete()
def setUp(self): self.index = Factory.index(self._testMethodName) self.obj = Factory.obj()
def setUp(self): self.client = RetryableClient(F.analytics_client()) self.index = F.index(self._testMethodName) self.index2 = F.index('{}2'.format(self._testMethodName))
def test_synonyms(self): responses = MultipleResponse() responses.push(self.index.save_objects([ {"console": "Sony PlayStation <PLAYSTATIONVERSION>"}, {"console": "Nintendo Switch"}, {"console": "Nintendo Wii U"}, {"console": "Nintendo Game Boy Advance"}, {"console": "Microsoft Xbox"}, {"console": "Microsoft Xbox 360"}, {"console": "Microsoft Xbox One"} ], {'autoGenerateObjectIDIfNotExist': True})) responses.push(self.index.save_synonym(F.synonym({ 'synonyms': [ "gba", "gameboy advance", "game boy advance" ] }, 'gba'))) synonym1 = { 'objectID': 'wii_to_wii_u', 'type': 'onewaysynonym', 'input': 'wii', 'synonyms': ['wii U'] } synonym2 = { 'objectID': 'playstation_version_placeholder', 'type': 'placeholder', 'placeholder': '<PLAYSTATIONVERSION>', 'replacements': [ "1", "One", "2", "3", "4", "4 Pro", ] } synonym3 = { 'objectID': 'ps4', 'type': 'altcorrection1', 'word': 'ps4', 'corrections': ['playstation4'] } synonym4 = { 'objectID': 'psone', 'type': 'altcorrection2', 'word': 'psone', 'corrections': ['playstationone'] } responses.push(self.index.save_synonyms([ synonym1, synonym2, synonym3, synonym4 ])) responses.wait() self.assertEqual(self.index.get_synonym(synonym1['objectID']), synonym1) self.assertEqual(self.index.get_synonym(synonym2['objectID']), synonym2) self.assertEqual(self.index.get_synonym(synonym3['objectID']), synonym3) self.assertEqual(self.index.get_synonym(synonym4['objectID']), synonym4) self.assertEqual(self.index.search_synonyms('')['nbHits'], 5) # Browse all records with browse_objects results = [] for obj in self.index.browse_synonyms(): results.append(obj) synonyms = [ synonym1, synonym2, synonym3, synonym4 ] for synonym in synonyms: self.assertIn(synonym, results) self.index.delete_synonym('gba').wait() # Try to get the synonym with getSynonym with objectID `gba and c # heck that the synonym does not exist anymore with self.assertRaises(RequestException) as _: self.index.get_synonym('gba') # Clear all the synonyms using clear_synonyms self.index.clear_synonyms().wait() # Perform a synonym search using searchSynonyms with an empty query # and check that the number of returned synonyms is equal to 0 self.assertEqual(self.index.search_synonyms('')['nbHits'], 0)
def setUp(self): self.client = F.search_client() self.index = F.index(self._testMethodName) self.index2 = self.index3 = self.index4 = self.index5 = self.index5 = self.index6 = None # noqa: E501
def test_mcm(self): mcm = F.mcm() clusters = mcm.list_clusters()['clusters'] self.assertEqual(len(clusters), 2) date = datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S") if 'TRAVIS' in os.environ: instance = os.environ['TRAVIS_JOB_NUMBER'] else: instance = 'unknown' python_version = platform.python_version().replace('.', '')[:2] python_version += os.environ.get('TEST_TYPE', '') user_id = 'python{}-{}-{}'.format(python_version, date, instance) mcm.assign_user_id(user_id, clusters[0]['clusterName']) result = None while result is None: try: result = mcm.get_user_id(user_id) except RequestException: pass users_ids = [ user['userID'] for user in mcm.search_user_ids(user_id)['hits'] ] self.assertIn(user_id, users_ids) users = mcm.list_user_ids() self.assertIsInstance(users, dict) self.assertIsInstance(users['userIDs'], list) self.assertTrue(len(users['userIDs']) > 0) users = mcm.get_top_user_ids() self.assertIsInstance(users, dict) self.assertIsInstance(users['topUsers'], dict) self.assertTrue(len(users['topUsers']) > 0) result = None while result is None: try: result = mcm.remove_user_id(user_id) except RequestException: pass users = mcm.list_user_ids() date = datetime.datetime.now().strftime("%Y-%m-%d") a = 'python{}'.format(python_version) b = '{}-{}'.format(a, date) for user in users['userIDs']: user_id = user['userID'] if user_id.startswith(a) and not user_id.startswith(b): mcm.remove_user_id(user['userID'])
def test_mcm(self): mcm = F.mcm() clusters = mcm.list_clusters()['clusters'] self.assertEqual(len(clusters), 2) date = datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S") if 'TRAVIS' in os.environ: instance = os.environ['TRAVIS_JOB_NUMBER'] else: instance = 'unknown' python_version = platform.python_version().replace('.', '')[:2] python_version += os.environ.get('TEST_TYPE', '') user_id = 'python{}-{}-{}'.format(python_version, date, instance) mcm.assign_user_id(user_id, clusters[0]['clusterName']) result = None while result is None: try: result = mcm.get_user_id(user_id) except RequestException: pass users_ids = [user['userID'] for user in mcm.search_user_ids(user_id)['hits']] self.assertIn( user_id, users_ids ) users = mcm.list_user_ids() self.assertIsInstance(users, dict) self.assertIsInstance(users['userIDs'], list) self.assertTrue(len(users['userIDs']) > 0) users = mcm.get_top_user_ids() self.assertIsInstance(users, dict) self.assertIsInstance(users['topUsers'], dict) self.assertTrue(len(users['topUsers']) > 0) result = None while result is None: try: result = mcm.remove_user_id(user_id) except RequestException: pass users = mcm.list_user_ids() date = datetime.datetime.now().strftime("%Y-%m-%d") a = 'python{}'.format(python_version) b = '{}-{}'.format(a, date) for user in users['userIDs']: user_id = user['userID'] if user_id.startswith(a) and not user_id.startswith(b): mcm.remove_user_id(user['userID'])
def test_copy_move_index(self): objects = [{ 'objectID': 'one', 'company': 'apple' }, { 'objectID': 'two', 'company': 'algolia' }] responses = MultipleResponse([ self.index.save_objects(objects), self.index.set_settings({'attributesForFaceting': ['company']}), self.index.save_synonym({ 'objectID': 'google_placeholder', 'type': 'placeholder', 'placeholder': '<GOOG>', 'replacements': ['Google', 'GOOG'] }), self.index.save_rule({ "objectID": "company_auto_faceting", "condition": { "anchoring": "contains", "pattern": "{facet:company}", }, "consequence": { "params": { "automaticFacetFilters": ["company"] } } }) ]).wait() self.index2 = F.index('{}_settings'.format(self._testMethodName)) responses.push( self.client.copy_settings(self.index.name, self.index2.name)) self.index3 = F.index('{}_rules'.format(self._testMethodName)) responses.push( self.client.copy_rules(self.index.name, self.index3.name)) self.index4 = F.index('{}_synonyms'.format(self._testMethodName)) responses.push( self.client.copy_synonyms(self.index.name, self.index4.name)) self.index5 = F.index('{}_full_copy'.format(self._testMethodName)) responses.push( self.client.copy_index(self.index.name, self.index5.name)) responses.wait() self.assertEqual(self.index2.get_settings()['attributesForFaceting'], ['company']) self.index3.get_rule('company_auto_faceting') with self.assertRaises(RequestException) as cm: self.index3.get_synonym('google_placeholder') self.index4.get_synonym('google_placeholder') with self.assertRaises(RequestException) as cm: self.index4.get_rule('company_auto_faceting') self.index5.get_synonym('google_placeholder') self.index5.get_rule('company_auto_faceting') self.assertEqual(self.index5.get_settings()['attributesForFaceting'], ['company']) for obj in self.index5.browse_objects(): self.assertIn(obj, objects) self.index6 = F.index('{}_after_move'.format(self._testMethodName)) self.client.move_index(self.index.name, self.index6.name).wait() self.index6.get_synonym('google_placeholder') self.index6.get_rule('company_auto_faceting') self.assertEqual(self.index6.get_settings()['attributesForFaceting'], ['company']) for obj in self.index6.browse_objects(): self.assertIn(obj, objects) with self.assertRaises(RequestException) as cm: self.client.init_index(self.index.name).search('') self.assertEqual(cm.exception.status_code, 404)
def test_indexing(self): responses = [] # adding a object with object id obj1 = F.obj() responses.append(self.index.save_object(obj1)) # adding a object w/o object id obj2 = F.obj(object_id=False) opts = {'autoGenerateObjectIDIfNotExist': True} responses.append(self.index.save_object(obj2, opts)) # adding two objects with object id obj3 = F.obj({'_tags': ['algolia']}) obj4 = F.obj({'_tags': ['algolia']}) responses.append(self.index.save_objects([obj3, obj4])) # adding two objects w/o object id obj5 = F.obj(object_id=False) obj6 = F.obj(object_id=False) opts = {'autoGenerateObjectIDIfNotExist': True} responses.append(self.index.save_objects([obj5, obj6], opts)) object1_id = self.get_object_id(responses[0]) object2_id = self.get_object_id(responses[1]) object3_id = self.get_object_id(responses[2]) object4_id = self.get_object_id(responses[2], 1) object5_id = self.get_object_id(responses[3]) object6_id = self.get_object_id(responses[3], 1) # adding 1000 objects with object id objects = [] for i in range(1000): object_id = i objects.append({ 'objectID': str(object_id), 'name': object_id, }) self.index._config.batch_size = 100 responses.append(self.index.save_objects(objects)) # waiting for all responses MultipleResponse(responses).wait() # Check 6 first records with get_object self.assertEqual(obj1['name'], self.index.get_object(object1_id)['name']) self.assertEqual(obj2['name'], self.index.get_object(object2_id)['name']) self.assertEqual(obj3['name'], self.index.get_object(object3_id)['name']) self.assertEqual(obj4['name'], self.index.get_object(object4_id)['name']) self.assertEqual(obj5['name'], self.index.get_object(object5_id)['name']) self.assertEqual(obj6['name'], self.index.get_object(object6_id)['name']) # Check 1000 remaining records with get_objects results = self.index.get_objects(range(1000))['results'] for obj in results: self.assertIn(obj, objects) self.assertEqual(len(results), len(objects)) # Browse all records with browse_objects results = [] for obj in self.index.browse_objects(): results.append(obj) for obj in objects: self.assertIn(obj, results) for obj in [obj1, obj3, obj4]: self.assertIn(obj, results) self.assertEqual(len(results), 1006) responses = [] # Alter 1 record with partial_update_object obj1['name'] = 'This is an altered name' responses.append(self.index.partial_update_object(obj1)) # Alter 2 records with partial_update_objects obj3['bar'] = 40 obj4['foo'] = 30 responses.append(self.index.partial_update_objects([obj3, obj4])) MultipleResponse(responses).wait() self.assertEqual(self.index.get_object(object1_id), obj1) self.assertEqual(self.index.get_object(object3_id), obj3) self.assertEqual(self.index.get_object(object4_id), obj4) responses = [] # Delete the 6 first records with delete_object responses.append(self.index.delete_object(object1_id)) responses.append(self.index.delete_object(object2_id)) responses.append(self.index.delete_by({'tagFilters': ['algolia']})) responses.append(self.index.delete_objects([ object5_id, object6_id ])) # Delete the 1000 remaining records with delete_objects responses.append(self.index.clear_objects()) MultipleResponse(responses).wait() objects = [obj for obj in self.index.browse_objects()] self.assertEqual(len(objects), 0)
def test_multiple_operations(self): index_name1 = self.index.name index_2 = F.index(self._testMethodName) index_name2 = index_2.name raw_response = self.client.multiple_batch([ {"indexName": index_name1, "action": "addObject", "body": {"firstname": "Jimmie"}}, {"indexName": index_name1, "action": "addObject", "body": {"firstname": "Jimmie"}}, {"indexName": index_name2, "action": "addObject", "body": {"firstname": "Jimmie"}}, {"indexName": index_name2, "action": "addObject", "body": {"firstname": "Jimmie"}} ]).wait().raw_response object_ids = list( map(lambda object_id: object_id, raw_response['objectIDs'])) objects = self.client.multiple_get_objects([ {"indexName": index_name1, "objectID": object_ids[0]}, {"indexName": index_name1, "objectID": object_ids[1]}, {"indexName": index_name2, "objectID": object_ids[2]}, {"indexName": index_name2, "objectID": object_ids[3]} ])['results'] self.assertEqual(objects[0]['objectID'], object_ids[0]) self.assertEqual(objects[1]['objectID'], object_ids[1]) self.assertEqual(objects[2]['objectID'], object_ids[2]) self.assertEqual(objects[3]['objectID'], object_ids[3]) results = self.client.multiple_queries([ {"indexName": index_name1, "params": QueryParametersSerializer.serialize( {"query": "", "hitsPerPage": 2})}, {"indexName": index_name2, "params": QueryParametersSerializer.serialize( {"query": "", "hitsPerPage": 2})}, ], {'strategy': 'none'})['results'] self.assertEqual(len(results), 2) self.assertEqual(len(results[0]['hits']), 2) self.assertEqual(results[0]['nbHits'], 4) self.assertEqual(len(results[1]['hits']), 2) self.assertEqual(results[1]['nbHits'], 4) results = self.client.multiple_queries([ {"indexName": index_name1, "params": QueryParametersSerializer.serialize( {"query": "", "hitsPerPage": 2})}, {"indexName": index_name2, "params": QueryParametersSerializer.serialize( {"query": "", "hitsPerPage": 2})} ], {'strategy': 'stopIfEnoughMatches'})['results'] self.assertEqual(len(results), 2) self.assertEqual(len(results[0]['hits']), 2) self.assertEqual(results[0]['nbHits'], 4) self.assertEqual(len(results[1]['hits']), 0) self.assertEqual(results[1]['nbHits'], 0) index_2.delete()
def test_tasks(self): task_id = self.index.save_object(F.obj()).raw_responses[0]['taskID'] task = self.index.get_task(task_id + 1000000) self.assertEqual(task['status'], 'notPublished')