def test_secured_api_keys(self): hosts = F.hosts(self.client.app_id) config1 = SearchConfig(F.get_app_id(), F.get_api_key()) config1.hosts = hosts client1 = F.decide(SearchClient.create_with_config(config1)) index1 = F.index(client1, self._testMethodName) index2 = F.index(client1, '{}_dev'.format(self._testMethodName)) index1.save_object({'objectID': 'one'}).wait() index2.save_object({'objectID': 'one'}).wait() api_key = self.client.generate_secured_api_key( os.environ['ALGOLIA_SEARCH_KEY_1'], { "validUntil": int(round(time.time())) + (60 * 10), # + 10 min "restrictIndices": index1.name }) config2 = SearchConfig(F.get_app_id(), api_key) config2.hosts = hosts client2 = F.decide(SearchClient.create_with_config(config2)) index1_search = client2.init_index(index1.name) index2_search = client2.init_index(index2.name) index1_search.search('') with self.assertRaises(RequestException) as _: index2_search.search('')
def test_can_not_copy_index_from_same_account(self): index1 = F.index('foo') index2 = F.index('bar') # Assert that copying indexes of same application with self.assertRaises(AlgoliaException) as _: AccountClient.copy_index(index1, index2)
def setUp(self): search_client = F.search_client() self.client = RetryableClient( F.analytics_client(), ['Too Many Requests', 'Index does not exist']) self.index = F.index(search_client, self._testMethodName) self.index2 = F.index(search_client, '{}2'.format(self._testMethodName)) # noqa: E501
def test_can_not_copy_index_from_same_account(self): client = F.search_client() index1 = F.index(client, "foo") index2 = F.index(client, "bar") # Assert that copying indexes of same application with self.assertRaises(AlgoliaException) as _: AccountClient.copy_index(index1, index2)
def setUp(self): self.search_client = F.search_client() self.analytics_client = F.analytics_client() self.client = RetryableClient( self.analytics_client, ["Too Many Requests", "Index does not exist"]) self.index = F.index(self.search_client, self._testMethodName) self.index2 = F.index(self.search_client, "{}2".format(self._testMethodName)) # noqa: E501
def test_browse_rules(self): def side_effect(req, **kwargs): hits = [{ "objectID": i, "_highlightResult": None } for i in range(0, 1000)] page = json.loads(req.body)["page"] if page == 3: hits = hits[0:800] response = Response() response.status_code = 200 response._content = str.encode( json.dumps({ "hits": hits, "nbHits": 3800, "page": page, "nbPages": 3, })) return response client = SearchClient.create("foo", "bar") client._transporter._requester._session = requests.Session() client._transporter._requester._session.send = mock.MagicMock( name="send") client._transporter._requester._session.send.side_effect = side_effect index = F.index(client, "test") rules = index.browse_rules() len_rules = len(list(rules)) self.assertEqual(len_rules, 3800)
def test_secured_api_keys(self): self.index2 = F.index('{}_dev'.format(self._testMethodName)) self.index.save_object({'objectID': 'one'}).wait() self.index2.save_object({'objectID': 'one'}).wait() api_key = self.client.generate_secured_api_key( os.environ['ALGOLIA_SEARCH_KEY_1'], { "validUntil": int(round(time.time())) + (60 * 10), # + 10 min "restrictIndices": self.index.name }) F.search_client(api_key=api_key).init_index(self.index.name).search('') with self.assertRaises(RequestException) as _: F.search_client(api_key=api_key).init_index( self.index2.name).search('')
def test_secured_api_keys(self): self.index2 = F.index('{}_dev'.format(self._testMethodName)) self.index.save_object({'objectID': 'one'}).wait() self.index2.save_object({'objectID': 'one'}).wait() api_key = self.client.generate_secured_api_key( os.environ['ALGOLIA_SEARCH_KEY_1'], { "validUntil": int(round(time.time())) + (60 * 10), # + 10 min "restrictIndices": self.index.name } ) F.search_client(api_key=api_key).init_index(self.index.name).search('') with self.assertRaises(RequestException) as _: F.search_client(api_key=api_key).init_index( self.index2.name).search('')
def test_multiple_operations(self): index_name1 = self.index.name index_2 = F.index(self._testMethodName) index_name2 = index_2.name raw_response = self.client.multiple_batch([{ "indexName": index_name1, "action": "addObject", "body": { "firstname": "Jimmie" } }, { "indexName": index_name1, "action": "addObject", "body": { "firstname": "Jimmie" } }, { "indexName": index_name2, "action": "addObject", "body": { "firstname": "Jimmie" } }, { "indexName": index_name2, "action": "addObject", "body": { "firstname": "Jimmie" } }]).wait().raw_response object_ids = list( map(lambda object_id: object_id, raw_response['objectIDs'])) objects = self.client.multiple_get_objects([{ "indexName": index_name1, "objectID": object_ids[0] }, { "indexName": index_name1, "objectID": object_ids[1] }, { "indexName": index_name2, "objectID": object_ids[2] }, { "indexName": index_name2, "objectID": object_ids[3] }])['results'] self.assertEqual(objects[0]['objectID'], object_ids[0]) self.assertEqual(objects[1]['objectID'], object_ids[1]) self.assertEqual(objects[2]['objectID'], object_ids[2]) self.assertEqual(objects[3]['objectID'], object_ids[3]) results = self.client.multiple_queries([ { "indexName": index_name1, "params": QueryParametersSerializer.serialize({ "query": "", "hitsPerPage": 2 }) }, { "indexName": index_name2, "params": QueryParametersSerializer.serialize({ "query": "", "hitsPerPage": 2 }) }, ], {'strategy': 'none'})['results'] self.assertEqual(len(results), 2) self.assertEqual(len(results[0]['hits']), 2) self.assertEqual(results[0]['nbHits'], 4) self.assertEqual(len(results[1]['hits']), 2) self.assertEqual(results[1]['nbHits'], 4) results = self.client.multiple_queries( [{ "indexName": index_name1, "params": QueryParametersSerializer.serialize({ "query": "", "hitsPerPage": 2 }) }, { "indexName": index_name2, "params": QueryParametersSerializer.serialize({ "query": "", "hitsPerPage": 2 }) }], {'strategy': 'stopIfEnoughMatches'})['results'] self.assertEqual(len(results), 2) self.assertEqual(len(results[0]['hits']), 2) self.assertEqual(results[0]['nbHits'], 4) self.assertEqual(len(results[1]['hits']), 0) self.assertEqual(results[1]['nbHits'], 0) index_2.delete()
def setUp(self): self.client = F.search_client() self.index = F.index(self._testMethodName) self.index2 = self.index3 = self.index4 = self.index5 = self.index5 = self.index6 = None # noqa: E501
def setUp(self): self.client = RetryableClient(F.analytics_client()) self.index = F.index(self._testMethodName) self.index2 = F.index('{}2'.format(self._testMethodName))
def setUp(self): self.index = F.index(self._testMethodName)
def setUp(self): self.client1 = F.search_client() self.client2 = F.search_client2() self.index = F.index(self.client1, self._testMethodName) self.index2 = F.index(self.client2, self._testMethodName)
def test_copy_move_index(self): objects = [ {"objectID": "one", "company": "apple"}, {"objectID": "two", "company": "algolia"}, ] responses = MultipleResponse( [ self.index.save_objects(objects), self.index.set_settings({"attributesForFaceting": ["company"]}), self.index.save_synonym( { "objectID": "google_placeholder", "type": "placeholder", "placeholder": "<GOOG>", "replacements": ["Google", "GOOG"], } ), self.index.save_rule( { "objectID": "company_auto_faceting", "condition": { "anchoring": "contains", "pattern": "{facet:company}", }, "consequence": { "params": {"automaticFacetFilters": ["company"]} }, } ), ] ).wait() index2 = F.index( self.client, "{}_settings".format(self._testMethodName) ) # noqa: E501 responses.push(self.client.copy_settings(self.index.name, index2.name)) index3 = F.index( self.client, "{}_rules".format(self._testMethodName) ) # noqa: E501 responses.push(self.client.copy_rules(self.index.name, index3.name)) index4 = F.index( self.client, "{}_synonyms".format(self._testMethodName) ) # noqa: E501 responses.push(self.client.copy_synonyms(self.index.name, index4.name)) index5 = F.index( self.client, "{}_full_copy".format(self._testMethodName) ) # noqa: E501 responses.push(self.client.copy_index(self.index.name, index5.name)) responses.wait() self.assertEqual(index2.get_settings()["attributesForFaceting"], ["company"]) index3.get_rule("company_auto_faceting") with self.assertRaises(RequestException) as cm: index3.get_synonym("google_placeholder") index4.get_synonym("google_placeholder") with self.assertRaises(RequestException) as cm: index4.get_rule("company_auto_faceting") index5.get_synonym("google_placeholder") index5.get_rule("company_auto_faceting") self.assertEqual(index5.get_settings()["attributesForFaceting"], ["company"]) for obj in index5.browse_objects(): self.assertIn(obj, objects) index6 = F.index( self.client, "{}_after_move".format(self._testMethodName) ) # noqa: E501 self.client.move_index(self.index.name, index6.name).wait() index6.get_synonym("google_placeholder") index6.get_rule("company_auto_faceting") self.assertEqual(index6.get_settings()["attributesForFaceting"], ["company"]) for obj in index6.browse_objects(): self.assertIn(obj, objects) with self.assertRaises(RequestException) as cm: self.client.init_index(self.index.name).search("") self.assertEqual(cm.exception.status_code, 404)
def setUp(self): client = F.search_client() self.index = F.index(client, self._testMethodName) self.obj = F.obj()
def test_copy_move_index(self): objects = [ {'objectID': 'one', 'company': 'apple'}, {'objectID': 'two', 'company': 'algolia'} ] responses = MultipleResponse([ self.index.save_objects(objects), self.index.set_settings({'attributesForFaceting': ['company']}), self.index.save_synonym({ 'objectID': 'google_placeholder', 'type': 'placeholder', 'placeholder': '<GOOG>', 'replacements': ['Google', 'GOOG'] }), self.index.save_rule({ "objectID": "company_auto_faceting", "condition": { "anchoring": "contains", "pattern": "{facet:company}", }, "consequence": { "params": {"automaticFacetFilters": ["company"]} } }) ]).wait() self.index2 = F.index('{}_settings'.format(self._testMethodName)) responses.push(self.client.copy_settings( self.index.name, self.index2.name )) self.index3 = F.index('{}_rules'.format(self._testMethodName)) responses.push(self.client.copy_rules( self.index.name, self.index3.name )) self.index4 = F.index('{}_synonyms'.format(self._testMethodName)) responses.push(self.client.copy_synonyms( self.index.name, self.index4.name )) self.index5 = F.index('{}_full_copy'.format(self._testMethodName)) responses.push(self.client.copy_index( self.index.name, self.index5.name )) responses.wait() self.assertEqual( self.index2.get_settings()['attributesForFaceting'], ['company'] ) self.index3.get_rule('company_auto_faceting') with self.assertRaises(RequestException) as cm: self.index3.get_synonym('google_placeholder') self.index4.get_synonym('google_placeholder') with self.assertRaises(RequestException) as cm: self.index4.get_rule('company_auto_faceting') self.index5.get_synonym('google_placeholder') self.index5.get_rule('company_auto_faceting') self.assertEqual( self.index5.get_settings()['attributesForFaceting'], ['company'] ) for obj in self.index5.browse_objects(): self.assertIn(obj, objects) self.index6 = F.index('{}_after_move'.format(self._testMethodName)) self.client.move_index( self.index.name, self.index6.name ).wait() self.index6.get_synonym('google_placeholder') self.index6.get_rule('company_auto_faceting') self.assertEqual( self.index6.get_settings()['attributesForFaceting'], ['company'] ) for obj in self.index6.browse_objects(): self.assertIn(obj, objects) with self.assertRaises(RequestException) as cm: self.client.init_index(self.index.name).search('') self.assertEqual(cm.exception.status_code, 404)
def setUp(self): self.client = F.insights_client() self.index = F.index(self._testMethodName)
def test_multiple_operations(self): index2 = F.index(self.client, "{}2".format(self._testMethodName)) index_name1 = self.index.name index_name2 = index2.name raw_response = ( self.client.multiple_batch( [ { "indexName": index_name1, "action": "addObject", "body": {"firstname": "Jimmie"}, }, # noqa: E501 { "indexName": index_name1, "action": "addObject", "body": {"firstname": "Jimmie"}, }, # noqa: E501 { "indexName": index_name2, "action": "addObject", "body": {"firstname": "Jimmie"}, }, # noqa: E501 { "indexName": index_name2, "action": "addObject", "body": {"firstname": "Jimmie"}, }, # noqa: E501 ] ) .wait() .raw_response ) object_ids = list(map(lambda object_id: object_id, raw_response["objectIDs"])) objects = self.client.multiple_get_objects( [ {"indexName": index_name1, "objectID": object_ids[0]}, {"indexName": index_name1, "objectID": object_ids[1]}, {"indexName": index_name2, "objectID": object_ids[2]}, {"indexName": index_name2, "objectID": object_ids[3]}, ] )["results"] self.assertEqual(objects[0]["objectID"], object_ids[0]) self.assertEqual(objects[1]["objectID"], object_ids[1]) self.assertEqual(objects[2]["objectID"], object_ids[2]) self.assertEqual(objects[3]["objectID"], object_ids[3]) results = self.client.multiple_queries( [ { "indexName": index_name1, "params": QueryParametersSerializer.serialize( {"query": "", "hitsPerPage": 2} ), # noqa: E501 }, { "indexName": index_name2, "params": QueryParametersSerializer.serialize( {"query": "", "hitsPerPage": 2} ), # noqa: E501 }, ], {"strategy": "none"}, )["results"] self.assertEqual(len(results), 2) self.assertEqual(len(results[0]["hits"]), 2) self.assertEqual(results[0]["nbHits"], 2) self.assertEqual(len(results[1]["hits"]), 2) self.assertEqual(results[1]["nbHits"], 2) results = self.client.multiple_queries( [ { "indexName": index_name1, "params": QueryParametersSerializer.serialize( {"query": "", "hitsPerPage": 2} ), # noqa: E501 }, { "indexName": index_name2, "params": QueryParametersSerializer.serialize( {"query": "", "hitsPerPage": 2} ), # noqa: E501 }, ], {"strategy": "stopIfEnoughMatches"}, )["results"] self.assertEqual(len(results), 2) self.assertEqual(len(results[0]["hits"]), 2) self.assertEqual(results[0]["nbHits"], 2) self.assertEqual(len(results[1]["hits"]), 0) self.assertEqual(results[1]["nbHits"], 0)
def test_copy_move_index(self): objects = [{ 'objectID': 'one', 'company': 'apple' }, { 'objectID': 'two', 'company': 'algolia' }] responses = MultipleResponse([ self.index.save_objects(objects), self.index.set_settings({'attributesForFaceting': ['company']}), self.index.save_synonym({ 'objectID': 'google_placeholder', 'type': 'placeholder', 'placeholder': '<GOOG>', 'replacements': ['Google', 'GOOG'] }), self.index.save_rule({ "objectID": "company_auto_faceting", "condition": { "anchoring": "contains", "pattern": "{facet:company}", }, "consequence": { "params": { "automaticFacetFilters": ["company"] } } }) ]).wait() self.index2 = F.index('{}_settings'.format(self._testMethodName)) responses.push( self.client.copy_settings(self.index.name, self.index2.name)) self.index3 = F.index('{}_rules'.format(self._testMethodName)) responses.push( self.client.copy_rules(self.index.name, self.index3.name)) self.index4 = F.index('{}_synonyms'.format(self._testMethodName)) responses.push( self.client.copy_synonyms(self.index.name, self.index4.name)) self.index5 = F.index('{}_full_copy'.format(self._testMethodName)) responses.push( self.client.copy_index(self.index.name, self.index5.name)) responses.wait() self.assertEqual(self.index2.get_settings()['attributesForFaceting'], ['company']) self.index3.get_rule('company_auto_faceting') with self.assertRaises(RequestException) as cm: self.index3.get_synonym('google_placeholder') self.index4.get_synonym('google_placeholder') with self.assertRaises(RequestException) as cm: self.index4.get_rule('company_auto_faceting') self.index5.get_synonym('google_placeholder') self.index5.get_rule('company_auto_faceting') self.assertEqual(self.index5.get_settings()['attributesForFaceting'], ['company']) for obj in self.index5.browse_objects(): self.assertIn(obj, objects) self.index6 = F.index('{}_after_move'.format(self._testMethodName)) self.client.move_index(self.index.name, self.index6.name).wait() self.index6.get_synonym('google_placeholder') self.index6.get_rule('company_auto_faceting') self.assertEqual(self.index6.get_settings()['attributesForFaceting'], ['company']) for obj in self.index6.browse_objects(): self.assertIn(obj, objects) with self.assertRaises(RequestException) as cm: self.client.init_index(self.index.name).search('') self.assertEqual(cm.exception.status_code, 404)
def setUp(self): self.client = F.search_client() self.index = F.index(self.client, self._testMethodName)
def test_multiple_operations(self): index_name1 = self.index.name index_2 = F.index(self._testMethodName) index_name2 = index_2.name raw_response = self.client.multiple_batch([ {"indexName": index_name1, "action": "addObject", "body": {"firstname": "Jimmie"}}, {"indexName": index_name1, "action": "addObject", "body": {"firstname": "Jimmie"}}, {"indexName": index_name2, "action": "addObject", "body": {"firstname": "Jimmie"}}, {"indexName": index_name2, "action": "addObject", "body": {"firstname": "Jimmie"}} ]).wait().raw_response object_ids = list( map(lambda object_id: object_id, raw_response['objectIDs'])) objects = self.client.multiple_get_objects([ {"indexName": index_name1, "objectID": object_ids[0]}, {"indexName": index_name1, "objectID": object_ids[1]}, {"indexName": index_name2, "objectID": object_ids[2]}, {"indexName": index_name2, "objectID": object_ids[3]} ])['results'] self.assertEqual(objects[0]['objectID'], object_ids[0]) self.assertEqual(objects[1]['objectID'], object_ids[1]) self.assertEqual(objects[2]['objectID'], object_ids[2]) self.assertEqual(objects[3]['objectID'], object_ids[3]) results = self.client.multiple_queries([ {"indexName": index_name1, "params": QueryParametersSerializer.serialize( {"query": "", "hitsPerPage": 2})}, {"indexName": index_name2, "params": QueryParametersSerializer.serialize( {"query": "", "hitsPerPage": 2})}, ], {'strategy': 'none'})['results'] self.assertEqual(len(results), 2) self.assertEqual(len(results[0]['hits']), 2) self.assertEqual(results[0]['nbHits'], 4) self.assertEqual(len(results[1]['hits']), 2) self.assertEqual(results[1]['nbHits'], 4) results = self.client.multiple_queries([ {"indexName": index_name1, "params": QueryParametersSerializer.serialize( {"query": "", "hitsPerPage": 2})}, {"indexName": index_name2, "params": QueryParametersSerializer.serialize( {"query": "", "hitsPerPage": 2})} ], {'strategy': 'stopIfEnoughMatches'})['results'] self.assertEqual(len(results), 2) self.assertEqual(len(results[0]['hits']), 2) self.assertEqual(results[0]['nbHits'], 4) self.assertEqual(len(results[1]['hits']), 0) self.assertEqual(results[1]['nbHits'], 0) index_2.delete()
def setUp(self): self.index = Factory.index(self._testMethodName) self.obj = Factory.obj()