def test_dict_access(self):
        response = {
            "foo": "bar",
        }

        response_object = IndexingResponse({}, [response])
        self.assertEqual(response_object[0]["foo"], "bar")

        response_object = MultipleResponse([response])
        self.assertEqual(response_object[0]["foo"], "bar")

        response_object = MultipleResponse([IndexingResponse({}, [response])])
        self.assertEqual(response_object[0][0]["foo"], "bar")

        response_object = AddApiKeyResponse({}, response)
        self.assertEqual(response_object["foo"], "bar")

        response_object = UpdateApiKeyResponse({}, response, {})
        self.assertEqual(response_object["foo"], "bar")

        response_object = DeleteApiKeyResponse({}, response, "")
        self.assertEqual(response_object["foo"], "bar")

        response_object = MultipleIndexBatchIndexingResponse({}, response)
        self.assertEqual(response_object["foo"], "bar")
Пример #2
0
    def test_dict_access(self):
        response = {
            'foo': 'bar',
        }

        response_object = IndexingResponse({}, [response])
        self.assertEqual(response_object[0]['foo'], 'bar')

        response_object = MultipleResponse([response])
        self.assertEqual(response_object[0]['foo'], 'bar')

        response_object = MultipleResponse([IndexingResponse({}, [response])])
        self.assertEqual(response_object[0][0]['foo'], 'bar')

        response_object = AddApiKeyResponse({}, response)
        self.assertEqual(response_object['foo'], 'bar')

        response_object = UpdateApiKeyResponse({}, response, {})
        self.assertEqual(response_object['foo'], 'bar')

        response_object = DeleteApiKeyResponse({}, response, '')
        self.assertEqual(response_object['foo'], 'bar')

        response_object = MultipleIndexBatchIndexingResponse({}, response)
        self.assertEqual(response_object['foo'], 'bar')
    def test_cross_app_copy_index(self):
        rule = F.rule(object_id='one')
        synonym = F.synonym(object_id='one')
        responses = [
            self.index.save_object({'objectID': 'one'}),
            self.index.save_rule(rule),
            self.index.save_synonym(synonym),
            self.index.set_settings({'searchableAttributes': ['objectID']})
        ]

        MultipleResponse(responses).wait()

        AccountClient.copy_index(self.index, self.index2).wait()

        # Assert objects got copied
        res = self.index2.search('')

        self.assertEqual(len(res['hits']), 1)
        self.assertEqual(res['hits'][0], {'objectID': 'one'})

        # Assert settings got copied
        settings = self.index2.get_settings()
        self.assertEqual(settings['searchableAttributes'], ['objectID'])

        # Assert synonyms got copied
        self.assertEqual(self.index2.get_rule('one'), rule)

        # Assert synonyms got copied
        self.assertEqual(self.index2.get_synonym('one'), synonym)

        # Assert that copying again fails because index already exists
        with self.assertRaises(AlgoliaException) as _:
            AccountClient.copy_index(self.index, self.index2)
Пример #4
0
    def test_batching(self):
        responses = MultipleResponse()

        responses.push(self.index.save_objects([
            {"objectID": "one", "key": "value"},
            {"objectID": "two", "key": "value"},
            {"objectID": "three", "key": "value"},
            {"objectID": "four", "key": "value"},
            {"objectID": "five", "key": "value"},
        ]))

        responses.push(self.index.batch([
            {
                "action": "addObject",
                "body": {"objectID": "zero", "key": "value"}
            },
            {
                "action": "updateObject",
                "body": {"objectID": "one", "k": "v"}
            },
            {
                "action": "partialUpdateObject",
                "body": {"objectID": "two", "k": "v"}
            },
            {
                "action": "partialUpdateObject",
                "body": {"objectID": "two_bis", "key": "value"}
            },
            {
                "action": "partialUpdateObjectNoCreate",
                "body": {"objectID": "three", "k": "v"}
            },
            {
                "action": "deleteObject",
                "body": {"objectID": "four"}
            }
        ]))

        responses.wait()

        objects = [
            {"objectID": "zero", "key": "value"},
            {"objectID": "one", "k": "v"},
            {"objectID": "two", "key": "value", "k": "v"},
            {"objectID": "two_bis", "key": "value"},
            {"objectID": "three", "key": "value", "k": "v"},
            {"objectID": "five", "key": "value"},
        ]

        results = [obj for obj in self.index.browse_objects()]

        for obj in objects:
            self.assertIn(obj, results)
    def test_batching(self):
        responses = MultipleResponse()

        responses.push(self.index.save_objects([
            {"objectID": "one", "key": "value"},
            {"objectID": "two", "key": "value"},
            {"objectID": "three", "key": "value"},
            {"objectID": "four", "key": "value"},
            {"objectID": "five", "key": "value"},
        ]))

        responses.push(self.index.batch([
            {
                "action": "addObject",
                "body": {"objectID": "zero", "key": "value"}
            },
            {
                "action": "updateObject",
                "body": {"objectID": "one", "k": "v"}
            },
            {
                "action": "partialUpdateObject",
                "body": {"objectID": "two", "k": "v"}
            },
            {
                "action": "partialUpdateObject",
                "body": {"objectID": "two_bis", "key": "value"}
            },
            {
                "action": "partialUpdateObjectNoCreate",
                "body": {"objectID": "three", "k": "v"}
            },
            {
                "action": "deleteObject",
                "body": {"objectID": "four"}
            }
        ]))

        responses.wait()

        objects = [
            {"objectID": "zero", "key": "value"},
            {"objectID": "one", "k": "v"},
            {"objectID": "two", "key": "value", "k": "v"},
            {"objectID": "two_bis", "key": "value"},
            {"objectID": "three", "key": "value", "k": "v"},
            {"objectID": "five", "key": "value"},
        ]

        results = [obj for obj in self.index.browse_objects()]

        for obj in objects:
            self.assertIn(obj, results)
    def copy_index(source_index, destination_index, request_options=None):
        # type: (SearchIndex, SearchIndex, Optional[Union[Dict[str, Any], RequestOptions]]) -> MultipleResponse  # noqa: E501

        if source_index.app_id == destination_index.app_id:
            raise AlgoliaException("The indices are on the same application. "
                                   "Use client.copy_index instead.")

        try:
            destination_index.get_settings()
        except RequestException:
            pass
        else:
            raise AlgoliaException(
                "Destination index already exists. Please "
                "delete it before copying index across applications.")

        responses = MultipleResponse()

        # Copy settings
        settings = source_index.get_settings()
        responses.push(
            destination_index.set_settings(settings, request_options))

        # Copy synonyms
        synonyms = source_index.browse_synonyms()
        responses.push(
            destination_index.save_synonyms(synonyms, request_options))

        # Copy rules
        rules = source_index.browse_rules()
        responses.push(destination_index.save_rules(rules, request_options))

        # Copy objects
        objects = source_index.browse_objects()
        responses.push(destination_index.save_objects(objects,
                                                      request_options))

        return responses
    def copy_index(source_index, destination_index, request_options=None):
        # type: (SearchIndex, SearchIndex, Optional[Union[Dict[str, Any], RequestOptions]]) -> MultipleResponse  # noqa: E501

        if source_index.app_id == destination_index.app_id:
            raise AlgoliaException(
                'The indices are on the same application. '
                'Use client.copy_index instead.')

        try:
            destination_index.get_settings()
        except RequestException:
            pass
        else:
            raise AlgoliaException(
                'Destination index already exists. Please '
                'delete it before copying index across applications.')

        responses = MultipleResponse()

        # Copy settings
        settings = source_index.get_settings()
        responses.push(
            destination_index.set_settings(settings, request_options)
        )

        # Copy synonyms
        synonyms = source_index.browse_synonyms()
        responses.push(
            destination_index.save_synonyms(synonyms, request_options)
        )

        # Copy rules
        rules = source_index.browse_rules()
        responses.push(destination_index.save_rules(rules, request_options))

        # Copy objects
        objects = source_index.browse_objects()
        responses.push(
            destination_index.save_objects(objects, request_options)
        )

        return responses
Пример #8
0
async def main():
    async with SearchClient.create(app_id, api_key) as client:
        index = client.init_index('articles')

        try:
            (await index.clear_objects_async()).wait()
        except AlgoliaException:  # Index not found
            pass

        results = await asyncio.gather(
            index.save_object_async({
                'objectID': 1,
                'foo': 'bar'
            }), index.save_object_async({
                'objectID': 2,
                'foo': 'foo'
            }))

        MultipleResponse(results).wait()

        print(await index.search_async(''))
Пример #9
0
    def test_cross_app_copy_index(self):
        rule = F.rule(object_id="one")
        synonym = F.synonym(object_id="one")
        responses = [
            self.index.save_object({"objectID": "one"}),
            self.index.save_rule(rule),
            self.index.save_synonym(synonym),
            self.index.set_settings({"searchableAttributes": ["objectID"]}),
        ]

        MultipleResponse(responses).wait()

        AccountClient.copy_index(self.index, self.index2).wait()

        # Assert objects got copied
        res = self.index2.search("")

        self.assertEqual(len(res["hits"]), 1)
        self.assertEqual(res["hits"][0], {"objectID": "one"})

        # Assert settings got copied
        settings = self.index2.get_settings()
        self.assertEqual(settings["searchableAttributes"], ["objectID"])

        # Assert rules got copied
        self.assertEqual(rule_without_metadata(self.index2.get_rule("one")),
                         rule)

        # Assert synonyms got copied
        list_synonyms1 = [synonym for synonym in self.index.browse_synonyms()]
        list_synonyms2 = [synonym for synonym in self.index2.browse_synonyms()]

        self.assertEqual(list_synonyms1, list_synonyms2)

        # Assert synomys are the same
        self.assertEqual(self.index2.get_synonym("one"), synonym)

        # Assert that copying again fails because index already exists
        with self.assertRaises(AlgoliaException) as _:
            AccountClient.copy_index(self.index, self.index2)
    def replace_all_objects_async(
            self,
            objects,  # type: ignore
            request_options=None):
        # type: (Union[List[dict], Iterator[dict]], Optional[Union[dict, RequestOptions]]) -> MultipleResponse # noqa: E501

        safe = False
        if isinstance(request_options, dict) \
                and 'safe' in request_options:
            safe = request_options.pop('safe')

        tmp_index_name = self._create_temporary_name()
        responses = MultipleResponse()
        response = yield from self.copy_to_async(  # type: ignore
            tmp_index_name, {'scope': ['settings', 'synonyms', 'rules']})
        responses.push(response)

        if safe:
            responses.wait()

        tmp_index = SearchIndexAsync(self._search_index,
                                     self._transporter_async, self._config,
                                     tmp_index_name)

        response = yield from tmp_index.save_objects_async(  # type: ignore
            objects, request_options)
        responses.push(response)

        if safe:
            responses.wait()

        response = yield from tmp_index.move_to_async(  # type: ignore
            self._name)
        responses.push(response)

        if safe:
            responses.wait()

        return responses
    def test_rules(self):
        responses = MultipleResponse()

        responses.push(self.index.save_objects([
            {"objectID": "iphone_7", "brand": "Apple", "model": "7"},
            {"objectID": "iphone_8", "brand": "Apple", "model": "8"},
            {"objectID": "iphone_x", "brand": "Apple", "model": "X"},
            {"objectID": "one_plus_one", "brand": "OnePlus",
             "model": "One"},
            {"objectID": "one_plus_two", "brand": "OnePlus",
             "model": "Two"},
        ]))

        responses.push(self.index.set_settings({
            'attributesForFaceting': ['brand']
        }))

        rule1 = {
            "objectID": "brand_automatic_faceting",
            "enabled": False,
            "condition": {"anchoring": "is", "pattern": "{facet:brand}"},
            "consequence": {
                "params": {
                    "automaticFacetFilters": [
                        {"facet": "brand", "disjunctive": True, "score": 42},
                    ]
                }
            },
            "validity": [
                {
                    "from": 1532439300,
                    "until": 1532525700
                },
                {
                    "from": 1532612100,
                    "until": 1532698500
                }
            ],
            "description": "Automatic apply the faceting on `brand` if a"
        }

        responses.push(self.index.save_rule(rule1))

        rule2 = {
            "objectID": "query_edits",
            "condition": {"anchoring": "is", "pattern": "mobile phone"},
            "consequence": {
                "params": {
                    "query": {
                        "edits": [
                            {"type": "remove", "delete": "mobile"},
                            {"type": "replace", "delete": "phone",
                             "insert": "iphone"},
                        ]
                    }
                }
            }
        }

        responses.push(self.index.save_rules([rule2]))

        responses.wait()

        self.assertEqual(self.index.get_rule(rule1['objectID']),
                         rule1)
        self.assertEqual(self.index.get_rule(rule2['objectID']),
                         rule2)

        self.assertEqual(self.index.search_rules('')['nbHits'], 2)

        # Browse all records with browse_rules
        results = []
        for obj in self.index.browse_rules():
            results.append(obj)

        rules = [
            rule1,
            rule2,
        ]

        for rule in rules:
            self.assertIn(rule, results)

        self.index.delete_rule(rule1['objectID']).wait()

        # Try to get the first rule with get_rule and check
        # that the rule does not exist anymore
        with self.assertRaises(RequestException) as _:
            self.index.get_rule(rule1['objectID'])

        # Clear all the rules using clear_rules
        self.index.clear_rules().wait()

        # Perform a rule search using search_rule with an empty query
        # and check that the number of returned nbHits is equal to 0
        self.assertEqual(self.index.search_rules('')['nbHits'], 0)
    def replace_all_objects(self, objects, request_options=None):
        # type: (Union[List[dict], Iterator[dict]], Optional[Union[dict, RequestOptions]]) -> MultipleResponse # noqa: E501

        safe = False
        if isinstance(request_options, dict) \
                and 'safe' in request_options:
            safe = request_options.pop('safe')

        tmp_index_name = self._create_temporary_name()
        responses = MultipleResponse()
        responses.push(self.copy_to(tmp_index_name, {
            'scope': ['settings', 'synonyms', 'rules']
        }))

        if safe:
            responses.wait()

        tmp_index = copy.copy(self)
        tmp_index._name = tmp_index_name

        responses.push(tmp_index.save_objects(objects, request_options))

        if safe:
            responses.wait()

        responses.push(tmp_index.move_to(self._name))

        if safe:
            responses.wait()

        return responses
Пример #13
0
    def replace_all_objects(self, objects, request_options=None):
        # type: (Union[List[dict], Iterator[dict]], Optional[Union[dict, RequestOptions]]) -> MultipleResponse # noqa: E501

        safe = False
        if isinstance(request_options, dict) \
                and 'safe' in request_options:
            safe = request_options.pop('safe')

        tmp_index_name = self._create_temporary_name()
        responses = MultipleResponse()
        responses.push(self.copy_to(tmp_index_name, {
            'scope': ['settings', 'synonyms', 'rules']
        }))

        if safe:
            responses.wait()

        try:
            from algoliasearch.search_client import SearchClient
        except ImportError:  # Already imported.
            pass

        tmp_client = SearchClient(self._transporter, self._config)
        tmp_index = tmp_client.init_index(tmp_index_name)

        responses.push(tmp_index.save_objects(objects, request_options))

        if safe:
            responses.wait()

        responses.push(tmp_index.move_to(self._name))

        if safe:
            responses.wait()

        return responses
    def test_replacing(self):
        responses = MultipleResponse()
        responses.push(self.index.save_object({"objectID": "one"}))
        responses.push(self.index.save_rule(F.rule(object_id="one")))

        responses.push(self.index.save_synonym(
            {"objectID": "one", "type": "synonym", "synonyms": ["one", "two"]}
        ))

        responses.wait()

        responses.push(self.index.replace_all_objects([{"objectID": "two"}]))
        responses.push(self.index.replace_all_rules([{
            "objectID": "two",
            "condition": {"anchoring": "is", "pattern": "pattern"},
            "consequence": {
                "params": {
                    "query": {
                        "edits": [
                            {"type": "remove", "delete": "pattern"}
                        ]
                    }
                }
            }
        }
        ]))

        responses.push(self.index.replace_all_synonyms([
            {"objectID": "two", "type": "synonym", "synonyms": ["one", "two"]}
        ]))

        responses.wait()

        # Check that record with objectID=`one` does not exist
        with self.assertRaises(RequestException) as _:
            self.index.get_object('one')

        # Check that record with objectID=`two` does exist
        self.assertEqual(self.index.get_object('two')['objectID'], 'two')

        # Check that rule with objectID=`one` does not exist
        with self.assertRaises(RequestException) as _:
            self.index.get_rule('one')

        # Check that rule with objectID=`two` does exist
        self.assertEqual(self.index.get_rule('two')['objectID'], 'two')

        # Check that synonym with objectID=`one` does not exist
        with self.assertRaises(RequestException) as _:
            self.index.get_synonym('one')

        # Check that synonym with objectID="two" does exist using getSynonym
        self.assertEqual(self.index.get_synonym('two')['objectID'], 'two')
    def test_copy_move_index(self):
        objects = [
            {'objectID': 'one', 'company': 'apple'},
            {'objectID': 'two', 'company': 'algolia'}
        ]

        responses = MultipleResponse([
            self.index.save_objects(objects),
            self.index.set_settings({'attributesForFaceting': ['company']}),
            self.index.save_synonym({
                'objectID': 'google_placeholder',
                'type': 'placeholder',
                'placeholder': '<GOOG>',
                'replacements': ['Google', 'GOOG']
            }),

            self.index.save_rule({
                "objectID": "company_auto_faceting",
                "condition": {
                    "anchoring": "contains",
                    "pattern": "{facet:company}",
                },
                "consequence": {
                    "params": {"automaticFacetFilters": ["company"]}
                }
            })
        ]).wait()

        self.index2 = F.index('{}_settings'.format(self._testMethodName))
        responses.push(self.client.copy_settings(
            self.index.name, self.index2.name
        ))

        self.index3 = F.index('{}_rules'.format(self._testMethodName))
        responses.push(self.client.copy_rules(
            self.index.name, self.index3.name
        ))

        self.index4 = F.index('{}_synonyms'.format(self._testMethodName))
        responses.push(self.client.copy_synonyms(
            self.index.name, self.index4.name
        ))

        self.index5 = F.index('{}_full_copy'.format(self._testMethodName))
        responses.push(self.client.copy_index(
            self.index.name, self.index5.name
        ))

        responses.wait()

        self.assertEqual(
            self.index2.get_settings()['attributesForFaceting'], ['company']
        )

        self.index3.get_rule('company_auto_faceting')
        with self.assertRaises(RequestException) as cm:
            self.index3.get_synonym('google_placeholder')

        self.index4.get_synonym('google_placeholder')
        with self.assertRaises(RequestException) as cm:
            self.index4.get_rule('company_auto_faceting')

        self.index5.get_synonym('google_placeholder')
        self.index5.get_rule('company_auto_faceting')
        self.assertEqual(
            self.index5.get_settings()['attributesForFaceting'], ['company']
        )
        for obj in self.index5.browse_objects():
            self.assertIn(obj, objects)

        self.index6 = F.index('{}_after_move'.format(self._testMethodName))
        self.client.move_index(
            self.index.name,
            self.index6.name
        ).wait()

        self.index6.get_synonym('google_placeholder')
        self.index6.get_rule('company_auto_faceting')
        self.assertEqual(
            self.index6.get_settings()['attributesForFaceting'], ['company']
        )
        for obj in self.index6.browse_objects():
            self.assertIn(obj, objects)

        with self.assertRaises(RequestException) as cm:
            self.client.init_index(self.index.name).search('')

        self.assertEqual(cm.exception.status_code, 404)
    def test_synonyms(self):
        responses = MultipleResponse()

        responses.push(self.index.save_objects([
            {"console": "Sony PlayStation <PLAYSTATIONVERSION>"},
            {"console": "Nintendo Switch"},
            {"console": "Nintendo Wii U"},
            {"console": "Nintendo Game Boy Advance"},
            {"console": "Microsoft Xbox"},
            {"console": "Microsoft Xbox 360"},
            {"console": "Microsoft Xbox One"}
        ], {'autoGenerateObjectIDIfNotExist': True}))

        responses.push(self.index.save_synonym(F.synonym({
            'synonyms': [
                "gba",
                "gameboy advance",
                "game boy advance"
            ]
        }, 'gba')))

        synonym1 = {
            'objectID': 'wii_to_wii_u',
            'type': 'onewaysynonym',
            'input': 'wii',
            'synonyms': ['wii U']
        }

        synonym2 = {
            'objectID': 'playstation_version_placeholder',
            'type': 'placeholder',
            'placeholder': '<PLAYSTATIONVERSION>',
            'replacements': [
                "1",
                "One",
                "2",
                "3",
                "4",
                "4 Pro",
            ]
        }

        synonym3 = {
            'objectID': 'ps4',
            'type': 'altcorrection1',
            'word': 'ps4',
            'corrections': ['playstation4']
        }

        synonym4 = {
            'objectID': 'psone',
            'type': 'altcorrection2',
            'word': 'psone',
            'corrections': ['playstationone']
        }

        responses.push(self.index.save_synonyms([
            synonym1,
            synonym2,
            synonym3,
            synonym4
        ]))

        responses.wait()

        self.assertEqual(self.index.get_synonym(synonym1['objectID']),
                         synonym1)
        self.assertEqual(self.index.get_synonym(synonym2['objectID']),
                         synonym2)
        self.assertEqual(self.index.get_synonym(synonym3['objectID']),
                         synonym3)
        self.assertEqual(self.index.get_synonym(synonym4['objectID']),
                         synonym4)

        self.assertEqual(self.index.search_synonyms('')['nbHits'], 5)

        # Browse all records with browse_objects
        results = []
        for obj in self.index.browse_synonyms():
            results.append(obj)

        synonyms = [
            synonym1,
            synonym2,
            synonym3,
            synonym4
        ]

        for synonym in synonyms:
            self.assertIn(synonym, results)

        self.index.delete_synonym('gba').wait()

        # Try to get the synonym with getSynonym with objectID `gba and c
        # heck that the synonym does not exist anymore
        with self.assertRaises(RequestException) as _:
            self.index.get_synonym('gba')

        # Clear all the synonyms using clear_synonyms
        self.index.clear_synonyms().wait()

        # Perform a synonym search using searchSynonyms with an empty query
        # and check that the number of returned synonyms is equal to 0
        self.assertEqual(self.index.search_synonyms('')['nbHits'], 0)
Пример #17
0
    def replace_all_objects(self, objects, request_options=None):
        # type: (Union[List[dict], Iterator[dict]], Optional[Union[dict, RequestOptions]]) -> MultipleResponse # noqa: E501

        safe = False
        if isinstance(request_options, dict) \
                and 'safe' in request_options:
            safe = request_options.pop('safe')

        tmp_index_name = self._create_temporary_name()
        responses = MultipleResponse()
        responses.push(self.copy_to(tmp_index_name, {
            'scope': ['settings', 'synonyms', 'rules']
        }))

        if safe:
            responses.wait()

        tmp_index = copy.copy(self)
        tmp_index._name = tmp_index_name

        responses.push(tmp_index.save_objects(objects, request_options))

        if safe:
            responses.wait()

        responses.push(tmp_index.move_to(self._name))

        if safe:
            responses.wait()

        return responses
Пример #18
0
    def test_search(self):
        responses = MultipleResponse()

        responses.push(
            self.index.save_objects(
                [
                    {
                        "company": "Algolia",
                        "name": "Julien Lemoine",
                        "objectID": "julien-lemoine",
                    },  # noqa: E501
                    {
                        "company": "Algolia",
                        "name": "Nicolas Dessaigne",
                        "objectID": "nicolas-dessaigne",
                    },  # noqa: E501
                    {
                        "company": "Amazon",
                        "name": "Jeff Bezos"
                    },
                    {
                        "company": "Apple",
                        "name": "Steve Jobs"
                    },
                    {
                        "company": "Apple",
                        "name": "Steve Wozniak"
                    },
                    {
                        "company": "Arista Networks",
                        "name": "Jayshree Ullal"
                    },
                    {
                        "company": "Google",
                        "name": "Larry Page"
                    },
                    {
                        "company": "Google",
                        "name": "Rob Pike"
                    },
                    {
                        "company": "Google",
                        "name": "Serguey Brin"
                    },
                    {
                        "company": "Microsoft",
                        "name": "Bill Gates"
                    },
                    {
                        "company": "SpaceX",
                        "name": "Elon Musk"
                    },
                    {
                        "company": "Tesla",
                        "name": "Elon Musk"
                    },
                    {
                        "company": "Yahoo",
                        "name": "Marissa Mayer"
                    },
                ],
                {"autoGenerateObjectIDIfNotExist": True},
            ))

        responses.push(
            self.index.set_settings(
                {"attributesForFaceting": ["searchable(company)"]}))

        responses.wait()

        # Perform a search query using search with the query `algolia` and no
        # parameter and check that the number of returned hits is equal to 2
        result = self.index.search("algolia")
        self.assertEqual(result["nbHits"], 2)
        self.assertEqual(
            SearchIndex.get_object_position(result, "nicolas-dessaigne"), 0)
        self.assertEqual(
            SearchIndex.get_object_position(result, "julien-lemoine"), 1)

        self.assertEqual(SearchIndex.get_object_position(result, ""), -1)

        # Call find_object with the following parameters and check that
        # no object is found
        with self.assertRaises(ObjectNotFoundException):
            self.index.find_object(lambda _: False)

        # Call find_object with the following parameters and check that
        # the first object is returned with a `position=0` and `page=0`
        found = self.index.find_object(lambda _: True)
        self.assertEqual(found["position"], 0)
        self.assertEqual(found["page"], 0)

        def callback(obj):
            # type: (dict) -> bool
            return obj.get("company") == "Apple"

        # Call find_object with the following parameters and check that
        # no object is found
        with self.assertRaises(ObjectNotFoundException):
            self.index.find_object(callback, {"query": "algolia"})

        # Call find_object with the following parameters and check that
        # no object is found
        with self.assertRaises(ObjectNotFoundException):
            self.index.find_object(callback, {
                "query": "",
                "paginate": False,
                "hitsPerPage": 5
            })

        # Call find_object with the following parameters and check that
        # the first object is returned with a `position=0` and `page=2`
        found = self.index.find_object(callback, {
            "query": "",
            "paginate": True,
            "hitsPerPage": 5
        })

        self.assertEqual(found["position"], 0)
        self.assertEqual(found["page"], 2)

        # Perform a search using search with the query `elon` and the
        # following parameter and check that the queryID field from
        # the response is not empty
        result = self.index.search("elon", {"clickAnalytics": True})
        self.assertIn("queryID", result)

        # Perform a faceted search using search with the query `elon` and the
        # following parameters and check that the number of returned hits is
        # equal to 1
        result = self.index.search("elon", {
            "facets": "*",
            "facetFilters": "company:tesla"
        })
        self.assertEqual(result["nbHits"], 1)

        # Perform a filtered search using search with the query `elon` and the
        # following parameters and check that the number of returned hits is
        # equal to 2
        result = self.index.search(
            "elon", {
                "facets": "*",
                "filters": "(company:tesla OR company:spacex)"
            })

        self.assertEqual(result["nbHits"], 2)

        result = self.index.search_for_facet_values("company",
                                                    "a")["facetHits"]

        values = list(map(lambda facet: facet["value"], result))

        self.assertIn("Algolia", values)
        self.assertIn("Amazon", values)
        self.assertIn("Apple", values)
        self.assertIn("Arista Networks", values)
Пример #19
0
    def test_synonyms(self):
        responses = MultipleResponse()

        responses.push(self.index.save_objects([
            {"console": "Sony PlayStation <PLAYSTATIONVERSION>"},
            {"console": "Nintendo Switch"},
            {"console": "Nintendo Wii U"},
            {"console": "Nintendo Game Boy Advance"},
            {"console": "Microsoft Xbox"},
            {"console": "Microsoft Xbox 360"},
            {"console": "Microsoft Xbox One"}
        ], {'autoGenerateObjectIDIfNotExist': True}))

        responses.push(self.index.save_synonym(F.synonym({
            'synonyms': [
                "gba",
                "gameboy advance",
                "game boy advance"
            ]
        }, 'gba')))

        synonym1 = {
            'objectID': 'wii_to_wii_u',
            'type': 'onewaysynonym',
            'input': 'wii',
            'synonyms': ['wii U']
        }

        synonym2 = {
            'objectID': 'playstation_version_placeholder',
            'type': 'placeholder',
            'placeholder': '<PLAYSTATIONVERSION>',
            'replacements': [
                "1",
                "One",
                "2",
                "3",
                "4",
                "4 Pro",
            ]
        }

        synonym3 = {
            'objectID': 'ps4',
            'type': 'altcorrection1',
            'word': 'ps4',
            'corrections': ['playstation4']
        }

        synonym4 = {
            'objectID': 'psone',
            'type': 'altcorrection2',
            'word': 'psone',
            'corrections': ['playstationone']
        }

        responses.push(self.index.save_synonyms([
            synonym1,
            synonym2,
            synonym3,
            synonym4
        ]))

        responses.wait()

        self.assertEqual(self.index.get_synonym(synonym1['objectID']),
                         synonym1)
        self.assertEqual(self.index.get_synonym(synonym2['objectID']),
                         synonym2)
        self.assertEqual(self.index.get_synonym(synonym3['objectID']),
                         synonym3)
        self.assertEqual(self.index.get_synonym(synonym4['objectID']),
                         synonym4)

        self.assertEqual(self.index.search_synonyms('')['nbHits'], 5)

        # Browse all records with browse_objects
        results = []
        for obj in self.index.browse_synonyms():
            results.append(obj)

        synonyms = [
            synonym1,
            synonym2,
            synonym3,
            synonym4
        ]

        for synonym in synonyms:
            self.assertIn(synonym, results)

        self.index.delete_synonym('gba').wait()

        # Try to get the synonym with getSynonym with objectID `gba and c
        # heck that the synonym does not exist anymore
        with self.assertRaises(RequestException) as _:
            self.index.get_synonym('gba')

        # Clear all the synonyms using clear_synonyms
        self.index.clear_synonyms().wait()

        # Perform a synonym search using searchSynonyms with an empty query
        # and check that the number of returned synonyms is equal to 0
        self.assertEqual(self.index.search_synonyms('')['nbHits'], 0)
Пример #20
0
    def test_search(self):
        responses = MultipleResponse()

        responses.push(self.index.save_objects([
            {"company": "Algolia", "name": "Julien Lemoine",
             "objectID": "julien-lemoine"},  # noqa: E501
            {"company": "Algolia", "name": "Nicolas Dessaigne",
             "objectID": "nicolas-dessaigne"},  # noqa: E501
            {"company": "Amazon", "name": "Jeff Bezos"},
            {"company": "Apple", "name": "Steve Jobs"},
            {"company": "Apple", "name": "Steve Wozniak"},
            {"company": "Arista Networks", "name": "Jayshree Ullal"},
            {"company": "Google", "name": "Larry Page"},
            {"company": "Google", "name": "Rob Pike"},
            {"company": "Google", "name": "Serguey Brin"},
            {"company": "Microsoft", "name": "Bill Gates"},
            {"company": "SpaceX", "name": "Elon Musk"},
            {"company": "Tesla", "name": "Elon Musk"},
            {"company": "Yahoo", "name": "Marissa Mayer"}
        ], {'autoGenerateObjectIDIfNotExist': True}))

        responses.push(self.index.set_settings({
            'attributesForFaceting': ["searchable(company)"]
        }))

        responses.wait()

        # Perform a search query using search with the query `algolia` and no
        # parameter and check that the number of returned hits is equal to 2
        result = self.index.search('algolia')
        self.assertEqual(result['nbHits'], 2)
        self.assertEqual(SearchIndex.get_object_position(
            result, 'nicolas-dessaigne'), 0
        )
        self.assertEqual(SearchIndex.get_object_position(
            result, 'julien-lemoine'), 1
        )

        self.assertEqual(SearchIndex.get_object_position(result, ''), -1)

        # Call find_object with the following parameters and check that
        # no object is found
        with self.assertRaises(ObjectNotFoundException):
            self.index.find_object(lambda _: False)

        # Call find_object with the following parameters and check that
        # the first object is returned with a `position=0` and `page=0`
        found = self.index.find_object(lambda _: True)
        self.assertEqual(found['position'], 0)
        self.assertEqual(found['page'], 0)

        def callback(obj):
            # type: (dict) -> bool
            return obj.get('company') == 'Apple'

        # Call find_object with the following parameters and check that
        # no object is found
        with self.assertRaises(ObjectNotFoundException):
            self.index.find_object(callback, {
                'query': 'algolia'
            })

        # Call find_object with the following parameters and check that
        # no object is found
        with self.assertRaises(ObjectNotFoundException):
            self.index.find_object(callback, {
                'query': '',
                'paginate': False,
                'hitsPerPage': 5
            })

        # Call find_object with the following parameters and check that
        # the first object is returned with a `position=0` and `page=2`
        found = self.index.find_object(callback, {
            'query': '',
            'paginate': True,
            'hitsPerPage': 5
        })

        self.assertEqual(found['position'], 0)
        self.assertEqual(found['page'], 2)

        # Perform a search using search with the query `elon` and the
        # following parameter and check that the queryID field from
        # the response is not empty
        result = self.index.search('elon', {'clickAnalytics': True})
        self.assertIn('queryID', result)

        # Perform a faceted search using search with the query `elon` and the
        # following parameters and check that the number of returned hits is
        # equal to 1
        result = self.index.search('elon', {
            'facets': '*',
            'facetFilters': 'company:tesla'
        })
        self.assertEqual(result['nbHits'], 1)

        # Perform a filtered search using search with the query `elon` and the
        # following parameters and check that the number of returned hits is
        # equal to 2
        result = self.index.search('elon', {
            'facets': '*',
            'filters': '(company:tesla OR company:spacex)'
        })

        self.assertEqual(result['nbHits'], 2)

        result = self.index.search_for_facet_values('company', 'a')[
            'facetHits']

        values = list(
            map(lambda facet: facet['value'], result))

        self.assertIn('Algolia', values)
        self.assertIn('Amazon', values)
        self.assertIn('Apple', values)
        self.assertIn('Arista Networks', values)
Пример #21
0
    def test_synonyms(self):
        responses = MultipleResponse()

        responses.push(
            self.index.save_objects(
                [
                    {
                        "console": "Sony PlayStation <PLAYSTATIONVERSION>"
                    },
                    {
                        "console": "Nintendo Switch"
                    },
                    {
                        "console": "Nintendo Wii U"
                    },
                    {
                        "console": "Nintendo Game Boy Advance"
                    },
                    {
                        "console": "Microsoft Xbox"
                    },
                    {
                        "console": "Microsoft Xbox 360"
                    },
                    {
                        "console": "Microsoft Xbox One"
                    },
                ],
                {"autoGenerateObjectIDIfNotExist": True},
            ))

        responses.push(
            self.index.save_synonym(
                F.synonym(
                    {
                        "synonyms":
                        ["gba", "gameboy advance", "game boy advance"]
                    }, "gba")))

        synonym1 = {
            "objectID": "wii_to_wii_u",
            "type": "onewaysynonym",
            "input": "wii",
            "synonyms": ["wii U"],
        }

        synonym2 = {
            "objectID": "playstation_version_placeholder",
            "type": "placeholder",
            "placeholder": "<PLAYSTATIONVERSION>",
            "replacements": ["1", "One", "2", "3", "4", "4 Pro"],
        }

        synonym3 = {
            "objectID": "ps4",
            "type": "altcorrection1",
            "word": "ps4",
            "corrections": ["playstation4"],
        }

        synonym4 = {
            "objectID": "psone",
            "type": "altcorrection2",
            "word": "psone",
            "corrections": ["playstationone"],
        }

        responses.push(
            self.index.save_synonyms([synonym1, synonym2, synonym3, synonym4]))

        responses.wait()

        self.assertEqual(self.index.get_synonym(synonym1["objectID"]),
                         synonym1)
        self.assertEqual(self.index.get_synonym(synonym2["objectID"]),
                         synonym2)
        self.assertEqual(self.index.get_synonym(synonym3["objectID"]),
                         synonym3)
        self.assertEqual(self.index.get_synonym(synonym4["objectID"]),
                         synonym4)

        self.assertEqual(self.index.search_synonyms("")["nbHits"], 5)

        # Browse all records with browse_objects
        results = []
        for obj in self.index.browse_synonyms():
            results.append(obj)

        synonyms = [synonym1, synonym2, synonym3, synonym4]

        for synonym in synonyms:
            self.assertIn(synonym, results)

        self.index.delete_synonym("gba").wait()

        # Try to get the synonym with getSynonym with objectID `gba and c
        # heck that the synonym does not exist anymore
        with self.assertRaises(RequestException) as _:
            self.index.get_synonym("gba")

        # Clear all the synonyms using clear_synonyms
        self.index.clear_synonyms().wait()

        # Perform a synonym search using searchSynonyms with an empty query
        # and check that the number of returned synonyms is equal to 0
        self.assertEqual(self.index.search_synonyms("")["nbHits"], 0)
Пример #22
0
    def replace_all_objects_async(  # type: ignore
            self,
            objects,
            request_options=None,
    ):
        # type: (Union[List[dict], Iterator[dict]], Optional[Union[dict, RequestOptions]]) -> MultipleResponse # noqa: E501

        safe = False
        if isinstance(request_options, dict) and "safe" in request_options:
            safe = request_options.pop("safe")

        tmp_index_name = self._create_temporary_name()
        responses = MultipleResponse()
        response = yield from self.copy_to_async(  # type: ignore
            tmp_index_name, {"scope": ["settings", "synonyms", "rules"]})
        responses.push(response)

        if safe:
            responses.wait()

        tmp_client = SearchClient(self._transporter, self._config)
        tmp_index = SearchIndexAsync(
            tmp_client.init_index(tmp_index_name),
            self._transporter_async,
            self._config,
            tmp_index_name,
        )

        response = yield from tmp_index.save_objects_async(  # type: ignore
            objects, request_options)
        responses.push(response)

        if safe:
            responses.wait()

        response = yield from tmp_index.move_to_async(  # type: ignore
            self._name)
        responses.push(response)

        if safe:
            responses.wait()

        return responses
    def replace_all_objects_async(self, objects,  # type: ignore
                                  request_options=None):
        # type: (Union[List[dict], Iterator[dict]], Optional[Union[dict, RequestOptions]]) -> MultipleResponse # noqa: E501

        safe = False
        if isinstance(request_options, dict) \
                and 'safe' in request_options:
            safe = request_options.pop('safe')

        tmp_index_name = self._create_temporary_name()
        responses = MultipleResponse()
        response = yield from self.copy_to_async(  # type: ignore
            tmp_index_name,
            {
                'scope': ['settings',
                          'synonyms',
                          'rules']
            })
        responses.push(response)

        if safe:
            responses.wait()

        tmp_index = SearchIndexAsync(
            self._search_index,
            self._transporter_async,
            self._config,
            tmp_index_name
        )

        response = yield from tmp_index.save_objects_async(  # type: ignore
            objects,
            request_options
        )
        responses.push(response)

        if safe:
            responses.wait()

        response = yield from tmp_index.move_to_async(  # type: ignore
            self._name)
        responses.push(response)

        if safe:
            responses.wait()

        return responses
Пример #24
0
    def test_indexing(self):
        responses = []

        # adding a object with object id
        obj1 = F.obj()
        responses.append(self.index.save_object(obj1))

        # adding a object w/o object id
        obj2 = F.obj(object_id=False)
        opts = {'autoGenerateObjectIDIfNotExist': True}
        responses.append(self.index.save_object(obj2, opts))

        # adding two objects with object id
        obj3 = F.obj({'_tags': ['algolia']})
        obj4 = F.obj({'_tags': ['algolia']})
        responses.append(self.index.save_objects([obj3, obj4]))

        # adding two objects w/o object id
        obj5 = F.obj(object_id=False)
        obj6 = F.obj(object_id=False)
        opts = {'autoGenerateObjectIDIfNotExist': True}
        responses.append(self.index.save_objects([obj5, obj6], opts))

        object1_id = self.get_object_id(responses[0])
        object2_id = self.get_object_id(responses[1])
        object3_id = self.get_object_id(responses[2])
        object4_id = self.get_object_id(responses[2], 1)
        object5_id = self.get_object_id(responses[3])
        object6_id = self.get_object_id(responses[3], 1)

        # adding 1000 objects with object id
        objects = []
        for i in range(1000):
            object_id = i
            objects.append({
                'objectID': str(object_id),
                'name': object_id,
            })

        self.index._config.batch_size = 100
        responses.append(self.index.save_objects(objects))

        # waiting for all responses
        MultipleResponse(responses).wait()

        # Check 6 first records with get_object
        self.assertEqual(obj1['name'],
                         self.index.get_object(object1_id)['name'])
        self.assertEqual(obj2['name'],
                         self.index.get_object(object2_id)['name'])
        self.assertEqual(obj3['name'],
                         self.index.get_object(object3_id)['name'])
        self.assertEqual(obj4['name'],
                         self.index.get_object(object4_id)['name'])
        self.assertEqual(obj5['name'],
                         self.index.get_object(object5_id)['name'])
        self.assertEqual(obj6['name'],
                         self.index.get_object(object6_id)['name'])

        # Check 1000 remaining records with get_objects
        results = self.index.get_objects(range(1000))['results']
        for obj in results:
            self.assertIn(obj, objects)

        self.assertEqual(len(results), len(objects))

        # Browse all records with browse_objects
        results = []
        for obj in self.index.browse_objects():
            results.append(obj)

        for obj in objects:
            self.assertIn(obj, results)

        for obj in [obj1, obj3, obj4]:
            self.assertIn(obj, results)

        self.assertEqual(len(results), 1006)

        responses = []

        # Alter 1 record with partial_update_object
        obj1['name'] = 'This is an altered name'
        responses.append(self.index.partial_update_object(obj1))

        # Alter 2 records with partial_update_objects
        obj3['bar'] = 40
        obj4['foo'] = 30
        responses.append(self.index.partial_update_objects([obj3, obj4]))

        MultipleResponse(responses).wait()

        self.assertEqual(self.index.get_object(object1_id), obj1)
        self.assertEqual(self.index.get_object(object3_id), obj3)
        self.assertEqual(self.index.get_object(object4_id), obj4)

        responses = []

        # Delete the 6 first records with delete_object
        responses.append(self.index.delete_object(object1_id))
        responses.append(self.index.delete_object(object2_id))

        responses.append(self.index.delete_by({'tagFilters': ['algolia']}))
        responses.append(self.index.delete_objects([
            object5_id, object6_id
        ]))

        # Delete the 1000 remaining records with delete_objects
        responses.append(self.index.clear_objects())

        MultipleResponse(responses).wait()

        objects = [obj for obj in self.index.browse_objects()]

        self.assertEqual(len(objects), 0)
    def test_search(self):
        responses = MultipleResponse()

        responses.push(
            self.index.save_objects([{
                "company": "Algolia",
                "name": "Julien Lemoine"
            }, {
                "company": "Algolia",
                "name": "Nicolas Dessaigne"
            }, {
                "company": "Amazon",
                "name": "Jeff Bezos"
            }, {
                "company": "Apple",
                "name": "Steve Jobs"
            }, {
                "company": "Apple",
                "name": "Steve Wozniak"
            }, {
                "company": "Arista Networks",
                "name": "Jayshree Ullal"
            }, {
                "company": "Google",
                "name": "Larry Page"
            }, {
                "company": "Google",
                "name": "Rob Pike"
            }, {
                "company": "Google",
                "name": "Serguey Brin"
            }, {
                "company": "Microsoft",
                "name": "Bill Gates"
            }, {
                "company": "SpaceX",
                "name": "Elon Musk"
            }, {
                "company": "Tesla",
                "name": "Elon Musk"
            }, {
                "company": "Yahoo",
                "name": "Marissa Mayer"
            }], {'autoGenerateObjectIDIfNotExist': True}))

        responses.push(
            self.index.set_settings(
                {'attributesForFaceting': ["searchable(company)"]}))

        responses.wait()

        # Perform a search query using search with the query `algolia` and no
        # parameter and check that the number of returned hits is equal to 2
        result = self.index.search('algolia')
        self.assertEqual(result['nbHits'], 2)

        # Perform a search using search with the query `elon` and the
        # following parameter and check that the queryID field from
        # the response is not empty
        result = self.index.search('elon', {'clickAnalytics': True})
        self.assertIn('queryID', result)

        # Perform a faceted search using search with the query `elon` and the
        # following parameters and check that the number of returned hits is
        # equal to 1
        result = self.index.search('elon', {
            'facets': '*',
            'facetFilters': 'company:tesla'
        })
        self.assertEqual(result['nbHits'], 1)

        # Perform a filtered search using search with the query `elon` and the
        # following parameters and check that the number of returned hits is
        # equal to 2
        result = self.index.search(
            'elon', {
                'facets': '*',
                'filters': '(company:tesla OR company:spacex)'
            })

        self.assertEqual(result['nbHits'], 2)

        result = self.index.search_for_facet_values('company',
                                                    'a')['facetHits']
        values = list(map(lambda facet: facet['value'], result))

        self.assertIn('Algolia', values)
        self.assertIn('Amazon', values)
        self.assertIn('Apple', values)
        self.assertIn('Arista Networks', values)
Пример #26
0
    def test_rules(self):
        responses = MultipleResponse()

        responses.push(self.index.save_objects([
            {"objectID": "iphone_7", "brand": "Apple", "model": "7"},
            {"objectID": "iphone_8", "brand": "Apple", "model": "8"},
            {"objectID": "iphone_x", "brand": "Apple", "model": "X"},
            {"objectID": "one_plus_one", "brand": "OnePlus",
             "model": "One"},
            {"objectID": "one_plus_two", "brand": "OnePlus",
             "model": "Two"},
        ]))

        responses.push(self.index.set_settings({
            'attributesForFaceting': ['brand', 'model']
        }))

        rule1 = {
            "objectID": "brand_automatic_faceting",
            "enabled": False,
            "condition": {"anchoring": "is", "pattern": "{facet:brand}"},
            "consequence": {
                "params": {
                    "automaticFacetFilters": [
                        {"facet": "brand", "disjunctive": True, "score": 42},
                    ]
                }
            },
            "validity": [
                {
                    "from": 1532439300,
                    "until": 1532525700
                },
                {
                    "from": 1532612100,
                    "until": 1532698500
                }
            ],
            "description": "Automatic apply the faceting on `brand` if a"
        }

        responses.push(self.index.save_rule(rule1))

        rule2 = {
            "objectID": "query_edits",
            "condition": {
                "anchoring": "is",
                "pattern": "mobile phone",
                "alternatives": True
            },
            "consequence": {
                "params": {
                    "query": {
                        "edits": [
                            {"type": "remove", "delete": "mobile"},
                            {"type": "replace", "delete": "phone",
                             "insert": "iphone"},
                        ]
                    }
                }
            }
        }

        rule3 = {
            "objectID": "query_promo",
            "consequence": {
              "params": {
                "filters": "brand:OnePlus"
              }
            }
        }

        rule4 = {
            "objectID": "query_promo_only_summer",
            "condition": {
                "context": "summer"
            },
            "consequence": {
              "params": {
                "filters": "model:One"
              }
            }
        }

        responses.push(self.index.save_rules([rule2, rule3, rule4]))

        responses.wait()

        # Should be only the One Plus model One
        self.assertEqual(self.index.search('', {
            'ruleContexts': ['summer']
        })['nbHits'], 1);

        self.assertEqual(self.index.get_rule(rule1['objectID']),
                         rule1)
        self.assertEqual(self.index.get_rule(rule2['objectID']),
                         rule2)

        self.assertEqual(self.index.get_rule(rule3['objectID']),
                         rule3)

        self.assertEqual(self.index.get_rule(rule4['objectID']),
                         rule4)
        
        self.assertEqual(self.index.search_rules('')['nbHits'], 4)

        # Browse all records with browse_rules
        results = []
        for obj in self.index.browse_rules():
            results.append(obj)

        rules = [
            rule1,
            rule2,
            rule3,
            rule4
        ]

        for rule in rules:
            self.assertIn(rule, results)

        self.index.delete_rule(rule1['objectID']).wait()

        # Try to get the first rule with get_rule and check
        # that the rule does not exist anymore
        with self.assertRaises(RequestException) as _:
            self.index.get_rule(rule1['objectID'])

        # Clear all the rules using clear_rules
        self.index.clear_rules().wait()

        # Perform a rule search using search_rule with an empty query
        # and check that the number of returned nbHits is equal to 0
        self.assertEqual(self.index.search_rules('')['nbHits'], 0)
Пример #27
0
    def test_copy_move_index(self):
        objects = [
            {"objectID": "one", "company": "apple"},
            {"objectID": "two", "company": "algolia"},
        ]

        responses = MultipleResponse(
            [
                self.index.save_objects(objects),
                self.index.set_settings({"attributesForFaceting": ["company"]}),
                self.index.save_synonym(
                    {
                        "objectID": "google_placeholder",
                        "type": "placeholder",
                        "placeholder": "<GOOG>",
                        "replacements": ["Google", "GOOG"],
                    }
                ),
                self.index.save_rule(
                    {
                        "objectID": "company_auto_faceting",
                        "condition": {
                            "anchoring": "contains",
                            "pattern": "{facet:company}",
                        },
                        "consequence": {
                            "params": {"automaticFacetFilters": ["company"]}
                        },
                    }
                ),
            ]
        ).wait()

        index2 = F.index(
            self.client, "{}_settings".format(self._testMethodName)
        )  # noqa: E501
        responses.push(self.client.copy_settings(self.index.name, index2.name))

        index3 = F.index(
            self.client, "{}_rules".format(self._testMethodName)
        )  # noqa: E501
        responses.push(self.client.copy_rules(self.index.name, index3.name))

        index4 = F.index(
            self.client, "{}_synonyms".format(self._testMethodName)
        )  # noqa: E501
        responses.push(self.client.copy_synonyms(self.index.name, index4.name))

        index5 = F.index(
            self.client, "{}_full_copy".format(self._testMethodName)
        )  # noqa: E501
        responses.push(self.client.copy_index(self.index.name, index5.name))

        responses.wait()

        self.assertEqual(index2.get_settings()["attributesForFaceting"], ["company"])

        index3.get_rule("company_auto_faceting")
        with self.assertRaises(RequestException) as cm:
            index3.get_synonym("google_placeholder")

        index4.get_synonym("google_placeholder")
        with self.assertRaises(RequestException) as cm:
            index4.get_rule("company_auto_faceting")

        index5.get_synonym("google_placeholder")
        index5.get_rule("company_auto_faceting")
        self.assertEqual(index5.get_settings()["attributesForFaceting"], ["company"])
        for obj in index5.browse_objects():
            self.assertIn(obj, objects)

        index6 = F.index(
            self.client, "{}_after_move".format(self._testMethodName)
        )  # noqa: E501
        self.client.move_index(self.index.name, index6.name).wait()

        index6.get_synonym("google_placeholder")
        index6.get_rule("company_auto_faceting")
        self.assertEqual(index6.get_settings()["attributesForFaceting"], ["company"])
        for obj in index6.browse_objects():
            self.assertIn(obj, objects)

        with self.assertRaises(RequestException) as cm:
            self.client.init_index(self.index.name).search("")

        self.assertEqual(cm.exception.status_code, 404)
Пример #28
0
    def test_replacing(self):
        responses = MultipleResponse()
        responses.push(self.index.save_object({"objectID": "one"}))
        responses.push(self.index.save_rule(F.rule(object_id="one")))

        responses.push(self.index.save_synonym(
            {"objectID": "one", "type": "synonym", "synonyms": ["one", "two"]}
        ))

        responses.wait()

        responses.push(self.index.replace_all_objects([{"objectID": "two"}]))
        responses.push(self.index.replace_all_rules([{
            "objectID": "two",
            "condition": {"anchoring": "is", "pattern": "pattern"},
            "consequence": {
                "params": {
                    "query": {
                        "edits": [
                            {"type": "remove", "delete": "pattern"}
                        ]
                    }
                }
            }
        }
        ]))

        responses.push(self.index.replace_all_synonyms([
            {"objectID": "two", "type": "synonym", "synonyms": ["one", "two"]}
        ]))

        responses.wait()

        # Check that record with objectID=`one` does not exist
        with self.assertRaises(RequestException) as _:
            self.index.get_object('one')

        # Check that record with objectID=`two` does exist
        self.assertEqual(self.index.get_object('two')['objectID'], 'two')

        # Check that rule with objectID=`one` does not exist
        with self.assertRaises(RequestException) as _:
            self.index.get_rule('one')

        # Check that rule with objectID=`two` does exist
        self.assertEqual(self.index.get_rule('two')['objectID'], 'two')

        # Check that synonym with objectID=`one` does not exist
        with self.assertRaises(RequestException) as _:
            self.index.get_synonym('one')

        # Check that synonym with objectID="two" does exist using getSynonym
        self.assertEqual(self.index.get_synonym('two')['objectID'], 'two')
Пример #29
0
async def ingest_ltd_lander_jsonld_document(
    *,
    app: web.Application,
    logger: BoundLogger,
    url_ingest_message: Dict[str, Any],
) -> None:
    """Run the Algolia ingest of a LTD_LANDER_JSONLD content type.

    Parameters
    ----------
    app : `aiohttp.web.Application`
        The app.
    logger
        A structlog logger that is bound with context about the Kafka message.
    url_ingest_message : `dict`
        The deserialized value of the Kafka message.
    """
    logger = logger.bind(
        content_url=url_ingest_message["url"],
        content_type=url_ingest_message["content_type"],
    )
    logger.info("Starting LTD_LANDER_JSONLD ingest")

    http_session = app["safir/http_session"]

    edition_data = await get_json_data(
        url=url_ingest_message["edition"]["url"],
        logger=logger,
        http_session=http_session,
    )

    published_url = edition_data["published_url"]
    jsonld_name = "metadata.jsonld"
    if published_url.endswith("/"):
        jsonld_url = f"{published_url}{jsonld_name}"
    else:
        jsonld_url = f"{published_url}/{jsonld_name}"

    try:
        metadata = await get_json_data(
            url=jsonld_url,
            logger=logger,
            http_session=http_session,
            # by-pass aiohttp's encoding check; the jsonld files do not have
            # correct CONTENT-TYPE headers.
            encoding="utf-8",
            content_type=None,
        )
    except Exception:
        logger.exception("Failure getting metadata.jsonld",
                         jsonld_url=jsonld_url)
        raise

    try:
        reduced_document = ReducedLtdLanderDocument(url=published_url,
                                                    metadata=metadata,
                                                    logger=logger)
    except Exception:
        logger.exception("Failed to build record")
        raise

    surrogate_key = generate_surrogate_key()

    logger.debug("Reduced LTD Lander Document",
                 chunks=len(reduced_document.chunks))

    try:
        records = [
            create_record(
                chunk=s,
                document=reduced_document,
                surrogate_key=surrogate_key,
            ) for s in reduced_document.chunks
        ]

        description_chunk = ContentChunk(
            headers=[reduced_document.h1],
            content=reduced_document.description,
        )
        records.append(
            create_record(
                chunk=description_chunk,
                document=reduced_document,
                surrogate_key=surrogate_key,
            ))
    except Exception:
        logger.exception("Failed to build records")
        raise

    logger.info("Finished building records")

    if app["ook/algolia_search"] is not None:
        try:
            client = app["ook/algolia_search"]
            index = client.init_index(
                app["safir/config"].algolia_document_index_name)
        except Exception:
            logger.exception(
                "Error initializing Algolia index",
                index_name=app["safir/config"].algolia_document_index_name,
            )
            raise

        tasks = [index.save_object_async(record) for record in records]
        try:
            results = await asyncio.gather(*tasks)
            MultipleResponse(results).wait()
        except Exception:
            logger.error("Got algoliasearch request error")
            for record in records:
                logger.debug(json.dumps(record, indent=2, sort_keys=True))

        logger.info("Finished uploading to Algolia")

        await delete_old_records(
            index=index,
            base_url=records[0]["baseUrl"],
            surrogate_key=surrogate_key,
            logger=logger,
        )
    def test_copy_move_index(self):
        objects = [{
            'objectID': 'one',
            'company': 'apple'
        }, {
            'objectID': 'two',
            'company': 'algolia'
        }]

        responses = MultipleResponse([
            self.index.save_objects(objects),
            self.index.set_settings({'attributesForFaceting': ['company']}),
            self.index.save_synonym({
                'objectID': 'google_placeholder',
                'type': 'placeholder',
                'placeholder': '<GOOG>',
                'replacements': ['Google', 'GOOG']
            }),
            self.index.save_rule({
                "objectID": "company_auto_faceting",
                "condition": {
                    "anchoring": "contains",
                    "pattern": "{facet:company}",
                },
                "consequence": {
                    "params": {
                        "automaticFacetFilters": ["company"]
                    }
                }
            })
        ]).wait()

        self.index2 = F.index('{}_settings'.format(self._testMethodName))
        responses.push(
            self.client.copy_settings(self.index.name, self.index2.name))

        self.index3 = F.index('{}_rules'.format(self._testMethodName))
        responses.push(
            self.client.copy_rules(self.index.name, self.index3.name))

        self.index4 = F.index('{}_synonyms'.format(self._testMethodName))
        responses.push(
            self.client.copy_synonyms(self.index.name, self.index4.name))

        self.index5 = F.index('{}_full_copy'.format(self._testMethodName))
        responses.push(
            self.client.copy_index(self.index.name, self.index5.name))

        responses.wait()

        self.assertEqual(self.index2.get_settings()['attributesForFaceting'],
                         ['company'])

        self.index3.get_rule('company_auto_faceting')
        with self.assertRaises(RequestException) as cm:
            self.index3.get_synonym('google_placeholder')

        self.index4.get_synonym('google_placeholder')
        with self.assertRaises(RequestException) as cm:
            self.index4.get_rule('company_auto_faceting')

        self.index5.get_synonym('google_placeholder')
        self.index5.get_rule('company_auto_faceting')
        self.assertEqual(self.index5.get_settings()['attributesForFaceting'],
                         ['company'])
        for obj in self.index5.browse_objects():
            self.assertIn(obj, objects)

        self.index6 = F.index('{}_after_move'.format(self._testMethodName))
        self.client.move_index(self.index.name, self.index6.name).wait()

        self.index6.get_synonym('google_placeholder')
        self.index6.get_rule('company_auto_faceting')
        self.assertEqual(self.index6.get_settings()['attributesForFaceting'],
                         ['company'])
        for obj in self.index6.browse_objects():
            self.assertIn(obj, objects)

        with self.assertRaises(RequestException) as cm:
            self.client.init_index(self.index.name).search('')

        self.assertEqual(cm.exception.status_code, 404)
    def test_search(self):
        responses = MultipleResponse()

        responses.push(self.index.save_objects([
            {"company": "Algolia", "name": "Julien Lemoine"},
            {"company": "Algolia", "name": "Nicolas Dessaigne"},
            {"company": "Amazon", "name": "Jeff Bezos"},
            {"company": "Apple", "name": "Steve Jobs"},
            {"company": "Apple", "name": "Steve Wozniak"},
            {"company": "Arista Networks", "name": "Jayshree Ullal"},
            {"company": "Google", "name": "Larry Page"},
            {"company": "Google", "name": "Rob Pike"},
            {"company": "Google", "name": "Serguey Brin"},
            {"company": "Microsoft", "name": "Bill Gates"},
            {"company": "SpaceX", "name": "Elon Musk"},
            {"company": "Tesla", "name": "Elon Musk"},
            {"company": "Yahoo", "name": "Marissa Mayer"}
        ], {'autoGenerateObjectIDIfNotExist': True}))

        responses.push(self.index.set_settings({
            'attributesForFaceting': ["searchable(company)"]
        }))

        responses.wait()

        # Perform a search query using search with the query `algolia` and no
        # parameter and check that the number of returned hits is equal to 2
        result = self.index.search('algolia')
        self.assertEqual(result['nbHits'], 2)

        # Perform a search using search with the query `elon` and the
        # following parameter and check that the queryID field from
        # the response is not empty
        result = self.index.search('elon', {'clickAnalytics': True})
        self.assertIn('queryID', result)

        # Perform a faceted search using search with the query `elon` and the
        # following parameters and check that the number of returned hits is
        # equal to 1
        result = self.index.search('elon', {
            'facets': '*',
            'facetFilters': 'company:tesla'
        })
        self.assertEqual(result['nbHits'], 1)

        # Perform a filtered search using search with the query `elon` and the
        # following parameters and check that the number of returned hits is
        # equal to 2
        result = self.index.search('elon', {
            'facets': '*',
            'filters': '(company:tesla OR company:spacex)'
        })

        self.assertEqual(result['nbHits'], 2)

        result = self.index.search_for_facet_values('company', 'a')[
            'facetHits']
        values = list(
            map(lambda facet: facet['value'], result))

        self.assertIn('Algolia', values)
        self.assertIn('Amazon', values)
        self.assertIn('Apple', values)
        self.assertIn('Arista Networks', values)