def __init__(self, app_id=None, search_key=None, admin_key=None): """Init Search and Admin clients.""" self.search_client = None self.admin_client = None if app_id and search_key and admin_key: self.search_client = SearchClient.create(app_id, search_key) self.admin_client = SearchClient.create(app_id, admin_key)
def dataframe_to_algolia(df): client = SearchClient.create(ALGOLIA_APP_ID, ALGOLIA_KEY) index = client.init_index('us_foodbank') records = [] df = df.where(pd.notnull(df), '') for i, row in df.iterrows(): record = {} for col in report_cols: record[col] = row[col] location = georeference_address(record['siteAddress']) if location is None: print('Could not geo reference location: ({0}, {1})'.format(record['siteName'], record['siteAddress'])) continue record['_geoloc'] = location open_times = row['breakfastTime':'dinnerSupperTime'].tolist() open_times = [time for time in open_times if time] record['openTimes'] = ', '.join(open_times) records.append(record) index.save_objects(records, {'autoGenerateObjectIDIfNotExist': True})
def setUp(self): super(AlgoliasearchTest, self).setUp() # dummy values def search(self, query, args=None, request_options=None): return { 'hits': [{ 'dummy': 'dummy' }], 'processingTimeMS': 23, 'nbHits': 1, 'hitsPerPage': 20, 'exhaustiveNbHits': True, 'params': 'query=xxx', 'nbPages': 1, 'query': 'xxx', 'page': 0 } # Algolia search is a non free SaaS application, it isn't possible to add it to the # docker environment to enable a full-fledged integration test. The next best option # is to mock out the search method to prevent it from making server requests if algoliasearch_version < (2, 0) and algoliasearch_version >= (1, 0): import algoliasearch import algoliasearch.index as index_module index_module.Index.search = search client = algoliasearch.algoliasearch.Client('X', 'X') else: import algoliasearch.search_index as index_module from algoliasearch.search_client import SearchClient index_module.SearchIndex.search = search client = SearchClient.create('X', 'X') # use this index only to properly test stuff self.index = client.init_index('test_index')
def __init__(self): super(LaracastsExtension, self).__init__() self.subscribe(KeywordQueryEvent, KeywordQueryEventListener()) agolia = SearchClient.create("1Z405N45FC", "6c44626a6a8c21778291dc05232905e6") self.index = agolia.init_index("lessons")
def initializeClipsIndex(db): app_id = '10E1WBKVLO' api_key = 'bcf375829b1daec9b14b8b765ec31a58' client = SearchClient.create(app_id, api_key) index = client.init_index('clips') from models import Clip allClips = db.getAllClips() for c in allClips: grandFinalResult = 'Not a Grand Final' if c.grandFinal: grandFinalResult = 'Grand Final' armorResult = 'No armor' if c.armor: armorResult = 'Armor' crowdResult = 'No crowd' if c.crowd: crowdResult = 'Crowd' weaponList = [] for w in c.weapons: weaponList.append(w.name) index.save_object({ 'objectID': c.id, 'Code': c.code, 'Event': c.event.name, 'Map': c.map.name, 'Player': c.player.alias, 'Team': c.team.alias, 'GrandFinal': grandFinalResult, 'Armor': armorResult, 'Crowd': crowdResult, 'Kills': ' '.join([str(c.kills), ' kills']), 'ClutchKills': 'v'.join(['1', str(c.clutchKills)]), 'Weapons': weaponList })
async def _na_update(self): wish_list_matches = [] queries = copy(QUERIES) params = queries[0]["params"] queries[0]["indexName"] = NA_INDEX_NAMES[self.country] async with SearchClient.create(APP_ID, API_KEY) as client: # Sets the default page to 0, if there are more pages we'll fetch those # after we know how many there are. query_params: str = params + "&page=0" queries[0]["params"] = query_params results = await client.multiple_queries_async(queries) wish_list_matches.extend( self._parse_matches(results["results"][0]["hits"])) # Check if there are more results than a single page. num_pages: int = results["results"][0]["nbPages"] if num_pages > 1: # Using pagination fetch the remaining results. for page_num in range(1, num_pages): # The page param is a 0-based index. Since we already fetched page # 0, start at 1 query_params: str = params + "&page=" + str(page_num) queries[0]["params"] = query_params results = await client.multiple_queries_async(queries) wish_list_matches.extend( self._parse_matches(results["results"][0]["hits"])) self.attrs["on_sale"] = wish_list_matches self._state = len(wish_list_matches)
def get_index_context(): app_id = current_app.config['ALGOLIA_APP_ID'] admin_api_key = current_app.config['ALGOLIA_ADMIN_API_KEY'] index_name = current_app.config['ALGOLIA_INDEX'] client = SearchClient.create(app_id, admin_api_key) index = client.init_index(index_name) return index
def test_browse_rules(self): def side_effect(req, **kwargs): hits = [{ "objectID": i, "_highlightResult": None } for i in range(0, 1000)] page = json.loads(req.body)["page"] if page == 3: hits = hits[0:800] response = Response() response.status_code = 200 response._content = str.encode( json.dumps({ "hits": hits, "nbHits": 3800, "page": page, "nbPages": 3, })) return response client = SearchClient.create("foo", "bar") client._transporter._requester._session = requests.Session() client._transporter._requester._session.send = mock.MagicMock( name="send") client._transporter._requester._session.send.side_effect = side_effect index = F.index(client, "test") rules = index.browse_rules() len_rules = len(list(rules)) self.assertEqual(len_rules, 3800)
def get_index(self, index_name, credentials) -> SearchIndex: merged_creds = merge_credentials(self.env.algolia_credentials, credentials) # yield "Checking for Algolia credentials and index..." if "app_id" not in merged_creds or "api_key" not in merged_creds: raise PublishError( "Could not connect to Algolia. " + "Make sure api_key and app_id are present in your configs/algolia.ini file." ) app_id = merged_creds["app_id"] api_key = merged_creds["api_key"] self.algolia = SearchClient.create(app_id, api_key) try: index = self.algolia.init_index(index_name) except AlgoliaException as exc: raise PublishError( f'Algolia index "{index_name}" does not exist, ' f"or the API key provided does not have access to it. " f"Please create the index / verify your credentials on their website." ) from exc return index
def __init__(self): api_key = os.getenv('ALGOLIA_API_KEY') app_id = os.getenv('ALGOLIA_APP_ID') self.db = SearchClient.create(app_id, api_key) self.shares_index = self.db.init_index('shares') self.shares_by_updated_index = self.db.init_index('shares_by_updated') self.plugins_index = self.db.init_index('plugins')
def pat_search(text): api = Env.ALGOLIA_API() admin = Env.ALGOLIA_ADMIN() client = SearchClient.create(api, admin) index = client.init_index('patients') index.set_settings({"customRanking": ["desc(followers)"]}) index.set_settings({ "searchableAttributes": [ "firstName", "lastName", "phone", "email", "id" ] }) # Res is all hits of Patients with matching res = index.search(text) hits = res['hits'] pats_return = [] for h in hits: pat_obj = { "id": h['id'], "firstName": h['firstName'], "lastName": h['lastName'], "email": h['email'], "phone": h['phone'], "profilePicture": h['profilePicture'] } pats_return.append(pat_obj) return pats_return
def __init__(self): # In AWS, Algolia variables are available in the env; outside AWS these must be loaded from the .env file. if not is_in_aws(): from dotenv import load_dotenv load_dotenv() self.client = SearchClient.create() self.index = self.client.init_index(os.environ.get("ALGOLIA_INDEX"))
def __init__(self, app_id, apikey, index_name, hits_per_page, sort_by): client = SearchClient.create( app_id=app_id, api_key=apikey, ) index = client.init_index(index_name) index.set_settings({ 'searchableAttributes': [ 'name', 'description', ], 'attributesForFaceting': [ 'categories', 'mechanics', 'players', 'weight', 'playing_time', ], 'customRanking': [sort_by], 'highlightPreTag': '<strong class="highlight">', 'highlightPostTag': '</strong>', 'hitsPerPage': hits_per_page, }) self.index = index
def add_hcp(hcp): api = Env.ALGOLIA_API() admin = Env.ALGOLIA_ADMIN() client = SearchClient.create(api, admin) index = client.init_index('hcps') res = { "id": hcp.id, "firstName": hcp.firstName, "lastName": hcp.lastName, "phone": hcp.phone, "email": hcp.email, "title": hcp.title, "specialty": hcp.specialty, "profilePicture": hcp.profilePicture } with open('js.json', 'w') as fp: json.dump(res, fp) batch = json.load(open('js.json')) fp.close() try: index.save_object(batch, {'autoGenerateObjectIDIfNotExist': True}) except Exception as e: print(f'Error: {e}') return 0
def search_client(app_id=None, api_key=None): # type: (Optional[str], Optional[str]) -> SearchClient app_id = app_id if app_id is not None else Factory.get_app_id() api_key = api_key if api_key is not None else Factory.get_api_key() return Factory.decide(SearchClient.create(app_id, api_key))
def setUp(self): super(AlgoliasearchTest, self).setUp() # dummy values def search(self, query, args=None, request_options=None): return { "hits": [{"dummy": "dummy"}], "processingTimeMS": 23, "nbHits": 1, "hitsPerPage": 20, "exhaustiveNbHits": True, "params": "query=xxx", "nbPages": 1, "query": "xxx", "page": 0, } # Algolia search is a non free SaaS application, it isn't possible to add it to the # docker environment to enable a full-fledged integration test. The next best option # is to mock out the search method to prevent it from making server requests if algoliasearch_version < (2, 0) and algoliasearch_version >= (1, 0): import algoliasearch import algoliasearch.index as index_module index_module.Index.search = search client = algoliasearch.algoliasearch.Client("X", "X") else: from algoliasearch.search_client import SearchClient import algoliasearch.search_index as index_module index_module.SearchIndex.search = search client = SearchClient.create("X", "X") # use this index only to properly test stuff self.index = client.init_index("test_index")
def init_index(self): """ Initializes an index within Algolia. Initializing an index will create it if it doesn't exist. """ if not self.ALGOLIA_INDEX_NAME: logger.error( 'Could not initialize Algolia index due to missing index name.' ) return if not self.ALGOLIA_APPLICATION_ID or not self.ALGOLIA_API_KEY: logger.error( 'Could not initialize Algolia\'s %s index due to missing Algolia settings: %s', self.ALGOLIA_INDEX_NAME, ['APPLICATION_ID', 'API_KEY'], ) return self._client = SearchClient.create(self.ALGOLIA_APPLICATION_ID, self.ALGOLIA_API_KEY) try: self.algolia_index = self._client.init_index( self.ALGOLIA_INDEX_NAME) except AlgoliaException as exc: logger.exception( 'Could not initialize %s index in Algolia due to an exception.', self.ALGOLIA_INDEX_NAME, ) raise exc
async def test(query): async with SearchClient.create(app_id, api_key) as client: index = client.init_index('clips') results = await index.search_async(query) return results
def search(self, docset, term): """ Searches a term on a specific docset and return the results """ docset = self.get_docset(docset) if not docset: raise ValueError("The specified docset is not known") algolia_client = SearchClient.create(docset['algolia_application_id'], docset['algolia_api_key']) index = algolia_client.init_index(docset['algolia_index']) search_results = index.search(term) if not search_results['hits']: return [] items = [] for hit in search_results['hits']: title, description = self.parse_item_description(hit) items.append({ 'url': hit['url'], 'title': title, 'icon': docset['icon'], 'category': description }) return items
def upload_to_algolia(): with Path("az_albums.pkl").open("rb") as file: albums = pickle.load(file) objects = [ { **album, "performer": album["performer"]["name"], "performerId": album["performer"]["id"], "imageUrl": next(( image["url"] for image in (album.get("details") or {}).get("image", []) if image is not None and image["size"] == "large" ), None), } for album in albums ] client = SearchClient.create( app_id=os.getenv("ALGOLIA_APP_ID"), api_key=os.getenv("ALGOLIA_APP_KEY") ) index_ = client.init_index(os.getenv("ALGOLIA_INDEX_NAME")) index_.replace_all_objects( objects=objects, request_options={'autoGenerateObjectIDIfNotExist': True} ) client.close()
def algolia_client() -> SearchIndex: client = SearchClient.create( settings.algolia_app_id, settings.algolia_api_key, ) index = client.init_index(settings.algolia_index) return index
def __init__(self): super(LaravelExtension, self).__init__() self.subscribe(KeywordQueryEvent, KeywordQueryEventListener()) agolia = SearchClient.create("8BB87I11DE", "8e1d446d61fce359f69cd7c8b86a50de") self.index = agolia.init_index("docs")
def process(app_id, write_key, index_name, facets): LOG.info(f"Setting facets for searching on index {index_name}") client = SearchClient.create(app_id, write_key) index = client.init_index(index_name) # https://www.algolia.com/doc/guides/managing-results/refine-results/faceting/how-to/declaring-attributes-for-faceting/ index.set_settings({'attributesForFaceting': facets}) LOG.info(f"Setting facets for searching on index {index_name} finished")
def init_algolia(): ''' init the algolia index ''' client = SearchClient.create(tokens['algolia_appid'], tokens['algolia_adminAPI']) logging.info(client.list_indices()) index = client.init_index('CoronaFactChecks') index.set_settings({'removeStopWords': True}) return index
def run(self): application_id = os.environ.get('ALGOLIA_APP_ID') api_key = os.environ.get('ALGOLIA_API_KEY') index = os.environ.get('ALGOLIA_INDEX', 'Letsautomate.it') client = SearchClient.create(application_id, api_key=api_key) index = client.init_index(index) with open(self.path) as f: index.save_objects(json.load(f))
async def docsearch(self, ctx, search): client = SearchClient.create('BH4D9OD16A', '53b3a8362ea7b391f63145996cfe8d82') index = client.init_index('ficsit') query = index.search(search, {'attributesToRetrieve': '*'}) for hit in query["hits"]: if hit["hierarchy"]["lvl0"].endswith("latest"): await self.bot.reply_to_msg(ctx.message, f"This is the best result I got from the SMD :\n{hit['url']}") return
def test_uses_request_options_on_wait(self): index = SearchClient.create('foo', 'bar').init_index('foo') index.wait_task = mock.Mock(name='wait_task') index._sync = mock.Mock(name='_sync') index._sync.return_value = index response = IndexingResponse(index, [{'taskID': 1}]) response.wait({'bar': 2}) index.wait_task.assert_called_once_with(1, {'bar': 2})
def test_uses_request_options_on_wait(self): index = SearchClient.create("foo", "bar").init_index("foo") index.wait_task = mock.Mock(name="wait_task") index._sync = mock.Mock(name="_sync") index._sync.return_value = index response = IndexingResponse(index, [{"taskID": 1}]) response.wait({"bar": 2}) index.wait_task.assert_called_once_with(1, {"bar": 2})
def _search_index(query, **options): global INDEX if not INDEX: client = SearchClient.create(APP_ID, API_KEY) INDEX = client.init_index(INDEX_NAME) response = INDEX.search(query, request_options=options) return response.get('hits', [])
def __init__(self): client = SearchClient.create('RONI3GVMZF', 'd069d6b6b79085dc8fce9e619feed841') self.algolia_recipe_db = client.init_index('recipes') cred = credentials.Certificate( "./pakalo-abid786-firebase-adminsdk-xc8ts-ad52022ec7.json") app = firebase_admin.initialize_app(cred) self.firestore_recipe_db = firestore.client().collection('recipes')
def test_async_session(self): app_id = Factory.get_app_id() api_key = Factory.get_api_key() client = SearchClient.create(app_id, api_key) import asyncio result = asyncio.get_event_loop().run_until_complete( asyncio.gather(client.list_api_keys_async()) ) self.assertIsInstance(result, list) asyncio.get_event_loop().run_until_complete( asyncio.gather(client.close()) ) self.assertTrue( client._transporter_async._requester._session.closed )
def test_app_id_getter(self): client = SearchClient.create('foo', 'bar') self.assertEqual(client.app_id, 'foo')
def setUp(self): self.client = SearchClient.create('foo', 'bar') self.client._transporter.write = mock.Mock(name='write') self.client._transporter.write.return_value = {}
def test_create(self): self.assertIsInstance(self.client, SearchClient) with self.assertRaises(AssertionError) as _: SearchClient.create('', '')