def upload(batch_size=5000): """Upload French city suggestions to Algolia index.""" suggestions = prepare_cities('data', 'data/geo/french_cities.csv', 'data/geo/french_urban_entities.xls', 'data/geo/ville-ideale-transports.html') client = algoliasearch.Client(os.getenv('ALGOLIA_APP_ID', 'K6ACI9BKKT'), os.getenv('ALGOLIA_API_KEY')) index_name = os.getenv('ALGOLIA_CITIES_INDEX', 'cities') cities_index = client.init_index(index_name) tmp_index_name = '{}_{}'.format(index_name, round(time.time())) tmp_cities_index = client.init_index(tmp_index_name) try: tmp_cities_index.set_settings(cities_index.get_settings()) # TODO(pascal): Add synonyms if we start having some. for start in range(0, len(suggestions), batch_size): tmp_cities_index.add_objects(suggestions[start:start + batch_size]) # OK we're ready finally replace the index. client.move_index(tmp_index_name, index_name) except helpers.AlgoliaException: tmp_cities_index.clear_index() print(json.dumps(suggestions[:10], indent=2)) raise
def __init__(self, api_key_admin): client = algoliasearch.Client( app_id=SETTINGS["algolia"]["app_id"], api_key=api_key_admin, ) index = client.init_index(SETTINGS["algolia"]["index_name"]) index.set_settings({ 'searchableAttributes': [ 'name', 'description', ], 'attributesForFaceting': [ 'categories', 'mechanics', 'players', 'weight', 'playing_time', ], 'customRanking': ['asc(name)'], 'highlightPreTag': '<strong class="highlight">', 'highlightPostTag': '</strong>' }) self.index = index
def main(): # initialize algolia client client = algoliasearch.Client(ALGOLIA_APP_ID, ALGOLIA_API_KEY) index = client.init_index(ALGOLIA_INDEX_NAME) index.set_settings({ "searchableAttributes": ["keywords", "file_id"], 'customRanking': ['asc(created)'], # fresh on top "typoTolerance": True, "disableTypoToleranceOnAttributes": ["file_id"], "ignorePlurals": True }) # initialize bot upd = Updater(token=BOT_TOKEN) upd.bot.index = index dp = upd.dispatcher dp.add_handler(remove_gif_conversation) dp.add_handler(add_gif_conversation) dp.add_handler(CommandHandler('help', help)) dp.add_handler(CommandHandler('start', start)) dp.add_handler(InlineQueryHandler(inline_search)) dp.add_handler(ChosenInlineResultHandler(inline_result)) dp.add_handler(MessageHandler(Filters.all, unknown_message)) dp.add_error_handler(error) upd.start_polling(timeout=30) upd.idle()
def get_algolilas_meta(entity_id): params = getattr(settings, 'ALGOLIA', None) algolia_meta = {} index_list = [] index_dict = {} for algolia_key in AlgoliaConstant.MODEL_DICT: index_name = algolia_key + str(entity_id) if 'INDEX_PREFIX' in params: index_name = params['INDEX_PREFIX'] + '_' + index_name if 'INDEX_SUFFIX' in params: index_name += '_' + params['INDEX_SUFFIX'] index_list.append(index_name) index_dict[algolia_key.lower()] = index_name client = algoliasearch.Client(params['APPLICATION_ID'], params['API_KEY']) public_key = client.generate_secured_api_key(params['SEARCH_API_KEY'], {'restrictIndices': index_list}) algolia_meta['index_dict'] = index_dict algolia_meta['application_id'] = params['APPLICATION_ID'] algolia_meta['public_key'] = public_key return algolia_meta
def load_algolia(self): client = algoliasearch.Client( self.app.config['ALGOLIA_APPLICATION_ID'], self.app.config['ALGOLIA_API_KEY']) self.index = client.init_index(self.app.config['ALGOLIA_INDEX_NAME']) self.index.set_settings( {'searchableAttributes': ['eventName, sourceName', 'sourceURL']})
def test_wrong_app_id(self): client = algoliasearch.Client("fakeappID", "blabla") try: client.listIndexes() self.assertTrue(False) except algoliasearch.AlgoliaException as e: pass
def main(event, context): """Create a seperate thread for each year with playlists""" threads = [] for year in PLAYLISTS.keys(): t = threading.Thread(target=work_on_playlist, args=(year, )) threads.append(t) t.start() for thread in threads: thread.join() """Aggregate all cache files into a single file""" cache = dict(data=[]) print('Aggregate files into single cache') with open('/tmp/videos.json', 'w') as final: for year in PLAYLISTS.keys(): with open('/tmp/videos-{}.json'.format(year), 'r') as f: local = json.load(f) cache['data'].extend(local) json.dump(cache['data'], final) """Read local cache and replace objects in indicies""" print('Updating Algolia indicies') client = algoliasearch.Client(ALGOLIA_APP, ALGOLIA_DEVELOPER_KEY) all_indicies = ALGOLIA_INDICIES.replace(' ', '').split(',') with open('/tmp/videos.json', 'r') as cache: # Load data from cache objects = json.load(cache) print('Loaded objects from cache') for index in all_indicies: client_index = client.init_index(index) request_options = algoliasearch.RequestOptions({'safe': True}) try: print('Updating index {}'.format(index)) client_index.replace_all_objects(objects, request_options) except Exception as error: print('Problem updating index: {}'.format(error))
def delete_objects(): objects = ['qc6kbklzr3ku7ylT9Df729RAvqZFaMsfhR'] al_client = algoliasearch.Client(os.environ['ALGOLIA_APP'], os.environ['ALGOLIA_KEY']) index = al_client.init_index('art-assets') index.delete_objects(objects)
def add_objects(org, objects): """ Add objects using the Algolia API, so that we can query them on the page where we visualise repositories :param org: name of the organisation :param objects: list of dictionaries containing repository information such as name :return: boolean representing success of operation """ if not ALGOLIA_APP_ID or not ALGOLIA_SEARCH_KEY or not ALGOLIA_ADMIN_KEY or not objects: return False client = algoliasearch.Client(ALGOLIA_APP_ID, ALGOLIA_ADMIN_KEY) index = client.init_index(org) indices = [i['name'] for i in client.list_indexes()['items']] # If index does not exist, create it if org not in indices: index.add_objects(objects) return True indexed_repository_count = index.search('')['nbHits'] # Do not add to index objects if objects are all already there if indexed_repository_count == len(objects): return True # Update the index with the new objects index.replace_all_objects(objects) return True
def lambda_handler(event, context): try: asset_event = AssetCreateEvent.from_json(event) response = requests.get(asset_event.url()) print(response) print("lambda_handler") url = poll_asset_url(asset_event) print(url) response = requests.get(url) print(response) labels = recognize_binary(response.content) print(labels) al_client = algoliasearch.Client(os.environ['ALGOLIA_APP'], os.environ['ALGOLIA_KEY']) index = al_client.init_index('art-assets') print(index, asset_event.asset_id, asset_event.space_id, url, labels) index_asset( index, asset_event.asset_id, asset_event.space_id, url, labels, ) except Exception as e: print(f"Failed to handle event {event}: {e}") raise e
def main(): if len(sys.argv) != 2: print('usage: ./index.py [PATHNAME]') sys.exit(1) app_id = os.getenv('ALGOLIA_APPLICATION_ID') api_key = os.getenv('ALGOLIA_API_KEY') if not (app_id and api_key): print('Please define ALGOLIA_APPLICATION_ID and ALGOLIA_API_KEY env.') sys.exit(1) batch = [] filenames = os.listdir(sys.argv[1]) for filename in filenames: pathname = '{}/{}'.format(sys.argv[1], filename) print(pathname) with open(pathname) as f: data = json.load(f) r = mapping(data) if should_keep(r): batch.append(r) for i, r in enumerate(batch): r['objectID'] = i client = algoliasearch.Client(app_id, api_key) index = client.init_index("Cocktail-Kit") index.clear_index() index.add_objects(batch)
def __init__(self, app_id=None, api_key=None): params = getattr(settings, 'ALGOLIA', None) app_id = params['APPLICATION_ID'] api_key = params['API_KEY'] self.client = algoliasearch.Client(app_id, api_key) self.__registered_indexs = {}
def __init__(self, app_id, apikey, index_name, hits_per_page, sort_by): client = algoliasearch.Client( app_id=app_id, api_key=apikey, ) index = client.init_index(index_name) index.set_settings({ 'searchableAttributes': [ 'name', 'description', ], 'attributesForFaceting': [ 'categories', 'mechanics', 'players', 'weight', 'playing_time', ], 'customRanking': [sort_by], 'highlightPreTag': '<strong class="highlight">', 'highlightPostTag': '</strong>', 'hitsPerPage': hits_per_page, }) self.index = index
def get_client(self, force_refresh=False): """Returns and caches the algolia's client""" if not self.client or force_refresh: self.client = algoliasearch.Client( self.configs.get('API_KEY'), self.configs.get('API_SECRET'), ) return self.client
def get_wh_index(): if 'WH_SEARCH_USER' in os.environ and 'WH_SEARCH_KEY' in os.environ: client = algoliasearch.Client(os.environ['WH_SEARCH_USER'], os.environ['WH_SEARCH_KEY']) index_name = os.environ[ 'WH_INDEX_NAME'] if 'WH_INDEX_NAME' in os.environ else "dev_KOTLINLANG_WEBHELP" return Index(client, index_name) return None
def remove_crawling_issue(config): app_id = os.environ.get('APPLICATION_ID_PROD', '') api_key = os.environ.get('API_KEY_PROD', '') client = algoliasearch.Client(app_id, api_key) index = client.init_index(config) index.set_settings({'userData': {'crawling_issue': False}})
def __init__(self, app_id, api_key, index_name, settings): self.algolia_client = algoliasearch.Client(app_id, api_key) self.index_name = index_name self.index_name_tmp = index_name + '_tmp' self.algolia_index = self.algolia_client.init_index(self.index_name) self.algolia_index_tmp = self.algolia_client.init_index( self.index_name_tmp) self.algolia_client.delete_index(self.index_name_tmp) self.algolia_index_tmp.set_settings(settings)
def update_index(): """Deletes index, then updates it""" print("Starting Updating Index") client = algoliasearch.Client("YOUR_KEY", "YOUR_VALUE") index = client.init_index("your_INDEX") print("Clearing index") index.clear_index() print("Loading index") batch = json.load(open('../index.json')) index.add_objects(batch)
def __init__(self, app_id=None, api_key=None): """ Initializes Algolia client and indexes. """ if not app_id: app_id = settings.ALGOLIA['APPLICATION_ID'] api_key = settings.ALGOLIA['API_KEY'] self._indices = {} self.client = algoliasearch.Client(app_id, api_key) self.client.set_extra_header('User-Agent', 'Cuely Backend') self.existing_algolia_indexes = [x.get('name') for x in self.client.list_indexes().get('items', [])]
def delete_data(items): """ 删除数据 """ client = algoliasearch.Client("NIACONWTKJ", args.managehKey) index = client.init_index('blog.threeq.me') res = index.delete_objects(items) print("delete count: %d. items:\n%s" % (len(res), json.dumps(res, ensure_ascii=False, indent=2)))
def main(currentbatch): client = algoliasearch.Client("CBJLT1KBQH", "2ab19a08e4be64e5a5f55dd080c80f5d") indexName = "database2" #client.delete_index(indexName) #do not delete the index anymore index = client.init_index(indexName) #actors = [{"link":"www.lukeisanidiot.com", "reports":45},{"link":"www.bao=shiv.com","reports":45}] #index.add_objects(actors) #this is for having database as a parameter cnx = mysql.connector.connect(user="******", password="******", host="myeusql.dur.ac.uk", database="Xlptx42_durhack") cursor = cnx.cursor(dictionary=True) cursor.execute("SELECT * FROM reports") actors = list(cursor.fetchall()) for i in range(len(actors)): currLink = actors[i]["link"][:500] if (index.search(currLink)["hits"]): newReport = index.search( currLink)["hits"][0]["reports"] + actors[i]["reports"] objectID = index.search(currLink)["hits"][0]["objectID"] bigBatch = index.search(currLink)["hits"][0]["biggestbatch"] bigBatchSize = index.search( currLink)["hits"][0]["biggestbatchsize"] index.partial_update_object({ "objectID": objectID, "reports": newReport }) ## if(bigBatch < actors[i]["biggestbatch"]): ## index.partial_update_object({"objectID": objectID, "biggestbatch":actors[i]["biggestbatch"]}) if (bigBatchSize < actors[i]["reports"]): #"biggestbatchsize"]): index.partial_update_object({ "objectID": objectID, "biggestbatchsize": actors[i]["reports"] }) index.partial_update_object({ "objectID": objectID, "biggestbatch": currentbatch }) else: index.add_objects([actors[i]]) cursor.execute("TRUNCATE TABLE reports") cnx.commit() cursor.close() cnx.close() return currentbatch + 1
def test_algolia_connection(self): """Inform the user about the state of the connection with algolia""" client = algoliasearch.Client(self.algolia_app_id, self.algolia_key_admin) try: client.list_indexes() except AlgoliaException as e: raise UserError(_("Connection Test Failed!")) # TODO: This not would UserError, it most be Info raise UserError( _("Connection Test Succeeded! Everything seems properly set up!"))
def set_settings(self, p_index, p_conf): """Sets the index settings """ try: client = algoliasearch.Client(self.app_id, self.api_key) index = client.init_index(p_index) index.set_settings(p_conf) logger.info('Index %s set', p_index) except Exception as e: logger.error('Error setting the index %s', p_index) logger.error(e)
def search_all(query): client = algoliasearch.Client("1SS7XPOA8X", '9375902912f6f82964c8ec269234b3c2') index = client.init_index('nprorg') res = index.search(query, {'hitsPerPage': 10}) # for audio-only search, add 'hasAudio:true', to filters above. # storyIDs = [i['objectID'] for i in res['hits']] try: return res['hits'] except: return res
def test_retry(self): try: client = algoliasearch.Client( os.environ['ALGOLIA_APPLICATION_ID'], os.environ['ALGOLIA_API_KEY'], [ "fakeapp-1.algolianet.com", "fakeapp-2.algolianet.com", os.environ['ALGOLIA_APPLICATION_ID'] + ".algolianet.com" ]) client.listIndexes except algoliasearch.AlgoliaException as e: self.assertTrue(False)
def start_exporting(self): """Initialize the Algolia API Client""" client = algoliasearch.Client( self.algolia_api_id, self.algolia_api_key ) self.algolia_index = client.init_index(self.algolia_index_name) self.next_items = [] self.exported_items_nbr = 0
def __init__(self, handle, ring, tid=None, ip=None): logger = logging.getLogger('Avispa') self.lggr = AvispaLoggerAdapter(logger, {'tid': tid, 'ip': ip}) self.handle = handle self.ring = ring client = algoliasearch.Client(ALGOLIA_APPLICATION_ID, ALGOLIA_ADMIN_API_KEY) indexname = '%s-%s' % (handle, ring) self.index = client.init_index(indexname)
def __init__(self, app_id, api_key, index_name, settings, query_rules): self.algolia_client = algoliasearch.Client(app_id, api_key) self.index_name = index_name self.index_name_tmp = index_name + '_tmp' self.algolia_index = self.algolia_client.init_index(self.index_name) self.algolia_index_tmp = self.algolia_client.init_index( self.index_name_tmp) self.algolia_client.delete_index(self.index_name_tmp) self.algolia_index_tmp.set_settings(settings) if len(query_rules) > 0: self.algolia_index_tmp.batch_rules(query_rules, True, True)
def get_settings(self, p_index): """Gets the index settings """ try: client = algoliasearch.Client(self.app_id, self.api_key) index = client.init_index(p_index) result = index.get_settings() logger.info('Index %s get', p_index) return result except Exception as e: logger.error('Error getting settings of %s', p_index) logger.error(e)
def _init_index_algolia(self): """Return Index Object, in this point only the Index object is instantiated, not connect with Algolia""" self.ensure_one() try: client = algoliasearch.Client(self.algolia_app_id, self.algolia_key_admin) index = client.init_index(self.algolia_index) except ValueError as err: _logger.info(err) return False return index