class TestSearchClient(unittest.TestCase):
    def setUp(self):
        self.config = SearchConfig('foo', 'bar')
        self.transporter = Transporter(Requester(), self.config)
        self.transporter.read = mock.Mock(name="read")
        self.transporter.read.return_value = {}
        self.transporter.write = mock.Mock(name="write")
        self.transporter.write.return_value = {}

        self.client = SearchClient(self.transporter, self.config)

    def test_create(self):
        self.assertIsInstance(self.client, SearchClient)
        with self.assertRaises(AssertionError) as _:
            SearchClient.create('', '')

    def test_create_with_config(self):
        config = SearchConfig('foo', 'bar')

        self.assertIsInstance(
            SearchClient.create_with_config(config),
            SearchClient
        )

    def test_init_index(self):
        index = self.client.init_index('foo')

        self.assertIsInstance(index, SearchIndex)

    def test_app_id_getter(self):
        client = SearchClient.create('foo', 'bar')

        self.assertEqual(client.app_id, 'foo')

    def test_set_personalization_strategy(self):
        strategy = {
            'eventsScoring': {
                'Add to cart': {'score': 50, 'type': 'conversion'},
                'Purchase': {'score': 100, 'type': 'conversion'}
            },
            'facetsScoring': {
                'brand': {'score': 100},
                'categories': {'score': 10}
            }
        }

        self.client.set_personalization_strategy(strategy)

        self.transporter.write.assert_called_once_with(
            'POST',
            '1/recommendation/personalization/strategy',
            strategy,
            None,
        )
    def __init__(self, search_client, transporter, search_config):
        # type: (SearchClient, TransporterAsync, SearchConfig) -> None

        self._search_client = search_client
        self._transporter_async = transporter

        super(SearchClientAsync, self).__init__(
            search_client._transporter,
            search_config
        )

        search_client = SearchClient(transporter, search_config)
        search_client.__setattr__('init_index', self.init_index)
        search_client.__setattr__('_sync', self._sync)
        _create_async_methods_in(self, search_client)
    def test_create_with_config(self):
        config = SearchConfig('foo', 'bar')

        self.assertIsInstance(
            SearchClient.create_with_config(config),
            SearchClient
        )
Ejemplo n.º 4
0
    def search_client(app_id=None, api_key=None):
        # type: (Optional[str], Optional[str]) -> SearchClient

        app_id = app_id if app_id is not None else Factory.get_app_id()
        api_key = api_key if api_key is not None else Factory.get_api_key()

        return Factory.decide(SearchClient.create(app_id, api_key))
    def test_uses_request_options_on_wait(self):
        index = SearchClient.create('foo', 'bar').init_index('foo')
        index.wait_task = mock.Mock(name='wait_task')
        index._sync = mock.Mock(name='_sync')
        index._sync.return_value = index

        response = IndexingResponse(index, [{'taskID': 1}])
        response.wait({'bar': 2})
        index.wait_task.assert_called_once_with(1, {'bar': 2})
    def setUp(self):
        self.config = SearchConfig('foo', 'bar')
        self.transporter = Transporter(Requester(), self.config)
        self.transporter.read = mock.Mock(name="read")
        self.transporter.read.return_value = {}
        self.transporter.write = mock.Mock(name="write")
        self.transporter.write.return_value = {}

        self.client = SearchClient(self.transporter, self.config)
    def test_dns_timeout(self):

        config = SearchConfig(F.get_app_id(), F.get_api_key())

        config.hosts = HostsCollection([
            Host('algolia.biz', 10),
            Host('{}-1.algolianet.com'.format(F.get_app_id())),
            Host('{}-2.algolianet.com'.format(F.get_app_id())),
            Host('{}-3.algolianet.com'.format(F.get_app_id()))
        ])

        client = SearchClient.create_with_config(config)

        client.list_indices()
        # We test that the first Host `algolia.biz` is down.
        self.assertFalse(config.hosts.read()[0].up)
        self.assertTrue(config.hosts.read()[1].up)
        self.assertTrue(config.hosts.read()[2].up)
        self.assertTrue(config.hosts.read()[3].up)
    def test_async_session(self):
        app_id = Factory.get_app_id()
        api_key = Factory.get_api_key()

        client = SearchClient.create(app_id, api_key)

        import asyncio

        result = asyncio.get_event_loop().run_until_complete(
            asyncio.gather(client.list_api_keys_async())
        )
        self.assertIsInstance(result, list)

        asyncio.get_event_loop().run_until_complete(
            asyncio.gather(client.close())
        )

        self.assertTrue(
            client._transporter_async._requester._session.closed
        )
Ejemplo n.º 9
0
    def setUp(self):
        super(AlgoliasearchTest, self).setUp()

        # dummy values
        def search(self, query, args=None, request_options=None):
            return {
                "hits": [{
                    "dummy": "dummy"
                }],
                "processingTimeMS": 23,
                "nbHits": 1,
                "hitsPerPage": 20,
                "exhaustiveNbHits": True,
                "params": "query=xxx",
                "nbPages": 1,
                "query": "xxx",
                "page": 0,
            }

        # Algolia search is a non free SaaS application, it isn't possible to add it to the
        # docker environment to enable a full-fledged integration test. The next best option
        # is to mock out the search method to prevent it from making server requests
        if algoliasearch_version < (2, 0) and algoliasearch_version >= (1, 0):
            import algoliasearch
            import algoliasearch.index as index_module

            index_module.Index.search = search
            client = algoliasearch.algoliasearch.Client("X", "X")
        else:
            import algoliasearch.search_index as index_module
            from algoliasearch.search_client import SearchClient

            index_module.SearchIndex.search = search
            client = SearchClient.create("X", "X")

        # use this index only to properly test stuff
        self.index = client.init_index("test_index")
Ejemplo n.º 10
0
 async def _na_update(self):
     from algoliasearch.search_client import SearchClient
     wish_list_matches = []
     QUERIES[0]['indexName'] = NA_INDEX_NAMES[self.country]
     async with SearchClient.create(APP_ID, API_KEY) as client:
         results = await client.multiple_queries_async(QUERIES)
         for game in results['results'][0]['hits']:
             if not game['title'].lower().startswith(tuple(self.wishlist)):
                 continue
             match = {
                 'box_art_url':
                 ('https://www.nintendo.com{}'.format(game['boxArt'])),
                 'normal_price':
                 '${}'.format(game['msrp']),
                 'percent_off':
                 get_percent_off(game['msrp'], game['salePrice']),
                 'sale_price':
                 '${}'.format(game['salePrice']),
                 'title':
                 game['title'],
             }
             wish_list_matches.append(match)
     self.attrs['on_sale'] = wish_list_matches
     self._state = len(wish_list_matches)
Ejemplo n.º 11
0
    def test_get_secured_api_key_remaining_validity(self):
        import time

        now = int(time.time())
        api_key = SearchClient.generate_secured_api_key(
            "foo", {"validUntil": now - (60 * 10)}
        )

        remaining = SearchClient.get_secured_api_key_remaining_validity(api_key)

        self.assertTrue(remaining < 0)

        api_key = SearchClient.generate_secured_api_key(
            "foo", {"validUntil": now + (60 * 10)}
        )

        remaining = SearchClient.get_secured_api_key_remaining_validity(api_key)
        self.assertTrue(remaining > 0)

        api_key = SearchClient.generate_secured_api_key("foo", {})

        with self.assertRaises(ValidUntilNotFoundException) as _:
            SearchClient.get_secured_api_key_remaining_validity(api_key)
Ejemplo n.º 12
0
from algoliasearch.search_client import SearchClient
from configs import Config
from flask import Flask
from flask_migrate import Migrate
from flask_sqlalchemy import SQLAlchemy
from flask_limiter import Limiter
from flask_limiter.util import get_remote_address
from app.healthcheck import add_health_check

db = SQLAlchemy()
migrate = Migrate()

# Connect to Agolia
search_client = SearchClient.create(Config.ALGOLIA_APP_ID,
                                    Config.ALGOLIA_API_KEY)
index = search_client.init_index(Config.INDEX_NAME)


def create_app(config_class=Config):
    app = Flask(__name__, static_folder=None)
    app.config.from_object(config_class)
    app.url_map.strict_slashes = False

    Limiter(app,
            key_func=get_remote_address,
            default_limits=["200 per day", "50 per hour"])

    db.init_app(app)
    migrate.init_app(app, db)

    from app.api import bp as api_bp
Ejemplo n.º 13
0
import os
from datetime import datetime

import pytz
from algoliasearch.search_client import SearchClient
from feedgen.feed import FeedGenerator

if __name__ == '__main__':
    client = SearchClient.create(os.environ['APP_ID'], os.environ['API_KEY'])
    index = client.init_index('interviews_publishedAt_desc')
    articles = index.search('')['hits']

    fg = FeedGenerator()
    fg.title('IH Interviews')
    fg.id('ih-interviews-20201123-205642')
    pubs = []
    for article in articles:
        pub = datetime.fromtimestamp(article['publishedAt'] /
                                     1000).replace(tzinfo=pytz.timezone('UTC'))
        pubs.append(pub)
        fe = fg.add_entry()
        fe.id(article['interviewId'])
        fe.published(pub)
        fe.pubDate(pub)
        fe.updated(pub)
        fe.title(article['title'])
        fe.link(
            href=
            f"https://www.indiehackers.com/interview/{article['interviewId']}")
    fg.updated(max(pubs))
    print(fg.atom_str(pretty=True).decode())
Ejemplo n.º 14
0
from os import path
import urllib.parse
import html
from algoliasearch.search_client import SearchClient

from albertv0 import *

__iid__ = "PythonInterface/v0.2"
__prettyname__ = "Vue.js Docs"
__version__ = "0.1.0"
__trigger__ = "vue "
__author__ = "Rick West"
__dependencies__ = ["algoliasearch"]

client = SearchClient.create("BH4D9OD16A", "85cc3221c9f23bfbaa4e3913dd7625ea")
index = client.init_index("vuejs")

icon = "{}/icon.png".format(path.dirname(__file__))
google_icon = "{}/google.png".format(path.dirname(__file__))


def getSubtitles(hit):
    hierarchy = hit["hierarchy"]

    subtitles = []
    for x in range(2, 6):
        if hierarchy["lvl" + str(x)] is not None:
            subtitles.append(hierarchy["lvl" + str(x)])

    return subtitles
                urn=f"gitlab.nektar.info:{p.id}",
            )
    repo.update(
        objectID=md5(repo["urn"].encode()).hexdigest()[:8],
        inactive=datetime.datetime.now() - repo["updated_at"] >
        datetime.timedelta(days=365),
        # Imperial College London: https://www.grid.ac/institutes/grid.7445.2
        organisations=["grid.7445.2"] + v["organisations"],
        # EPSRC: https://search.crossref.org/funding?q=501100000266
        # MRC: https://search.crossref.org/funding?q=501100000265
        # Wellcome Trust: https://search.crossref.org/funding?q=100010269
        funders=v["funders"],
        contact_name=v["contact"].split(" <")[0],
        rsotm=v["rsotm"],
    )
    repos.append(repo)

if "ALGOLIA_APPLICATION_ID" in os.environ:
    client = SearchClient.create(os.environ["ALGOLIA_APPLICATION_ID"],
                                 os.environ["ALGOLIA_ADMIN_KEY"])
    index = client.init_index("software_showcase")
    index.save_objects(repos)
else:
    json.dump(
        repos,
        sys.stdout,
        indent=4,
        default=lambda x: f"{x.isoformat()}Z"
        if isinstance(x, datetime.datetime) else None,
    )
Ejemplo n.º 16
0
def colour_me(text: str) -> str:
    """
    Returns a dark green text
    """
    init(autoreset=True)

    return Style.BRIGHT + Fore.GREEN + text


# ------- Algolia Start ---------

APP_ID = "UT1XVMZE1Q"
INDEX_NAME = "gollahalli-website"
FILE_PATH = os.path.join('public', 'searchindex.json')

CLIENT = SearchClient.create(APP_ID, os.environ.get('ALGOLIA_KEY'))
INDEX = CLIENT.init_index(INDEX_NAME)

print(colour_me("PY> Clearing Previous Search Entries..."), end='')
INDEX.clear_objects()  # Clear previous entries.
print(colour_me("Done"))

print(colour_me("PY> Uploading New Search Index..."), end='')
BATCH = json.load(open(FILE_PATH))
INDEX.save_objects(BATCH)
print(colour_me("Done"))

# ------- Algolia End ---------

# ------- Sitemap Ping Start ---------
GOOGLE_PING_URL = "https://www.google.com/webmasters/tools/ping"
Ejemplo n.º 17
0
import os
import csv

from algoliasearch.search_client import SearchClient

# add conditional for github workflow
#  Application ID and API Key

#from jproperties import Properties
#configs = Properties()
#with open('keys.properties', 'rb') as config_file:
#    configs.load(config_file)
#client = SearchClient.create(configs.get("ALGOLIAapplicationid").data, configs.get("ALGOLIAapikeysecret").data) #local

client = SearchClient.create("T81G59BI39", str(os.environ.get("KEY")))

index = client.init_index("my-notes")


def add_records(filename: str):

    with open(filename, newline="") as f:
        csv_r = list(csv.DictReader(f, delimiter=";"))

        # Bug: This checks # of rows and not change in indices
        try:
            len_idx = index.search("")["nbHits"]
            if len(csv_r) > len_idx:
                index.save_objects(csv_r[len_idx:],
                                   {"autoGenerateObjectIDIfNotExist": "true"})
                print(f"{len(csv_r[len_idx:])} new records added.")
 def test_create(self):
     self.assertIsInstance(self.client, SearchClient)
     with self.assertRaises(AssertionError) as _:
         SearchClient.create('', '')
Ejemplo n.º 19
0
def get_index():
    client = SearchClient.create(os.getenv('ALGOLIA_ACCOUNT'),
                                 os.getenv('ALGOLIA_TOKEN'))

    return client.init_index(os.getenv('ALGOLIA_INDEX'))
Ejemplo n.º 20
0
        tmp.seek(0)
        try:
            loaded = yaml.load(tmp, Loader=yaml.FullLoader)
            openapi = cleanup_openapi(filename, loaded)

            #print("NEW LEN: %d" % len(str(openapi)))
            if len(str(openapi)) > 10000:
                continue

            all_objects.append(openapi)

            #with open('%s.json' % filename, 'w') as fp:
            #    json.dump(loaded, fp)
        except yaml.scanner.ScannerError as e:
            print("Yaml error in file %s: %s" % (filename, e))
    #break

print("Largest filesize: %d" % largest)

from algoliasearch.search_client import SearchClient
client = SearchClient.create(algolia_client_key, algolia_secret_key)
index = client.init_index(algolia_index)

res = index.save_objects(all_objects,
                         {'autoGenerateObjectIDIfNotExist': False})
print(res)
#    {'firstname': 'Jimmie', 'lastname': 'Barninger'},
#    {'firstname': 'Warren', 'lastname': 'Speach'}
#],
#print(res)
Ejemplo n.º 21
0
# Explain the difference in the number of docs with different facet filters

from algoliasearch.search_client import SearchClient

myApp = ''
myAPIkey = ''
myIndex = ''

client = SearchClient.create(myApp, myAPIkey)
index = client.init_index(myIndex)

query = ''
res = index.browse_objects({
    'query': query,
    'attributesToRetrieve': ['url'],
    'facetFilters': ['version:4.0']
})
total = 0
myHits = []
for hit in res:
    total = total + 1
    #print(hit)
    myHits.append(hit['url'][25:])
print(total)
#print(myHits)

resnew = index.browse_objects({
    'query': query,
    'attributesToRetrieve': ['url'],
    'facetFilters': ['version:latest']
})
Ejemplo n.º 22
0
import argparse
import json
from algoliasearch.search_client import SearchClient

parser = argparse.ArgumentParser()
parser.add_argument('-f', '--index_file', help="Index file to upload to Algolia")
parser.add_argument('-a', '--app_id', help="Algolia application ID")
parser.add_argument('-k', '--admin_api_key', help="Algolia admin API key")
parser.add_argument('-n', '--index_name', help="Algolia index name")
args = parser.parse_args()

with open(args.index_file, 'r') as file:
    indices_json = file.read().replace('\n', '')

indices = json.loads(indices_json)
client = SearchClient.create(args.app_id, args.admin_api_key)
index = client.init_index(args.index_name)
index.save_objects(indices)
Ejemplo n.º 23
0
from algoliasearch.search_client import SearchClient
import requests
from flask import Flask, request, jsonify, abort, Response, redirect, url_for

client = SearchClient.create('1BX7OOMK9J', '8304543235ddfbb93d342a7f32b5da57')
index = client.init_index('food_details')

# Init app
app = Flask(__name__)


@app.route('/ingredient', methods=['POST', 'PUT', 'DELETE'])
def ingredient():  # function to handle ingredients in algolia
    # get request data
    data = request.get_json()
    response = jsonify(data=[])

    if request.method == 'POST':  # add new ingredients to Algolia
        index.save_object(data, {'autoGenerateObjectIDIfNotExist': True})
        return response, 204
    elif request.method == 'PUT':  # update existing ingredients in Algolia (just needs objectID)
        #Create an object
        index.partial_update_object(data)
        return response, 204
    elif request.method == 'DELETE':  # delete an existing ingredient in Algolia delete just requires 'objectID'
        index.delete_object(data['objectID'])
        return response, 204


# Run Server
if __name__ == '__main__':
Ejemplo n.º 24
0
        "siteAddress": streetAddress_str,
        "siteAddress_json": siteAddress_json,
        "siteState": siteState,
        "siteZip": siteZip,
        "contactPhone": contactPhone,
        "startDate": startDate,
        "endDate": endDate,
        "daysofOperation": daysofOperation,
        "breakfastTime": breakfastTime,
        "lunchTime": lunchTime,
        "snackTimeAM": snackTimeAM,
        "snackTimePM": snackTimePM,
        "dinnerSupperTime": dinnerSupperTime,
        "openTimes": openTimes,
        "_geoloc": _geoloc,
        "_createdOn": _createdOn,
        "_updatedOn": None,  # not sure what to put here
    }

    all_records.append(record)

# ========================================
# Now, need to commit the data!

# Upload this way
client = SearchClient.create(APP_ID, ALGOLIA_API_KEY)

# push to production
index = client.init_index('prod_schools')
index.save_objects(all_records, {'autoGenerateObjectIDIfNotExist': True})
 def setUp(self):
     self.client = SearchClient.create('foo', 'bar')
     self.client._transporter.write = mock.Mock(name='write')
     self.client._transporter.write.return_value = {}
Ejemplo n.º 26
0
import os
from dotenv import load_dotenv
from algoliasearch.search_client import SearchClient

load_dotenv()
search_key = os.getenv("search_key")
account_id = os.getenv("account_id")
algolia_index = os.getenv("algolia_index")

from spellchecker import SpellChecker

spell = SpellChecker()

client = SearchClient.create(account_id, search_key)
index = client.init_index(algolia_index)

MAX_HITS = 150


def levenshtein(s1, s2):
    """
        Levenshtein or edit distance to compare two strings' similarity.
        The metric calculated the number of physical edits needed to get from s1 to s2.
        Inputs:
            s1 - first string
            s2 - second string to compare to the first
        Returns:
            integer with number of edits to get from s1 to s2.
    """
    if len(s1) < len(s2):
        return levenshtein(s2, s1)
    def test_app_id_getter(self):
        client = SearchClient.create('foo', 'bar')

        self.assertEqual(client.app_id, 'foo')
Ejemplo n.º 28
0
 def _get_client(self):
     backend = self.backend_record
     account = backend._get_api_credentials()
     return SearchClient.create(backend.algolia_app_id, account["password"])
    def test_app_id_getter(self):
        client = SearchClient.create('foo', 'bar')

        self.assertEqual(client.app_id, 'foo')
Ejemplo n.º 30
0
from time import sleep
from urllib.parse import urlparse

# Fix lambda dep
sys.path.append(
    os.path.join(os.path.dirname(os.path.realpath(__file__)), "./lib"))
import telebot
from algoliasearch.search_client import SearchClient

# Init telebot
logger = telebot.logger
bot = telebot.TeleBot(tgram_token, threaded=False)
telebot.logger.setLevel(logging.INFO)

# Init algolia
algolia = SearchClient.create(algolia_id, algolia_secret)


def cleantext(msg):
    """ Clear text from malicius data """
    return re.sub(r"[^a-z|A-Z|0-9|?| |'|!|?|_|-|,|.]", "", msg)


def lambda_handler(event, context):
    """ Lambda Handler (webhook via api gateway) """
    update = telebot.types.Update.de_json(event["body"])
    bot.process_new_updates([update])
    return {
        "body": "ok",
        "statusCode": 200,
        "headers": {
            re.search(headers, word).group(1) for word in body_list
            if re.search(headers, word)
        ],
        "content":
        re.sub(code_block, "", body_no_headers)
    }


def export_data(file_paths):
    json_data = []

    for path in file_paths:
        f = open(path, 'r')
        df = f.read().split('\n')
        try:
            json_data.append(structure_markdown(df, path))
        except:
            print(path)  # skipped files
    return json_data


# generate list of all markdown files
file_paths = list_files(".", ".md")
json_data = export_data(file_paths)

# push json data to Algolia
algolia_key = os.environ['ALGOLIA_KEY']
client = SearchClient.create('02IYLG4AP9', algolia_key)
index = client.init_index('Tilburg_Science_Hub')

index.save_objects(json_data)
Ejemplo n.º 32
0
from algoliasearch.search_client import SearchClient

client = SearchClient.create('xxxx', 'xxxxx')
index = client.init_index('articles')

res = index.search('cognito', {
    'attributesToRetrieve': ['title'],
    'hitsPerPage': 1
})

print(res)
Ejemplo n.º 33
0
 def setUp(self):
     self.client = SearchClient.create('foo', 'bar')
     self.client._transporter.write = mock.Mock(name='write')
     self.client._transporter.write.return_value = {}
Ejemplo n.º 34
0
import pytz
from django.db.models.signals import post_save, pre_delete
import os
from algoliasearch.search_client import SearchClient
from django.utils.html import strip_tags
from model_utils import FieldTracker
from django.http import Http404
from django.core.exceptions import (
    PermissionDenied,
    ObjectDoesNotExist,
    SuspiciousOperation,
)
from .xredis import re_set, re_incr


client = SearchClient.create(settings.ALGOLIA_APPLICATION_ID, settings.ALGOLIA_ADMIN_KEY)
index = client.init_index(
    ("prod" if "IN_HEROKU" in os.environ else "dev") + "_post_index"
)

index.set_settings(
    {
        "searchableAttributes": ["title,content"],
        "attributesForFaceting": ["community", "channel_id", "type"],
        "attributesToSnippet": ["content:20",],
        "snippetEllipsisText": "...",
    }
)

person_index = client.init_index(
    ("prod" if "IN_HEROKU" in os.environ else "dev") + "_person_index"
Ejemplo n.º 35
0
import os
import logging
from algoliasearch.search_client import SearchClient
from utils import extract_domain

client = SearchClient.create(os.environ["ALGOLIA_APP_ID"],
                             os.environ["ALGOLIA_API_KEY"])
index = client.init_index("prod_businesses")


def get_domains():
    for hit in index.browse_objects():
        urls = set()
        for key in (
                "gift-card-link",
                "online-store-link",
                "online-order-link",
                "order-groceries-link",
                "donations-link",
                "website",
        ):
            value = hit.get(key)
            if value:
                urls.add(value)

        yield {"urls": urls, "object_id": hit["objectID"], "name": hit["name"]}


def save_keywords(keywords_by_object_id):
    logging.warning(f"Starting save to index")
    partial_objects = [{
Ejemplo n.º 36
0
import firebase_admin
from firebase_admin import credentials
from firebase_admin import firestore
from firebase_admin import db

from algoliasearch.search_client import SearchClient
import algoliaSecret

client = SearchClient.create(algoliaSecret.algolia_app_id,
                             algoliaSecret.algolia_api_key)
kougiIndex = client.init_index('SyllabusViewer_kougis')

cred = credentials.ApplicationDefault()
firebase_admin.initialize_app(cred, {
    'projectId': 'syllubusviewer',
})
db = firestore.client()

users_ref = db.collection('kougis')
docs = users_ref.get()

objectBatch = []
count = 0

for doc in docs:
    id = doc.id
    doc = doc.to_dict()
    doc["objectID"] = id

    objectBatch.append(doc)
    if len(objectBatch) > 200:
Ejemplo n.º 37
0
import googlemaps
import spacy

from algoliasearch.search_client import SearchClient

client = SearchClient.create('RSBCBF0EG8', '362a46f9db7603d913d9f4eea9a47fde')
index = client.init_index('Simon_test_wine')
gmaps = googlemaps.Client(key='AIzaSyBHAFJQ8hdhQCEscOUXKfqD_GDo4HJ-6Xo')

nlp = spacy.load("en_core_web_sm")

query = ''
res = index.browse_objects({'query': query, 'filters': 'country:France'})

i = 0
for hit in res:
    text = hit["description"]
    doc = nlp(text)
    if (doc):
        index.partial_update_object(
            {
                'objectID':
                hit["objectID"],
                'tags': [
                    chunk.text
                    for chunk in doc.noun_chunks if chunk.text in list
                ]
            }, {'createIfNotExists': 'true'})

list = [
    'acidity', 'tannins', 'wood', 'fruit', 'structure', 'Citrus', 'minerality',
Ejemplo n.º 38
0
def get_algolia_client() -> SearchIndex:
    algolia_client = SearchClient.create(os.environ['ALGOLIA_APP_ID'],
                                         os.environ['ALGOLIA_APP_KEY'])
    algolia_index = algolia_client.init_index(os.environ['ALGOLIA_INDEX_NAME'])

    return algolia_index
Ejemplo n.º 39
0
def index_generator(generator):
    index_name = generator.settings.get('ALGOLIA_INDEX_NAME', None)
    app_id = generator.settings.get('ALGOLIA_APP_ID', None)
    admin_api_key = generator.settings.get('ALGOLIA_ADMIN_API_KEY', None)

    if None in [index_name, app_id, admin_api_key]:
        logger.error("Algolia Indexe - settings error")
        return

    logger.info("Generating Algolia index '%s' for %d articles..." %
                (index_name, len(generator.articles)))

    client = SearchClient.create(app_id, admin_api_key)
    index = client.init_index(index_name)

    #TODO: utiliser flag dans metadata pour bypass search
    #TODO: settings sur fields

    # pprint(index.get_settings())

    # common_settings = {
    #     'maxFacetHits': 20,
    #     'attributesToRetrieve': [
    #         'title',
    #         'summary',
    #         'content',
    #         'url',
    #         'created',
    #         'modified',
    #         'tags'
    #     ],
    #     'attributesToHighlight': [
    #         'title',
    #         'summary',
    #     ],
    #     'searchableAttributes': [
    #         'title',
    #         'summary',
    #         'content',
    #     ],
    #     'attributesForFaceting': [
    #         'tags',
    #     ],
    # }

    # settings = common_settings.copy()
    # settings.update({
    #     "replicas": [
    #         "blog_created_asc",
    #         "blog_title_asc",
    #         #"blog_created_desc",
    #     ],
    #     "ranking": [
    #         "desc(created)",
    #     ]
    # })
    # index.set_settings(settings)

    # blog_created_asc = client.init_index("blog_created_asc")
    # settings = common_settings.copy()
    # settings.update({
    #     "ranking": [
    #         "asc(created)",
    #     ]
    # })
    # blog_created_asc.set_settings(settings)

    # blog_title_asc = client.init_index("blog_title_asc")
    # settings = common_settings.copy()
    # settings.update({
    #     "ranking": [
    #         "asc(title)",
    #     ]
    # })
    # blog_title_asc.set_settings(settings)

    exists = []

    for article in generator.articles:
        try:
            logger.info("Indexing article: '%s'" % article.title)
            data = convert_article(article)
            objectId = hashlib.sha256(str(
                article.slug).encode('utf-8')).hexdigest()
            exists.append(objectId)
            data['objectId'] = objectId
            index.save_object(data, {'autoGenerateObjectIDIfNotExist': True})
        except Exception as err:
            logger.error(err)

    logger.info('Purge old Algolia objects')
    for_delete = []
    res = index.browse_objects()
    for hit in res:
        if not hit['objectID'] in exists:
            for_delete.append(hit['objectID'])
            logger.debug('Delete old article[%s]' % hit['title'])

    if for_delete:
        res = index.delete_objects(for_delete)
Ejemplo n.º 40
0
 def __init__(self, app_id: Text, search_key: Text, index: Text):
     self.client = SearchClient.create(app_id, search_key)
     self.index = self.client.init_index(index)
Ejemplo n.º 41
0
from algoliasearch.search_client import SearchClient
import json

client = SearchClient.create('ONQFXGBCJV', 'ef67df9945bdf28cacfdc182f8981bd3')
index = client.init_index('Chloe')
batch = json.load(open('InventoryData.json'))
index.save_objects(batch, {'autoGenerateObjectIDIfNotExist': True})

index.set_settings(
    {"searchableAttributes": ["productName", "blurb", "productCode"]})