def __init__(self, args):
     self.host = args.host
     self.port = args.port
     self.key = args.key
     self.file = args.file
     self.rows = args.rows
     self.client = AutoCompleter(self.key, self.host, self.port)
Example #2
0
def get_category_suggestions(query):
    search_results = {"results": []}

    if not is_search_module_loaded():
        # Redisearch module not loaded, query db
        categories = frappe.db.get_all("Item Group",
                                       filters={
                                           "name":
                                           ["like", "%{0}%".format(query)],
                                           "show_in_website": 1
                                       },
                                       fields=["name", "route"])
        search_results['results'] = categories
        return search_results

    if not query:
        return search_results

    ac = AutoCompleter(make_key(WEBSITE_ITEM_CATEGORY_AUTOCOMPLETE),
                       conn=frappe.cache())
    suggestions = ac.get_suggestions(query, num=10)

    search_results['results'] = [s.string for s in suggestions]

    return search_results
Example #3
0
 def post(self):
     mydata = request.json
     location = str(mydata['location'])
     ac = AutoCompleter(current_app.config["REDISSEARCH_INDEX"], current_app.config["REDISSEARCH_URI"])
     res = ac.add_suggestions(Suggestion(location, 1.0), increment = False)
     data = {'msg': res}
     return data, 200
Example #4
0
def get_category_suggestions(query):
    search_results = {"results": []}

    if not is_redisearch_enabled():
        # Redisearch module not enabled, query db
        categories = frappe.db.get_all(
            "Item Group",
            filters={
                "name": ["like", "%{0}%".format(query)],
                "show_in_website": 1
            },
            fields=["name", "route"],
        )
        search_results["results"] = categories
        return search_results

    if not query:
        return search_results

    ac = AutoCompleter(make_key(WEBSITE_ITEM_CATEGORY_AUTOCOMPLETE),
                       conn=frappe.cache())
    suggestions = ac.get_suggestions(query, num=10, with_payloads=True)

    results = [json.loads(s.payload) for s in suggestions]

    search_results["results"] = results

    return search_results
Example #5
0
 def __init__(self, autocomplete_name, host='localhost', port=6379):
     try:
         self.host = host
         self.port = port
         self.table_name = autocomplete_name
         self.ac = AutoCompleter(autocomplete_name, host, port)
     except Exception as e:
         print >> sys.stderr, "TAS_Autocompleter Error inside constructor Index:\'", self.table_name, "\' HOST:\'", self.host, "\' PORT:\'", self.port, "\'\n"
         print >> sys.stderr, e
Example #6
0
 def post(self):
     mydata = request.json
     location = str(mydata['location'])
     ac = AutoCompleter(current_app.config["REDISSEARCH_INDEX"], current_app.config["REDISSEARCH_URI"])
     res = ac.delete(location);
     if res == 1:
         data = {'msg': 'Location deleted'}
     else:
         data = {'msg': 'Location not found'}
     return data, 200
Example #7
0
 def _setup_client(self, hostname: str, idx_name: str, port=6379) -> None:
     try:
         self._client = Client(idx_name, host=hostname, port=port)
         self._auto_compl = AutoCompleter(idx_name, hostname, port=port)
         self._hostname = hostname
         self._port = port
         self._idx = idx_name
         self._ready = True
         LOGGER.info("Cache engine is ready")
     except:
         self._client = None
         LOGGER.error("Cache engine is faulty!")
def insert():
    # insertion of search/suggestion data
    suggestion_client = Client('movie')
    suggestion_client.create_index([TextField('title'), TagField('genres', separator = '|')])

    for i in range(0, len(movie_df)):
        suggestion_client.add_document(movie_df['tmdbId'][i], title = movie_df['title'][i], genres = movie_df['genres'][i])

    # insertion of auto-completion data
    completion_client = AutoCompleter('ac')

    for i in range(0, len(movie_df)):
        completion_client.add_suggestions(Suggestion(movie_df['title'][i]))
Example #9
0
def define_autocomplete_dictionary():
    """Creates an autocomplete search dictionary for `name`.
		Also creats autocomplete dictionary for `categories` if
		checked in E Commerce Settings"""

    cache = frappe.cache()
    name_ac = AutoCompleter(make_key(WEBSITE_ITEM_NAME_AUTOCOMPLETE),
                            conn=cache)
    cat_ac = AutoCompleter(make_key(WEBSITE_ITEM_CATEGORY_AUTOCOMPLETE),
                           conn=cache)

    ac_categories = frappe.db.get_single_value(
        'E Commerce Settings', 'show_categories_in_search_autocomplete')

    # Delete both autocomplete dicts
    try:
        cache.delete(make_key(WEBSITE_ITEM_NAME_AUTOCOMPLETE))
        cache.delete(make_key(WEBSITE_ITEM_CATEGORY_AUTOCOMPLETE))
    except Exception:
        return False

    items = frappe.get_all('Website Item',
                           fields=['web_item_name', 'item_group'],
                           filters={"published": 1})

    for item in items:
        name_ac.add_suggestions(Suggestion(item.web_item_name))
        if ac_categories and item.item_group:
            cat_ac.add_suggestions(Suggestion(item.item_group))

    return True
Example #10
0
def product_autocomplete(request):
    autocomplete_key = request.POST.get('autocomplete_key', "").strip()
    if len(autocomplete_key) == 0:
        return JsonResponse({'autocomplete_values': []})
    auto_completer = AutoCompleter('productAutocompleter')
    autocomplete_values = []
    if auto_completer.len() == 0:
        create_product_autocompleter()
    res = auto_completer.get_suggestions(autocomplete_key, fuzzy=True)

    for acv in res:
        autocomplete_values.append(str(acv))
    context = {'autocomplete_values': autocomplete_values}
    return JsonResponse(context)
Example #11
0
def refresh_search_keys(request):
    if (request.user.is_authenticated() and request.user.is_staff):
        client = Client('productIndex')
        total_old_docts = client.info()['num_docs']
        delete_status = client.drop_index()
        new_index = False
        if delete_status == 'OK':
            new_index = create_product_search_index()
        auto_completer = AutoCompleter('productAutocompleter')
        auto_completer_old_count = auto_completer.len()
        create_product_autocompleter()
        auto_completer_new_count = auto_completer.len()
        return JsonResponse({'success': True})
    else:
        return JsonResponse({'success': False})
Example #12
0
class CacheEngine:
    def __init__(self, hostname: str, idx_name: str, port=6379) -> None:
        self._ready = False
        self._setup_client(hostname, idx_name, port)

    def _setup_client(self, hostname: str, idx_name: str, port=6379) -> None:
        try:
            self._client = Client(idx_name, host=hostname, port=port)
            self._auto_compl = AutoCompleter(idx_name, hostname, port=port)
            self._hostname = hostname
            self._port = port
            self._idx = idx_name
            self._ready = True
            LOGGER.info("Cache engine is ready")
        except:
            self._client = None
            LOGGER.error("Cache engine is faulty!")

    def add_doc(self, doc_id: str, data: dict) -> Any:
        if dict is None:
            return False
        results = self._client.redis.hset(doc_id, mapping=data)
        return results

    def search(self, text_to_search: str) -> Result:
        results: Result = self._client.search(text_to_search)
        return results

    def get_doc(self, doc_id) -> Document:
        try:
            data = self._client.load_document(doc_id)
            return data
        except:
            return None

    def add_suggestion(self, suggestion) -> bool:
        results = None
        try:
            results = self._auto_compl.add_suggestions(Suggestion(suggestion))
        except:
            return False
        return True

    def get_suggestion(self, str_to_suggest: str) -> List:
        suggs = self._auto_compl.get_suggestions(str_to_suggest,
                                                 fuzzy=len(str_to_suggest) > 3)
        return suggs
Example #13
0
class TTAutoCompleter:
    def __init__(self, host=ip, port=port, db=db, autocomplete_name='Default'):
        self.ipAdd = host
        self.ipPort = port
        self.db = db
        self.autocomplete = AutoCompleter(autocomplete_name, host, port)

    def create_auto_complete(self, res):
        for word in res:
            self.autocomplete.add_suggestions(Suggestion(word, 1.0))

    def suggest(self, keyword):
        return self.autocomplete.get_suggestions(keyword)
        #return self.autocomplete.get_suggestions(keyword,fuzzy=True)

    def delete_auto_complete(self, key):
        self.autocomplete.delete(key)
Example #14
0
    def __init__(self, host=ip, port=port, db=db, autocomplete_name='Default'):
	self.client = Client(autocomplete_name,host,port)
        self.ipAdd = host 
        self.ipPort  = port
        self.db = db
        self.redisConn = redis.StrictRedis(host=self.ipAdd, port=self.ipPort, db=self.db)
        self.autocomplete = AutoCompleter(autocomplete_name, host, port)
        self.escape1 = re.compile(r'&#\d+;')
        self.escape2 = re.compile(r',|\.|<|>|{|}|[|]|"|\'|:|;|!|@|#|\$|%|\^|&|\*|\(|\)|-|\+|=|~')
        self.escape3 = re.compile(r'\s+')
Example #15
0
def define_autocomplete_dictionary():
	"""
	Defines/Redefines an autocomplete search dictionary for Website Item Name.
	Also creats autocomplete dictionary for Published Item Groups.
	"""

	cache = frappe.cache()
	item_ac = AutoCompleter(make_key(WEBSITE_ITEM_NAME_AUTOCOMPLETE), conn=cache)
	item_group_ac = AutoCompleter(make_key(WEBSITE_ITEM_CATEGORY_AUTOCOMPLETE), conn=cache)

	# Delete both autocomplete dicts
	try:
		cache.delete(make_key(WEBSITE_ITEM_NAME_AUTOCOMPLETE))
		cache.delete(make_key(WEBSITE_ITEM_CATEGORY_AUTOCOMPLETE))
	except Exception:
		raise_redisearch_error()

	create_items_autocomplete_dict(autocompleter=item_ac)
	create_item_groups_autocomplete_dict(autocompleter=item_group_ac)
Example #16
0
def product_search(query, limit=10, fuzzy_search=True):
    search_results = {"from_redisearch": True, "results": []}

    if not is_redisearch_enabled():
        # Redisearch module not enabled
        search_results["from_redisearch"] = False
        search_results["results"] = get_product_data(query, 0, limit)
        return search_results

    if not query:
        return search_results

    red = frappe.cache()
    query = clean_up_query(query)

    # TODO: Check perf/correctness with Suggestions & Query vs only Query
    # TODO: Use Levenshtein Distance in Query (max=3)
    ac = AutoCompleter(make_key(WEBSITE_ITEM_NAME_AUTOCOMPLETE), conn=red)
    client = Client(make_key(WEBSITE_ITEM_INDEX), conn=red)
    suggestions = ac.get_suggestions(
        query,
        num=limit,
        fuzzy=fuzzy_search
        and len(query) > 3  # Fuzzy on length < 3 can be real slow
    )

    # Build a query
    query_string = query

    for s in suggestions:
        query_string += f"|('{clean_up_query(s.string)}')"

    q = Query(query_string)

    results = client.search(q)
    search_results["results"] = list(map(convert_to_dict, results.docs))
    search_results["results"] = sorted(
        search_results["results"],
        key=lambda k: frappe.utils.cint(k["ranking"]),
        reverse=True)

    return search_results
class SuggestionUpdater:
    def __init__(self, args):
        self.host = args.host
        self.port = args.port
        self.key = args.key
        self.file = args.file
        self.rows = args.rows
        self.client = AutoCompleter(self.key, self.host, self.port)

    def loadFile(self):
        suggs = []
        with open(self.file) as fp:
            for cnt, line in enumerate(fp):
                line = line.strip()
                print("Line {}: {}".format(cnt, line))
                suggs.append(Suggestion(line, '1'))
                if cnt == self.rows - 1:
                    break
            self.client.add_suggestions(*suggs)
            print('Finished loading ' + str(cnt) + ' rows.')
Example #18
0
def create_product_autocompleter():
    auto_completer = AutoCompleter('productAutocompleter')
    products = Product.objects.filter(active=True)
    for product in products:
        title = product.name
        category = product.category.name
        auto_completer.add_suggestions(Suggestion(title, 5.0),
                                       Suggestion(category, 3.0))
    for tag in Tag.objects.all():
        auto_completer.add_suggestions(Suggestion(tag.tag, 5.0))

    for cv in CategoryVarient.objects.all():
        auto_completer.add_suggestions(Suggestion(cv.value, 2.0))
    return True
Example #19
0
class TAS_Autocompleter:

    #constructor
    def __init__(self, autocomplete_name, host='localhost', port=6379):
        try:
            self.host = host
            self.port = port
            self.table_name = autocomplete_name
            self.ac = AutoCompleter(autocomplete_name, host, port)
        except Exception as e:
            print >> sys.stderr, "TAS_Autocompleter Error inside constructor Index:\'", self.table_name, "\' HOST:\'", self.host, "\' PORT:\'", self.port, "\'\n"
            print >> sys.stderr, e

    #will add the list to auto-complete entries in the respective name
    def add_to_autocomplete(self, new_words):
        try:
            for word in new_words:
                self.ac.add_suggestions(Suggestion(word, 1.0))
        except Exception as e:
            print >> sys.stderr, "TAS_Autocompleter Error inside add_to_autocomplete Index:\'", self.table_name, "\' HOST:\'", self.host, "\' PORT:\'", self.port, "\'\n"
            print >> sys.stderr, e

    #not working as is intented, researching more...
    #btw, will add all the words in autocomplete dictionary
    def add_to_autocomplete_combination(self, new_words):
        try:
            for word in new_words:
                splitted = word.split(' ')
                splittedLength = len(splitted)
                for index in range(0, splittedLength):
                    toAdd = ' '.join(splitted[index:splittedLength])
                    self.ac.add_suggestions(Suggestion(toAdd, 1.0))

        except Exception as e:
            print >> sys.stderr, "TAS_Autocompleter Error inside add_to_autocomplete_combination Index:\'", self.table_name, "\' HOST:\'", self.host, "\' PORT:\'", self.port, "\'\n"
            print >> sys.stderr, e

    #will return auto-suggestions for the give prefix
    def suggest(self, prefix):
        try:
            return self.ac.get_suggestions(prefix, fuzzy=True)
        except Exception as e:
            print >> sys.stderr, "TAS_Autocompleter Error inside suggest Index:\'", self.table_name, "\' HOST:\'", self.host, "\' PORT:\'", self.port, "\'\n"
            print >> sys.stderr, e

    def delete(self, prefix):
        try:
            return self.ac.delete(prefix)
        except Exception as e:
            print >> sys.stderr, "TAS_Autocompleter Error inside delete Index:\'", self.table_name, "\' HOST:\'", self.host, "\' PORT:\'", self.port, "\'\n"
            print >> sys.stderr, e
def load_data(redis_server, redis_port, redis_password):
   load_client = Client(
      'fortune500-v1',
      host=redis_server,
      password=redis_password,
      port=redis_port
   )
   load_ac = AutoCompleter(
   'ac',
   conn = load_client.redis
   )
   
   definition = IndexDefinition(
           prefix=['fortune500:'],
           language='English',
           score_field='title',
           score=0.5
           )
   load_client.create_index(
           (
               TextField("title", weight=5.0),
               TextField('website'),
               TextField('company'),
               NumericField('employees', sortable=True),
               TextField('industry', sortable=True),
               TextField('sector', sortable=True),
               TextField('hqcity', sortable=True),
               TextField('hqstate', sortable=True),
               TextField('ceo'),
               TextField('ceoTitle'),
               NumericField('rank', sortable=True),
               NumericField('assets', sortable=True),
               NumericField('revenues', sortable=True),
               NumericField('profits', sortable=True),
               NumericField('equity', sortable=True),
               TagField('tags'),
               TextField('ticker')
               ),        
       definition=definition)

   with open('./fortune500.csv', encoding='utf-8') as csv_file:
      csv_reader = csv.reader(csv_file, delimiter=',')
      line_count = 0
      for row in csv_reader:
         if line_count > 0:
            load_ac.add_suggestions(Suggestion(row[1].replace('"', ''),  1.0))
            load_client.redis.hset(
                    "fortune500:%s" %(row[1].replace(" ", '')),
                    mapping = {
                        'title': row[1],
                        'company': row[1],
                        'rank': row[0],
                        'website': row[2],
                        'employees': row[3],
                        'sector': row[4],
                        'tags': ",".join(row[4].replace('&', '').replace(',', '').replace('  ', ' ').split()).lower(),
                        'industry': row[5],
                        'hqcity': row[8],
                        'hqstate': row[9],
                        'ceo': row[12],
                        'ceoTitle': row[13],
                        'ticker': row[15],
                        'revenues': row[17],
                        'profits': row[19],
                        'assets': row[21],
                        'equity': row[22]

               })
         line_count += 1
   # Finally Create the alias
   load_client.aliasadd("fortune500")
else:
   redis_port = 6379

if environ.get('REDIS_PASSWORD') is not None:
   redis_password = environ.get('REDIS_PASSWORD')
else:
   redis_password = ''

client = Client(
   'fortune500',
   host=redis_server,
   password=redis_password,
   port=redis_port
   )
ac = AutoCompleter(
   'ac',
   conn = client.redis
   )



nav = Nav()
topbar = Navbar('',
    View('Home', 'index'),
    View('Aggregations', 'show_agg'),
    View('CEO Search', 'search_ceo'),
    View('Tag Search', 'search_tags'),
)
nav.register_element('top', topbar)

def agg_by(field):
   ar = aggregation.AggregateRequest().group_by(field, reducers.count().alias('my_count')).sort_by(aggregation.Desc('@my_count'))
Example #22
0
from redisearch import TextField, NumericField, Query, AutoCompleter, Suggestion

ac = AutoCompleter('ac', 'redis-search')

ac.add_suggestions(Suggestion('google', 5.0), Suggestion('goo', 1.0))
f = open("location-cnt.txt", "r")
for line in f:
    keywords = line.split(" ")
    keywords[1] = keywords[1].replace("_", " ")
    ac.add_suggestions(Suggestion(keywords[1].rstrip("\n"),
                                  float(keywords[0])))
Example #23
0
    def __init__(self,
                 ghlogin_or_token=None,
                 docs_url=None,
                 search_url=None,
                 queue_url=None,
                 repo=None):
        timestamp = datetime.utcnow()
        logger.info('Initializing temporary hub {}'.format(timestamp))

        if ghlogin_or_token:
            self.gh = Github(ghlogin_or_token)
        elif 'GITHUB_TOKEN' in os.environ:
            self.gh = Github(os.environ['GITHUB_TOKEN'])
        else:
            logger.info('Env var ' 'GITHUB_TOKEN' ' not found')

        if docs_url:
            pass
        elif 'DOCS_REDIS_URL' in os.environ:
            docs_url = os.environ['DOCS_REDIS_URL']
        else:
            logger.critical('No Redis for document storage... bye bye.')
            raise RuntimeError('No Redis for document storage... bye bye.')
        self.dconn = ReJSONClient().from_url(docs_url)

        if search_url:
            pass
        elif 'SEARCH_REDIS_URL' in os.environ:
            search_url = os.environ['SEARCH_REDIS_URL']
        else:
            search_url = docs_url
        conn = Redis(connection_pool=ConnectionPool().from_url(search_url))
        self.sconn = RediSearchClient(self._ixname, conn=conn)
        self.autocomplete = AutoCompleter(self._acname, conn=conn)

        if queue_url:
            pass
        elif 'QUEUE_REDIS_URL' in os.environ:
            queue_url = os.environ['QUEUE_REDIS_URL']
        else:
            queue_url = docs_url
        self.qconn = StrictRedis.from_url(queue_url)

        if repo:
            pass
        elif 'REDISMODULES_REPO' in os.environ:
            repo = os.environ['REDISMODULES_REPO']
        else:
            logger.critical('No REDISMODULES_REPO... bye bye.')
            raise RuntimeError('No REDISMODULES_REPO... bye bye.')
        self.repo = repo

        # Check if hub exists
        if self.dconn.exists(self._hubkey):
            self._ts = datetime.fromtimestamp(
                float(self.dconn.jsonget(self._hubkey, Path('.created'))))
            logger.info('Latching to hub {}'.format(self._ts))
        else:
            self._ts = timestamp
            logger.info('Creating hub {}'.format(self._ts))
            self.createHub()
            self.addModulesRepo(self.repo)
Example #24
0
class Hub(object):
    dconn = None  # document store connection
    sconn = None  # search index connection
    qconn = None  # queue connection
    gh = None
    autocomplete = None
    repo = None
    _ts = None
    _hubkey = 'hub:catalog'
    _ixname = 'ix'
    _acname = 'ac'

    def __init__(self,
                 ghlogin_or_token=None,
                 docs_url=None,
                 search_url=None,
                 queue_url=None,
                 repo=None):
        timestamp = datetime.utcnow()
        logger.info('Initializing temporary hub {}'.format(timestamp))

        if ghlogin_or_token:
            self.gh = Github(ghlogin_or_token)
        elif 'GITHUB_TOKEN' in os.environ:
            self.gh = Github(os.environ['GITHUB_TOKEN'])
        else:
            logger.info('Env var ' 'GITHUB_TOKEN' ' not found')

        if docs_url:
            pass
        elif 'DOCS_REDIS_URL' in os.environ:
            docs_url = os.environ['DOCS_REDIS_URL']
        else:
            logger.critical('No Redis for document storage... bye bye.')
            raise RuntimeError('No Redis for document storage... bye bye.')
        self.dconn = ReJSONClient().from_url(docs_url)

        if search_url:
            pass
        elif 'SEARCH_REDIS_URL' in os.environ:
            search_url = os.environ['SEARCH_REDIS_URL']
        else:
            search_url = docs_url
        conn = Redis(connection_pool=ConnectionPool().from_url(search_url))
        self.sconn = RediSearchClient(self._ixname, conn=conn)
        self.autocomplete = AutoCompleter(self._acname, conn=conn)

        if queue_url:
            pass
        elif 'QUEUE_REDIS_URL' in os.environ:
            queue_url = os.environ['QUEUE_REDIS_URL']
        else:
            queue_url = docs_url
        self.qconn = StrictRedis.from_url(queue_url)

        if repo:
            pass
        elif 'REDISMODULES_REPO' in os.environ:
            repo = os.environ['REDISMODULES_REPO']
        else:
            logger.critical('No REDISMODULES_REPO... bye bye.')
            raise RuntimeError('No REDISMODULES_REPO... bye bye.')
        self.repo = repo

        # Check if hub exists
        if self.dconn.exists(self._hubkey):
            self._ts = datetime.fromtimestamp(
                float(self.dconn.jsonget(self._hubkey, Path('.created'))))
            logger.info('Latching to hub {}'.format(self._ts))
        else:
            self._ts = timestamp
            logger.info('Creating hub {}'.format(self._ts))
            self.createHub()
            self.addModulesRepo(self.repo)

    def get_repo_url(self):
        return 'https://github.com/{}'.format(self.repo)

    def createHub(self):
        logger.info('Creating the hub in the database {}'.format(self._ts))
        # Store the master modules catalog as an object
        self.dconn.jsonset(
            self._hubkey, Path.rootPath(), {
                'created': str(_toepoch(self._ts)),
                'modules': {},
                'submissions': [],
                'submit_enabled': False
            })

        # Create a RediSearch index for the modules
        # TODO: catch errors
        self.sconn.create_index(
            (TextField('name', sortable=True), TextField('description'),
             NumericField('stargazers_count', sortable=True),
             NumericField('forks_count', sortable=True),
             NumericField('last_modified', sortable=True)),
            stopwords=stopwords)

    def deleteHub(self):
        # TODO
        pass

    def addModule(self, mod):
        logger.info('Adding module to hub {}'.format(mod['name']))
        # Store the module object as a document
        m = RedisModule(self.dconn, self.sconn, self.autocomplete, mod['name'])
        m.save(mod)

        # Add a reference to it in the master catalog
        self.dconn.jsonset(
            self._hubkey, Path('.modules["{}"]'.format(m.get_id())), {
                'id': m.get_id(),
                'key': m.get_key(),
                'created': str(_toepoch(self._ts)),
            })

        # Schedule a job to refresh repository statistics, starting from now and every hour
        s = Scheduler(connection=self.qconn)
        job = s.schedule(
            scheduled_time=datetime(1970, 1, 1),
            func=callRedisModuleUpateStats,
            args=[m.get_id()],
            interval=60 * 60,  # every hour
            repeat=None,  # indefinitely
            ttl=0,
            result_ttl=0)
        return m

    """
    Adds modules to the hub from a local directory
    TODO: deprecate asap
    """

    def addModulesPath(self, path):
        logger.info('Loading modules from local path {}'.format(path))
        # Iterate module JSON files
        for filename in os.listdir(path):
            if filename.endswith(".json"):
                with open('{}/{}'.format(path, filename)) as fp:
                    mod = json.load(fp)

                m = self.addModule(mod['name'], mod)

    """
    Adds a modules to the hub from a github repository
    """

    def addModulesRepo(self, name, path='/modules/'):
        # TODO: check for success
        q = Queue(connection=self.qconn)
        q.enqueue(callLoadModulesFromRepo, name, path)

    def loadModulesFromRepo(self, name, path):
        logger.info('Loading modules from Github {} {}'.format(name, path))
        # TODO: error handling, sometimes not all contents are imported?
        repo = self.gh.get_repo(name)
        files = repo.get_dir_contents(path)
        for f in files:
            mod = json.loads(f.decoded_content)
            m = self.addModule(mod)

    """
    Submits a module to the hub
    """

    def submitModule(self, repo_id, **kwargs):
        logger.info('Module submitted to hub {}'.format(repo_id))
        repo_id = repo_id.lower()
        ts = datetime.utcnow()
        res = {'id': repo_id, 'status': 'failed'}

        if not self.dconn.jsonget(self._hubkey, Path('submit_enabled')):
            res['message'] = 'Module submission is currently disabled'
            return res

        # Check if the module is already listed
        m = RedisModule(self.dconn, self.sconn, self.autocomplete, repo_id)
        if m.exists:
            # TODO: return in search results
            res['message'] = 'Module already listed in the hub'
            return res

        # Check if there's an active submission, or if the failure was too recent
        submission = Submission(self.dconn, repo_id)
        if submission.exists:
            status = submission.status
            if status != 'failed':
                res['status'] = 'active'
                res['message'] = 'Active submission found for module'
                return res
            else:
                # TODO: handle failed submissions
                res['message'] = 'Module already submitted to the hub and had failed, please reset manually for now'
                return res

        # Store the new submission
        submission.save(**kwargs)

        # Record the submission in the catalog
        # TODO: find a good use for that, e.g. 5 last submissions
        self.dconn.jsonarrappend(self._hubkey, Path('.submissions'), {
            'id': submission.get_id(),
            'created': submission.created,
        })

        # Add a job to process the submission
        q = Queue(connection=self.qconn)
        job = q.enqueue(callProcessSubmission, submission.get_id())
        if job is None:
            res['message'] = 'Submission job could not be created'
            # TODO: design retry path
            logger.error(
                'Could not create submission processing job for {}'.format(
                    submission.get_id()))
        else:
            res['status'] = 'queued'
            submission.status = res['status']
            submission.job = job.id

        return res

    def viewSubmissionStatus(self, repo_id):
        submission = Submission(self.dconn, repo_id)
        if submission.exists:
            res = {
                'id': submission.get_id(),
                'status': submission.status,
                'message': submission.message,
            }
            if 'finished' == res['status']:
                res['pull_number'] = submission.pull_number
                res['pull_url'] = submission.pull_url
            return res

    def processSubmission(self, repo_id):
        logger.info('Processing submision for {}'.format(repo_id))
        submission = Submission(self.dconn, repo_id)
        if submission.exists:
            return submission.process(self.gh, self.repo)

    def viewModules(self, query=None, sort=None):
        if not query:
            # Use a purely negative query to get all modules
            query = '-etaoinshrdlu'
        q = Query(query).no_content().paging(0, 1000)
        if sort:
            if sort == 'relevance':
                pass
            elif sort == 'update':
                q.sort_by('last_modified')
            elif sort == 'stars':
                q.sort_by('stargazers_count', asc=False)
            elif sort == 'forks':
                q.sort_by('forks_count', asc=False)
            elif sort == 'name':
                q.sort_by('name')

        results = self.sconn.search(q)
        mods = []
        fetch_duration = 0
        # TODO: this should be pipelined
        for doc in results.docs:
            m = RedisModule(self.dconn, self.sconn, self.autocomplete, doc.id)
            res, duration = _durationms(m.to_dict)
            mods.append(res)
            fetch_duration += duration

        return {
            'results': results.total,
            'search_duration': '{:.3f}'.format(results.duration),
            'fetch_duration': '{:.3f}'.format(fetch_duration),
            'total_duration':
            '{:.3f}'.format(fetch_duration + results.duration),
            'modules': mods,
        }

    def viewSearchSuggestions(self, prefix):
        suggestions = self.autocomplete.get_suggestions(prefix)
        return [s.string for s in suggestions]
Example #25
0
def delete_from_ac_dict(website_item_doc):
    """Removes this items's name from autocomplete dictionary"""
    cache = frappe.cache()
    name_ac = AutoCompleter(make_key(WEBSITE_ITEM_NAME_AUTOCOMPLETE),
                            conn=cache)
    name_ac.delete(website_item_doc.web_item_name)
Example #26
0
def insert_to_name_ac(web_name, doc_name):
    ac = AutoCompleter(make_key(WEBSITE_ITEM_NAME_AUTOCOMPLETE),
                       conn=frappe.cache())
    ac.add_suggestions(Suggestion(web_name, payload=doc_name))
Example #27
0
 def get(self):
     keyword = request.args['term']
     ac = AutoCompleter(current_app.config["REDISSEARCH_INDEX"], current_app.config["REDISSEARCH_URI"])
     res = ac.get_suggestions(keyword, fuzzy = True)
     return {"suggestion": [x.string for x in res]}, 200
Example #28
0
from redisearch import Client, AutoCompleter, TextField, IndexDefinition, Query
from flask import Flask, escape, request, jsonify
from flask_cors import CORS

app = Flask(__name__)
CORS(app)
client = Client('idx:addr', 'redis')
ac = AutoCompleter('ac', 'redis')


@app.route('/recherche')
def recherches_adresses():
    query = request.args.get("q")
    q = Query(query).language('french').paging(0, 10)
    res = client.search(q)
    adresses = {}
    for i, doc in enumerate(res.docs):
        adresses[i] = {
            "value": doc.id.replace("addr:", ""),
            "label": doc.adresse
        }
    return jsonify(adresses=adresses)


@app.route('/suggestions')
def suggestions_adresses():
    query = request.args.get("q")
    suggs = ac.get_suggestions(query, fuzzy=True, with_payloads=True)
    adresses = {}
    for i, sugg in enumerate(suggs):
        adresses[i] = {"value": sugg.payload, "label": sugg.string}
Example #29
0
 def __init__(self, host=ip, port=port, db=db, autocomplete_name='Default'):
     self.ipAdd = host
     self.ipPort = port
     self.db = db
     self.autocomplete = AutoCompleter(autocomplete_name, host, port)