def define_autocomplete_dictionary(): """Creates an autocomplete search dictionary for `name`. Also creats autocomplete dictionary for `categories` if checked in E Commerce Settings""" cache = frappe.cache() name_ac = AutoCompleter(make_key(WEBSITE_ITEM_NAME_AUTOCOMPLETE), conn=cache) cat_ac = AutoCompleter(make_key(WEBSITE_ITEM_CATEGORY_AUTOCOMPLETE), conn=cache) ac_categories = frappe.db.get_single_value( 'E Commerce Settings', 'show_categories_in_search_autocomplete') # Delete both autocomplete dicts try: cache.delete(make_key(WEBSITE_ITEM_NAME_AUTOCOMPLETE)) cache.delete(make_key(WEBSITE_ITEM_CATEGORY_AUTOCOMPLETE)) except Exception: return False items = frappe.get_all('Website Item', fields=['web_item_name', 'item_group'], filters={"published": 1}) for item in items: name_ac.add_suggestions(Suggestion(item.web_item_name)) if ac_categories and item.item_group: cat_ac.add_suggestions(Suggestion(item.item_group)) return True
def create_product_autocompleter(): auto_completer = AutoCompleter('productAutocompleter') products = Product.objects.filter(active=True) for product in products: title = product.name category = product.category.name auto_completer.add_suggestions(Suggestion(title, 5.0), Suggestion(category, 3.0)) for tag in Tag.objects.all(): auto_completer.add_suggestions(Suggestion(tag.tag, 5.0)) for cv in CategoryVarient.objects.all(): auto_completer.add_suggestions(Suggestion(cv.value, 2.0)) return True
def post(self): mydata = request.json location = str(mydata['location']) ac = AutoCompleter(current_app.config["REDISSEARCH_INDEX"], current_app.config["REDISSEARCH_URI"]) res = ac.add_suggestions(Suggestion(location, 1.0), increment = False) data = {'msg': res} return data, 200
def add_suggestion(self, suggestion) -> bool: results = None try: results = self._auto_compl.add_suggestions(Suggestion(suggestion)) except: return False return True
def add_to_autocomplete(self, new_words): try: for word in new_words: self.ac.add_suggestions(Suggestion(word, 1.0)) except Exception as e: print >> sys.stderr, "TAS_Autocompleter Error inside add_to_autocomplete Index:\'", self.table_name, "\' HOST:\'", self.host, "\' PORT:\'", self.port, "\'\n" print >> sys.stderr, e
def create_items_autocomplete_dict(autocompleter): "Add items as suggestions in Autocompleter." items = frappe.get_all("Website Item", fields=["web_item_name", "item_group"], filters={"published": 1}) for item in items: autocompleter.add_suggestions(Suggestion(item.web_item_name))
def loadFile(self): suggs = [] with open(self.file) as fp: for cnt, line in enumerate(fp): line = line.strip() print("Line {}: {}".format(cnt, line)) suggs.append(Suggestion(line, '1')) if cnt == self.rows - 1: break self.client.add_suggestions(*suggs) print('Finished loading ' + str(cnt) + ' rows.')
def add_to_autocomplete_combination(self, new_words): try: for word in new_words: splitted = word.split(' ') splittedLength = len(splitted) for index in range(0, splittedLength): toAdd = ' '.join(splitted[index:splittedLength]) self.ac.add_suggestions(Suggestion(toAdd, 1.0)) except Exception as e: print >> sys.stderr, "TAS_Autocompleter Error inside add_to_autocomplete_combination Index:\'", self.table_name, "\' HOST:\'", self.host, "\' PORT:\'", self.port, "\'\n" print >> sys.stderr, e
def insert(): # insertion of search/suggestion data suggestion_client = Client('movie') suggestion_client.create_index([TextField('title'), TagField('genres', separator = '|')]) for i in range(0, len(movie_df)): suggestion_client.add_document(movie_df['tmdbId'][i], title = movie_df['title'][i], genres = movie_df['genres'][i]) # insertion of auto-completion data completion_client = AutoCompleter('ac') for i in range(0, len(movie_df)): completion_client.add_suggestions(Suggestion(movie_df['title'][i]))
def create_item_groups_autocomplete_dict(autocompleter): "Add item groups with weightage as suggestions in Autocompleter." published_item_groups = frappe.get_all( "Item Group", fields=["name", "route", "weightage"], filters={"show_in_website": 1} ) if not published_item_groups: return for item_group in published_item_groups: payload = json.dumps({"name": item_group.name, "route": item_group.route}) autocompleter.add_suggestions( Suggestion( string=item_group.name, score=frappe.utils.flt(item_group.weightage) or 1.0, payload=payload, # additional info that can be retrieved later ) )
def save(self, mod): # Store the module self._conn.jsonset(self._key, Path.rootPath(), mod) # Index it self._sconn.add_document( self._doc_id, nosave=True, name=mod['name'], description=mod['description'], ) # Add the module's name and description to the suggestions engine text = '{} {}'.format(mod['name'], mod['description']) words = set(re.compile('\w+').findall(text)) words = set(w.lower() for w in words) words = words.difference(stopwords) self._autocomplete.add_suggestions(*[Suggestion(w) for w in words])
from redisearch import TextField, NumericField, Query, AutoCompleter, Suggestion ac = AutoCompleter('ac', 'redis-search') ac.add_suggestions(Suggestion('google', 5.0), Suggestion('goo', 1.0)) f = open("location-cnt.txt", "r") for line in f: keywords = line.split(" ") keywords[1] = keywords[1].replace("_", " ") ac.add_suggestions(Suggestion(keywords[1].rstrip("\n"), float(keywords[0])))
def insert_to_name_ac(web_name, doc_name): ac = AutoCompleter(make_key(WEBSITE_ITEM_NAME_AUTOCOMPLETE), conn=frappe.cache()) ac.add_suggestions(Suggestion(web_name, payload=doc_name))
def create_auto_complete(self, res): for word in res: self.autocomplete.add_suggestions(Suggestion(word, 1.0))
def load_data(redis_server, redis_port, redis_password): load_client = Client( 'fortune500-v1', host=redis_server, password=redis_password, port=redis_port ) load_ac = AutoCompleter( 'ac', conn = load_client.redis ) definition = IndexDefinition( prefix=['fortune500:'], language='English', score_field='title', score=0.5 ) load_client.create_index( ( TextField("title", weight=5.0), TextField('website'), TextField('company'), NumericField('employees', sortable=True), TextField('industry', sortable=True), TextField('sector', sortable=True), TextField('hqcity', sortable=True), TextField('hqstate', sortable=True), TextField('ceo'), TextField('ceoTitle'), NumericField('rank', sortable=True), NumericField('assets', sortable=True), NumericField('revenues', sortable=True), NumericField('profits', sortable=True), NumericField('equity', sortable=True), TagField('tags'), TextField('ticker') ), definition=definition) with open('./fortune500.csv', encoding='utf-8') as csv_file: csv_reader = csv.reader(csv_file, delimiter=',') line_count = 0 for row in csv_reader: if line_count > 0: load_ac.add_suggestions(Suggestion(row[1].replace('"', ''), 1.0)) load_client.redis.hset( "fortune500:%s" %(row[1].replace(" ", '')), mapping = { 'title': row[1], 'company': row[1], 'rank': row[0], 'website': row[2], 'employees': row[3], 'sector': row[4], 'tags': ",".join(row[4].replace('&', '').replace(',', '').replace(' ', ' ').split()).lower(), 'industry': row[5], 'hqcity': row[8], 'hqstate': row[9], 'ceo': row[12], 'ceoTitle': row[13], 'ticker': row[15], 'revenues': row[17], 'profits': row[19], 'assets': row[21], 'equity': row[22] }) line_count += 1 # Finally Create the alias load_client.aliasadd("fortune500")