def do_fuzzyindex(self, word): """Compute fuzzy extensions of word that exist in index. FUZZYINDEX lilas""" word = list(preprocess_query(word))[0] token = Token(word) neighbors = make_fuzzy(token) neighbors = [(n, DB.zcard(dbkeys.token_key(n))) for n in neighbors] neighbors.sort(key=lambda n: n[1], reverse=True) for token, freq in neighbors: if freq == 0: break print(white(token), blue(freq))
def do_fuzzyindex(self, word): """Compute fuzzy extensions of word that exist in index. FUZZYINDEX lilas""" word = list(preprocess_query(word))[0] token = Token(word) token.make_fuzzy() neighbors = [(n, DB.zcard(dbkeys.token_key(n))) for n in token.neighbors] neighbors.sort(key=lambda n: n[1], reverse=True) for token, freq in neighbors: if freq == 0: break print(white(token), blue(freq))
def try_fuzzy(helper, tokens, include_common=True): if not helper.bucket_dry or not tokens: return helper.debug('Fuzzy on. Trying with %s.', tokens) tokens.sort(key=lambda t: len(t), reverse=True) allkeys = helper.keys[:] if include_common: # As we are in fuzzy, try to narrow as much as possible by adding # unused common tokens. allkeys.extend( [t.db_key for t in helper.common if t.db_key not in helper.keys]) for try_one in tokens: if helper.bucket_full: break keys = allkeys[:] if try_one.db_key in keys: keys.remove(try_one.db_key) if try_one.isdigit(): continue helper.debug('Going fuzzy with %s and %s', try_one, keys) neighbors = make_fuzzy(try_one, max=helper.fuzzy) if len(keys): # Only retain tokens that have been seen in the index at least # once with the other tokens. DB.sadd(helper.pid, *neighbors) interkeys = [pair_key(k[2:]) for k in keys] interkeys.append(helper.pid) fuzzy_words = DB.sinter(interkeys) DB.delete(helper.pid) # Keep the priority we gave in building fuzzy terms (inversion # first, then substitution, etc.). fuzzy_words = [w.decode() for w in fuzzy_words] fuzzy_words.sort(key=lambda x: neighbors.index(x)) else: # The token we are considering is alone. fuzzy_words = [] for neighbor in neighbors: key = dbkeys.token_key(neighbor) count = DB.zcard(key) if count: fuzzy_words.append(neighbor) if fuzzy_words: helper.debug('Found fuzzy candidates %s', fuzzy_words) fuzzy_keys = [dbkeys.token_key(w) for w in fuzzy_words] for key in fuzzy_keys: if helper.bucket_dry: helper.add_to_bucket(keys + [key])
def try_fuzzy(helper, tokens, include_common=True): if not helper.bucket_dry or not tokens: return helper.debug('Fuzzy on. Trying with %s.', tokens) tokens.sort(key=lambda t: len(t), reverse=True) allkeys = helper.keys[:] if include_common: # As we are in fuzzy, try to narrow as much as possible by adding # unused commons tokens. common = [t for t in helper.common if t.db_key not in helper.keys] allkeys.extend([t.db_key for t in common]) for try_one in tokens: if helper.bucket_full: break keys = allkeys[:] if try_one.db_key in keys: keys.remove(try_one.db_key) if try_one.isdigit(): continue helper.debug('Going fuzzy with %s', try_one) neighbors = make_fuzzy(try_one, max=helper.fuzzy) if len(keys): # Only retains tokens that have been seen in the index at least # once with the other tokens. DB.sadd(helper.query, *neighbors) interkeys = [pair_key(k[2:]) for k in keys] interkeys.append(helper.query) fuzzy_words = DB.sinter(interkeys) DB.delete(helper.query) # Keep the priority we gave in building fuzzy terms (inversion # first, then substitution, etc.). fuzzy_words = [w.decode() for w in fuzzy_words] fuzzy_words.sort(key=lambda x: neighbors.index(x)) else: # The token we are considering is alone. fuzzy_words = [] for neighbor in neighbors: key = dbkeys.token_key(neighbor) count = DB.zcard(key) if count: fuzzy_words.append(neighbor) helper.debug('Found fuzzy candidates %s', fuzzy_words) fuzzy_keys = [dbkeys.token_key(w) for w in fuzzy_words] for key in fuzzy_keys: if helper.bucket_dry: helper.add_to_bucket(keys + [key])
def token_key_frequency(key): return DB.zcard(key)
def token_key_frequency(key): return DB.zcard(key)