Beispiel #1
0
 def __init__(self,
              field,
              stemmer=True,
              metaphone=False,
              stopwords_file=None,
              min_word_length=None):
     super(FullTextIndex, self).__init__(field)
     self.tokenizer = Tokenizer(stemmer=stemmer,
                                metaphone=metaphone,
                                stopwords_file=stopwords_file
                                or 'stopwords.txt',
                                min_word_length=min_word_length)
Beispiel #2
0
    def __init__(self, db, name, **tokenizer_settings):
        """
        :param Database db: a walrus database object.
        :param str name: name for the search index.
        :param bool stemmer: use porter stemmer (default True).
        :param bool metaphone: use double metaphone (default False).
        :param str stopwords_file: defaults to walrus stopwords.txt.
        :param int min_word_length: specify minimum word length.

        Create a search index for storing and searching documents.
        """
        self.db = db
        self.name = name
        self.tokenizer = Tokenizer(**tokenizer_settings)
        self.members = self.db.Set('fts.%s' % self.name)