예제 #1
0
 def __init__(self, session, config, parent):
     SimpleTokenizer.__init__(self, session, config, parent)
     tknr = self.get_setting(session, 'tokenizer')
     if tknr[-9:] == 'Tokenizer' and hasattr(lucene, tknr):            
         self.tokenizer = getattr(lucene, tknr)
     else:
         raise ConfigFileException("Unknown Lucene Tokenizer")
예제 #2
0
 def __init__(self, session, config, parent):
     SimpleTokenizer.__init__(self, session, config, parent)
     tknr = self.get_setting(session, "tokenizer")
     if tknr[-9:] == "Tokenizer" and hasattr(lucene, tknr):
         self.tokenizer = getattr(lucene, tknr)
     else:
         raise ConfigFileException("Unknown Lucene Tokenizer")
예제 #3
0
 def __init__(self, session, config, parent):
     SimpleTokenizer.__init__(self, session, config, parent)
     self.structure = self.get_setting(session, 'structuredOutput', 0)
     self.stem = self.get_setting(session, 'useStem', 0)
     self.pos = self.get_setting(session, 'pos', 0)
     self.justPos = self.get_setting(session, 'justPos', 0)
예제 #4
0
 def __init__(self, session, config, parent):
     SimpleTokenizer.__init__(self, session, config, parent)
     self.punkt = nltk.data.load('tokenizers/punkt/english.pickle')
예제 #5
0
 def __init__(self, session, config, parent):
     SimpleTokenizer.__init__(self, session, config, parent)
     self.punkt = nltk.tokenize.PunktWordTokenizer()
예제 #6
0
 def __init__(self, session, config, parent):
     SimpleTokenizer.__init__(self, session, config, parent)
     self.structure = self.get_setting(session, 'structuredOutput', 0)
     self.stem = self.get_setting(session, 'useStem', 0)
     self.pos = self.get_setting(session, 'pos', 0)
     self.justPos = self.get_setting(session, 'justPos', 0)
예제 #7
0
 def __init__(self, session, config, parent):
     SimpleTokenizer.__init__(self, session, config, parent)
     raise MissingDependencyException(self.objectType, "lucene")
예제 #8
0
 def __init__(self, session, config, parent):
     SimpleTokenizer.__init__(self, session, config, parent)
     if nltk is None:
         raise MissingDependencyException(self.objectType, 'nltk')
     self.punkt = nltk.data.load('tokenizers/punkt/english.pickle')
예제 #9
0
 def __init__(self, session, config, parent):
     SimpleTokenizer.__init__(self, session, config, parent)
     if nltk is None:
         raise MissingDependencyException(self.objectType, 'nltk')
     self.punkt = nltk.tokenize.PunktWordTokenizer()
예제 #10
0
 def __init__(self, session, config, parent):
     SimpleTokenizer.__init__(self, session, config, parent)
     self.punkt = nltk.data.load('tokenizers/punkt/english.pickle')
예제 #11
0
 def __init__(self, session, config, parent):
     SimpleTokenizer.__init__(self, session, config, parent)
     self.punkt = nltk.tokenize.PunktWordTokenizer()
예제 #12
0
 def __init__(self, session, config, parent):
     SimpleTokenizer.__init__(self, session, config, parent)
     raise MissingDependencyException(self.objectType, "lucene")