def train(self, stream, noparagraphs=False): """Train a new markov chain, overwriting the existing one. """ training_data = tokenise.Tokeniser(stream=stream, noparagraphs=noparagraphs) self.markov.train(training_data) self.generator = None
def train_more(self, stream, noparagraphs=False): """Add some data to an existing chain. """ if self.markov is None: raise MarkovStateError("No markov chain loaded!") training_data = tokenise.Tokeniser(stream=stream, noparagraphs=noparagraphs) self.markov.train(training_data) self.generator = None # Reinitialize the generator