def test_generate_error(self, training_text): mm = generate_utils.MarkovModel(state_len=4) with pytest.raises(RuntimeError): mm.generate(10) mm.fit(training_text) with pytest.raises(ValueError): mm.generate(10, state="foo")
def test_generate(self, training_text): mm = generate_utils.MarkovModel(state_len=4) mm.fit(training_text) result = mm.generate(10) assert isinstance(result, str) assert len(result) == 10 result = mm.generate(10, state="Here") assert isinstance(result, str) assert len(result) == 10 assert result.startswith("Here")
def markov_model(self): """ :class:`generate_utils.MarkovModel`: Markov model used to generate text in :meth:`Provider.text_lines()` and :meth:`Provider.text_lines_trailing()`. It's trained and assigned at the instance- rather than class-level to avoid having to train the model every time this module is imported. It's fast, but not *that* fast. """ if self._markov_model is None: self._markov_model = generate_utils.MarkovModel(state_len=4).fit(c.TEXT_SAMPLES) return self._markov_model
def test_fit(self, training_text): mm = generate_utils.MarkovModel(state_len=4) mm.fit(training_text) assert isinstance(mm.model, dict) assert len(mm.model) > 1
def test_init(self): state_len = 4 mm = generate_utils.MarkovModel(state_len=state_len) assert mm.state_len == state_len assert mm.model is None