Exemplo n.º 1
0
 def __init__(self, inp_size, models, **kwargs):
     self.inp_size = inp_size
     self.inp = collections.deque(list(), inp_size)
     self.out = collections.deque(list(), inp_size)
     self.models = self.init_models(models)
     self.new_data = False
     self.name = kwargs.get("name", "N/A")
     self.current_model_name = "N/A"
     self.current_model = None
     self.current_model_object = None
     self.test_size_const = 0.6
     self.num_test_lists = 2
     self.mchain = markov.Chain(self.models)
Exemplo n.º 2
0
import markov
import util

elems = util.readWords('text/spongebob_texas.txt')
chain = markov.Chain(2)
for x in elems:
    chain.observe(x)

content = chain.chooseFirst()
for i in range(100):
    next = chain.chooseNext()
    if not next: break
    content.append(next)

content = ' '.join(content)
print(content)
input()
Exemplo n.º 3
0
 def clear(self):
     self.inp = collections.deque(list(), self.inp_size)
     self.out = collections.deque(list(), self.inp_size)
     self.mchain = markov.Chain(self.models)
     return self
Exemplo n.º 4
0
 def _create_new_chain(self):
     return markov.Chain()
Exemplo n.º 5
0
#!/usr/bin/env python3
# -*- coding: utf8 -*-

import itertools
from nltk import sent_tokenize, word_tokenize
import markov

if __name__ == '__main__':

    with open('text') as f:
        data = f.read()

    sentences = sent_tokenize(data)
    chain = markov.Chain(order=3)

    for i, s in enumerate(sentences):
        if not i % 1000:
            print(i)
        chain.feed(word_tokenize(s))

    for i in range(10):
        print(chain.generate())