def give_sentence():
    text = tokenize("corpus.txt")

    # re.sub(r'(?<=\n).*', 'START END', text)

    model = MarkovModel(text)

    sentence = model.random_walk()

    sentence = " ".join(sentence[:-1])

    # print()

    # (?<=\n).*

    re.sub(r'(?<=\n).*', '', sentence)
    new_sentence = ""
    
    sentence = sentence.split('\n')

    # print(sentence)

    if type(sentence) is list:
        return max(sentence, key=len)
    else:
        return sentence
Exemple #2
0
def butcher_word(word):
    butchered = list(word)
    butchered = ["START"] + butchered + ["END"]
    model = MarkovModel(butchered)
    new_word = model.random_walk()
    return "".join(new_word[:-1])