def main(): words = cleanup.text_list('text/dickens.txt') m_chain = markov.order_mchain(2, words) c_start = markov.start_token(m_chain) walk_the_dog = markov.walk(c_start, m_chain) almost = markov.finalize(walk_the_dog) home = str(almost) return render_template("main.html", sentence=home)
def main(): #process and import file words = cleanup.text_list('text/dickens.txt') m_chain = markov.order_mchain(2, words) c_start = markov.start_token(m_chain) walk_the_dog = markov.walk(c_start, m_chain) almost = markov.finalize(walk_the_dog) home = str(almost) # # #create resulting object # return home return render_template("main.html", sentence=home)
from sys import argv import cleanup import tokenize import wordcount import sample def sentance(histogram, total, loop): looper = int(loop) sentance1 = [] word_string = " " # loop for i in range(0, looper): weight_word = sample.weighted_random(histogram, total) sentance1.append(weight_word) #turn list into a string word_string = word_string.join(word for word in sentance1) return word_string if __name__ == '__main__': file1 = argv[1] #file to analyze looper = int(argv[2]) # number of times that loop will run hist1 = wordcount.dict_words((cleanup.text_list(file1))) total = wordcount.sum_value(hist1) #runs sentance function print(sentance(hist1, total, looper))
# walks the markov sentence = ['START', start_token[1]] #while the last entry in the list sentence is not "STOP" while sentence[len(sentence)-1] != 'STOP': #or len(sentence) < 19: window = (sentence[len(sentence) - 2], sentence[len(sentence)-1]) hist = dictionary[tuple(window)] next_word = sample.weighted_random(hist, sample.sum_value(hist)) sentence.append(next_word) return(sentence) def finalize(sentence): # remove start_token and capatlize the second word of the listself. sentence.pop(0) sentence.pop() sentence[0] = sentence[0].capitalize() word_string = '' word_string = word_string.join(' '+ word for word in sentence) + '.' return word_string if __name__ == '__main__': file1 = argv[1] #file to analyze words = cleanup.text_list(file1) # m_chain = order_mchain(2, words) m_chain = m_chain_one(words) print(m_chain) # c_start = start_token(m_chain) # walk_the_dog = walk(c_start, m_chain) # print(finalize(walk_the_dog))
# Creates a list of unique values from sys import argv import cleanup def unique_list(words): unique_words = [] for word in words: if word not in unique_words: unique_words.append(word) # return unique words lists return unique_words if __name__ == '__main__': file1 = argv[1] list = unique_list(cleanup.text_list(file1)) print(list)
#takes a list argument and returns a word count tuple_list = [] for word in words: if (word, words.count(word)) not in tuple_list: tuple_list.append((word, words.count(word))) return tuple_list #sums dictionary in histo def sum_value(histogram): total = (sum(histogram.values())) return total #opens file_name if __name__ == '__main__': file1 = argv[1] print_list = cleanup.text_list(file1) #promts user for method print("==================================") print("Welcome to Hist-o-grama-rama!!!!!") print("=======>INSTRUCTIONS<============") print("Press 1 for a Dictionary") print("Press 2 for a List of Lists") print("Press 3 for a Tuple") input1 = input("Which method would you like returned? ") #dictionary if input1 == "1": dict_list = dict_words(print_list) tots = sum_value(dict_list) print(dict_list) print(tots)
def main(): file1 = argv[1] words = cleanup.text_list(file1) m_chain = order_mchain(2, words) print(m_chain)