def load_word_gen(filename): if Utilities.is_file(filename) == False: sys.exit("Fatal Error: no word hash found for file '%s'. Please \ re-train the model and try again" % filename) try: file = open(filename, 'rb') except: sys.exit("Fatal Error: cannot open word hash '%s' for \ reading" % filename) return pickle.load(file)
def file_to_ngram(file_path, n): if n < 1: return None if Utilities.is_file(file_path) == False: return None fileContent = [] file = Utilities.open_file(file_path); for line in file: fileContent += TextUtils.normalize_line(line) fileContent.append(' ') ng = ngrams(fileContent, n) return ng
def file_to_ngram(file_path, n): if n < 1: return None if Utilities.is_file(file_path) == False: return None fileContent = [] file = Utilities.open_file(file_path) for line in file: fileContent += TextUtils.normalize_line(line) fileContent.append(' ') ng = ngrams(fileContent, n) return ng