class PhraseClassifier(object): """ Classify a phrase based on a pre-existing model """ def __init__(self, directory, label): metafile = os.path.join(directory, label + "-meta.json") with open(metafile, 'r') as v: all_vars = json.load(v) self.vocab = generateVocabVectors(all_vars["vocab"]) self.max_vector_len = all_vars["max_vector_len"] self.max_phrase_len = all_vars["max_phrase_len"] self.net_sizes = all_vars["sizes"] self.targets = all_vars["targets"] model_filename = os.path.join(directory, label + ".model") state_filename = os.path.join(directory, label + ".state") self.classifier = Classifier(self.net_sizes, model_filename, state_filename) def classify(self, phrase, cut_to_len=True): """ Classify a phrase based on the loaded model. If cut_to_len is True, cut to desired length.""" if (len(phrase) > self.max_phrase_len): if not cut_to_len: raise Exception("Phrase too long.") phrase = phrase[0:self.max_phrase_len] numbers = self.classifier.classify(stringToVector(phrase, self.vocab, self.max_vector_len)) return zip(self.targets, numbers)
def __init__(self, directory, label): metafile = os.path.join(directory, label + "-meta.json") with open(metafile, 'r') as v: all_vars = json.load(v) self.vocab = generateVocabVectors(all_vars["vocab"]) self.max_vector_len = all_vars["max_vector_len"] self.max_phrase_len = all_vars["max_phrase_len"] self.net_sizes = all_vars["sizes"] self.targets = all_vars["targets"] model_filename = os.path.join(directory, label + ".model") state_filename = os.path.join(directory, label + ".state") self.classifier = Classifier(self.net_sizes, model_filename, state_filename)