Beispiel #1
0
class ClassifierPredictor:
    def __init__(self, model: Classifier) -> None:
        self.model = model
        self.reader = ClassificationReader(skip_start_end=True)
        self.predictor = TextClassifierPredictor(self.model, self.reader)

    def predict(self, sequences: List[str]) -> np.ndarray:
        probs = [self.predictor.predict(seq)['probs'] for seq in sequences]
        probs = np.array(probs)
        return probs
trainer = Trainer(
    model=model,
    optimizer=optimizer,
    iterator=iterator,
    train_dataset=train_data,
    validation_dataset=validation_data,
    patience=2,  # early stopping if it is stuck for 2 epochs
    num_epochs=
    10,  # we should increase the number of epoch later, it's just for early try
    cuda_device=cuda_device)
trainer.train()

# predictor
pre_example = 'Good morning'
predictor = TextClassifierPredictor()
pre = predictor.predict(pre_example)

# save the model
with open('./tmp/classifier_biattention_model.th', 'wb') as f:
    torch.save(model.state_dict(), f)

# save the vocabulary
vocab.save_to_files('./tmp/vocabulary')

# reload the model
# vocab2 = Vocabulary.from_files('./tmp/vocabulary')
# model2 = BiattentiveClassificationNetwork(word_embeddings, encoder, vocab2)
# with open('./tmp/classifier_biattention_model.th', 'rb') as f:
#     model2.load_state_dict(torch.load(f))
# if cuda_device > -1:
#     model2.cuda(cuda_device)