def test_serve():
    model = TextClassifier(2, TEST_BACKBONE)
    # TODO: Currently only servable once a preprocess and postprocess have been attached
    model._preprocess = TextClassificationPreprocess(backbone=TEST_BACKBONE)
    model._postprocess = TextClassificationPostprocess()
    model.eval()
    model.serve()
def test_jit(tmpdir):
    sample_input = {"input_ids": torch.randint(1000, size=(1, 100))}
    path = os.path.join(tmpdir, "test.pt")

    model = TextClassifier(2, TEST_BACKBONE)
    model.eval()

    # Huggingface bert model only supports `torch.jit.trace` with `strict=False`
    model = torch.jit.trace(model, sample_input, strict=False)

    torch.jit.save(model, path)
    model = torch.jit.load(path)

    out = model(sample_input)["logits"]
    assert isinstance(out, torch.Tensor)
    assert out.shape == torch.Size([1, 2])
예제 #3
0
def test_serve():
    model = TextClassifier(2, backbone=TEST_BACKBONE)
    model.eval()
    model.serve()