Exemplo n.º 1
0
def do_inference(args):
    prepare_output_path(args.output_dir, args.overwrite_output_dir)
    device, n_gpus = setup_backend(args.no_cuda)
    args.batch_size = args.per_gpu_eval_batch_size * max(1, n_gpus)
    inference_examples = process_inference_input(args.data_file)
    classifier = NeuralTagger.load_model(model_path=args.model_dir)
    classifier.to(device, n_gpus)
    output = classifier.inference(inference_examples, args.b)
    write_column_tagged_file(args.output_dir + os.sep + "output.txt", output)
Exemplo n.º 2
0
def do_inference(args):
    prepare_output_path(args.output_dir, args.overwrite_output_dir)
    device, n_gpus = setup_backend(args.no_cuda)
    args.batch_size = args.per_gpu_eval_batch_size * max(1, n_gpus)
    inference_examples = process_inference_input(args.data_file)
    classifier = TransformerTokenClassifier.load_model(model_path=args.model_path,
                                                       model_type=args.model_type,
                                                       do_lower_case=args.do_lower_case,
                                                       load_quantized=args.load_quantized_model)
    classifier.to(device, n_gpus)
    output = classifier.inference(inference_examples, args.max_seq_length, args.batch_size)
    write_column_tagged_file(args.output_dir + os.sep + "output.txt", output)