def test_raw_dataset_prediction(self): args = PredictionAttrs() predictor = Predictor.from_checkpoint(PredictorParams( progress_bar=False, silent=True), checkpoint=args.checkpoint[0]) params = PipelineParams(type=DataSetType.FILE, files=args.files) for sample in predictor.predict(params): pass
def test_raw_prediction(self): args = PredictionAttrs() predictor = Predictor.from_checkpoint(PredictorParams( progress_bar=False, silent=True), checkpoint=args.checkpoint[0]) images = [load_image(file) for file in args.files] for result in predictor.predict_raw(images): self.assertGreater(result.outputs.avg_char_probability, 0)
def test_white_image_raw_prediction(self): args = PredictionAttrs() predictor = Predictor.from_checkpoint(PredictorParams( progress_bar=False, silent=True), checkpoint=args.checkpoint[0]) images = [np.zeros(shape=(200, 50))] for result in predictor.predict_raw(images): print(result.outputs.sentence)
def test_raw_prediction_voted(self): args = PredictionAttrs() predictor = MultiPredictor.from_paths(checkpoints=args.checkpoint, predictor_params=PredictorParams( progress_bar=False, silent=True)) images = [load_image(file) for file in args.files] for inputs, (r, voted), meta in predictor.predict_raw(images): print([rn.sentence for rn in r])
def test_raw_prediction(self): args = PredictionAttrs() predictor = Predictor.from_checkpoint(PredictorParams( progress_bar=False, silent=True), checkpoint=args.checkpoint[0]) images = [load_image(file) for file in args.files] for file, image in zip(args.files, images): _, prediction, _ = list(predictor.predict_raw([image]))[0] print(file, prediction.sentence)
def benchmark_prediction(model, batch_size, processes, n_examples, runs=10): params = PredictorParams(silent=True) predictor = Predictor.from_checkpoint(params, model) data = (np.random.random((400, 48)) * 255).astype(np.uint8) print("Running with bs={}, proc={}, n={}".format(batch_size, processes, n_examples)) start = time.time() for i in range(runs): list(predictor.predict_raw([data] * n_examples, batch_size=batch_size)) end = time.time() return (end - start) / runs
def default_predictor_params(): p = PredictorParams(progress_bar=False, silent=True) p.pipeline.batch_size = 2 p.pipeline.num_processes = 1 return p