def test(model_file, test_file, device=-1): context = utils.Saver.load_context(model_file) if context.seed is not None: utils.set_random_seed(context.seed, device) test_dataset = context.loader.load(test_file, train=False, bucketing=True) model = _build_parser(**dict(context)) chainer.serializers.load_npz(model_file, model) if device >= 0: chainer.cuda.get_device_from_id(device).use() model.to_gpu(device) pbar = training.listeners.ProgressBar(lambda n: tqdm(total=n)) pbar.init(len(test_dataset)) evaluator = Evaluator(model, context.loader.rel_map, test_file, Log.getLogger()) utils.chainer_train_off() for batch in test_dataset.batch(context.batch_size, colwise=True, shuffle=False): xs, ts = batch[:-1], batch[-1] parsed = model.parse(*xs) evaluator.append([tokens[1:] for tokens in xs[-1]], parsed) pbar.update(len(ts)) evaluator.report(show_details=False)