Пример #1
0
    report = classification_report(actual_val.values.astype(
        int), np.array(merged_preds_val_list))
    print(report)

prediction_dict_test, merged_preds_test, embs_test = evaluate_on_set(
    test_generator, predictor, emb_gen=args.freeze_bert,  c_val=opt_c)
if args.output_train_stats:
    prediction_dict_train, merged_preds_train, embs_train = evaluate_on_set(
        training_generator, predictor, emb_gen=args.freeze_bert, c_val=opt_c)
else:
    merged_preds_train, embs_train = {}, {}

# save predictor
json.dump(predictor_params, open(os.path.join(
    args.output_dir, 'predictor_params.json'), 'w'))
torch.save(predictor.state_dict(), os.path.join(
    args.output_dir, 'predictor.pt'))

# save model
if not args.freeze_bert:
    model_to_save = model.module if hasattr(
        model, 'module') else model  # Only save the model it-self
    output_model_file = os.path.join(args.output_dir, WEIGHTS_NAME)
    output_config_file = os.path.join(args.output_dir, CONFIG_NAME)
    torch.save(model_to_save.state_dict(), output_model_file)
    model_to_save.config.to_json_file(output_config_file)
    tokenizer.save_vocabulary(args.output_dir)

# save args
json.dump(vars(args), open(os.path.join(
    args.output_dir, 'argparse_args.json'), 'w'))
    report = classification_report(actual_val.values.astype(int),
                                   np.array(merged_preds_val_list))
    print(report)

prediction_dict_test, merged_preds_test, embs_test = evaluate_on_set(
    test_generator, predictor, emb_gen=args.freeze_bert, c_val=opt_c)
if args.output_train_stats:
    prediction_dict_train, merged_preds_train, embs_train = evaluate_on_set(
        training_generator, predictor, emb_gen=args.freeze_bert, c_val=opt_c)
else:
    merged_preds_train, embs_train = {}, {}

# save predictor
json.dump(predictor_params,
          open(os.path.join(args.output_dir, 'predictor_params.json'), 'w'))
torch.save(predictor.state_dict(), os.path.join(args.output_dir,
                                                'predictor.pt'))

# save model
if not args.freeze_bert:
    model_to_save = model.module if hasattr(
        model, 'module') else model  # Only save the model it-self
    output_model_file = os.path.join(args.output_dir, WEIGHTS_NAME)
    output_config_file = os.path.join(args.output_dir, CONFIG_NAME)
    torch.save(model_to_save.state_dict(), output_model_file)
    model_to_save.config.to_json_file(output_config_file)
    tokenizer.save_vocabulary(args.output_dir)

# save args
json.dump(vars(args),
          open(os.path.join(args.output_dir, 'argparse_args.json'), 'w'))