Ejemplo n.º 1
0
def main():
    parser = ArgumentParser()

    # add task level args
    parser = add_common_specific_args(parser)
    parser = add_tune_specific_args(parser)
    parser = add_task_specific_args(parser)

    # add model specific args
    parser = Model.add_model_specific_args(parser)
    parser = optimization.add_optimizer_specific_args(parser)
    parser = Trainer.add_argparse_args(parser)

    parser.set_defaults(min_epochs=1, max_epochs=10)
    parser.set_defaults(gradient_clip_val=1.0,
                        lr_layers_getter='get_layer_lrs_with_crf')
    args = parser.parse_args()

    if args.ltp_model is not None and args.resume_from_checkpoint is not None:
        deploy_model(args, args.ltp_version)
    elif args.tune:
        tune_train(args,
                   model_class=Model,
                   task_info=task_info,
                   build_method=build_method)
    else:
        common_train(args,
                     model_class=Model,
                     task_info=task_info,
                     build_method=build_method)
Ejemplo n.º 2
0
def main():
    # 如果要输出 LTP master 分支可以使用的模型,传入 ltp_adapter 参数为输出文件夹路径,如 ltp_model
    parser = ArgumentParser()
    parser = add_task_specific_args(parser)
    parser = Model.add_model_specific_args(parser)
    parser = optimization.add_optimizer_specific_args(parser)
    parser = Trainer.add_argparse_args(parser)
    parser.set_defaults(min_epochs=1, max_epochs=10)
    parser.set_defaults(gradient_clip_val=1.0,
                        lr_layers_getter='get_layer_lrs_with_crf')
    args = parser.parse_args()

    if args.ltp_model is not None and args.resume_from_checkpoint is not None:
        deploy_model(args, args.ltp_version)
    elif args.build_ner_dataset:
        build_ner_distill_dataset(args)
    elif args.tune:
        tune_train(args,
                   model_class=Model,
                   task_info=task_info,
                   build_method=build_method)
    else:
        common_train(args,
                     model_class=Model,
                     task_info=task_info,
                     build_method=build_method)
Ejemplo n.º 3
0
def main():
    # 如果要输出 LTP master 分支可以使用的模型,传入 ltp_adapter 参数为输出文件夹路径,如 ltp_model
    parser = ArgumentParser()

    # add task level args
    parser = add_common_specific_args(parser)
    parser = add_tune_specific_args(parser)
    parser = add_task_specific_args(parser)

    # add model specific args
    parser = Model.add_model_specific_args(parser)
    parser = optimization.add_optimizer_specific_args(parser)
    parser = Trainer.add_argparse_args(parser)

    parser.set_defaults(min_epochs=1, max_epochs=10)
    parser.set_defaults(gradient_clip_val=1.0, lr_layers_getter='get_layer_lrs_with_crf')
    args = parser.parse_args()

    if args.ltp_model is not None and args.resume_from_checkpoint is not None:
        deploy_model(args, args.ltp_version)
    elif args.build_ner_dataset:
        build_ner_distill_dataset(args)
    elif args.tune:
        from ltp.utils.common_train import tune
        tune_config = {
            # 3e-4 for Small, 1e-4 for Base, 5e-5 for Large
            "lr": tune.loguniform(args.tune_min_lr, args.tune_max_lr),

            # dataset split
            "tau": tune.choice([0.8, 0.9, 1.0]),

            # 梯度衰减
            "weight_decay": tune.choice([0.0, 0.01]),

            # 梯度裁剪
            "gradient_clip_val": tune.choice([1.0, 2.0, 3.0, 4.0, 5.0]),

            # lr scheduler
            "lr_scheduler": tune.choice([
                'linear_schedule_with_warmup',
                'polynomial_decay_schedule_with_warmup',
            ]),
        }
        tune_train(args, model_class=Model, task_info=task_info, build_method=build_method, tune_config=tune_config)
    else:
        common_train(args, model_class=Model, task_info=task_info, build_method=build_method)
Ejemplo n.º 4
0
def main():
    parser = ArgumentParser()
    parser = add_task_specific_args(parser)
    parser = Model.add_model_specific_args(parser)
    parser = optimization.add_optimizer_specific_args(parser)
    parser = Trainer.add_argparse_args(parser)
    parser.set_defaults(gradient_clip_val=1.0)
    args = parser.parse_args()

    if args.ltp_model is not None and args.resume_from_checkpoint is not None:
        deploy_model(args, args.ltp_version)
    else:
        common_train(args,
                     metric=f'val_{task_info.metric_name}',
                     model_class=Model,
                     build_method=build_method,
                     task=task_info.task_name)