def main(): parser = ArgumentParser() # add task level args parser = add_common_specific_args(parser) parser = add_tune_specific_args(parser) parser = add_task_specific_args(parser) # add model specific args parser = Model.add_model_specific_args(parser) parser = optimization.add_optimizer_specific_args(parser) parser = Trainer.add_argparse_args(parser) parser.set_defaults(min_epochs=1, max_epochs=10) parser.set_defaults(gradient_clip_val=1.0, lr_layers_getter='get_layer_lrs_with_crf') args = parser.parse_args() if args.ltp_model is not None and args.resume_from_checkpoint is not None: deploy_model(args, args.ltp_version) elif args.tune: tune_train(args, model_class=Model, task_info=task_info, build_method=build_method) else: common_train(args, model_class=Model, task_info=task_info, build_method=build_method)
def main(): parser = ArgumentParser() # add task level args parser = add_common_specific_args(parser) parser = add_tune_specific_args(parser) parser = add_task_specific_args(parser) # add model specific args parser = Model.add_model_specific_args(parser) parser = optimization.add_optimizer_specific_args(parser) parser = Trainer.add_argparse_args(parser) # set default args parser.set_defaults(gradient_clip_val=1.0, min_epochs=1, max_epochs=10) parser.set_defaults(num_labels=27) args = parser.parse_args() if args.build_dataset: build_distill_dataset(args) elif args.tune: tune_train(args, model_class=Model, task_info=task_info) else: common_train(args, model_class=Model, task_info=task_info)
def main(): # 如果要输出 LTP master 分支可以使用的模型,传入 ltp_adapter 参数为输出文件夹路径,如 ltp_model parser = ArgumentParser() # add task level args parser = add_common_specific_args(parser) parser = add_tune_specific_args(parser) parser = add_task_specific_args(parser) # add model specific args parser = Model.add_model_specific_args(parser) parser = optimization.add_optimizer_specific_args(parser) parser = Trainer.add_argparse_args(parser) parser.set_defaults(min_epochs=1, max_epochs=10) parser.set_defaults(gradient_clip_val=1.0, lr_layers_getter='get_layer_lrs_with_crf') args = parser.parse_args() if args.ltp_model is not None and args.resume_from_checkpoint is not None: deploy_model(args, args.ltp_version) elif args.build_ner_dataset: build_ner_distill_dataset(args) elif args.tune: from ltp.utils.common_train import tune tune_config = { # 3e-4 for Small, 1e-4 for Base, 5e-5 for Large "lr": tune.loguniform(args.tune_min_lr, args.tune_max_lr), # dataset split "tau": tune.choice([0.8, 0.9, 1.0]), # 梯度衰减 "weight_decay": tune.choice([0.0, 0.01]), # 梯度裁剪 "gradient_clip_val": tune.choice([1.0, 2.0, 3.0, 4.0, 5.0]), # lr scheduler "lr_scheduler": tune.choice([ 'linear_schedule_with_warmup', 'polynomial_decay_schedule_with_warmup', ]), } tune_train(args, model_class=Model, task_info=task_info, build_method=build_method, tune_config=tune_config) else: common_train(args, model_class=Model, task_info=task_info, build_method=build_method)
def main(): parser = ArgumentParser() # add task level args parser = add_common_specific_args(parser) parser = add_tune_specific_args(parser) parser = add_task_specific_args(parser) # add model specific args parser = ViModel.add_model_specific_args(parser) parser = Model.add_model_specific_args(parser) parser = optimization.add_optimizer_specific_args(parser) parser = Trainer.add_argparse_args(parser) # task specific default args parser.set_defaults(gradient_clip_val=1.0, min_epochs=1, max_epochs=10) parser.set_defaults(num_labels=56, arc_hidden_size=600, rel_hidden_size=600) args = parser.parse_args() if args.use_vi: model_class = ViModel model_kwargs = {} else: model_class = Model model_kwargs = {'loss_func': sdp_loss} if args.build_dataset: build_distill_dataset(model_class, args, model_kwargs=model_kwargs) elif args.tune: tune_train(args, model_class=model_class, task_info=task_info, model_kwargs=model_kwargs) else: common_train(args, model_class=model_class, task_info=task_info, model_kwargs=model_kwargs)