Exemple #1
0
    def __init__(
        self,
        model: models.ConditionedModel = Ref("model"),
        src_file: Union[None, str, Sequence[str]] = None,
        trg_file: Optional[str] = None,
        dev_every: numbers.Integral = 0,
        dev_zero: bool = False,
        batcher: batchers.Batcher = bare(batchers.SrcBatcher, batch_size=32),
        loss_calculator: loss_calculators.LossCalculator = bare(
            loss_calculators.MLELoss),
        trainer: optimizers.XnmtOptimizer = bare(optimizers.SimpleSGDTrainer,
                                                 e0=0.1),
        run_for_epochs: Optional[numbers.Integral] = None,
        lr_decay: numbers.Real = 1.0,
        lr_decay_times: numbers.Integral = 3,
        patience: numbers.Integral = 1,
        initial_patience: Optional[numbers.Integral] = None,
        dev_tasks: Sequence[eval_tasks.EvalTask] = None,
        dev_combinator: Optional[str] = None,
        restart_trainer: bool = False,
        reload_command: Optional[str] = None,
        name: str = "{EXP}",
        sample_train_sents: Optional[numbers.Integral] = None,
        max_num_train_sents: Optional[numbers.Integral] = None,
        max_src_len: Optional[numbers.Integral] = None,
        max_trg_len: Optional[numbers.Integral] = None,
        loss_comb_method: str = Ref("exp_global.loss_comb_method",
                                    default="sum"),
        update_every: numbers.Integral = 1,
        commandline_args: dict = Ref("exp_global.commandline_args", default={})
    ) -> None:

        super().__init__(model=model,
                         src_file=src_file,
                         trg_file=trg_file,
                         dev_every=dev_every,
                         batcher=batcher,
                         loss_calculator=loss_calculator,
                         run_for_epochs=run_for_epochs,
                         lr_decay=lr_decay,
                         lr_decay_times=lr_decay_times,
                         patience=patience,
                         initial_patience=initial_patience,
                         dev_tasks=dev_tasks,
                         dev_combinator=dev_combinator,
                         restart_trainer=restart_trainer,
                         reload_command=reload_command,
                         name=name,
                         sample_train_sents=sample_train_sents,
                         max_num_train_sents=max_num_train_sents,
                         max_src_len=max_src_len,
                         max_trg_len=max_trg_len)
        self.dev_zero = dev_zero
        self.trainer = trainer or optimizers.SimpleSGDTrainer(e0=0.1)
        self.dynet_profiling = commandline_args.get(
            "dynet_profiling", 0) if commandline_args else 0
        self.train_loss_tracker = loss_trackers.TrainLossTracker(self)
        self.loss_comb_method = loss_comb_method
        self.update_every = update_every
        self.num_updates_skipped = 0
Exemple #2
0
 def __init__(
     self,
     tasks: Sequence[train_tasks.TrainingTask],
     task_weights: Optional[Sequence[numbers.Real]] = None,
     trainer: optimizers.XnmtOptimizer = bare(optimizers.SimpleSGDTrainer,
                                              e0=0.1),
     dev_zero: bool = False,
     loss_comb_method: str = Ref("exp_global.loss_comb_method",
                                 default="sum"),
     update_every_within: numbers.Integral = 1,
     update_every_across: numbers.Integral = 1,
     commandline_args=Ref("exp_global.commandline_args", default=None)
 ) -> None:
     super().__init__(tasks=tasks,
                      trainer=trainer,
                      dev_zero=dev_zero,
                      update_every=update_every_across,
                      commandline_args=commandline_args)
     if update_every_within != 1 and update_every_across != 1:
         raise ValueError(
             "update_every_within and update_every_across cannot be mixed.")
     self.update_every_within = update_every_within
     self.task_weights = task_weights or [1. / len(tasks)] * len(tasks)
     if len(self.task_weights) != len(self.tasks):
         raise ValueError(
             f"number of tasks must match number of task weights; "
             f"found: {len(self.task_weights)} != {len(self.tasks)}")
     self.train_loss_trackers = {
         task: loss_trackers.TrainLossTracker(task)
         for task in tasks
     }
     self.loss_comb_method = loss_comb_method
Exemple #3
0
 def __init__(
     self,
     tasks: Sequence[train_tasks.TrainingTask],
     trainer: optimizers.XnmtOptimizer = bare(optimizers.SimpleSGDTrainer,
                                              e0=0.1),
     dev_zero: bool = False,
     per_task_backward: bool = True,
     loss_comb_method: str = Ref("exp_global.loss_comb_method",
                                 default="sum"),
     update_every: numbers.Integral = 1,
     n_task_steps: Optional[Sequence[numbers.Integral]] = None,
     commandline_args: dict = Ref("exp_global.commandline_args",
                                  default=None)
 ) -> None:
     super().__init__(tasks=tasks,
                      trainer=trainer,
                      dev_zero=dev_zero,
                      update_every=update_every,
                      commandline_args=commandline_args)
     self.train_loss_trackers = {
         task: loss_trackers.TrainLossTracker(task)
         for task in tasks
     }
     self.per_task_backward = per_task_backward
     self.loss_comb_method = loss_comb_method
     self.n_task_steps = n_task_steps or [1] * len(tasks)
     if len(self.n_task_steps) != len(tasks):
         raise ValueError(
             f"number of tasks and steps per task do not match: {len(tasks)} != {len(self.n_task_steps)}"
         )
Exemple #4
0
 def __init__(
     self,
     tasks: Sequence[train_tasks.TrainingTask],
     trainer: optimizers.XnmtOptimizer = bare(optimizers.SimpleSGDTrainer,
                                              e0=0.1),
     dev_zero: bool = False,
     loss_comb_method: str = Ref("exp_global.loss_comb_method",
                                 default="sum"),
     update_every: numbers.Integral = 1,
     commandline_args: dict = Ref("exp_global.commandline_args",
                                  default=None)
 ) -> None:
     super().__init__(tasks=tasks,
                      trainer=trainer,
                      dev_zero=dev_zero,
                      commandline_args=commandline_args,
                      update_every=update_every)
     self.train_loss_trackers = {
         task: loss_trackers.TrainLossTracker(task)
         for task in tasks
     }
     self.loss_comb_method = loss_comb_method