Exemple #1
0
    def backward(self, loss: dy.Expression,
                 dynet_profiling: numbers.Integral) -> None:
        """
    Perform backward pass to accumulate gradients.

    Args:
      loss: Result of self.training_step(...)
      dynet_profiling: if > 0, print the computation graph
    """
        if dynet_profiling and dynet_profiling > 0:
            dy.print_text_graphviz()
        loss.backward()
Exemple #2
0
    def update_weights(self, loss, trainer, dynet_profiling):
        """
    Standardized way to perform backward pass and parameter updates.

    :param loss: Result of self.training_step(...)
    :param trainer: DyNet trainer / xnmt.optimizer object
    :param dynet_profiling: if > 0, print the computation graph
    """
        if dynet_profiling and dynet_profiling > 0:
            dy.print_text_graphviz()
        loss.backward()
        trainer.update()
Exemple #3
0
    def update_weights(self, loss, trainer, dynet_profiling):
        """
    Standardized way to perform backward pass and parameter updates.

    Args:
      loss: Result of self.training_step(...)
      trainer (XnmtOptimizer): DyNet trainer
      dynet_profiling (int): if > 0, print the computation graph
    """
        if dynet_profiling and dynet_profiling > 0:
            dy.print_text_graphviz()
        loss.backward()
        trainer.update()
Exemple #4
0
def print_cg_conditional() -> None:
    if settings.PRINT_CG_ON_ERROR:
        dy.print_text_graphviz()