def _track_gradient_norm(self):
     grad_norm_dict = {}
     if (self.global_step + 1) % self.trainer.log_every_n_steps == 0:
         if float(self.trainer.track_grad_norm) > 0:
             model = self.trainer.lightning_module
             grad_norm_dict = grad_norm(model, self.trainer.track_grad_norm)
     return grad_norm_dict
Example #2
0
    def track_and_norm_grad(self, optimizer) -> dict:
        # track gradient norms
        grad_norm_dict = {}
        if (self.global_step + 1) % self.trainer.log_every_n_steps == 0 and float(self.trainer.track_grad_norm) > 0:
            grad_norm_dict = grad_norm(self.trainer.lightning_module, self.trainer.track_grad_norm)

        # clip gradients
        self.trainer.accelerator.clip_gradients(
            optimizer, self.trainer.gradient_clip_val, gradient_clip_algorithm=self.trainer.gradient_clip_algorithm
        )
        return grad_norm_dict