예제 #1
0
 def backward(self, loss):
     """Wrapper for backward pass. Some optimizer requires ownership of the
     backward pass."""
     if self._with_fp16_wrapper:
         kwargs = {}
         if "update_master_grads" in fn_args(self._optimizer.backward):
             kwargs["update_master_grads"] = True
         self._optimizer.backward(loss, **kwargs)
     else:
         loss.backward()
예제 #2
0
 def backward(self, loss):
     """Wrapper for backward pass. Some optimizer requires ownership of the
     backward pass."""
     if self._with_fp16_wrapper:
         kwargs = {}
         if "update_master_grads" in fn_args(self._optimizer.backward):
             kwargs["update_master_grads"] = True
         self._optimizer.backward(loss, **kwargs)
     else:
         loss.backward()
예제 #3
0
 def backward(self, loss, retain_graph=False):
     """Wrapper for backward pass. Some optimizer requires ownership of the
     backward pass."""
     #with torch.autograd.detect_anomaly():
     if self._with_fp16_wrapper:
         kwargs = {"retain_graph": retain_graph}
         if "update_master_grads" in fn_args(self._optimizer.backward):
             kwargs["update_master_grads"] = True
         self._optimizer.backward(loss, **kwargs)
     else:
         loss.backward(retain_graph=retain_graph)
예제 #4
0
 def backward(self, loss):
     """Wrapper for backward pass. Some optimizer requires ownership of the
     backward pass."""
     if self.amp:
         self._scaler.scale(loss).backward()
     elif self._fp16 == "legacy":
         kwargs = {}
         if "update_master_grads" in fn_args(self._optimizer.backward):
             kwargs["update_master_grads"] = True
         self._optimizer.backward(loss, **kwargs)
     else:
         loss.backward()
예제 #5
0
파일: optimizers.py 프로젝트: menggehe/DRAW
 def backward(self, loss):
     """Wrapper for backward pass. Some optimizer requires ownership of the
     backward pass."""
     if self._fp16 == "amp":
         import apex
         with apex.amp.scale_loss(loss, self._optimizer) as scaled_loss:
             scaled_loss.backward()
     elif self._fp16 == "legacy":
         kwargs = {}
         if "update_master_grads" in fn_args(self._optimizer.backward):
             kwargs["update_master_grads"] = True
         self._optimizer.backward(loss, **kwargs)
     else:
         loss.backward()