def forward(self, prediction: ndarray, target: ndarray) -> float: """Computes the actual loss value.""" assert_same_shape(prediction, target) self.prediction = prediction self.target = target loss_value = self._output() return loss_value
def backward(self, output_grad: ndarray) -> ndarray: """ Calls the self._input_grad() function. Checks that the appropriate shapes match. """ assert_same_shape(self.output, output_grad) self.input_grad = self._input_grad(output_grad) assert_same_shape(self.input_, self.input_grad) return self.input_grad
def backward(self, output_grad: ndarray) -> ndarray: """Passes output_grad backward through a series of operations.""" assert_same_shape(self.output, output_grad) for operation in reversed(self.operations): output_grad = operation.backward(output_grad) input_grad = output_grad self._param_grads() return input_grad
def backward(self, output_grad: ndarray) -> ndarray: """ Calls self._input_grad and self._param_grad. Checks appropriate shapes. """ assert_same_shape(self.output, output_grad) self.input_grad = self._input_grad(output_grad) self.param_grad = self._param_grad(output_grad) assert_same_shape(self.input_, self.input_grad) return self.input_grad
def backward(self) -> ndarray: """Computes gradient of the loss value with respect to the input to the loss function.""" self.input_grad = self._input_grad() assert_same_shape(self.prediction, self.input_grad) return self.input_grad