def _backward(self, state, root_gradients, variables): ''' Backpropagates supplied ``root_gradients`` for one or more of the output variables of the Function, to calculate gradients with respect to ``variables``. Formally, multiplies the values of ``root_gradients`` by the Jacobian of the Function and returns the subset of the output that corresponds to ``variables``. This function calls :func:`backward`, which is to be implemented by the user. Args: state (BackPropState): state obtained from a previous call to the func:`cntk.ops.Function.forward` method on this Function for the computation that this gradient backpropagation corresponds to. root_gradients (dict): the gradients that will be backpropagated variables (set): a list of input variables with respect to which the gradients have to be computed. Returns: dict: mapping of ``variables`` to NumPy arrays ''' device = state.device() if self.as_numpy: for v in root_gradients: root_gradients[v] = variable_value_to_seq(root_gradients[v], v) state = cntk_py.UserBackPropState.data(state) else: if not isinstance(state, cntk_py.BackPropState): if state is None: state = self._none_state else: raise ValueError('if as_numpy=False, state must be of ' 'type BackPropState') map_if_possible(variables) if len(root_gradients) == 1: for rg in root_gradients.values(): break root_gradients = rg possible_wrt = [input for input in self.inputs if input.needs_gradient] if len(possible_wrt) > 1: self.backward(state, root_gradients, variables) else: result = self.backward(state, root_gradients) for k in variables: variables[k] = result if self.as_numpy: for k,v in variables.items(): if v is None: raise ValueError('gradients were not provided for all variables') variables[k] = sanitize_batch(k, v, None, device)
def _forward(self, arguments, outputs, device=None, outputs_to_retain=None): ''' Computes the values of speficied variables in ``outputs``, using values provided in ``arguments`` that correspond to each input `Variable` of the function whose ``is_input`` is `True`. This function calls :func:`forward`, which is to be implemented by the user. Args: arguments (tuple): Value objects of the Function's input outputs (iterable): outputs to fetch values for. device (:class:`~cntk.device.DeviceDescriptor`, default `None`): the device descriptor that contains the type and id of the device on which the computation is. If `None`, the default device is used. Returns: A BackPropState instance, which is used by :func:`backward`. ''' if self.as_numpy: arguments = tuple( variable_value_to_seq(v, self.inputs[i]) for i, v in enumerate(arguments)) map_if_possible(outputs) map_if_possible(outputs_to_retain) args = arguments if len(arguments) > 1 else arguments[0] if len(outputs) <= 1: state, result = self.forward(args, device, outputs_to_retain) for k in outputs: outputs[k] = result else: state = self.forward(args, outputs, device, outputs_to_retain) if state is None: state = self._none_state elif not isinstance(state, cntk_py.BackPropState): state = cntk_py.UserBackPropState(self, device, state) if self.as_numpy: for k, v in outputs.items(): if v is None: raise ValueError('not all outputs have been provided') # FIXME: seq_starts outputs[k] = sanitize_batch(k, v, None, device) return state, outputs
def _forward(self, arguments, outputs, device=None, outputs_to_retain=None): ''' Computes the values of speficied variables in ``outputs``, using values provided in ``arguments`` that correspond to each input `Variable` of the function whose ``is_input`` is `True`. This function calls :func:`forward`, which is to be implemented by the user. Args: arguments (tuple): Value objects of the Function's input outputs (iterable): outputs to fetch values for. device (:class:`~cntk.device.DeviceDescriptor`, default `None`): the device descriptor that contains the type and id of the device on which the computation is. If `None`, the default device is used. Returns: A BackPropState instance, which is used by :func:`backward`. ''' if self.as_numpy: arguments = tuple(variable_value_to_seq(v, self.inputs[i]) for i, v in enumerate(arguments)) map_if_possible(outputs) map_if_possible(outputs_to_retain) args = arguments if len(arguments)>1 else arguments[0] if len(outputs) <= 1: state, result = self.forward(args, device, outputs_to_retain) for k in outputs: outputs[k] = result else: state = self.forward(args, outputs, device, outputs_to_retain) if state is None: state = self._none_state elif not isinstance(state, cntk_py.BackPropState): state = cntk_py.UserBackPropState(self, device, state) if self.as_numpy: for k,v in outputs.items(): if v is None: raise ValueError('not all outputs have been provided') # FIXME: seq_starts outputs[k] = sanitize_batch(k, v, None, device) return state, outputs
def _backward(self, state, root_gradients, variables): ''' Backpropagates supplied ``root_gradients`` for one or more of the output variables of the Function, to calculate gradients with respect to ``variables``. Formally, multiplies the values of ``root_gradients`` by the Jacobian of the Function and returns the subset of the output that corresponds to ``variables``. This function calls :func:`backward`, which is to be implemented by the user. Example: TBD Args: state (BackPropState): state obtained from a previous call to the func:`cntk.ops.Function.forward` method on this Function for the computation that this gradient backpropagation corresponds to. root_gradients (dict): the gradients that will be backpropagated variables (set): a list of input variables with respect to which the gradients have to be computed. Returns: dict: mapping of ``variables`` to NumPy arrays ''' for v in root_gradients: root_gradients[v] = variable_value_to_seq(root_gradients[v], v) map_if_possible(variables) if len(variables) > 1: self.backward(cntk_py.UserBackPropState.data(state), root_gradients, variables) else: for rg in root_gradients.values(): break result = self.backward(cntk_py.UserBackPropState.data(state), rg) for k in variables: variables[k] = result for k, v in variables.items(): if v is None: raise ValueError( 'gradients were not provided for all variables') variables[k] = sanitize_batch(k, v, None, state.device())
def _backward(self, state, root_gradients, variables): ''' Backpropagates supplied ``root_gradients`` for one or more of the output variables of the Function, to calculate gradients with respect to ``variables``. Formally, multiplies the values of ``root_gradients`` by the Jacobian of the Function and returns the subset of the output that corresponds to ``variables``. This function calls :func:`backward`, which is to be implemented by the user. Example: TBD Args: state (BackPropState): state obtained from a previous call to the func:`cntk.ops.Function.forward` method on this Function for the computation that this gradient backpropagation corresponds to. root_gradients (dict): the gradients that will be backpropagated variables (set): a list of input variables with respect to which the gradients have to be computed. Returns: dict: mapping of ``variables`` to NumPy arrays ''' for v in root_gradients: root_gradients[v] = variable_value_to_seq(root_gradients[v], v) map_if_possible(variables) if len(variables)>1: self.backward(cntk_py.UserBackPropState.data(state), root_gradients, variables) else: for rg in root_gradients.values(): break result = self.backward(cntk_py.UserBackPropState.data(state), rg) for k in variables: variables[k] = result for k,v in variables.items(): if v is None: raise ValueError('gradients were not provided for all variables') variables[k] = sanitize_batch(k, v, None, state.device())