Exemplo n.º 1
0
 def backward(ctx, grad_spike, grad_v_next):
     grad_x, grad_v = _C_neuron.LIF_backward(grad_spike, grad_v_next,
                                             ctx.saved_tensors[0],
                                             ctx.saved_tensors[1],
                                             ctx.reciprocal_tau,
                                             ctx.detach_input)
     return grad_x, grad_v, None, None, None, None, None, None, None
Exemplo n.º 2
0
def LIF_backward(grad_spike: torch.Tensor, grad_v_next: torch.Tensor,
                 grad_s_to_h: torch.Tensor, grad_v_to_h: float,
                 reciprocal_tau: float, detach_input: bool):
    '''
    * :ref:`API in English <LIF_backward-en>`

    .. _LIF_backward-cn:

    :param reciprocal_tau: :math:`\\frac{1}{\\tau}`
    :type reciprocal_tau: float

    其余的参数参见 :ref:`backward_template <backward_template-cn>`。

    梯度的计算按照

    .. math::
        \\frac{\\partial H_{t}}{\\partial X_{t}} & = \\frac{1}{\\tau}

        \\frac{\\partial H_{t}}{\\partial V_{t-1}} & = 1 - \\frac{1}{\\tau}

    * :ref:`中文API <LIF_backward-cn>`

    .. _LIF_backward-en:

    :param reciprocal_tau: :math:`\\frac{1}{\\tau}`
    :type reciprocal_tau: float

    See :ref:`backward_template <backward_template-en>` for more details about other args。

    The gradients are calculated by

    .. math::
        \\frac{\\partial H_{t}}{\\partial X_{t}} & = \\frac{1}{\\tau}

        \\frac{\\partial H_{t}}{\\partial V_{t-1}} & = 1 - \\frac{1}{\\tau}

    '''
    return _C_neuron.LIF_backward(grad_spike, grad_v_next, grad_s_to_h,
                                  grad_v_to_h, reciprocal_tau, detach_input)