def test_force_backprop_mode(): with chainerx.backprop_scope('bp1') as bp1, \ chainerx.backprop_scope('bp2') as bp2: with chainerx.no_backprop_mode(): assert not chainerx.is_backprop_required() assert not chainerx.is_backprop_required(bp1) assert not chainerx.is_backprop_required(bp2) with chainerx.force_backprop_mode(): assert chainerx.is_backprop_required() assert chainerx.is_backprop_required(bp1) assert chainerx.is_backprop_required(bp2) assert not chainerx.is_backprop_required() assert not chainerx.is_backprop_required(bp1) assert not chainerx.is_backprop_required(bp2) with chainerx.force_backprop_mode(chainerx.get_default_context()): assert chainerx.is_backprop_required() assert chainerx.is_backprop_required(bp1) assert chainerx.is_backprop_required(bp2) assert not chainerx.is_backprop_required() assert not chainerx.is_backprop_required(bp1) assert not chainerx.is_backprop_required(bp2) with chainerx.force_backprop_mode(bp1): assert not chainerx.is_backprop_required() assert chainerx.is_backprop_required(bp1) assert not chainerx.is_backprop_required(bp2) assert not chainerx.is_backprop_required() assert not chainerx.is_backprop_required(bp1) assert not chainerx.is_backprop_required(bp2) with chainerx.force_backprop_mode((bp1, bp2)): assert not chainerx.is_backprop_required() assert chainerx.is_backprop_required(bp1) assert chainerx.is_backprop_required(bp2) assert not chainerx.is_backprop_required() assert not chainerx.is_backprop_required(bp1) assert not chainerx.is_backprop_required(bp2) with chainerx.force_backprop_mode(): assert chainerx.is_backprop_required() assert chainerx.is_backprop_required(bp1) assert chainerx.is_backprop_required(bp2)
def test_is_backprop_required(): current_context = chainerx.get_default_context() another_context = chainerx.Context() with chainerx.backprop_scope('bp1') as bp1, \ chainerx.backprop_scope('bp2') as bp2: with chainerx.no_backprop_mode(): with chainerx.force_backprop_mode(bp1): assert not chainerx.is_backprop_required() assert chainerx.is_backprop_required(bp1) assert not chainerx.is_backprop_required(bp2) assert not chainerx.is_backprop_required( context=current_context) assert chainerx.is_backprop_required(context=another_context) with pytest.raises(TypeError): chainerx.is_backprop_required(context='foo')
def force_backprop_mode(): """Make a context manager which enables back-propagation. When you want to enable back-propagation in :func:`no_backprop_mode`, call this method. A :class:`~chainer.Variable` created in this context always has a computational graph unless overridden by deeper contexts. If you call this method outside of :func:`no_backprop_mode` context, it changes nothing. In the following example, ``y`` has a computational graph and calling :func:`~chainer.Variable.backward` on ``y`` will compute and accumulate the gradients of the variables in the graph, in this case only ``x``. >>> x = chainer.Variable(np.array([1,], np.float32)) >>> with chainer.no_backprop_mode(): ... with chainer.force_backprop_mode(): ... y = x + 1 >>> y.backward() >>> x.grad array([1.], dtype=float32) .. note:: ``chainer.force_backprop_mode()`` implicitly applies ChainerX's counterpart :func:`chainerx.force_backprop_mode()`, but not vice versa. Also, setting ``enable_backprop`` :ref:`configuration <configuration>` does not affect ChainerX. .. seealso:: See :func:`chainer.no_backprop_mode` for details on disabled back-propagation mode. """ c = configuration.using_config('enable_backprop', True) if chainerx.is_available(): return _BackpropModeContext((c, chainerx.force_backprop_mode())) return _BackpropModeContext((c,))
def force_backprop_mode(): """Make a context manager which enables back-propagation. When you want to enable back-propagation in :func:`no_backprop_mode`, call this method. A :class:`~chainer.Variable` created in this context always has a computational graph unless overridden by deeper contexts. If you call this method outside of :func:`no_backprop_mode` context, it changes nothing. In the following example, ``y`` has a computational graph and calling :func:`~chainer.Variable.backward` on ``y`` will compute and accumulate the gradients of the variables in the graph, in this case only ``x``. >>> x = chainer.Variable(np.array([1,], np.float32)) >>> with chainer.no_backprop_mode(): ... with chainer.force_backprop_mode(): ... y = x + 1 >>> y.backward() >>> x.grad array([1.], dtype=float32) .. note:: ``chainer.force_backprop_mode()`` implicitly applies ChainerX's counterpart :func:`chainerx.force_backprop_mode()`, but not vice versa. Also, setting ``enable_backprop`` :ref:`configuration <configuration>` does not affect ChainerX. .. seealso:: See :func:`chainer.no_backprop_mode` for details on disabled back-propagation mode. """ c = configuration.using_config('enable_backprop', True) if chainerx.is_available(): return _BackpropModeContext((c, chainerx.force_backprop_mode())) return _BackpropModeContext((c, ))