def test_check_device_fail(shape, device, compare_device_spec): dtype = 'float32' a = chainerx.empty(shape, dtype, device=device) with chainerx.using_device('native:1'): with pytest.raises(AssertionError): array_utils.check_device(a, compare_device_spec)
def test_empty_like_with_device(device): t = chainerx.empty((2,), 'float32') a = chainerx.empty_like(t, device) b = chainerx.empty_like(t) array_utils.check_device(a, device) assert a.dtype == b.dtype assert a.shape == b.shape
def test_check_device_device_spec(shape, device_spec): dtype = 'float32' a = chainerx.empty(shape, dtype, device=device_spec) device = chainerx.get_device(device_spec) array_utils.check_device(a, device_spec) array_utils.check_device(a, device)
def generate_array(initializer, shape, xp, dtype=None, device=None): # type: (types.AbstractInitializer, types.ShapeSpec, types.Xp, types.DTypeSpec, types.DeviceSpec) -> types.NdArray # NOQA """Return initialized array. The algorithms used to make the new values depend on the concrete derived classes. If the initializer has the ``dtype`` attribute, it is used to construct the array. Otherwise, ``chainer.config.dtype`` is used instead. See :ref:`configuration` for the dtype config. Args: initializer: A callable object that takes :ref:`ndarray` and edits its value. shape (int or tuple of int): Shape of the initialized array. xp (module): :mod:`cupy`, :mod:`numpy`, or :mod:`chainerx`. dtype: Dtype specifier. If omitted, ``initializer.dtype`` is used. device: Target device specifier. If omitted, the current device is used for :mod:`cupy`, and the default device is used for :mod:`chainerx`. Returns: :ref:`ndarray`: An initialized array. """ dtype_attr = getattr(initializer, 'dtype', None) if dtype is not None and dtype_attr is not None \ and numpy.dtype(dtype) != numpy.dtype(dtype_attr): raise ValueError('dtype mismatch: {} != {}'.format(dtype, dtype_attr)) if dtype is None: dtype = dtype_attr dtype = chainer.get_dtype(dtype) if device is None: backend_device = backend._guess_device_from_array_module(xp) else: backend_device = chainer.get_device(device) if xp != backend_device.xp: raise ValueError('xp and device arguments are inconsistent.') if xp is chainerx: # Initialize with NumPy/CuPy array that shares memory with the # ChainerX array. # TODO(sonots): Directly use initializer after ChainerX # supports random. chx_device = backend_device.device array = chainerx.empty(shape, dtype=dtype, device=chx_device) fallback_device = backend_device.fallback_device with chainer.using_device(fallback_device): initializer(fallback_device.send(array)) return array with chainer.using_device(backend_device): array = xp.empty(shape, dtype=dtype) initializer(array) return array
def backward_call_callback(call_arg): backward_call_new_array.append(chainerx.empty(shape, dtype))
def test_full_like_with_device(device): t = chainerx.empty((2,), 'float32') a = chainerx.full_like(t, 1, device) b = chainerx.full_like(t, 1) array_utils.check_device(a, device) chainerx.testing.assert_array_equal_ex(a, b)
def test_ones_like_with_device(shape, device): t = chainerx.empty((2,), 'float32') a = chainerx.ones_like(t, device) b = chainerx.ones_like(t) array_utils.check_device(a, device) chainerx.testing.assert_array_equal_ex(a, b)
def test_check_device(shape, device): dtype = 'float32' a = chainerx.empty(shape, dtype, device=device) array_utils.check_device(a, device.name) array_utils.check_device(a, device)
def generate_array(initializer, shape, xp, dtype=None, device=None): # type: (types.AbstractInitializer, types.ShapeSpec, types.Xp, types.DTypeSpec, types.DeviceSpec) -> types.NdArray # NOQA """Return initialized array. The algorithms used to make the new values depend on the concrete derived classes. If the initializer has the ``dtype`` attribute, it is used to construct the array. Otherwise, ``chainer.config.dtype`` is used instead. See :ref:`configuration` for the dtype config. Args: initializer: A callable object that takes :ref:`ndarray` and edits its value. shape (tuple): Shape of a return array. xp (module): :mod:`cupy`, :mod:`numpy`, or :mod:`chainerx`. dtype: Dtype specifier. If omitted, ``initializer.dtype`` is used. device: Target device specifier. If omitted, the current device is used for :mod:`cupy`, and the default device is used for :mod:`chainerx`. Returns: :ref:`ndarray`: An initialized array. """ dtype_attr = getattr(initializer, 'dtype', None) if dtype is not None and dtype_attr is not None \ and numpy.dtype(dtype) != numpy.dtype(dtype_attr): raise ValueError( 'dtype mismatch: {} != {}'.format(dtype, dtype_attr)) if dtype is None: dtype = dtype_attr dtype = chainer.get_dtype(dtype) if device is None: backend_device = backend._guess_device_from_array_module(xp) else: backend_device = chainer.get_device(device) if xp != backend_device.xp: raise ValueError('xp and device arguments are inconsistent.') if xp is chainerx: # Initialize with NumPy/CuPy array that shares memory with the # ChainerX array. # TODO(sonots): Directly use initializer after ChainerX # supports random. chx_device = backend_device.device # type: ignore # TODO(okapies): remove 'type: ignore' when chainerx implements sequence support for empty() # NOQA array = chainerx.empty(shape, dtype=dtype, device=chx_device) # type: ignore # NOQA if chx_device.backend.name == 'native': temp_array = _cpu._to_cpu(array) temp_device = cuda.DummyDevice # type: cuda.Device elif chx_device.backend.name == 'cuda': temp_array = cuda.to_gpu(array, chx_device.index) temp_device = cuda.Device(chx_device.index) else: raise RuntimeError('ChainerX backend: {} is not supported.'.format( chx_device.backend.name)) with temp_device: initializer(temp_array) return array with chainer.using_device(backend_device): array = xp.empty(shape, dtype=dtype) initializer(array) return array
def generate_array(initializer, shape, xp, dtype=None, device=None): # type: (types.AbstractInitializer, types.ShapeSpec, types.Xp, types.DTypeSpec, types.DeviceSpec) -> types.NdArray # NOQA """Return initialized array. The algorithms used to make the new values depend on the concrete derived classes. If the initializer has the ``dtype`` attribute, it is used to construct the array. Otherwise, ``chainer.config.dtype`` is used instead. See :ref:`configuration` for the dtype config. Args: initializer: A callable object that takes :class:`numpy.ndarray` or :class:`cupy.ndarray` and edits its value. shape (tuple): Shape of a return array. xp (module): :mod:`cupy`, :mod:`numpy`, or :mod:`chainerx`. dtype: Dtype specifier. If omitted, ``initializer.dtype`` is used. device: Target device specifier. If omitted, the current device is used for :mod:`cupy`, and the default device is used for :mod:`chainerx`. Returns: numpy.ndarray, cupy.ndarray, or chainerx.ndarray: An initialized array. """ dtype_attr = getattr(initializer, 'dtype', None) if dtype is not None and dtype_attr is not None \ and numpy.dtype(dtype) != numpy.dtype(dtype_attr): raise ValueError( 'dtype mismatch: {} != {}'.format(dtype, dtype_attr)) if dtype is None: dtype = dtype_attr dtype = chainer.get_dtype(dtype) if device is None: if xp is cuda.cupy: backend_device = chainer.get_device(cuda.Device()) elif xp is chainerx: backend_device = chainer.get_device(chainerx.get_default_device()) else: backend_device = chainer.get_device(numpy) else: backend_device = chainer.get_device(device) if xp != backend_device.xp: raise ValueError('xp and device arguments are inconsistent.') if xp is chainerx: # Initialize with NumPy/CuPy array that shares memory with the # ChainerX array. # TODO(sonots): Directly use initializer after ChainerX # supports random. chx_device = backend_device.device # type: ignore # TODO(okapies): remove 'type: ignore' when chainerx implements sequence support for empty() # NOQA array = chainerx.empty(shape, dtype=dtype, device=chx_device) # type: ignore # NOQA if chx_device.backend.name == 'native': temp_array = _cpu._to_cpu(array) temp_device = cuda.DummyDevice # type: cuda.Device elif chx_device.backend.name == 'cuda': temp_array = cuda.to_gpu(array, chx_device.index) temp_device = cuda.Device(chx_device.index) else: raise RuntimeError('ChainerX backend: {} is not supported.'.format( chx_device.backend.name)) with temp_device: initializer(temp_array) return array with chainer.using_device(backend_device): array = xp.empty(shape, dtype=dtype) initializer(array) return array