def test_extra_ops(): a = matrix("a") a.tag.test_value = np.arange(6, dtype=config.floatX).reshape((3, 2)) out = aet_extra_ops.cumsum(a, axis=0) fgraph = FunctionGraph([a], [out]) compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) out = aet_extra_ops.cumprod(a, axis=1) fgraph = FunctionGraph([a], [out]) compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) out = aet_extra_ops.diff(a, n=2, axis=1) fgraph = FunctionGraph([a], [out]) compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) out = aet_extra_ops.repeat(a, (3, 3), axis=1) fgraph = FunctionGraph([a], [out]) compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) # This function also cannot take symbolic input. c = aet.as_tensor(5) out = aet_extra_ops.bartlett(c) fgraph = FunctionGraph([], [out]) compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) with pytest.raises(NotImplementedError): out = aet_extra_ops.fill_diagonal(a, c) fgraph = FunctionGraph([a], [out]) compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) with pytest.raises(NotImplementedError): out = aet_extra_ops.fill_diagonal_offset(a, c, c) fgraph = FunctionGraph([a], [out]) compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) with pytest.raises(NotImplementedError): out = aet_extra_ops.Unique(axis=1)(a) fgraph = FunctionGraph([a], [out]) compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) indices = np.arange(np.product((3, 4))) out = aet_extra_ops.unravel_index(indices, (3, 4), order="C") fgraph = FunctionGraph([], out) compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs], must_be_device_array=False) multi_index = np.unravel_index(np.arange(np.product((3, 4))), (3, 4)) out = aet_extra_ops.ravel_multi_index(multi_index, (3, 4)) fgraph = FunctionGraph([], [out]) compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs], must_be_device_array=False) # The inputs are "concrete", yet it still has problems? out = aet_extra_ops.Unique()(aet.as_tensor( np.arange(6, dtype=config.floatX).reshape((3, 2)))) fgraph = FunctionGraph([], [out]) compare_jax_and_py(fgraph, [])
def check(shape, index_ndim, order): indices = np.arange(np.product(shape)) # test with scalars and higher-dimensional indices if index_ndim == 0: indices = indices[-1] elif index_ndim == 2: indices = indices[:, np.newaxis] indices_symb = aesara.shared(indices) # reference result ref = np.unravel_index(indices, shape, order=order) def fn(i, d): return function([], unravel_index(i, d, order=order)) # shape given as a tuple f_array_tuple = fn(indices, shape) f_symb_tuple = fn(indices_symb, shape) np.testing.assert_equal(ref, f_array_tuple()) np.testing.assert_equal(ref, f_symb_tuple()) # shape given as an array shape_array = np.array(shape) f_array_array = fn(indices, shape_array) np.testing.assert_equal(ref, f_array_array()) # shape given as an Aesara variable shape_symb = aesara.shared(shape_array) f_array_symb = fn(indices, shape_symb) np.testing.assert_equal(ref, f_array_symb()) # shape given as a Shape op (unravel_index will use get_vector_length # to infer the number of dimensions) indexed_array = aesara.shared(np.random.uniform(size=shape_array)) f_array_shape = fn(indices, indexed_array.shape) np.testing.assert_equal(ref, f_array_shape()) # shape testing self._compile_and_check( [], unravel_index(indices, shape_symb, order=order), [], UnravelIndex, )
def test_extra_ops(): a = matrix("a") a.tag.test_value = np.arange(6, dtype=config.floatX).reshape((3, 2)) out = at_extra_ops.cumsum(a, axis=0) fgraph = FunctionGraph([a], [out]) compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) out = at_extra_ops.cumprod(a, axis=1) fgraph = FunctionGraph([a], [out]) compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) out = at_extra_ops.diff(a, n=2, axis=1) fgraph = FunctionGraph([a], [out]) compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) out = at_extra_ops.repeat(a, (3, 3), axis=1) fgraph = FunctionGraph([a], [out]) compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) c = at.as_tensor(5) with pytest.raises(NotImplementedError): out = at_extra_ops.fill_diagonal(a, c) fgraph = FunctionGraph([a], [out]) compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) with pytest.raises(NotImplementedError): out = at_extra_ops.fill_diagonal_offset(a, c, c) fgraph = FunctionGraph([a], [out]) compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) with pytest.raises(NotImplementedError): out = at_extra_ops.Unique(axis=1)(a) fgraph = FunctionGraph([a], [out]) compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) indices = np.arange(np.product((3, 4))) out = at_extra_ops.unravel_index(indices, (3, 4), order="C") fgraph = FunctionGraph([], out) compare_jax_and_py( fgraph, [get_test_value(i) for i in fgraph.inputs], must_be_device_array=False )
def fn(i, d): return function([], unravel_index(i, d, order=order))
def test_unravel_index(self): def check(shape, index_ndim, order): indices = np.arange(np.product(shape)) # test with scalars and higher-dimensional indices if index_ndim == 0: indices = indices[-1] elif index_ndim == 2: indices = indices[:, np.newaxis] indices_symb = aesara.shared(indices) # reference result ref = np.unravel_index(indices, shape, order=order) def fn(i, d): return function([], unravel_index(i, d, order=order)) # shape given as a tuple f_array_tuple = fn(indices, shape) f_symb_tuple = fn(indices_symb, shape) np.testing.assert_equal(ref, f_array_tuple()) np.testing.assert_equal(ref, f_symb_tuple()) # shape given as an array shape_array = np.array(shape) f_array_array = fn(indices, shape_array) np.testing.assert_equal(ref, f_array_array()) # shape given as an Aesara variable shape_symb = aesara.shared(shape_array) f_array_symb = fn(indices, shape_symb) np.testing.assert_equal(ref, f_array_symb()) # shape given as a Shape op (unravel_index will use get_vector_length # to infer the number of dimensions) indexed_array = aesara.shared(np.random.uniform(size=shape_array)) f_array_shape = fn(indices, indexed_array.shape) np.testing.assert_equal(ref, f_array_shape()) # shape testing self._compile_and_check( [], unravel_index(indices, shape_symb, order=order), [], UnravelIndex, ) for order in ("C", "F"): for index_ndim in (0, 1, 2): check((3, ), index_ndim, order) check((3, 4), index_ndim, order) check((3, 4, 5), index_ndim, order) # must specify ndim if length of dims is not fixed with pytest.raises(ValueError): unravel_index(ivector(), ivector()) # must provide integers with pytest.raises(TypeError): unravel_index(fvector(), (3, 4)) with pytest.raises(TypeError): unravel_index((3, 4), (3.4, 3.2)) # dims must be a 1D sequence with pytest.raises(TypeError): unravel_index((3, 4), 3) with pytest.raises(TypeError): unravel_index((3, 4), ((3, 4), ))