def with_linker(self, linker): for xsh, shuffle, zsh in [((2, 3), (1, 'x', 0), (3, 1, 2)), ((1, 2, 3), (1, 2), (2, 3)), ((1, 2, 1, 3), (1, 3), (2, 3)), ((2, 3, 4), (2, 1, 0), (4, 3, 2)), ((2, 3, 4), ('x', 2, 1, 0, 'x'), (1, 4, 3, 2, 1)), ((1, 4, 3, 2, 1), (3, 2, 1), (2, 3, 4)), ((1, 1, 4), (1, 2), (1, 4)), ((1, 1, 1), (), ()), ((1,), ('x', 'x'), (1, 1))]: ib = [(entry == 1) for entry in xsh] x = TensorType('float64', ib)('x') e = DimShuffle(ib, shuffle)(x) f = copy(linker).accept(FunctionGraph([x], [e])).make_function() assert f(numpy.ones(xsh)).shape == zsh #test that DimShuffle.infer_shape work correctly x = TensorType('float64', ib)('x') e = DimShuffle(ib, shuffle)(x) f = copy(linker).accept(FunctionGraph([x], [e. shape])).make_function() assert all(f(numpy.ones(xsh))) == all(zsh) # Test when we drop a axis that is not broadcastable ib = [False, True, False] x = TensorType('float64', ib)('x') self.assertRaises(ValueError, DimShuffle, ib, shuffle) # Test when we drop a axis that don't have shape 1 ib = [True, True, False] x = TensorType('float64', ib)('x') e = DimShuffle(ib, (1, 2))(x) f = copy(linker).accept(FunctionGraph([x], [e.shape])).make_function() self.assertRaises(TypeError, f, numpy.ones((2, 1, 4))) # Test that we can't take a dimensions multiple time xsh, shuffle, zsh = ((1, 1, 4), (0, 1, 2, 0), (1, 4)) ib = [False, True, False] x = TensorType('float64', ib)('x') self.assertRaises(ValueError, DimShuffle, ib, shuffle)
def test_infer_shape(self): for xsh, shuffle in [((2, 3), (1, 'x', 0)), ((1, 2, 3), (1, 2)), ((1, 2, 1, 3), (1, 3)), ((2, 3, 4), (2, 1, 0)), ((2, 3, 4), ('x', 2, 1, 0, 'x')), ((1, 4, 3, 2, 1), (3, 2, 1)), ((1, 1, 4), (1, 2)), ((1, 1, 1), ()), ((1, ), ('x', 'x'))]: ib = [(entry == 1) for entry in xsh] adtens = TensorType('float64', ib)('x') adtens_val = numpy.ones(xsh) self._compile_and_check([adtens], [DimShuffle(ib, shuffle)(adtens)], [adtens_val], DimShuffle)
def make_node(self, input): res = DimShuffle.make_node(self, input) otype = GpuArrayType(dtype=res.outputs[0].type.dtype, broadcastable=res.outputs[0].type.broadcastable) input = as_gpuarray_variable(input) return Apply(self, [input], [otype()])
def transpose_inplace(x, **kwargs): "Perform a transpose on a tensor without copying the underlying storage" dims = list(range(x.ndim - 1, -1, -1)) return DimShuffle(x.broadcastable, dims, inplace=True)(x)