def test_reshape(self): a = np.array([[[1.1, 1.1, 1.4], [1.1, 1.1, 1.1]], [[1.1, 1.1, 1.3], [1.6, 1.1, 1.2]]]) ta = tensor.from_numpy(a) tb = tensor.reshape(ta, [2, 6]) self.assertAlmostEqual(tb.shape[0], 2., places=3) self.assertAlmostEqual(tb.shape[1], 6., places=3) np.testing.assert_array_almost_equal(tensor.to_numpy(tb), a.reshape((2, 6)))
def test_gpu_6d_transpose(self, dev=gpu_dev): s0 = (2, 3, 4, 5, 6, 7) axes1 = [5, 4, 3, 2, 1, 0] s1 = (2, 7, 6, 5, 4, 3) s2 = (2, 4, 3, 5, 7, 6) a = np.random.random(s1) ta = tensor.from_numpy(a) ta.to_device(dev) ta = tensor.reshape(ta, s1) ta = tensor.transpose(ta, axes1) ta = tensor.reshape(ta, s2) a = np.reshape(a, s1) a = np.transpose(a, axes1) a = np.reshape(a, s2) np.testing.assert_array_almost_equal(tensor.to_numpy(ta), a)
def forward(self, flag, input): ''' Reshape the input tensor into output_shape Args: flag: not used input: a single input tensor Returns: output tensor (reshaped input) ''' assert isinstance(input, tensor.Tensor), 'The input must be Tensor' self.in_shape = input outputs = tensor.reshape( input, tuple(list([(self.in_shape.size() / self.output_size)])) + self.output_shape) return outputs
def backward(self, flag, dy): ''' Return gradient, []''' dx = tensor.reshape(dy, self.in_shape.shape) return dx, []
def forward(self, *x): # if you change to other models, please update the output name here y = super(MyModel, self).forward(*x, last_layers=-4)[0] y = self.conv(y) y = tensor.reshape(y, y.shape[:2]) return y