def test_grad(self): x = T.vector('x') a = np.random.random(50).astype(config.floatX) theano.function([x], T.grad(T.sum(diff(x)), x)) utt.verify_grad(self.op, [a]) for k in range(TestDiffOp.nb): theano.function([x], T.grad(T.sum(diff(x, n=k)), x)) utt.verify_grad(DiffOp(n=k), [a], eps=7e-3)
def test_diffOp(self): x = T.matrix('x') a = np.random.random((30, 50)).astype(config.floatX) f = theano.function([x], diff(x)) assert np.allclose(np.diff(a), f(a)) for axis in range(len(a.shape)): for k in range(TestDiffOp.nb): g = theano.function([x], diff(x, n=k, axis=axis)) assert np.allclose(np.diff(a, n=k, axis=axis), g(a))
def toChw(self, position): samples, targetDim = K.shape(position) position = K.reshape(position, (samples, 2, 2)) centroid = K.sum(position, axis=1) / 2.0 hw = K.abs(THEO.diff(position, axis=1)[:, 0, :]) chw = K.concatenate((centroid, hw), axis=1) chw = chw[:, [0, 1, 3, 2]] # Changing from cwh to chw return chw
def test_infer_shape(self): x = T.matrix("x") a = np.random.random((30, 50)).astype(config.floatX) self._compile_and_check([x], [self.op(x)], [a], self.op_class) for axis in range(len(a.shape)): for k in range(TestDiffOp.nb): self._compile_and_check([x], [diff(x, n=k, axis=axis)], [a], self.op_class)
def toChw(self, position): samples, targetDim = K.shape(position) position = K.reshape(position, (samples, 2, 2)) centroid = K.sum(position, axis=1) / 2.0 hw = K.abs(THEO.diff(position, axis=1)[:,0,:]) chw = K.concatenate((centroid, hw), axis=1) chw = chw[:, [0, 1, 3, 2]] # Changing from cwh to chw return chw
def test_infer_shape(self): x = T.matrix('x') a = np.random.random((30, 50)).astype(config.floatX) self._compile_and_check([x], [self.op(x)], [a], self.op_class) for axis in range(len(a.shape)): for k in range(TestDiffOp.nb): self._compile_and_check([x], [diff(x, n=k, axis=axis)], [a], self.op_class)