def test_topk_sanity(self, dtype, axis, sorted): x = vector(name="x", dtype=dtype) fn = aesara.function([x], topk(x, 1, axis=axis, sorted=sorted), mode=self.mode) assert any( isinstance(n.op, self.op_class) for n in fn.maker.fgraph.apply_nodes) xval = np.asarray([1]).astype(dtype) yval = fn(xval) assert yval == xval assert yval.dtype == xval.dtype
def test_grad(self, shp, k_, sorted): ndim = len(shp) for axis in range(-ndim, ndim): if isinstance(k_, str): k = eval(k_.replace("n", str(shp[axis]))) else: k = k_ if k == 0: continue # make input away from undefined gradient (where some inputs are equal) xval = gen_unique_vector(reduce(int.__mul__, shp), dtype=aesara.config.floatX).reshape(shp) utt.verify_grad(lambda x: topk(x, k, axis=axis, sorted=sorted), [xval], eps=1e-2)
def test_topk_1d(self, size, k, dtype, sorted): if isinstance(k, str): k = eval(k.replace("n", str(size))) x = vector(name="x", dtype=dtype) y = topk(x, k, sorted=sorted) fn = aesara.function([x], y, mode=self.mode) assert any( isinstance(n.op, self.op_class) for n in fn.maker.fgraph.apply_nodes) # assert local_useless_topk opt is done properly assert 1 == len(fn.maker.fgraph.outputs[0].owner.outputs) # generate a all-unique array xval = gen_unique_vector(size, dtype) yval = fn(xval) idx = slice(-k, None) if k > 0 else slice(-k) goal = np.sort(xval)[idx] assert yval.dtype == goal.dtype utt.assert_allclose(goal, np.sort(yval))