def test_make_node_shared(self): """Make sure we can provide `OpFromGraph.make_node` new shared inputs and get a valid `OpFromGraph`.""" x = at.scalar("x") y = shared(1.0, name="y") test_ofg = OpFromGraph([x], [x + y], on_unused_input="ignore") assert test_ofg.shared_inputs == [y] out = test_ofg(x) y_clone = y.clone() assert y_clone != y y_clone.name = "y_clone" out_new = test_ofg.make_node(*(out.owner.inputs[:1] + [y_clone])).outputs[0] assert "on_unused_input" in out_new.owner.op.kwargs assert out_new.owner.op.shared_inputs == [y_clone] out_fn = function([x], out_new) assert np.array_equal(out_fn(1.0), 2.0) y_clone.set_value(2.0) assert np.array_equal(out_fn(1.0), 3.0)
def test_clone(self): x, y, z = matrices("xyz") ofg = OpFromGraph([x], [2 * x]) ofg_clone = ofg.clone() assert ofg_clone.fgraph is not ofg.fgraph assert ofg_clone.fgraph.outputs != ofg.fgraph.outputs assert equal_computations(ofg_clone.fgraph.outputs, ofg.fgraph.outputs)
def __init__(self): x, y, z = scalars("xyz") e = x * y op = OpFromGraph([x, y], [e]) e2 = op(x, y) + z op2 = OpFromGraph([x, y, z], [e2]) e3 = op2(x, y, z) + z self.inputs = [x, y, z] self.outputs = [e3]
def test_valid_input(self): x, y, z = matrices("xyz") with pytest.raises(TypeError): OpFromGraph((x,), (x,)) with pytest.raises(TypeError): OpFromGraph([1], [1]) with pytest.raises(TypeError): OpFromGraph([x, as_tensor(1)], [x]) with pytest.raises(NotImplementedError): OpFromGraph([x], [x], updates={})
def test_debugprint(): x, y, z = matrices("xyz") e = x + y * z op = OpFromGraph([x, y, z], [e]) out = op(x, y, z) output_str = debugprint(out, file="str") lines = output_str.split("\n") exp_res = """OpFromGraph{inline=False} [id A] |x [id B] |y [id C] |z [id D] Inner graphs: OpFromGraph{inline=False} [id A] >Elemwise{add,no_inplace} [id E] > |*0-<TensorType(float64, (None, None))> [id F] > |Elemwise{mul,no_inplace} [id G] > |*1-<TensorType(float64, (None, None))> [id H] > |*2-<TensorType(float64, (None, None))> [id I] """ for truth, out in zip(exp_res.split("\n"), lines): assert truth.strip() == out.strip()
def __init__(self): x, y, z = scalars("xyz") e = at.sigmoid((x + y + z)**2) op = OpFromGraph([x, y, z], [e]) e2 = op(x, y, z) self.inputs = [x, y, z] self.outputs = [e2]
def test_compute_test_value(self): x = scalar("x") x.tag.test_value = np.array(1.0, dtype=config.floatX) op = OpFromGraph([x], [x**3]) y = scalar("y") y.tag.test_value = np.array(1.0, dtype=config.floatX) f = op(y) grad_f = grad(f, y) assert grad_f.tag.test_value is not None
def test_shared_to_nonshared_input(self): """Make sure that shared variables can be replaced with non-shared variables.""" x = at.scalar("x") y = shared(1.0, name="y") test_ofg = OpFromGraph([], [y]) assert test_ofg.shared_inputs == [y] out_1_fn = function([], test_ofg()) res_1 = out_1_fn() assert np.array_equal(res_1, 1.0) test_ofg_new = test_ofg.make_node(x) assert test_ofg_new.op.shared_inputs == [] out_2_fn = function([x], test_ofg_new.outputs[0]) res_2 = out_2_fn(np.array(1.0, dtype=config.floatX)) assert np.array_equal(res_2, 1.0)
def test_shared_with_constant_input(self): """Make sure that a constant input can be given to an `OpFromGraph` instance.""" x = at.scalar("x") y = shared(1.0, name="y") test_ofg = OpFromGraph([x], [x + y]) assert test_ofg.shared_inputs == [y] out = test_ofg(at.as_tensor(1.0, dtype=config.floatX)) out_fn = function([], out) assert np.array_equal(out_fn(), 2.0)
def test_infer_shape(self): # test infer shape does not need to against inline case # since the Op is remove during optimization phase x = matrix("x") y = matrix("y") o1 = x + y o2 = x * y op_graph = OpFromGraph([x, y], [o1, o2]) q = matrix("q") p = matrix("p") self._compile_and_check( [q, p], op_graph(q, p), [ np.ones([3, 4], dtype=config.floatX), np.ones([3, 4], dtype=config.floatX), ], OpFromGraph, ) # Make sure `OpFromGraph.infer_shape` can handle objects without a # shape x = MyVariable("x") y = matrix("y") z = as_tensor([1, 2]) op_graph = OpFromGraph([x, y, z], [x, y]) op_var = op_graph(x, y, z) fg = FunctionGraph(outputs=[op_var[1]], clone=False) opt_res = optimize_graph(fg, custom_opt=ShapeOptimizer()) assert opt_res.shape_feature.shape_of[x] is None assert opt_res.shape_feature.shape_of[z][0].data == 2
def local_transform(fgraph, node): if node in nodes_seen: return False # importing Scan into module scope would be circular from aesara.compile.builders import OpFromGraph from aesara.scan.op import Scan if isinstance(node.op, (Scan, OpFromGraph)): # recurse on the inner graph ( new_inner_inputs, new_outer_inputs, new_inner_outputs, ) = _map_variables_inner( wrapped_replacer, inner_inputs=node.op.inputs, outer_inputs=node.inputs, inner_outputs=node.op.outputs, containing_op=node.op, ) # reinstantiate the op if isinstance(node.op, Scan): new_op = Scan( new_inner_inputs, new_inner_outputs, node.op.info, node.op.mode, # FIXME: infer this someday? typeConstructor=None, ) elif isinstance(node.op, OpFromGraph): new_op = OpFromGraph(new_inner_inputs, new_inner_outputs, **node.op.kwargs) # make a new node to replace the old one new_node = new_op.make_node(*new_outer_inputs) nodes_seen.add(new_node) return new_node.outputs else: nodes_seen.add(node) replacements = [wrapped_replacer(o) for o in node.outputs] # Add inputs to replacement graphs as inputs to this `fgraph` for i in graph_inputs(replacements): fgraph.add_input(i) return replacements
def test_nested_OpFromGraph_shared(self): y = aesara.shared(1.0, name="y") test_ofg = OpFromGraph([], [y]) def inner_func(x): out, _ = aesara.scan(lambda: test_ofg(), n_steps=x) return out out, _ = aesara.scan(inner_func, sequences=[at.arange(1, 2)]) _ = aesara.function([], test_ofg()) out_fn = aesara.function([], out) assert np.array_equal(out_fn(), [[1.0]])
def test_infer_shape(self): # test infer shape does not need to against inline case # since the Op is remove during optimization phase x = matrix("x") y = matrix("y") o1 = x + y o2 = x * y op_graph = OpFromGraph([x, y], [o1, o2]) q = matrix("q") p = matrix("p") self._compile_and_check( [q, p], op_graph(q, p), [ np.ones([3, 4], dtype=config.floatX), np.ones([3, 4], dtype=config.floatX), ], OpFromGraph, )
def test_outputs_consistency(self): """Make sure that `OpFromGraph.fn` doesn't change the value of `OpFromGraph.inner_outputs`.""" x = scalar("x") op = OpFromGraph([x], [x**2 / x], mode="FAST_RUN") # Confirm that the inner-graph is as expected assert equal_computations(op.inner_outputs, [x**2 / x], op.inner_inputs, [x]) # These outputs of the compiled `op.fgraph` should differ from the # original, uncompiled `op.fgraph` outputs fn = op.fn new_inputs = fn.maker.fgraph.inputs new_outputs = fn.maker.fgraph.outputs assert not equal_computations(new_outputs, [x**2 / x], new_inputs, [x]) # The original `op.fgraph` outputs should stay the same, though assert equal_computations(op.inner_outputs, [x**2 / x], op.inner_inputs, [x])
def test_OpFromGraph_shared(self): """Make sure that a simple `OpFromGraph` with a shared variable can be pushed out.""" y = shared(1.0, name="y") test_ofg = OpFromGraph([], [1 + y]) def inner_func(): return test_ofg() out, out_updates = aesara.scan(inner_func, n_steps=10) out_fn = function([], out, updates=out_updates) res = out_fn() assert np.array_equal(res, np.repeat(2.0, 10)) y.set_value(2.0) res = out_fn() assert np.array_equal(res, np.repeat(3.0, 10))
def test_valid_input(self): x, y, z = matrices("xyz") with pytest.raises(ValueError, match="Expected at least.*"): OpFromGraph([x], [x])() with pytest.raises(ValueError, match=r"Expected 1 input\(s\)"): OpFromGraph([x], [x]).make_node() with pytest.raises(TypeError): OpFromGraph((x, ), (x, )) with pytest.raises(TypeError): OpFromGraph([1], [1]) with pytest.raises(TypeError): OpFromGraph([x, as_tensor(1)], [x]) with pytest.raises(TypeError): OpFromGraph([shared(1)], [1]) with pytest.raises(NotImplementedError): OpFromGraph([x], [x], updates={})
def MvNormalLogp(): """Compute the log pdf of a multivariate normal distribution. This should be used in MvNormal.logp once Theano#5908 is released. Parameters ---------- cov: aet.matrix The covariance matrix. delta: aet.matrix Array of deviations from the mean. """ cov = aet.matrix("cov") cov.tag.test_value = floatX(np.eye(3)) delta = aet.matrix("delta") delta.tag.test_value = floatX(np.zeros((2, 3))) solve_lower = Solve(A_structure="lower_triangular") solve_upper = Solve(A_structure="upper_triangular") cholesky = Cholesky(lower=True, on_error="nan") n, k = delta.shape n, k = f(n), f(k) chol_cov = cholesky(cov) diag = aet.nlinalg.diag(chol_cov) ok = aet.all(diag > 0) chol_cov = aet.switch(ok, chol_cov, aet.fill(chol_cov, 1)) delta_trans = solve_lower(chol_cov, delta.T).T result = n * k * aet.log(f(2) * np.pi) result += f(2) * n * aet.sum(aet.log(diag)) result += (delta_trans**f(2)).sum() result = f(-0.5) * result logp = aet.switch(ok, result, -np.inf) def dlogp(inputs, gradients): (g_logp, ) = gradients cov, delta = inputs g_logp.tag.test_value = floatX(1.0) n, k = delta.shape chol_cov = cholesky(cov) diag = aet.nlinalg.diag(chol_cov) ok = aet.all(diag > 0) chol_cov = aet.switch(ok, chol_cov, aet.fill(chol_cov, 1)) delta_trans = solve_lower(chol_cov, delta.T).T inner = n * aet.eye(k) - aet.dot(delta_trans.T, delta_trans) g_cov = solve_upper(chol_cov.T, inner) g_cov = solve_upper(chol_cov.T, g_cov.T) tau_delta = solve_upper(chol_cov.T, delta_trans.T) g_delta = tau_delta.T g_cov = aet.switch(ok, g_cov, -np.nan) g_delta = aet.switch(ok, g_delta, -np.nan) return [-0.5 * g_cov * g_logp, -g_delta * g_logp] return OpFromGraph([cov, delta], [logp], grad_overrides=dlogp, inline=True)
def test_missing_input(self): x = at.lscalar("x") with pytest.raises(MissingInputError): OpFromGraph([], [x])