def test_unification(): x, y, a, b = tt.dvectors("xyab") x_s = tt.scalar("x_s") y_s = tt.scalar("y_s") c_tt = tt.constant(1, "c") d_tt = tt.constant(2, "d") x_l = var("x_l") y_l = var("y_l") assert a == reify(x_l, {x_l: a}).reify() test_expr = mt.add(1, mt.mul(2, x_l)) test_reify_res = reify(test_expr, {x_l: a}) assert graph_equal(test_reify_res.reify(), 1 + 2 * a) z = tt.add(b, a) assert {x_l: z} == unify(x_l, z) assert b == unify(mt.add(x_l, a), mt.add(b, a))[x_l].reify() res = unify(mt.inv(mt.add(x_l, a)), mt.inv(mt.add(b, y_l))) assert res[x_l].reify() == b assert res[y_l].reify() == a mt_expr_add = mt.add(x_l, y_l) # The parameters are vectors tt_expr_add_1 = tt.add(x, y) assert graph_equal( tt_expr_add_1, reify(mt_expr_add, unify(mt_expr_add, tt_expr_add_1)).reify()) # The parameters are scalars tt_expr_add_2 = tt.add(x_s, y_s) assert graph_equal( tt_expr_add_2, reify(mt_expr_add, unify(mt_expr_add, tt_expr_add_2)).reify()) # The parameters are constants tt_expr_add_3 = tt.add(c_tt, d_tt) assert graph_equal( tt_expr_add_3, reify(mt_expr_add, unify(mt_expr_add, tt_expr_add_3)).reify())
def test_terms(): x, a, b = tt.dvectors("xab") test_expr = x + a * b assert mt(test_expr.owner.op) == operator(test_expr) assert mt(tuple(test_expr.owner.inputs)) == tuple(arguments(test_expr)) assert tuple(arguments(test_expr)) == mt(tuple(test_expr.owner.inputs)) # Implicit `etuple` conversion should retain the original object # (within the implicitly introduced meta object, of course). assert test_expr == arguments(test_expr)._parent._eval_obj.obj assert graph_equal(test_expr, term(operator(test_expr), arguments(test_expr))) assert mt(test_expr) == term(operator(test_expr), arguments(test_expr)) # Same here: should retain the original object. assert test_expr == term(operator(test_expr), arguments(test_expr)).reify()
def test_meta_classes(): vec_tt = tt.vector('vec') vec_m = metatize(vec_tt) assert vec_m.obj == vec_tt assert type(vec_m) == TheanoMetaTensorVariable # This should invalidate the underlying base object. vec_m.index = 0 assert vec_m.obj is None assert vec_m.reify().type == vec_tt.type assert vec_m.reify().name == vec_tt.name vec_type_m = vec_m.type assert type(vec_type_m) == TheanoMetaTensorType assert vec_type_m.dtype == vec_tt.dtype assert vec_type_m.broadcastable == vec_tt.type.broadcastable assert vec_type_m.name == vec_tt.type.name assert graph_equal(tt.add(1, 2), mt.add(1, 2).reify()) meta_var = mt.add(1, var()).reify() assert isinstance(meta_var, TheanoMetaTensorVariable) assert isinstance(meta_var.owner.op.obj, theano.Op) assert isinstance(meta_var.owner.inputs[0].obj, tt.TensorConstant) test_vals = [1, 2.4] meta_vars = metatize(test_vals) assert meta_vars == [metatize(x) for x in test_vals] # TODO: Do we really want meta variables to be equal to their # reified base objects? # assert meta_vars == [tt.as_tensor_variable(x) for x in test_vals] name_mt = var() add_tt = tt.add(0, 1) add_mt = mt.add(0, 1, name=name_mt) assert add_mt.name is name_mt assert add_tt.type == add_mt.type.reify() assert mt(add_tt.owner) == add_mt.owner # assert isvar(add_mt._obj) # Let's confirm that we can dynamically create a new meta `Op` type test_mat = np.c_[[2, 3], [4, 5]] svd_tt = tt.nlinalg.SVD()(test_mat) # First, can we create one from a new base `Op` instance? svd_op_mt = mt(tt.nlinalg.SVD()) svd_mt = svd_op_mt(test_mat) assert svd_mt[0].owner.nin == 1 assert svd_mt[0].owner.nout == 3 svd_outputs = svd_mt[0].owner.outputs assert svd_outputs[0] == svd_mt[0] assert svd_outputs[1] == svd_mt[1] assert svd_outputs[2] == svd_mt[2] assert mt(svd_tt) == svd_mt # Next, can we create one from a base `Op` type/class? svd_op_type_mt = mt.nlinalg.SVD assert isinstance(svd_op_type_mt, type) assert issubclass(svd_op_type_mt, TheanoMetaOp) # svd_op_inst_mt = svd_op_type_mt(tt.nlinalg.SVD()) # svd_op_inst_mt(test_mat) == svd_mt # Apply node with logic variable as outputs svd_apply_mt = TheanoMetaApply(svd_op_mt, [test_mat], outputs=var('out')) assert isinstance(svd_apply_mt.inputs, tuple) assert isinstance(svd_apply_mt.inputs[0], MetaSymbol) assert isvar(svd_apply_mt.outputs) assert svd_apply_mt.nin == 1 assert svd_apply_mt.nout is None # Apply node with logic variable as inputs svd_apply_mt = TheanoMetaApply(svd_op_mt, var('in'), outputs=var('out')) assert svd_apply_mt.nin is None # A meta variable with None index var_mt = TheanoMetaVariable(svd_mt[0].type, svd_mt[0].owner, None, None) assert var_mt.index is None reified_var_mt = var_mt.reify() assert isinstance(reified_var_mt, TheanoMetaTensorVariable) assert reified_var_mt.index == 0 assert var_mt.index == 0 assert reified_var_mt == svd_mt[0] # A meta variable with logic variable index var_mt = TheanoMetaVariable(svd_mt[0].type, svd_mt[0].owner, var('index'), None) assert isvar(var_mt.index) reified_var_mt = var_mt.reify() assert isvar(var_mt.index) assert reified_var_mt.index == 0 const_mt = mt(1) assert isinstance(const_mt, TheanoMetaTensorConstant) assert const_mt != mt(2)
def test_unification(): x, y, a, b = tt.dvectors('xyab') x_s = tt.scalar('x_s') y_s = tt.scalar('y_s') c_tt = tt.constant(1, 'c') d_tt = tt.constant(2, 'd') x_l = var('x_l') y_l = var('y_l') assert a == reify(x_l, {x_l: a}).reify() test_expr = mt.add(1, mt.mul(2, x_l)) test_reify_res = reify(test_expr, {x_l: a}) assert graph_equal(test_reify_res.reify(), 1 + 2 * a) z = tt.add(b, a) assert {x_l: z} == unify(x_l, z) assert b == unify(mt.add(x_l, a), mt.add(b, a))[x_l].reify() res = unify(mt.inv(mt.add(x_l, a)), mt.inv(mt.add(b, y_l))) assert res[x_l].reify() == b assert res[y_l].reify() == a # TODO: This produces a `DimShuffle` so that the scalar constant `1` # will match the dimensions of the vector `b`. That `DimShuffle` isn't # handled by the logic variable form. # assert unify(mt.add(x_l, 1), mt.add(b_l, 1))[x] == b with variables(x): assert unify(x + 1, b + 1)[x].reify() == b assert unify(mt.add(x_l, a), mt.add(b, a))[x_l].reify() == b with variables(x): assert unify(x, b)[x] == b assert unify([x], [b])[x] == b assert unify((x, ), (b, ))[x] == b assert unify(x + 1, b + 1)[x].reify() == b assert unify(x + a, b + a)[x].reify() == b with variables(x): assert unify(a + b, a + x)[x].reify() == b mt_expr_add = mt.add(x_l, y_l) # The parameters are vectors tt_expr_add_1 = tt.add(x, y) assert graph_equal( tt_expr_add_1, reify(mt_expr_add, unify(mt_expr_add, tt_expr_add_1)).reify()) # The parameters are scalars tt_expr_add_2 = tt.add(x_s, y_s) assert graph_equal( tt_expr_add_2, reify(mt_expr_add, unify(mt_expr_add, tt_expr_add_2)).reify()) # The parameters are constants tt_expr_add_3 = tt.add(c_tt, d_tt) assert graph_equal( tt_expr_add_3, reify(mt_expr_add, unify(mt_expr_add, tt_expr_add_3)).reify())