def test_applyo(): x = var() assert run(0, x, applyo("add", (1, 2, 3), x)) == (("add", 1, 2, 3), ) assert run(0, x, applyo(x, (1, 2, 3), ("add", 1, 2, 3))) == ("add", ) assert run(0, x, applyo("add", x, ("add", 1, 2, 3))) == ((1, 2, 3), ) a_lv, b_lv, c_lv = var(), var(), var() from operator import add assert run(0, c_lv, applyo(add, (1, 2), c_lv)) == (3, ) assert run(0, c_lv, applyo(add, etuple(1, 2), c_lv)) == (3, ) assert run(0, c_lv, applyo(add, a_lv, c_lv)) == (cons(add, a_lv), ) for obj in ( (1, 2, 3), (add, 1, 2), [1, 2, 3], [add, 1, 2], etuple(1, 2, 3), etuple(add, 1, 2), ): o_rator, o_rands = operator(obj), arguments(obj) assert run(0, a_lv, applyo(o_rator, o_rands, a_lv)) == (term(o_rator, o_rands), ) # Just acts like `conso` here assert run(0, a_lv, applyo(o_rator, a_lv, obj)) == (arguments(obj), ) assert run(0, a_lv, applyo(a_lv, o_rands, obj)) == (operator(obj), ) # Just acts like `conso` here, too assert run(0, c_lv, applyo(a_lv, b_lv, c_lv)) == (cons(a_lv, b_lv), ) # with pytest.raises(ConsError): assert run(0, a_lv, applyo(a_lv, b_lv, object())) == () assert run(0, a_lv, applyo(1, 2, a_lv)) == ()
def test_terms(): x, a, b = tt.dvectors('xab') test_expr = x + a * b assert mt(test_expr.owner.op) == operator(test_expr) assert mt(tuple(test_expr.owner.inputs)) == arguments(test_expr) assert graph_equal(test_expr, term(operator(test_expr), arguments(test_expr)))
def test_unifiable_with_term(): add = Op('add') t = MyTerm(add, (1, 2)) assert arguments(t) == (1, 2) assert operator(t) == add assert term(operator(t), arguments(t)) == t x = var('x') assert unify(MyTerm(add, (1, x)), MyTerm(add, (1, 2)), {}) == {x: 2}
def test_etuple_term(): """Test `tuple_expression` and `etuple` interaction with `term` """ # Make sure that we don't lose underlying `eval_obj`s # when taking apart and re-creating expression tuples # using `kanren`'s `operator`, `arguments` and `term` # functions. e1 = etuple(add, (object(), ), (object(), )) e1_obj = e1.eval_obj e1_dup = (operator(e1), ) + arguments(e1) assert isinstance(e1_dup, ExpressionTuple) assert e1_dup.eval_obj == e1_obj e1_dup_2 = term(operator(e1), arguments(e1)) assert e1_dup_2 == e1_obj # Take apart an already constructed/evaluated meta # object. e2 = mt.add(mt.vector(), mt.vector()) e2_et = tuple_expression(e2) assert isinstance(e2_et, ExpressionTuple) e2_et_expect = etuple( mt.add, etuple(mt.TensorVariable, etuple(mt.TensorType, 'float64', (False, ), None), None, None, None), etuple(mt.TensorVariable, etuple(mt.TensorType, 'float64', (False, ), None), None, None, None), ) assert e2_et == e2_et_expect assert e2_et.eval_obj is e2 # Make sure expression expansion works from Theano objects, too. # First, do it manually. tt_expr = tt.vector() + tt.vector() mt_expr = mt(tt_expr) assert mt_expr.obj is tt_expr assert mt_expr.reify() is tt_expr e3 = tuple_expression(mt_expr) assert e3 == e2_et assert e3.eval_obj is mt_expr assert e3.eval_obj.reify() is tt_expr # Now, through `tuple_expression` e2_et_2 = tuple_expression(tt_expr) assert e2_et_2 == e3 == e2_et assert isinstance(e2_et_2, ExpressionTuple) assert e2_et_2.eval_obj.reify() == tt_expr
def test_unifiable_with_term(): add = Operator("add") t = Node(add, (1, 2)) assert arguments(t) == (1, 2) assert operator(t) == add assert term(operator(t), arguments(t)) == t x = var() s = unify(Node(add, (1, x)), Node(add, (1, 2)), {}) assert s == {x: 2} assert reify(Node(add, (1, x)), s) == Node(add, (1, 2))
def test_etuple_term(): """Test `etuplize` and `etuple` interaction with `term` """ # Make sure that we don't lose underlying `eval_obj`s # when taking apart and re-creating expression tuples # using `kanren`'s `operator`, `arguments` and `term` # functions. e1 = etuple(add, (object(), ), (object(), )) e1_obj = e1.eval_obj e1_dup = (operator(e1), ) + arguments(e1) assert isinstance(e1_dup, ExpressionTuple) assert e1_dup.eval_obj == e1_obj e1_dup_2 = term(operator(e1), arguments(e1)) assert e1_dup_2 == e1_obj
def test_terms(): x, a, b = tt.dvectors("xab") test_expr = x + a * b assert mt(test_expr.owner.op) == operator(test_expr) assert mt(tuple(test_expr.owner.inputs)) == tuple(arguments(test_expr)) assert tuple(arguments(test_expr)) == mt(tuple(test_expr.owner.inputs)) # Implicit `etuple` conversion should retain the original object # (within the implicitly introduced meta object, of course). assert test_expr == arguments(test_expr)._parent._eval_obj.obj assert graph_equal(test_expr, term(operator(test_expr), arguments(test_expr))) assert mt(test_expr) == term(operator(test_expr), arguments(test_expr)) # Same here: should retain the original object. assert test_expr == term(operator(test_expr), arguments(test_expr)).reify()
def buildo(op, args, obj): if not isvar(obj): if not isvar(args): args = etuplize(args, shallow=True) oop, oargs = operator(obj), arguments(obj) return lallgreedy(eq(op, oop), eq(args, oargs)) elif isvar(args) or isvar(op): return conso(op, args, obj) else: return eq(obj, term(op, args))
def tuple_expression(x): """Return a tuple of rand and rators that, when evaluated, would construct the object; otherwise, return the object itself. """ try: # This can throw an `IndexError` if `x` is an empty # `list`/`tuple`. op = operator(x) args = arguments(x) except (IndexError, NotImplementedError): return x assert isinstance(args, (list, tuple)) res = etuple(op, *tuple(tuple_expression(a) for a in args), eval_obj=x) return res
def etuplize(x, shallow=False, return_bad_args=False): """Return an expression-tuple for an object (i.e. a tuple of rand and rators). When evaluated, the rand and rators should [re-]construct the object. When the object cannot be given such a form, it is simply converted to an `ExpressionTuple` and returned. Parameters ---------- x: object Object to convert to expression-tuple form. shallow: bool Whether or not to do a shallow conversion. return_bad_args: bool Return the passed argument when its type is not appropriate, instead of raising an exception. """ if isinstance(x, ExpressionTuple): return x elif x is not None and isinstance(x, (ConsNull, ConsPair)): return etuple(*x) try: # This can throw an `IndexError` if `x` is an empty # `list`/`tuple`. op = operator(x) args = arguments(x) except (IndexError, NotImplementedError): op = None args = None if not callable(op) or not isinstance(args, (ConsNull, ConsPair)): if return_bad_args: return x else: raise TypeError(f"x is neither a non-str Sequence nor term: {type(x)}") if shallow: et_op = op et_args = args else: et_op = etuplize(op, return_bad_args=True) et_args = tuple(etuplize(a, return_bad_args=True) for a in args) res = etuple(et_op, *et_args, eval_obj=x) return res
def reify_all_terms(obj, s=None): """Recursively reifies all terms tuples/lists with some awareness for meta objects.""" try: if isinstance(obj, MetaSymbol): # Avoid using `operator`/`arguments` and unnecessarily # breaking apart meta objects and the base objects they # hold onto (i.e. their reified forms). res = obj.reify() if not MetaSymbol.is_meta(res): return res op, args = operator(obj), arguments(obj) op = reify_all_terms(op, s) args = reify_all_terms(args, s) return term(op, args) except (IndexError, NotImplementedError): return reify(obj, s or {})
def test_operator(): assert operator(('add', 1, 2, 3)) == 'add'
lambda u, v, s: unify_MetaSymbol(u, metatize(v), s), ) _unify.add( (tf_class_abstractions, TFlowMetaSymbol, Mapping), lambda u, v, s: unify_MetaSymbol(metatize(u), v, s), ) _unify.add( (tf_class_abstractions, tf_class_abstractions, Mapping), lambda u, v, s: unify_MetaSymbol(metatize(u), metatize(v), s), ) def _reify_TFlowClasses(o, s): meta_obj = metatize(o) return reify(meta_obj, s) _reify.add((tf_class_abstractions, Mapping), _reify_TFlowClasses) _car.add((tf.Tensor,), lambda x: operator(metatize(x))) operator.add((tf.Tensor,), lambda x: operator(metatize(x))) _cdr.add((tf.Tensor,), lambda x: arguments(metatize(x))) arguments.add((tf.Tensor,), lambda x: arguments(metatize(x))) etuplize.add(tf_class_abstractions, lambda x, shallow=False: etuplize(metatize(x), shallow)) __all__ = []
def test_etuple_term(): assert etuplize("blah", return_bad_args=True) == "blah" a = tf.compat.v1.placeholder(tf.float64, name='a') b = tf.compat.v1.placeholder(tf.float64, name='b') a_mt = mt(a) a_mt._obj = None a_reified = a_mt.reify() assert isinstance(a_reified, tf.Tensor) assert a_reified.shape.dims is None with pytest.raises(TypeError): etuplize(a_mt.op.op_def) a_nd_e = etuplize(a_mt.op.node_def, shallow=False) assert a_nd_e[0] is TFlowMetaNodeDef assert a_nd_e[1] == a_mt.op.node_def.op assert a_nd_e[2] == a_mt.op.node_def.name assert a_nd_e[3] == a_mt.op.node_def.attr # A deep etuplization test_e = etuplize(a_mt, shallow=False) assert len(test_e) == 1 assert len(test_e[0]) == 3 assert test_e[0][0] is TFlowMetaOperator assert test_e[0][1] is a_mt.op.op_def assert test_e[0][2] == a_nd_e assert test_e.eval_obj is a_mt test_e._eval_obj = ExpressionTuple.null with tf.Graph().as_default(): a_evaled = test_e.eval_obj assert a_evaled == a_mt # A shallow etuplization test_e = etuplize(a_mt, shallow=True) assert len(test_e) == 1 assert isinstance(test_e[0], TFlowMetaOperator) assert test_e[0].op_def is a_mt.op.op_def assert test_e[0].node_def is a_mt.op.node_def assert test_e.eval_obj is a_mt test_e._eval_obj = ExpressionTuple.null with tf.Graph().as_default(): a_evaled = test_e.eval_obj assert a_evaled == a_mt a_reified = a_evaled.reify() assert isinstance(a_reified, tf.Tensor) assert a_reified.shape.dims is None # Now, consider a meta graph with operator arguments add_mt = mt.AddV2(a, b) add_et = etuplize(add_mt, shallow=True) assert isinstance(add_et, ExpressionTuple) assert add_et[0].op_def == mt.AddV2.op_def # Check `kanren`'s term framework assert isinstance(operator(add_mt), TFlowMetaOperator) assert arguments(add_mt) == add_mt.op.inputs assert operator(add_mt)(*arguments(add_mt)) == add_mt assert isinstance(add_et[0], TFlowMetaOperator) assert add_et[1:] == add_mt.op.inputs assert operator(add_mt)(*arguments(add_mt)) == add_mt assert term(operator(add_mt), arguments(add_mt)) == add_mt # Make sure things work with logic variables add_lvar_mt = TFlowMetaTensor(var(), var(), [1, 2]) # TODO FIXME: This is bad assert operator(add_lvar_mt) is None # assert operator(add_lvar_mt) == add_lvar_mt.op # TODO FIXME: Same here assert arguments(add_lvar_mt) is None
This applies a special consideration for Theano `Variable`s: if it has a non-`None` `owner` with non-`None` `op` and `inputs`, then the `Variable` is more aptly given as the output of `op(inputs)`. Otherwise, considering the `Variable` in isolation, it can be constructed directly using its `type` constructor. """ x_owner = getattr(x, 'owner', None) if x_owner and hasattr(x_owner, 'op'): return x_owner.op return operator_MetaSymbol(x) operator.add((MetaSymbol, ), operator_MetaSymbol) operator.add((MetaVariable, ), operator_MetaVariable) operator.add((tt.Variable, ), lambda x: operator(MetaVariable.from_obj(x))) def arguments_MetaSymbol(x): return tuple(x.rands()) def arguments_MetaVariable(x): """Get a tuple of the arguments used to construct this meta object. This applies a special consideration for Theano `Variable`s: if it has a non-`None` `owner` with non-`None` `op` and `inputs`, then the `Variable` is more aptly given as the output of `op(inputs)`. Otherwise, considering the `Variable` in isolation, it can be constructed directly using its `type` constructor.
) _unify.add( (tt_class_abstractions, TheanoMetaSymbol, Mapping), lambda u, v, s: unify_MetaSymbol(metatize(u), v, s), ) _unify.add( (tt_class_abstractions, tt_class_abstractions, Mapping), lambda u, v, s: unify_MetaSymbol(metatize(u), metatize(v), s), ) def _reify_TheanoClasses(o, s): meta_obj = metatize(o) return reify(meta_obj, s) _reify.add((tt_class_abstractions, Mapping), _reify_TheanoClasses) operator.add((tt.Variable, ), lambda x: operator(metatize(x))) _car.add((tt.Variable, ), lambda x: operator(metatize(x))) arguments.add((tt.Variable, ), lambda x: arguments(metatize(x))) _cdr.add((tt.Variable, ), lambda x: arguments(metatize(x))) term.add((tt.Op, ExpressionTuple), lambda op, args: term(metatize(op), args)) etuplize.add(tt_class_abstractions, lambda x, shallow=False: etuplize(metatize(x), shallow)) __all__ = []
def test_etuple_term(): assert etuplize("blah", return_bad_args=True) == "blah" a = tf.compat.v1.placeholder(tf.float64, name="a") b = tf.compat.v1.placeholder(tf.float64, name="b") a_mt = mt(a) a_mt._obj = None a_reified = a_mt.reify() assert isinstance(a_reified, tf.Tensor) assert a_reified.shape.dims is None with pytest.raises(TypeError): etuplize(a_mt.op.op_def) with pytest.raises(TypeError): etuplize(a_mt.op.node_def, shallow=False) with pytest.raises(TypeError): etuplize(a_mt, shallow=False) # Now, consider a meta graph with operator arguments add_mt = mt.AddV2(a, b) add_et = etuplize(add_mt, shallow=True) assert isinstance(add_et, ExpressionTuple) assert add_et[0].op_def == mt.AddV2.op_def # Check `kanren`'s term framework assert isinstance(operator(add_mt), TFlowMetaOperator) assert arguments(add_mt) == add_mt.op.inputs assert operator(add_mt)(*arguments(add_mt)) == add_mt assert isinstance(add_et[0], TFlowMetaOperator) assert add_et[1:] == add_mt.op.inputs assert operator(add_mt)(*arguments(add_mt)) == add_mt assert term(operator(add_mt), arguments(add_mt)) == add_mt add_mt = mt.AddV2(a, add_mt) add_et = etuplize(add_mt, shallow=False) assert isinstance(add_et, ExpressionTuple) assert len(add_et) == 3 assert add_et[0].op_def == mt.AddV2.op_def assert len(add_et[2]) == 3 assert add_et[2][0].op_def == mt.AddV2.op_def assert add_et.eval_obj is add_mt add_et._eval_obj = ExpressionTuple.null with tf.Graph().as_default(): assert add_et.eval_obj == add_mt # Make sure things work with logic variables add_lvar_mt = TFlowMetaTensor(var(), var(), [1, 2]) with pytest.raises(ConsError): assert operator(add_lvar_mt) is None with pytest.raises(ConsError): assert arguments(add_lvar_mt) is None