def test_constant_unification(self): x = Constant(MyType(), 2, name='x') y = MyVariable('y') z = Constant(MyType(), 2, name='z') e = op1(op1(x, y), y) g = Env([y], [e]) PatternOptimizer((op1, z, '1'), (op2, '1', z)).optimize(g) assert str(g) == "[Op1(Op2(y, z), y)]"
def test_constant_unification(self): x = Constant(MyType(), 2, name="x") y = MyVariable("y") z = Constant(MyType(), 2, name="z") e = op1(op1(x, y), y) g = FunctionGraph([y], [e]) PatternOptimizer((op1, z, "1"), (op2, "1", z)).optimize(g) assert str(g) == "FunctionGraph(Op1(Op2(y, z), y))"
def test_identical_constant_args(self): x = MyVariable("x") y = Constant(MyType(), 2, name="y") z = Constant(MyType(), 2, name="z") with config.change_flags(compute_test_value="off"): e1 = op1(y, z) g = FunctionGraph([x, y, z], [e1]) MergeOptimizer().optimize(g) strg = str(g) assert strg == "FunctionGraph(Op1(y, y))" or strg == "FunctionGraph(Op1(z, z))"
def test_constant_merging(self): x = MyVariable("x") y = Constant(MyType(), 2, name="y") z = Constant(MyType(), 2, name="z") e = op1(op2(x, y), op2(x, y), op2(x, z)) g = FunctionGraph([x, y, z], [e]) MergeOptimizer().optimize(g) strg = str(g) assert (strg == "FunctionGraph(Op1(*1 -> Op2(x, y), *1, *1))" or strg == "FunctionGraph(Op1(*1 -> Op2(x, z), *1, *1))")
def test_constant_merging(self): x = MyVariable('x') y = Constant(MyType(), 2, name='y') z = Constant(MyType(), 2, name='z') e = op1(op2(x, y), op2(x, y), op2(x, z)) g = FunctionGraph([x, y, z], [e]) MergeOptimizer().optimize(g) strg = str(g) assert strg == "[Op1(*1 -> Op2(x, y), *1, *1)]" \ or strg == "[Op1(*1 -> Op2(x, z), *1, *1)]"
def test_identical_constant_args(self): x = MyVariable('x') y = Constant(MyType(), 2, name='y') z = Constant(MyType(), 2, name='z') ctv_backup = config.compute_test_value config.compute_test_value = 'off' try: e1 = op1(y, z) finally: config.compute_test_value = ctv_backup g = FunctionGraph([x, y, z], [e1]) MergeOptimizer().optimize(g) strg = str(g) assert strg == '[Op1(y, y)]' or strg == '[Op1(z, z)]'
def test_identical_constant_args(self): x = MyVariable("x") y = Constant(MyType(), 2, name="y") z = Constant(MyType(), 2, name="z") ctv_backup = config.compute_test_value config.compute_test_value = "off" try: e1 = op1(y, z) finally: config.compute_test_value = ctv_backup g = FunctionGraph([x, y, z], [e1]) MergeOptimizer().optimize(g) strg = str(g) assert strg == "FunctionGraph(Op1(y, y))" or strg == "FunctionGraph(Op1(z, z))"
def __init__(self, type, data, name=None): assert isinstance(data, slice) # Numpy ndarray aren't hashable, so get rid of them. if isinstance(data.start, np.ndarray): assert data.start.ndim == 0 assert str(data.start.dtype) in theano.tensor.integer_dtypes data = slice(int(data.start), data.stop, data.step) elif isinstance(data.stop, np.ndarray): assert data.stop.ndim == 0 assert str(data.stop.dtype) in theano.tensor.integer_dtypes data = slice(data.start, int(data.stop), data.step) elif isinstance(data.step, np.ndarray): assert data.step.ndim == 0 assert str(data.step.dtype) in theano.tensor.integer_dtypes data = slice(data.start, int(data.stop), data.step) Constant.__init__(self, type, data, name)
def test_opwiseclinker_constant(): x, y, z = inputs() x = Constant(tdouble, 7.2, name='x') e = add(mul(x, y), mul(y, z)) lnk = OpWiseCLinker().accept(Env([y, z], [e])) fn = lnk.make_function() res = fn(1.5, 3.0) assert res == 15.3
def test_c_fail_error(): x, y, z = inputs() x = Constant(tdouble, 7.2, name="x") e = add_fail(mul(x, y), mul(y, z)) lnk = OpWiseCLinker().accept(Env([y, z], [e])) fn = lnk.make_function() with pytest.raises(RuntimeError): fn(1.5, 3.0)
def test_pre_greedy_local_optimizer(): empty_fgraph = FunctionGraph([], []) x = MyVariable("x") y = MyVariable("y") c1 = Constant(MyType(), 1, "c1") c2 = Constant(MyType(), 2, "c2") o1 = op2(c1, c2) o3 = op1(c1, y) o2 = op1(o1, c2, x, o3, o1) assert o2.owner.inputs[0].owner is not None assert o2.owner.inputs[4].owner is not None # This should fold `o1`, because it has only `Constant` arguments, and # replace it with the `Constant` result cst = pre_greedy_local_optimizer(empty_fgraph, [constant_folding], o2) assert cst.owner.inputs[0].owner is None assert cst.owner.inputs[1] is c2 assert cst.owner.inputs[2] is x assert cst.owner.inputs[3] is o3 assert cst.owner.inputs[4] is cst.owner.inputs[0] # We're going to do it again, except this time `o1` is # in the `fgraph`, so it shouldn't be folded fg = FunctionGraph([], [o1], clone=False) o2 = op1(o1, c2, x, o3, o1) cst = pre_greedy_local_optimizer(fg, [constant_folding], o2) assert cst.owner.inputs[0] is o1 assert cst.owner.inputs[4] is cst.owner.inputs[0] # What exactly is this supposed to test? ms = MakeSlice()(1) cst = pre_greedy_local_optimizer(empty_fgraph, [constant_folding], ms) assert isinstance(cst, SliceConstant) # Make sure constant of slice signature is hashable. assert isinstance(hash(cst.signature()), int)
def test_c_fail_error(): if not theano.config.cxx: pytest.skip("G++ not available, so we need to skip this test.") x, y, z = inputs() x = Constant(tdouble, 7.2, name="x") e = add_fail(mul(x, y), mul(y, z)) lnk = OpWiseCLinker().accept(Env([y, z], [e])) fn = lnk.make_function() with pytest.raises(RuntimeError): fn(1.5, 3.0)
def test_pre_constant_merge(): empty_fgraph = FunctionGraph([], []) x = MyVariable("x") y = MyVariable("y") c1 = Constant(MyType(), 1, "c1") c2 = Constant(MyType(), 1, "c1") o1 = op2(c1, x) o2 = op1(o1, y, c2) assert c1 is not c2 res = pre_constant_merge(empty_fgraph, [o2]) assert [o2] == res assert o2.owner.inputs[2] is c1 o2 = op1(o1, y, c2) fg = FunctionGraph([x, y], [o2], clone=False) assert o2.owner in fg.apply_nodes res = pre_constant_merge(fg, [o2]) assert res == [o2] assert o2.owner.inputs[2] is c2 # What is this supposed to test? ms = MakeSlice()(1) res = pre_constant_merge(empty_fgraph, [ms]) assert res == [ms] const_slice = SliceConstant(type=slicetype, data=slice(1, None, 2)) assert isinstance(const_slice, Constant) adv = AdvancedSubtensor()(tt.matrix(), [2, 3], const_slice) res = pre_constant_merge(empty_fgraph, adv) assert res == [adv]
def test_clinker_literal_inlining(): x, y, z = inputs() z = Constant(tdouble, 4.12345678) e = add(mul(add(x, y), div(x, y)), sub(sub(x, y), z)) lnk = CLinker().accept(Env([x, y], [e])) fn = lnk.make_function() assert abs(fn(2.0, 2.0) + 0.12345678) < 1e-9 code = lnk.code_gen() print "=== Code generated ===" print code assert "4.12345678" in code # we expect the number to be inlined
def test_fail_error(): x, y, z = inputs() x = Constant(tdouble, 7.2, name='x') e = add_fail(mul(x, y), mul(y, z)) lnk = OpWiseCLinker().accept(Env([y, z], [e])) fn = lnk.make_function() try: res = fn(1.5, 3.0) except RuntimeError: print 'Yay, TEST PASSED' return #test passed assert 0 #test failed
def test_clinker_literal_inlining(): if not theano.config.cxx: raise SkipTest("G++ not available, so we need to skip this test.") x, y, z = inputs() z = Constant(tdouble, 4.12345678) e = add(mul(add(x, y), div(x, y)), bad_sub(bad_sub(x, y), z)) lnk = CLinker().accept(Env([x, y], [e])) fn = lnk.make_function() assert abs(fn(2.0, 2.0) + 0.12345678) < 1e-9 code = lnk.code_gen() # print "=== Code generated ===" # print code assert "4.12345678" in code # we expect the number to be inlined
def test_c_fail_error(): if not theano.config.cxx: raise SkipTest("G++ not available, so we need to skip this test.") x, y, z = inputs() x = Constant(tdouble, 7.2, name='x') e = add_fail(mul(x, y), mul(y, z)) lnk = OpWiseCLinker().accept(Env([y, z], [e])) fn = lnk.make_function() try: fn(1.5, 3.0) except RuntimeError: print('Yay, TEST PASSED') return # test passed assert 0 # test failed
def make_node(self, x, index): assert isinstance(x.type, TypedListType) if not isinstance(index, Variable): if isinstance(index, slice): index = Constant(SliceType(), index) return Apply(self, [x, index], [x.type()]) else: index = tt.constant(index, ndim=0, dtype="int64") return Apply(self, [x, index], [x.ttype()]) if isinstance(index.type, SliceType): return Apply(self, [x, index], [x.type()]) elif isinstance(index, tt.TensorVariable) and index.ndim == 0: assert index.dtype == "int64" return Apply(self, [x, index], [x.ttype()]) else: raise TypeError("Expected scalar or slice as index.")
SliceType.Constant = SliceConstant class NoneTypeT(Generic): """ Inherit from Generic to have c code working. """ def filter(self, x, strict=False, allow_downcast=None): if x is None: return x else: raise TypeError("Expected None!") @staticmethod def may_share_memory(a, b): # None never share memory between object, in the sence of DebugMode. # Python None are singleton return False none_type_t = NoneTypeT() # This is a variable instance. It can be used only once per fgraph. # So use NoneConst.clone() before using it in a Theano graph. # Use NoneConst.equals(x) to check if two variable are NoneConst. NoneConst = Constant(none_type_t, None, name="NoneConst")
def test_constant(self): x, y, z = inputs() y = Constant(tdouble, 2.0) e = mul(add(x, y), div(x, y)) fn = perform_linker(FunctionGraph([x], [e])).make_function() assert fn(1.0) == 1.5