def logpy_join(node): if isinstance(node.op, tensor.Join): axis = node.inputs[0] tsrs = node.inputs[1:] if len(tsrs) < 2: return for i, (t0, t1) in enumerate(zip(tsrs[:-1], tsrs[1:])): reb_op = tensor.Rebroadcast((0, 0)) x0 = reb_op(t0.type()) x1 = reb_op(t1.type()) op0 = var('op0') with variables(x0, x1): op(x[i], x[i+1]) match = run( 1, [x0, x1, op0], (eq, [t0, t1], [reb_op(x0), reb_op(x1)]), (getattrreco, (x0, 'owner', 'op'), op0), (getattrreco, (x1, 'owner', 'op'), op0), (isinstanceo, op0, tensor.Elemwise), ) if match: print 'MATCH', match else: return
def logpy_group_incsubtensor(node): # TODO: how to design re-usable patterns? (dtype, ndim, etc.) shape_of = node.fgraph.shape_feature.shape_of shape_dimo = goalifyN( shape_dim(shape_of)) ndimo = goalify(lambda x: getattr(x, 'ndim')) x = node.outputs[0].type() if x.ndim == 0: return y = x[0].type() z = tensor.set_subtensor(x[1001], y) incs = [] orig_out = node.outputs[0] while node: with variables(x, y, 1001): match = run(1, (x, y, 1001), (eq, node.outputs[0], z)) if match: xx, yy, ii = match[0] incs.append((ii, xx, yy)) node = xx.owner continue break if not incs: return incs.sort() if zip(*incs)[0] == tuple(range(shape_dim(shape_of)(xx, 0))): iin = tensor.concatenate([ tensor.shape_padleft(yy) for ii, _, yy in incs]) print 'INCS', incs return [iin]
def test_context_type_not_matter(): x1 = theano.tensor.fmatrix() # -- type doesn't really matter y1 = theano.tensor.fmatrix() # -- type doesn't really matter x2 = theano.tensor.dmatrix() # -- type doesn't really matter y2 = theano.tensor.dmatrix() # -- type doesn't really matter with variables(x2, y2): assert run(1, (x2, y2), (eq, tensor.dot(x1, y1).owner, tensor.dot(x2, y2).owner))
def test_run_objects_with_context_manager(): f = Foo(1, 1234) g = Foo(1, 2) unify_dispatch[(Foo, Foo)] = unify_object reify_dispatch[Foo] = reify_object with variables(1234): assert unify_object(f, g, {}) assert run(1, 1234, (eq, f, g)) == (2,) assert run(1, Foo(1234, 1234), (eq, f, g)) == (Foo(2, 2),) del reify_dispatch[Foo] del unify_dispatch[(Foo, Foo)]
def logpy_cut_subtensor(node): # TODO: how to design re-usable patterns? (dtype, ndim, etc.) shape_dimo = goalifyN( shape_dim(node.fgraph.shape_feature.shape_of)) #jj = lpint(238908925034, 'j') # -- a number that cannot occur in the graph x = tensor.vector() jj = 12345 with variables(x, jj) : rval = [x] goals = ( (eq, node.outputs[0], x[:jj]), (shape_dimo, (x, 0), jj), ) return rval, goals
def test_context_manager(): x = tensor.vector() y = tensor.vector() z = tensor.inc_subtensor(x[1:3], y) xp = tensor.vector() yp = tensor.vector() zp = tensor.inc_subtensor(xp[1:1234], yp) vars = (1234, xp, yp) with variables(*vars): match, = run(0, vars, (eq, z, zp)) assert match == (3, x, y)
def logpy_remove_dot_scalar_matrix(node): # TODO: how to design re-usable patterns? (dtype, ndim, etc.) shape_of = node.fgraph.shape_feature.shape_of shape_dimo = goalifyN( shape_dim(shape_of)) ndimo = goalify(lambda x: getattr(x, 'ndim')) x = theano.tensor.matrix() # -- XXX type should not matter y = theano.tensor.matrix() # -- XXX type should not matter if isinstance(node.op, theano.tensor.Dot): with variables(x, y): #theano.printing.debugprint(tensor.dot(x, y)) result = run(1, (x, y), (eq, node, tensor.dot(x, y).owner), (ndimo, x, 2), (shape_dimo, (x, 0), 1), (shape_dimo, (x, 1), 1), ) if result: xx, yy = result[0] #print 'MATCHED xx!', xx, shape_of[xx], xx.type #print 'MATCHED yy!', yy, shape_of[yy], yy.type #theano.printing.debugprint(xx) return [tensor.addbroadcast(xx, 0, 1).dimshuffle() * yy]
def simplify(expr): source, target = var(), var() with variables(*vars): result = run(0, target, (reduces, source, target), (eq, expr, source)) return result
def test_context_manager(): with variables(1): assert isvar(1) assert not isvar(1)