Exemple #1
0
def logpy_group_incsubtensor(node):
    # TODO: how to design re-usable patterns? (dtype, ndim, etc.)

    shape_of = node.fgraph.shape_feature.shape_of
    shape_dimo = goalifyN(
        shape_dim(shape_of))
    ndimo = goalify(lambda x: getattr(x, 'ndim'))
    x = node.outputs[0].type()
    if x.ndim == 0:
        return
    y = x[0].type()
    z = tensor.set_subtensor(x[1001], y)
    incs = []
    orig_out = node.outputs[0]
    while node:
        with variables(x, y, 1001):
            match = run(1, (x, y, 1001), (eq, node.outputs[0], z))
            if match:
                xx, yy, ii = match[0]
                incs.append((ii, xx, yy))
                node = xx.owner
                continue
        break
    if not incs:
        return
    incs.sort()
    if zip(*incs)[0] == tuple(range(shape_dim(shape_of)(xx, 0))):
        iin = tensor.concatenate([
            tensor.shape_padleft(yy)
            for ii, _, yy in incs])
        print 'INCS', incs
        return [iin]
Exemple #2
0
def goalifyN(func):
    funco = goalify(func)
    def goalo(args, result):
        tmp = var()
        return (lall,
            (eq, tmp, args),
            (funco, tmp, result))
    return goalo
Exemple #3
0
def logpy_remove_dot_scalar_matrix(node):
    # TODO: how to design re-usable patterns? (dtype, ndim, etc.)
    shape_of = node.fgraph.shape_feature.shape_of
    shape_dimo = goalifyN(
        shape_dim(shape_of))
    ndimo = goalify(lambda x: getattr(x, 'ndim'))
    x = theano.tensor.matrix() # -- XXX type should not matter
    y = theano.tensor.matrix() # -- XXX type should not matter
    if isinstance(node.op, theano.tensor.Dot):
        with variables(x, y):
            #theano.printing.debugprint(tensor.dot(x, y))
            result = run(1, (x, y),
                    (eq, node, tensor.dot(x, y).owner),
                    (ndimo, x, 2),
                    (shape_dimo, (x, 0), 1),
                    (shape_dimo, (x, 1), 1),
               )
        if result:
            xx, yy = result[0]
            #print 'MATCHED xx!', xx, shape_of[xx], xx.type
            #print 'MATCHED yy!', yy, shape_of[yy], yy.type
            #theano.printing.debugprint(xx)
            return [tensor.addbroadcast(xx, 0, 1).dimshuffle() * yy]