コード例 #1
0
ファイル: logpy_opt.py プロジェクト: jaberg/theano_workspace
def logpy_group_incsubtensor(node):
    # TODO: how to design re-usable patterns? (dtype, ndim, etc.)

    shape_of = node.fgraph.shape_feature.shape_of
    shape_dimo = goalifyN(
        shape_dim(shape_of))
    ndimo = goalify(lambda x: getattr(x, 'ndim'))
    x = node.outputs[0].type()
    if x.ndim == 0:
        return
    y = x[0].type()
    z = tensor.set_subtensor(x[1001], y)
    incs = []
    orig_out = node.outputs[0]
    while node:
        with variables(x, y, 1001):
            match = run(1, (x, y, 1001), (eq, node.outputs[0], z))
            if match:
                xx, yy, ii = match[0]
                incs.append((ii, xx, yy))
                node = xx.owner
                continue
        break
    if not incs:
        return
    incs.sort()
    if zip(*incs)[0] == tuple(range(shape_dim(shape_of)(xx, 0))):
        iin = tensor.concatenate([
            tensor.shape_padleft(yy)
            for ii, _, yy in incs])
        print 'INCS', incs
        return [iin]
コード例 #2
0
ファイル: logpy_opt.py プロジェクト: jaberg/theano_workspace
def logpy_cut_subtensor(node):
    # TODO: how to design re-usable patterns? (dtype, ndim, etc.)
    shape_dimo = goalifyN(
        shape_dim(node.fgraph.shape_feature.shape_of))
    #jj = lpint(238908925034, 'j') # -- a number that cannot occur in the graph
    x = tensor.vector()
    jj = 12345
    with variables(x, jj) :
        rval = [x]
        goals = (
            (eq, node.outputs[0], x[:jj]),
            (shape_dimo, (x, 0), jj),
            )
    return rval, goals
コード例 #3
0
ファイル: logpy_opt.py プロジェクト: jaberg/theano_workspace
def logpy_remove_dot_scalar_matrix(node):
    # TODO: how to design re-usable patterns? (dtype, ndim, etc.)
    shape_of = node.fgraph.shape_feature.shape_of
    shape_dimo = goalifyN(
        shape_dim(shape_of))
    ndimo = goalify(lambda x: getattr(x, 'ndim'))
    x = theano.tensor.matrix() # -- XXX type should not matter
    y = theano.tensor.matrix() # -- XXX type should not matter
    if isinstance(node.op, theano.tensor.Dot):
        with variables(x, y):
            #theano.printing.debugprint(tensor.dot(x, y))
            result = run(1, (x, y),
                    (eq, node, tensor.dot(x, y).owner),
                    (ndimo, x, 2),
                    (shape_dimo, (x, 0), 1),
                    (shape_dimo, (x, 1), 1),
               )
        if result:
            xx, yy = result[0]
            #print 'MATCHED xx!', xx, shape_of[xx], xx.type
            #print 'MATCHED yy!', yy, shape_of[yy], yy.type
            #theano.printing.debugprint(xx)
            return [tensor.addbroadcast(xx, 0, 1).dimshuffle() * yy]