Esempio n. 1
0
 def f(local_opt):
     name = (kwargs and kwargs.pop('name')) or local_opt.__name__
     optdb.register(
         name, TopoOptimizer(
             local_opt, failure_callback=TopoOptimizer.warn_inplace),
         60, 'fast_run', 'inplace', 'gpuarray', *tags)
     return local_opt
Esempio n. 2
0
                for idx in to_gpu:
                    new_inp[idx] = cuda.gpu_from_host(new_inp[idx])
                return [op()(*new_inp)]
        return False

    local_to_gpu.__name__ = "local_to_gpu_" + op.__name__
    cuda.opt.register_opt()(local_to_gpu)


if cuda.cuda_available:
    make_gpu_optimizer(DiagonalSubtensor, [0])
    make_gpu_optimizer(IncDiagonalSubtensor, [0, 3])


@theano.gof.local_optimizer([DiagonalSubtensor, IncDiagonalSubtensor])
def local_inplace_DiagonalSubtensor(node):
    """Also work for IncDiagonalSubtensor."""
    if (isinstance(node.op, (DiagonalSubtensor, IncDiagonalSubtensor))
            and not node.op.inplace):
        new_op = node.op.__class__(inplace=True)
        new_node = new_op(*node.inputs)
        return [new_node]
    return False


theano.compile.optdb.register(
    'local_inplace_DiagonalSubtensor',
    TopoOptimizer(local_inplace_DiagonalSubtensor,
                  failure_callback=TopoOptimizer.warn_inplace), 60, 'fast_run',
    'inplace')
Esempio n. 3
0
             self._inplace))

    def c_code_cache_version(self):
        """
        .. todo::

            WRITEME
        """
        return (8, )


@local_optimizer([CrossMapNormUndo])
def local_crossmapnormundo_inplace(node):
    """
    .. todo::

        WRITEME
    """
    if isinstance(node.op, CrossMapNormUndo) and not node.op.inplace:
        new_op = node.op.as_inplace()
        new_node = new_op(*node.inputs)
        return new_node
    return False


theano.compile.optdb.register(
    'local_crossmapnormundo_inplace',
    TopoOptimizer(local_crossmapnormundo_inplace,
                  failure_callback=TopoOptimizer.warn_inplace), 80, 'fast_run',
    'inplace')
Esempio n. 4
0
from theano import compile, gof
from theano.gof import TopoOptimizer
from theano.typed_list.basic import Append, Extend, Insert, Remove, Reverse


@gof.local_optimizer([Append, Extend, Insert, Reverse, Remove], inplace=True)
def typed_list_inplace_opt(node):
    if (isinstance(node.op, (Append, Extend, Insert, Reverse, Remove))
            and not node.op.inplace):

        new_op = node.op.__class__(inplace=True)
        new_node = new_op(*node.inputs)
        return [new_node]
    return False


compile.optdb.register(
    "typed_list_inplace_opt",
    TopoOptimizer(typed_list_inplace_opt,
                  failure_callback=TopoOptimizer.warn_inplace),
    60,
    "fast_run",
    "inplace",
)
Esempio n. 5
0
    if node.op.what_to_output == 0:
        return [GpuLargeSparseTargets(node.op.what_to_output)(*node.inputs)]
    elif node.op.what_to_output == 1:
        return [
            host_from_gpu(
                GpuLargeSparseTargets(node.op.what_to_output)(*node.inputs))
        ]
    else:
        out = GpuLargeSparseTargets(node.op.what_to_output)(*node.inputs)
        return [out[0], host_from_gpu(out[1])]


optdb.register(
    "local_large_sparse_targets_gpu",
    TopoOptimizer(
        local_optimizer([LargeSparseTargets])(local_large_sparse_targets_gpu)),
    49, "fast_run")


def optimize_large_sparse_target(inputs, H, outputs, updates):
    """
    TODO: WRITEME
    """

    # need to rewrite MergeLargeSparseTargetOps because there will be multiple
    # updates containing gradH!

    if not isinstance(updates, OrderedDict):
        raise ValueError("Updates needs to be OrderedDict otherwise keys, and"
                         " values may not match after optimization")