Beispiel #1
0
    It is defined in __init__.py so that it exists even when `cuda_available`
    is False (this is necessary to avoid breaking the test suite).
    """
    def make_thunk(self, node, storage_map, compute_map, no_recycling):
        if theano.sandbox.cuda.use.device_number is None:
            theano.sandbox.cuda.use("gpu",
                                    force=True,
                                    default_to_move_computation_to_gpu=False,
                                    move_shared_float32_to_gpu=False,
                                    enable_cuda=False)
        return super(GpuOp, self).make_thunk(node, storage_map, compute_map,
                                             no_recycling)


theano.compile.debugmode.default_make_thunk.append(
    get_unbound_function(GpuOp.make_thunk))

# We must do those import to be able to create the full doc when
# nvcc is not available
from theano.sandbox.cuda.var import (CudaNdarrayVariable, CudaNdarrayConstant,
                                     CudaNdarraySharedVariable,
                                     float32_shared_constructor)
from theano.sandbox.cuda.type import CudaNdarrayType

if cuda_available:
    # check if their is an old cuda_ndarray that was loading instead of the one
    # we compiled!
    import cuda_ndarray.cuda_ndarray
    if cuda_ndarray_so != cuda_ndarray.cuda_ndarray.__file__:
        _logger.warning(
            "cuda_ndarray was loaded from %s, but Theano expected "
Beispiel #2
0
    is False (this is necessary to avoid breaking the test suite).

    """

    def make_thunk(self, node, storage_map, compute_map, no_recycling):
        if use.device_number is None:
            use("gpu",
                force=True,
                default_to_move_computation_to_gpu=False,
                move_shared_float32_to_gpu=False,
                enable_cuda=False)
        return super(GpuOp, self).make_thunk(node, storage_map,
                                             compute_map, no_recycling)

theano.compile.debugmode.default_make_thunk.append(
    get_unbound_function(GpuOp.make_thunk))

# We must do those import to be able to create the full doc when
# nvcc is not available
from theano.sandbox.cuda.var import (CudaNdarrayVariable,
                                     CudaNdarrayConstant,
                                     CudaNdarraySharedVariable,
                                     float32_shared_constructor)
from theano.sandbox.cuda.type import CudaNdarrayType


def dnn_available():
    if config.dnn.enabled == "False":
        dnn_available.avail = False
        dnn_available.msg = "Disabled by dnn.enabled flag"
    if dnn_available.avail is None and not cuda_available:
Beispiel #3
0
    def __hash__(self):
        msg = []
        msg.append(self.__class__.__name__)
        for val in (self.partial_sum, self.pad, self.dense_connectivity,
                    self.stride, self.copy_non_contiguous):
            msg.append(str(val))

        return hash(tuple(msg))

    # Make sure the cuda_convnet library is compiled and up-to-date
    def make_thunk(self, *args, **kwargs):
        if not convnet_available():
            raise RuntimeError('Could not compile cuda_convnet')

        return super(BaseActs, self).make_thunk(*args, **kwargs)


# This is needed as otherwise DebugMode will consider that
# BaseActs.make_thunk do something else then the default code, and
# would duplicate verification.
theano.compile.debugmode.default_make_thunk.append(
    get_unbound_function(BaseActs.make_thunk))


class UnimplementedError(Exception):
    """
    Like NotImplementedError, but designed not to be caught and suppressed
    by theano.
    """
Beispiel #4
0
    It is defined in __init__.py so that it exists even when `cuda_available`
    is False (this is necessary to avoid breaking the test suite).
    """

    def make_thunk(self, node, storage_map, compute_map, no_recycling):
        if theano.sandbox.cuda.use.device_number is None:
            theano.sandbox.cuda.use("gpu",
                                    force=True,
                                    default_to_move_computation_to_gpu=False,
                                    move_shared_float32_to_gpu=False,
                                    enable_cuda=False)
        return super(GpuOp, self).make_thunk(node, storage_map,
                                             compute_map, no_recycling)

theano.compile.debugmode.default_make_thunk.append(get_unbound_function(GpuOp.make_thunk))

# We must do those import to be able to create the full doc when
# nvcc is not available
from theano.sandbox.cuda.var import (CudaNdarrayVariable,
                                     CudaNdarrayConstant,
                                     CudaNdarraySharedVariable,
                                     float32_shared_constructor)
from theano.sandbox.cuda.type import CudaNdarrayType


if cuda_available:
    # check if their is an old cuda_ndarray that was loading instead of the one
    # we compiled!
    import cuda_ndarray.cuda_ndarray
    if cuda_ndarray_so != cuda_ndarray.cuda_ndarray.__file__:
Beispiel #5
0
                self.copy_non_contiguous == other.copy_non_contiguous)

    def __hash__(self):
        msg = []
        msg.append(self.__class__.__name__)
        for val in (self.partial_sum, self.pad, self.dense_connectivity,
                    self.stride, self.copy_non_contiguous):
            msg.append(str(val))

        return hash(tuple(msg))

    # Make sure the cuda_convnet library is compiled and up-to-date
    def make_thunk(self, *args, **kwargs):
        if not convnet_available():
            raise RuntimeError('Could not compile cuda_convnet')

        return super(BaseActs, self).make_thunk(*args, **kwargs)

# This is needed as otherwise DebugMode will consider that
# BaseActs.make_thunk do something else then the default code, and
# would duplicate verification.
theano.compile.debugmode.default_make_thunk.append(
    get_unbound_function(BaseActs.make_thunk))


class UnimplementedError(Exception):
    """
    Like NotImplementedError, but designed not to be caught and suppressed
    by theano.
    """