Esempio n. 1
0
import sys

import numpy

from theano.gof.python25 import DefaultOrderedDict
from theano.misc.ordered_set import OrderedSet
from theano.compat.six import StringIO
from theano.gof import opt
from theano.configparser import AddConfigVar, FloatParam
from theano import config
AddConfigVar('optdb.position_cutoff',
             'Where to stop eariler during optimization. It represent the'
             ' position of the optimizer where to stop.',
             FloatParam(numpy.inf),
             in_c_key=False)
AddConfigVar('optdb.max_use_ratio',
             'A ratio that prevent infinite loop in EquilibriumOptimizer.',
             FloatParam(5),
             in_c_key=False)


class DB(object):
    def __hash__(self):
        if not hasattr(self, '_optimizer_idx'):
            self._optimizer_idx = opt._optimizer_idx[0]
            opt._optimizer_idx[0] += 1
        return self._optimizer_idx

    def __init__(self):
        self.__db__ = DefaultOrderedDict(OrderedSet)
        self._names = set()
Esempio n. 2
0
             # if theano.sandbox.cuda is loaded or not.
             in_c_key=False)

AddConfigVar('gpuarray.sync',
             """If True, every op will make sure its work is done before
                returning.  Setting this to True will slow down execution,
                but give much more accurate results in profiling.""",
             BoolParam(False),
             in_c_key=True)

AddConfigVar('gpuarray.preallocate',
             """If 0 it doesn't do anything.  If between 0 and 1 it
             will preallocate that fraction of the total GPU memory.
             If 1 or greater it will preallocate that amount of memory
             (in megabytes).""",
             FloatParam(0, lambda i: i >= 0),
             in_c_key=False)


def safe_no_dnn_workmem(workmem):
    """
    Make sure the user is not attempting to use dnn.conv.workmem`.
    """
    if workmem:
        raise RuntimeError(
            'The option `dnn.conv.workmem` has been removed and should '
            'not be used anymore. Please use the option '
            '`dnn.conv.algo_fwd` instead.')
    return True

AddConfigVar('dnn.conv.workmem',
Esempio n. 3
0
        """Name of the cuda blas library for the linker.""",
        StrParam('cublas'))

AddConfigVar('lib.cnmem',
             """Do we enable CNMeM or not (a faster CUDA memory allocator).

             The parameter represent the start size (in MB or % of
             total GPU memory) of the memory pool.

             0: not enabled.
             0 < N <= 1: % of the total GPU memory (clipped to .985 for driver memory)
             > 0: use that number of MB of memory.

             """,
             # We should not mix both allocator, so we can't override
             FloatParam(0, lambda i: i >= 0, allow_override=False),
             in_c_key=False)

# is_nvcc_available called here to initialize global vars in
# nvcc_compiler module
nvcc_compiler.is_nvcc_available()

# Compile cuda_ndarray.cu
# This need that nvcc (part of cuda) is installed. If it is not, a warning is
# printed and this module will not be working properly (we set `cuda_available`
# to False).

# This variable is True by default, and set to False if nvcc is not
# available or their is no cuda card or something goes wrong when
# trying to initialize cuda.
cuda_available = True
Esempio n. 4
0
import StringIO
import sys

if sys.version_info[:2] >= (2, 5):
    from collections import defaultdict
else:
    from python25 import defaultdict

import numpy
import opt
from theano.configparser import AddConfigVar, FloatParam
from theano import config
AddConfigVar('optdb.position_cutoff',
             'Where to stop eariler during optimization. It represent the'
             ' position of the optimizer where to stop.',
             FloatParam(numpy.inf),
             in_c_key=False)
#upgraded to 20 to avoid EquibriumOptimizer error
# to be max'ed out by constant folding (can
# I increase the max ratio only for
# constant folding somehow?
AddConfigVar('optdb.max_use_ratio',
             'A ratio that prevent infinite loop in EquilibriumOptimizer.',
             FloatParam(20),
             in_c_key=False)


class DB(object):
    def __hash__(self):
        if not hasattr(self, '_optimizer_idx'):
            self._optimizer_idx = opt._optimizer_idx[0]