Пример #1
0
 def function(self, name=None):
     """ Returns a compiled theano function to compute a representation """
     inputs = tensor.matrix()
     if self.cpu_only:
         return theano.function([inputs], self(inputs), name=name, mode=get_default_mode().excluding("gpu"))
     else:
         return theano.function([inputs], self(inputs), name=name)
Пример #2
0
def get_mode(gpu):
    mode = get_default_mode()
    mode = copy.copy(mode)
    if gpu:
        mode = mode.including('gpu', 'gpu_local_optimizations', 'local_cut_gpu_host_gpu', 'local_gpu_multinomial')
    if isinstance(mode.linker, theano.gof.PerformLinker):
        mode.linker = predefined_linkers['c|py']
    return mode
Пример #3
0
 def function(self, name=None):
     """ Returns a compiled theano function to compute a representation """
     inputs = tensor.matrix()
     if self.cpu_only:
         return theano.function([inputs], self(inputs), name=name,
                                mode=get_default_mode().excluding('gpu'))
     else:
         return theano.function([inputs], self(inputs), name=name)
Пример #4
0
def get_mode(gpu):
    mode = get_default_mode()
    if theano.config.mode == 'FAST_COMPILE':
        mode = theano.compile.get_mode('FAST_RUN')
    if gpu:
        mode = mode.including('gpu', 'gpu_local_optimizations',
                              'local_cut_gpu_host_gpu',
                              'local_gpu_multinomial')
    return mode
Пример #5
0
def get_mode(gpu):
    mode = get_default_mode()
    if theano.config.mode == 'FAST_COMPILE':
        mode = theano.compile.get_mode('FAST_RUN')
    if gpu:
        mode = mode.including('gpu', 'gpu_local_optimizations',
                              'local_cut_gpu_host_gpu',
                              'local_gpu_multinomial')
    return mode
Пример #6
0
def get_mode(gpu):
    mode = get_default_mode()
    mode = copy.copy(mode)
    if gpu:
        mode = mode.including("gpu", "gpu_local_optimizations", "local_cut_gpu_host_gpu", "local_gpu_multinomial")
    if isinstance(mode.linker, theano.gof.PerformLinker):
        mode.linker = predefined_linkers["c|py"]
    if hasattr(mode.linker, "c_thunks"):
        mode.linker.c_thunks = True
    return mode
Пример #7
0
 def setUp(self):
     self.test_vals = [
         numpy.array(x, dtype=config.floatX)
         for x in [0, 1, numpy.nan, numpy.inf, -numpy.inf, [numpy.nan, numpy.inf, -numpy.inf, 0, 1, -1]]
     ]
     self.scalar = tensor.scalar()
     self.vector = tensor.vector()
     self.mode = get_default_mode()
     if isinstance(self.mode, theano.compile.debugmode.DebugMode):
         # Disable the check preventing usage of NaN / Inf values.
         self.mode = copy(self.mode)
         self.mode.check_isfinite = False
Пример #8
0
    def function(self, name=None):
        """
        Returns a compiled theano function to compute a representation

        Parameters
        ----------
        name : string, optional
            name of the function
        """
        inputs = tensor.matrix()
        if self.cpu_only:
            return theano.function([inputs], self(inputs), name=name,
                                   mode=get_default_mode().excluding('gpu'))
        else:
            return theano.function([inputs], self(inputs), name=name)
Пример #9
0
 def setUp(self):
     self.test_vals = [numpy.array(x, dtype=config.floatX) for x in [
         0,
         1,
         numpy.nan,
         numpy.inf,
         -numpy.inf,
         [numpy.nan, numpy.inf, -numpy.inf, 0, 1, -1],
         ]]
     self.scalar = tensor.scalar()
     self.vector = tensor.vector()
     self.mode = get_default_mode()
     if isinstance(self.mode, theano.compile.debugmode.DebugMode):
         # Disable the check preventing usage of NaN / Inf values.
         self.mode = copy(self.mode)
         self.mode.check_isfinite = False
Пример #10
0
def inplace_func(
    inputs,
    outputs,
    mode=None,
    allow_input_downcast=False,
    on_unused_input="raise",
    name=None,
):
    if mode is None:
        mode = get_default_mode()
    return function(
        inputs,
        outputs,
        mode=mode,
        allow_input_downcast=allow_input_downcast,
        accept_inplace=True,
        on_unused_input=on_unused_input,
        name=name,
    )
Пример #11
0
def test_naacl_model(iters_per_unsup=3, iters_per_sup=3,
        optimizer=None, realistic=False):
    #print "BUILDING MODEL"
    import time
    t = time.time()

    if optimizer:
        mode = theano.Mode(linker='c|py', optimizer=optimizer)
    else:
        mode = get_default_mode()

    if mode.__class__.__name__ == 'DebugMode':
        iters_per_unsup = 1
        iters_per_sup = 1

    if realistic:
        m = create_realistic(compile_mode=mode)
    else:
        m = create(compile_mode=mode)

    #print 'BUILD took %.3fs'%(time.time() - t)
    prog_str = []
    idx_of_node = {}
    for i, node in enumerate(m.pretraining_update.maker.fgraph.toposort()):
        idx_of_node[node] = i
        if False and i > -1:
            print '   ', i, node, [(ii, idx_of_node.get(ii.
                owner, 'IN')) for ii in node.inputs]
        prog_str.append(str(node))
    #print input_pretraining_gradients[4].owner.inputs
    #print input_pretraining_gradients[4].owner.inputs[1].owner.inputs
    #sys.exit()

    #print "PROGRAM LEN %i HASH %i"% (len(m.pretraining_update.maker.fgraph.apply_nodes), reduce(lambda a, b: hash(a) ^ hash(b),prog_str))

    rng = N.random.RandomState(unittest_tools.fetch_seed(23904))

    inputs = [rng.rand(10, m.input_size) for i in 1, 2, 3]
    targets = N.asarray([0, 3, 4, 2, 3, 4, 4, 2, 1, 0])
    #print inputs

    #print 'UNSUPERVISED PHASE'
    t = time.time()
    for i in xrange(3):
        for j in xrange(iters_per_unsup):
            try:
                known_fail = False
                m.pretraining_update(*inputs)
            except ValueError:
                known_fail = True
            except TypeError:
                known_fail = True
            if known_fail:
                raise KnownFailureTest("Deprecated compile.module fails to "
                    "give a sensible warning when updates to a variable "
                    "have the wrong type")
        s0, s1 = [str(j) for j in m.pretraining_update(*inputs)]
        #print 'huh?', i, iters_per_unsup, iters_per_unsup * (i+1), s0, s1
    if iters_per_unsup == 3:
        assert s0.startswith('0.927793')  # '0.403044')
        assert s1.startswith('0.068035')  # '0.074898')
    #print 'UNSUPERVISED took %.3fs'%(time.time() - t)

    #print 'FINETUNING GRAPH'
    #print 'SUPERVISED PHASE COSTS (%s)'%optimizer
    t = time.time()
    for i in xrange(3):
        for j in xrange(iters_per_unsup):
            m.finetuning_update(*(inputs + [targets]))
        s0 = str(m.finetuning_update(*(inputs + [targets])))
        #print iters_per_sup * (i+1), s0
    if iters_per_sup == 10:
        s0f = float(s0)
        assert 19.7042 < s0f and s0f < 19.7043
Пример #12
0
def test_naacl_model(iters_per_unsup=3,
                     iters_per_sup=3,
                     optimizer=None,
                     realistic=False):
    #print "BUILDING MODEL"
    import time
    t = time.time()

    if optimizer:
        mode = theano.Mode(linker='c|py', optimizer=optimizer)
    else:
        mode = get_default_mode()

    if mode.__class__.__name__ == 'DebugMode':
        iters_per_unsup = 1
        iters_per_sup = 1

    if realistic:
        m = create_realistic(compile_mode=mode)
    else:
        m = create(compile_mode=mode)

    #print 'BUILD took %.3fs'%(time.time() - t)
    prog_str = []
    idx_of_node = {}
    for i, node in enumerate(m.pretraining_update.maker.fgraph.toposort()):
        idx_of_node[node] = i
        if False and i > -1:
            print '   ', i, node, [(ii, idx_of_node.get(ii.owner, 'IN'))
                                   for ii in node.inputs]
        prog_str.append(str(node))
    #print input_pretraining_gradients[4].owner.inputs
    #print input_pretraining_gradients[4].owner.inputs[1].owner.inputs
    #sys.exit()

    #print "PROGRAM LEN %i HASH %i"% (len(m.pretraining_update.maker.fgraph.apply_nodes), reduce(lambda a, b: hash(a) ^ hash(b),prog_str))

    rng = N.random.RandomState(unittest_tools.fetch_seed(23904))

    inputs = [rng.rand(10, m.input_size) for i in 1, 2, 3]
    targets = N.asarray([0, 3, 4, 2, 3, 4, 4, 2, 1, 0])
    #print inputs

    #print 'UNSUPERVISED PHASE'
    t = time.time()
    for i in xrange(3):
        for j in xrange(iters_per_unsup):
            try:
                known_fail = False
                m.pretraining_update(*inputs)
            except ValueError:
                known_fail = True
            except TypeError:
                known_fail = True
            if known_fail:
                raise KnownFailureTest(
                    "Deprecated compile.module fails to "
                    "give a sensible warning when updates to a variable "
                    "have the wrong type")
        s0, s1 = [str(j) for j in m.pretraining_update(*inputs)]
        #print 'huh?', i, iters_per_unsup, iters_per_unsup * (i+1), s0, s1
    if iters_per_unsup == 3:
        assert s0.startswith('0.927793')  # '0.403044')
        assert s1.startswith('0.068035')  # '0.074898')
    #print 'UNSUPERVISED took %.3fs'%(time.time() - t)

    #print 'FINETUNING GRAPH'
    #print 'SUPERVISED PHASE COSTS (%s)'%optimizer
    t = time.time()
    for i in xrange(3):
        for j in xrange(iters_per_unsup):
            m.finetuning_update(*(inputs + [targets]))
        s0 = str(m.finetuning_update(*(inputs + [targets])))
        #print iters_per_sup * (i+1), s0
    if iters_per_sup == 10:
        s0f = float(s0)
        assert 19.7042 < s0f and s0f < 19.7043
Пример #13
0
    _grad_broadcast_unary_0_2_no_complex,
    _grad_broadcast_unary_abs1_no_complex,
    _grad_broadcast_unary_normal,
    _grad_broadcast_unary_normal_small_neg_range,
    makeBroadcastTester,
    rand_ranged,
    randint_ranged,
)
from theano import tensor
from theano.compile.mode import get_default_mode
from theano.configdefaults import config
from theano.tensor import inplace


imported_scipy_special = False
mode_no_scipy = get_default_mode()
try:
    import scipy.special
    import scipy.stats

    imported_scipy_special = True
except ImportError:
    if config.mode == "FAST_COMPILE":
        mode_no_scipy = "FAST_RUN"


# We can't test it if scipy is not installed!
# Precomputing the result is brittle(it have been broken!)
# As if we do any modification to random number here,
# The input random number will change and the output!
if imported_scipy_special: