from theano.gof.link import WrapLinker from theano.compile.mode import (Mode, register_mode, predefined_modes, predefined_linkers, predefined_optimizers) from theano.gof.python25 import any from theano import gof from theano.configparser import config, AddConfigVar, IntParam, BoolParam from theano.compile.function_module import FunctionMaker run_cthunk = None # Will be imported only when needed. from profiling import ProfileStats import_time = time.time() AddConfigVar('ProfileMode.n_apply_to_print', "Number of apply instances to print by default", IntParam(15, lambda i: i > 0), in_c_key=False) AddConfigVar('ProfileMode.n_ops_to_print', "Number of ops to print by default", IntParam(20, lambda i: i > 0), in_c_key=False) AddConfigVar('ProfileMode.min_memory_size', """For the memory profile, do not print apply nodes if the size of their outputs (in bytes) is lower then this threshold""", IntParam(1024, lambda i: i >= 0), in_c_key=False) AddConfigVar('ProfileMode.profile_memory', """Enable profiling of memory used by Theano functions""",
import logging from contextlib import contextmanager from theano import config from theano.configparser import AddConfigVar, IntParam _logger = logging.getLogger("theano.gof.compilelock") # If the user provided a logging level, we don't want to override it. if _logger.level == logging.NOTSET: # INFO will show the "Refreshing lock" messages _logger.setLevel(logging.INFO) AddConfigVar('compile.wait', """Time to wait before retrying to aquire the compile lock.""", IntParam(5, lambda i: i > 0, allow_override=False), in_c_key=False) def _timeout_default(): return config.compile.wait * 24 AddConfigVar('compile.timeout', """In seconds, time that a process will wait before deciding to override an existing lock. An override only happens when the existing lock is held by the same owner *and* has not been 'refreshed' by this owner for more than this period. Refreshes are done every half timeout period for running processes.""", IntParam(_timeout_default, lambda i: i >= 0, allow_override=False),
in_c_key=False) AddConfigVar('on_unused_input', "What to do if a variable in the 'inputs' list of " " theano.function() is not used in the graph.", EnumStr('raise', 'warn', 'ignore'), in_c_key=False) # This flag is used when we import Theano to initialize global variables. # So changing it after import will not modify these global variables. # This could be done differently... but for now we simply prevent it from being # changed at runtime. AddConfigVar( 'tensor.cmp_sloppy', "Relax tensor._allclose (0) not at all, (1) a bit, (2) more", IntParam(0, lambda i: i in (0, 1, 2), allow_override=False), in_c_key=False) AddConfigVar( 'tensor.local_elemwise_fusion', ("Enable or not in fast_run mode(fast_run optimization) the elemwise " "fusion optimization"), BoolParam(True), in_c_key=False) AddConfigVar( 'gpu.local_elemwise_fusion', ("Enable or not in fast_run mode(fast_run optimization) the gpu " "elemwise fusion optimization"), BoolParam(True), in_c_key=False)