Пример #1
0
 def __init__(self, sprintExecPath, minPythonControlVersion=2, sprintConfigStr="", sprintControlConfig=None,
              usePythonSegmentOrder=True):
   """
   :param str sprintExecPath: this executable will be called for the sub proc.
   :param int minPythonControlVersion: will be checked in the subprocess. via Sprint PythonControl
   :param str sprintConfigStr: passed to Sprint as command line args.
     can have "config:" prefix - in that case, looked up in config.
     handled via eval_shell_str(), can thus have lazy content (if it is callable, will be called).
   :param dict[str]|None sprintControlConfig: passed to SprintControl.init().
   """
   assert os.path.exists(sprintExecPath)
   self.sprintExecPath = sprintExecPath
   self.minPythonControlVersion = minPythonControlVersion
   if sprintConfigStr.startswith("config:"):
     from returnn.config import get_global_config
     config = get_global_config()
     assert config
     sprintConfigStr = config.typed_dict[sprintConfigStr[len("config:"):]]
   self.sprintConfig = eval_shell_str(sprintConfigStr)
   self.sprintControlConfig = sprintControlConfig
   self.usePythonSegmentOrder = usePythonSegmentOrder
   self.child_pid = None  # type: typing.Optional[int]
   self.parent_pid = os.getpid()
   # There is no generic way to see whether Python is exiting.
   # This is our workaround. We check for it in self.run_inner().
   self.python_exit = False
   atexit.register(self.exit_handler)
   self._cur_seg_name = None
   self._cur_posteriors_shape = None
   self.is_calculating = False
   self.init()
Пример #2
0
    def create_bias(self, n, prefix='b', name="", init_eval_str=None):
        """
    :param int n: output dimension
    :rtype: theano.shared
    """
        if not name:
            name = "%s_%s" % (prefix, self.name)
            if name in self.params:
                name += "_%i" % len(self.params)
        if not init_eval_str:
            init_eval_str = self.bias_init
        if self.depth > 1:
            size = (self.depth, n)
        else:
            size = (n, )

        def random_normal(scale, loc=0.0):
            return self.rng.normal(loc=loc, scale=scale, size=size)

        def random_uniform(l, loc=0.0):
            return self.rng.uniform(low=-l + loc, high=l + loc, size=size)

        import returnn.config as config_mod
        import returnn.util.basic as util
        try:
            config = config_mod.get_global_config()
        except Exception:
            config = None
        else:
            config = util.DictAsObj(config.typed_dict)
        eval_locals = {
            "numpy": numpy,
            "rng": self.rng,
            "config": config,
            "self": self,
            "n": n,
            "name": name,
            "sqrt": numpy.sqrt,
            "log": numpy.log,
            "zeros": (lambda: numpy.zeros(size, dtype=theano.config.floatX)),
            "random_normal": random_normal,
            "random_uniform": random_uniform
        }
        values = eval(init_eval_str, eval_locals)
        values = numpy.asarray(values, dtype=theano.config.floatX)
        assert values.shape == (n, )
        return self.shared(values, name)
Пример #3
0
def get_ctx(config=None):
  """
  :param Config|None config:
  :returns: the global context if Horovod is enabled, or None otherwise.
    If we did not setup the context yet, it will automatically create it.
  :rtype: HorovodContext|None
  """
  global _is_set_up, _ctx
  if _is_set_up:
    return _ctx
  if not config:
    from returnn.config import get_global_config
    config = get_global_config()
  _is_set_up = True
  if not config.is_true("use_horovod"):
    return None
  _ctx = HorovodContext(config=config)
  return _ctx
Пример #4
0
 def _create_eval_weights(self, n, m, name, default_name_prefix, init_eval_str):
   """
   :param int n: input dimension
   :param int m: output dimension
   :param str|None name: layer name
   :rtype: theano.shared
   """
   if not name: name = "%s_%s_%i" % (default_name_prefix, self.name, len(self.params))
   from returnn.config import get_global_config
   import returnn.util.basic as util
   try:
     config = get_global_config()
   except Exception:
     config = None
   else:
     config = util.DictAsObj(config.typed_dict)
   eval_locals = {
     "numpy": numpy,
     "theano": theano,
     "rng": self.rng,
     "config": config,
     "self": self,
     "n": n,
     "m": m,
     "name": name,
     "sqrt": numpy.sqrt,
     "eye": (lambda N=n, M=m: numpy.eye(N, M, dtype=theano.config.floatX)),
     "random_normal": (
     lambda scale=None, **kwargs: self.create_random_normal_weights(n, m, scale=scale, name=name, **kwargs)),
     "random_uniform": (
     lambda l=None, p=None, **kwargs: self.create_random_uniform_weights(n, m, p=p, l=l, name=name, **kwargs)),
     "random_unitary": (lambda **kwargs: self.create_random_unitary_weights(n, m, name=name, **kwargs)),
     "random_unitary_tiled": (lambda **kwargs: self.create_random_unitary_tiled_weights(n, m, name=name, **kwargs))
   }
   v = eval(init_eval_str, eval_locals)
   if isinstance(v, numpy.ndarray):
     v = numpy.asarray(v, dtype=theano.config.floatX)
     v = self.shared(v, name)
   assert isinstance(v, theano.compile.SharedVariable)
   assert v.ndim == 2
   vshape = v.get_value(borrow=True, return_internal_type=True).shape
   assert vshape == (n, m)
   return v
Пример #5
0
    def perform(self, node, inputs, output_storage, params=None):
      """
      :param node:
      :param inputs:
      :param output_storage:
      :param params:
      """
      start_time = time.time()
      log_posteriors, seq_lengths = inputs

      if numpy.isnan(log_posteriors).any():
        print('SprintErrorSigOp: log_posteriors contain NaN!', file=log.v1)
      if numpy.isinf(log_posteriors).any():
        print('SprintErrorSigOp: log_posteriors contain Inf!', file=log.v1)
        print('SprintErrorSigOp: log_posteriors:', log_posteriors, file=log.v1)

      if self.sprint_instance_pool is None:
        print("SprintErrorSigOp: Starting Sprint %r" % self.sprint_opts, file=log.v3)
        self.sprint_instance_pool = SprintInstancePool.get_global_instance(sprint_opts=self.sprint_opts)

      assert isinstance(self.sprint_instance_pool, SprintInstancePool)  # PyCharm confused otherwise
      loss, errsig = self.sprint_instance_pool.get_batch_loss_and_error_signal(log_posteriors, seq_lengths)
      output_storage[0][0] = loss
      output_storage[1][0] = errsig

      print('SprintErrorSigOp: avg frame loss for segments:', loss.sum() / seq_lengths.sum(), file=log.v5)
      end_time = time.time()
      if self.debug_perform_time is None:
        from returnn.config import get_global_config
        config = get_global_config()
        self.debug_perform_time = config.bool("debug_SprintErrorSigOp_perform_time", False)
      if self.debug_perform_time:
        print("SprintErrorSigOp perform time:", end_time - start_time, file=log.v1)
        from returnn.theano.device import deviceInstance
        assert deviceInstance.is_device_proc()
        forward_time = start_time - deviceInstance.compute_start_time
        print("SprintErrorSigOp forward time:", forward_time, file=log.v1)
Пример #6
0
def get_layer_class(name, raise_exception=True):
    """
  :type name: str
  :rtype: type(NetworkHiddenLayer.HiddenLayer)
  """
    if name in LayerClasses:
        return LayerClasses[name]
    if name.startswith("config."):
        from returnn.config import get_global_config
        config = get_global_config()
        cls = config.typed_value(name[len("config."):])
        import inspect
        if not inspect.isclass(cls):
            if raise_exception:
                raise Exception("get_layer_class: %s not found" % name)
            else:
                return None
        if cls.layer_class is None:
            # Will make Layer.save() (to HDF) work correctly.
            cls.layer_class = name
        return cls
    if raise_exception:
        raise Exception("get_layer_class: invalid layer type: %s" % name)
    return None
Пример #7
0
import returnn  # pip install returnn
from returnn.tf.engine import Engine
from returnn.datasets import init_dataset
from returnn.config import get_global_config
from returnn.util.basic import get_login_username
from returnn.util import better_exchook

print("TF version:", tf.__version__)
print("RETURNN imported from:", returnn.__file__)
print("RETURNN version:", returnn.__version__)
print("RETURNN long version:", returnn.__long_version__)

better_exchook.install()

config = get_global_config(auto_create=True)
config.update(
    dict(
        batching="random",
        batch_size=5000,
        max_seqs=10,
        chunking="0",
        network={
            "fw0": {
                "class": "rec",
                "unit": "NativeLstm2",
                "dropout": 0.1,
                "n_out": 10,
                "from": "data:data"
            },
            "output": {