Exemple #1
0
 def make_task(func, a, kw):
     ft_name = funcname(func) + '-task'
     if a:
         ft_name = ft_name + ':' + repr(a)
     if kw:
         ft_name = ft_name + ':' + repr(kw)
     return task_class(name=ft_name, func=func, func_args=a, func_kwargs=kw)
Exemple #2
0
def orm_auto_session(method):
  ''' Decorator to run a method in a session derived from `self.orm`
      if a session is not presupplied.
      Intended to assist classes with a `.orm` attribute.

      See `with_session` for details.
  '''

  if isgeneratorfunction(method):

    def orm_auto_session_wrapper(self, *a, session=None, **kw):
      ''' Yield from the method with a session.
      '''
      with using_session(orm=self.orm, session=session) as active_session:
        yield from method(self, *a, session=active_session, **kw)
  else:

    def orm_auto_session_wrapper(self, *a, session=None, **kw):
      ''' Call the method with a session.
      '''
      with using_session(orm=self.orm, session=session) as active_session:
        return method(self, *a, session=active_session, **kw)

  orm_auto_session_wrapper.__name__ = "@orm_auto_session(%s)" % (
      funcname(method),
  )
  orm_auto_session_wrapper.__doc__ = method.__doc__
  orm_auto_session_wrapper.__module__ = getattr(method, '__module__', None)
  return orm_auto_session_wrapper
Exemple #3
0
 def __str__(self):
     s = super().__str__() + ":func=%s" % (funcname(self.func), )
     if self.fargs:
         s += ":fargs=%r" % (self.fargs, )
     if self.fkwargs:
         s += ":fkwargs=%r" % (self.fkwargs, )
     return s
Exemple #4
0
    def __init__(self, name, L, actions, outQ):
        ''' Initialise the Pipeline from `name`, Later instance `L`,
        list of filter functions `actions` and output queue `outQ`.

        Each action is either a 2-tuple of (sig, functor) or an
        object with a .sig attribute and a .functor method returning
        a callable.
    '''
        MultiOpenMixin.__init__(self)
        self.name = name
        self.later = L
        self.queues = [outQ]
        # counter tracking items in play
        self._busy = TrackingCounter(name="Pipeline<%s>._items" % (name, ))
        RHQ = outQ
        for index, action in reversed(list(enumerate(actions))):
            try:
                func_sig, functor = action
            except TypeError:
                func_sig = action.sig
                functor = action.functor(self.later)
            pq_name = ":".join((
                name,
                str(index),
                str(func_sig),
                funcname(functor),
            ))
            if func_sig == FUNC_ONE_TO_MANY:
                PQ = _PipelineStageOneToMany(pq_name, self, functor, RHQ)
            elif func_sig == FUNC_ONE_TO_ONE:
                PQ = _PipelineStageOneToOne(pq_name, self, functor, RHQ)
            elif func_sig == FUNC_SELECTOR:

                select_by = functor

                def selector(item):
                    if select_by(item):
                        yield item

                PQ = _PipelineStageOneToMany(pq_name, self, selector, RHQ)
            elif func_sig == FUNC_MANY_TO_MANY:
                PQ = _PipelineStageManyToMany(pq_name, self, functor, RHQ)
            elif func_sig == FUNC_PIPELINE:
                PQ = _PipelineStagePipeline(pq_name, self, functor, RHQ)
            else:
                raise RuntimeError("unimplemented func_sig=%r, functor=%s" %
                                   (func_sig, functor))
            PQ.open()
            self.queues.insert(0, PQ)
            RHQ = PQ
Exemple #5
0
    def _defer(self, func, *args, **kwargs):
        ''' Defer a function via the internal `Later` queue.
        Hold opens on `self` to avoid easy shutdown.
    '''
        self.open()

        def with_self():
            with self:
                return func(*args, **kwargs)

        with_self.__name__ = "with_self:" + funcname(func)
        LF = self.__funcQ.defer(with_self)
        LF.notify(lambda LF: self.close())
        return LF
Exemple #6
0
def exc_fold(func, exc_types=None, exc_return=False):
    ''' Decorator to catch specific exception types and return a defined default value.
  '''
    def wrapped(*a, **kw):
        try:
            return func(*a, **kw)
        except exc_types as e:
            error("%s", e)
            return exc_return

    wrapped.__name__ = ("@exc_fold[%r=>%r]%s" %
                        (exc_types, exc_return, funcname(func)))
    doc = getattr(func, '__doc__', '')
    if doc:
        wrapped.__doc__ = wrapped.__name__ + '\n' + doc
    return wrapped
Exemple #7
0
 def wrapper(*a,
             progress=None,
             progress_name=None,
             progress_total=None,
             progress_report_print=None,
             **kw):
     if progress_name is None:
         progress_name = label or funcname(func)
     if progress_report_print is None:
         progress_report_print = report_print
     if progress is None:
         upd = Upd()
         if not upd.disabled:
             progress = Progress(name=progress_name, total=progress_total)
             with progress.bar(upd=upd, report_print=progress_report_print):
                 return func(*a, progress=progress, **kw)
     return func(*a, progress=progress, **kw)
Exemple #8
0
  def __init__(self, func, name=None, retry_delay=None):
    ''' Initialise a `LateFunction`.

        Parameters:
        * `func` is the callable for later execution.
        * `name`, if supplied, specifies an identifying name for the `LateFunction`.
        * `retry_local`: time delay before retry of this function on RetryError.
          Default from `later.retry_delay`.
    '''
    Result.__init__(self)
    self.func = func
    if name is None:
      name = "LF-%d[%s]" % (seq(), funcname(func))
    if retry_delay is None:
      retry_delay = DEFAULT_RETRY_DELAY
    self.name = name
    self.retry_delay = retry_delay
Exemple #9
0
  def _after(self, LFs, R, func, *a, **kw):
    if not isinstance(LFs, list):
      LFs = list(LFs)
    if R is None:
      R = Result("Later.after(%s)" % (",".join(str(_) for _ in LFs)))
    elif not isinstance(R, Result):
      raise TypeError(
          "Later.after(LFs, R, func, ...): expected Result for R, got %r" %
          (R,)
      )

    def put_func():
      ''' Function to defer: run `func` and pass its return value to R.put().
      '''
      R.call(func, *a, **kw)

    put_func.__name__ = "%s._after(%r)[func=%s]" % (self, LFs, funcname(func))
    return after(LFs, None, lambda: self._defer(put_func))
Exemple #10
0
def bg(
    func,
    daemon=None,
    name=None,
    no_start=False,
    no_logexc=False,
    args=None,
    kwargs=None
):
  ''' Dispatch the callable `func` in its own `Thread`;
      return the `Thread`.

      Parameters:
      * `func`: a callable for the `Thread` target.
      * `daemon`: optional argument specifying the `.daemon` attribute.
      * `name`: optional argument specifying the `Thread` name,
        default: the name of `func`.
      * `no_logexc`: if false (default `False`), wrap `func` in `@logexc`.
      * `no_start`: optional argument, default `False`.
        If true, do not start the `Thread`.
      * `args`, `kwargs`: passed to the `Thread` constructor
  '''
  if name is None:
    name = funcname(func)
  if args is None:
    args = ()
  if kwargs is None:
    kwargs = {}

  ##thread_prefix = prefix() + ': ' + name
  thread_prefix = name

  def thread_body():
    with Pfx(thread_prefix):
      return func(*args, **kwargs)

  T = Thread(name=thread_prefix, target=thread_body)
  if not no_logexc:
    func = logexc(func)
  if daemon is not None:
    T.daemon = daemon
  if not no_start:
    T.start()
  return T
Exemple #11
0
  def _defer_iterable(self, it, outQ, test_ready=None):
    iterate = partial(next, iter(it))
    R = Result()
    iterationss = [0]

    @logexc
    def iterate_once():
      ''' Call `iterate`. Place the result on outQ.

          Close the queue at end of iteration or other exception.
          Otherwise, requeue ourself to collect the next iteration value.
      '''
      if test_ready is not None and not test_ready():
        raise RetryError("iterate_once: not ready yet")
      try:
        item = iterate()
      except StopIteration:
        outQ.close()
        R.result = iterationss[0]
      except Exception as e:  # pylint: disable=broad-except
        exception(
            "defer_iterable: iterate_once: exception during iteration: %s", e
        )
        outQ.close()
        R.exc_info = sys.exc_info()
      else:
        iterationss[0] += 1
        # put the item onto the output queue
        # this may itself defer various tasks (eg in a pipeline)
        debug("L.defer_iterable: iterate_once: %s.put(%r)", outQ, item)
        outQ.put(item)
        # now queue another iteration to run after those defered tasks
        self._defer(iterate_once)

    iterate_once.__name__ = "%s:next(iter(%s))" % (
        funcname(iterate_once), getattr(it, '__name__', repr(it))
    )
    self._defer(iterate_once)
    return R
Exemple #12
0
def locked(func, initial_timeout=10.0, lockattr='_lock'):
  ''' A decorator for instance methods that must run within a lock.

      Decorator keyword arguments:
      * `initial_timeout`:
        the initial lock attempt timeout;
        if this is `>0` and exceeded a warning is issued
        and then an indefinite attempt is made.
        Default: `2.0`s
      * `lockattr`:
        the name of the attribute of `self`
        which references the lock object.
        Default `'_lock'`
  '''
  citation = "@locked(%s)" % (funcname(func),)

  def lockfunc(self, *a, **kw):
    ''' Obtain the lock and then call `func`.
    '''
    lock = getattr(self, lockattr)
    if initial_timeout > 0 and lock.acquire(timeout=initial_timeout):
      try:
        return func(self, *a, **kw)
      finally:
        lock.release()
    else:
      if initial_timeout > 0:
        warning(
            "%s: timeout after %gs waiting for %s<%s>.%s, continuing to wait",
            citation, initial_timeout,
            type(self).__name__, self, lockattr
        )
      with lock:
        return func(self, *a, **kw)

  lockfunc.__name__ = citation
  lockfunc.__doc__ = getattr(func, '__doc__', '')
  return lockfunc
Exemple #13
0
def pfx(func, message=None, message_args=()):
    ''' General purpose @pfx for generators, methods etc.

      Parameters:
      * `func`: the function or generator function to decorate
      * `message`: optional prefix to use instead of the function name
      * `message_args`: optional arguments to embed in the preifx using `%`

      Example usage:

          @pfx
          def f(....):
              ....
  '''
    fname = funcname(func)
    if message is None:
        if message_args:
            raise ValueError("no message, but message_args=%r" %
                             (message_args, ))

    if isgeneratorfunction(func):

        # persistent in-generator stack to be reused across calls to
        # the context manager
        saved_stack = []
        if message is None:
            message = funcname

        @contextdecorator
        def cmgrdeco(func, a, kw):
            ''' Context manager to note the entry `Pfx` stack height, append saved
          `Pfx` stack from earlier run, then after the iteration step save the
          top of the `Pfx` stack for next time.
      '''
            pfx_stack = Pfx._state.stack
            height = len(pfx_stack)
            pfx_stack.extend(saved_stack)
            with Pfx(message, *message_args):
                yield
            saved_stack[:] = pfx_stack[height:]
            pfx_stack[height:] = []

        wrapper = cmgrdeco(func)

    else:

        if message is None:

            def wrapper(*a, **kw):
                ''' Run function inside `Pfx` context manager.
        '''
                return pfx_call(func, *a, **kw)

        else:

            def wrapper(*a, **kw):
                ''' Run function inside `Pfx` context manager.
        '''
                with Pfx(message, *message_args):
                    return func(*a, **kw)

    wrapper.__name__ = "@pfx(%s)" % (fname, )
    wrapper.__doc__ = func.__doc__
    return wrapper