def client_bg(self, rq_line, is_multiline=False, notify=None): ''' Dispatch a request `rq_line` in the background. Return a `Result` to collect the request result. Parameters: * `rq_line`: POP3 request text, without any terminating CRLF * `is_multiline`: true if a multiline response is expected, default `False` * `notify`: a optional handler for `Result.notify`, applied if not `None` *Note*: DOES NOT flush the send stream. Call `self.flush()` when a batch of requests has been submitted, before trying to collect the `Result`s. The `Result` will receive `[etc,lines]` on success where: * `etc` is the trailing portion of an ok response line * `lines` is a list of unstuffed text lines from the response if `is_multiline` is true, `None` otherwise The `Result` gets a list instead of a tuple so that a handler may clear it in order to release memory. Example: R = self.client_bg(f'RETR {msg_n}', is_multiline=True, notify=notify) ''' with self._lock: self.sendline(rq_line) R = Result(rq_line) self._result_queue.put((R, is_multiline)) R.extra.update(rq_line=rq_line) if notify is not None: R.notify(notify) return R
def test01after(self): R = self.R self.assertFalse(R.ready) R2 = Result() self.assertFalse(R2.ready) def add_R_R2(): value = R.result + R2.result return value A = after([R, R2], None, add_R_R2) self.assertFalse(A.ready) self.assertFalse(R.ready) self.assertFalse(R2.ready) def delayed_completion(): time.sleep(2) R.result = 1 time.sleep(2) R2.result = 2 threading.Thread(target=delayed_completion).start() Aresult = A.get() self.assertEqual(Aresult, 3) self.assertTrue(A.ready) self.assertTrue(R.ready) self.assertTrue(R2.ready)
def __init__(self, func, name=None, retry_delay=None): ''' Initialise a `LateFunction`. Parameters: * `func` is the callable for later execution. * `name`, if supplied, specifies an identifying name for the `LateFunction`. * `retry_local`: time delay before retry of this function on RetryError. Default from `later.retry_delay`. ''' Result.__init__(self) self.func = func if name is None: name = "LF-%d[%s]" % (seq(), funcname(func)) if retry_delay is None: retry_delay = DEFAULT_RETRY_DELAY self.name = name self.retry_delay = retry_delay
def _complete(self, result, exc_info): ''' Wrapper for `Result._complete` which handles `RetryError`s. Further, if the function raises one of `NameError`, `AttributeError` or `RuntimeError` (broadly: "programmer errors"), report the stack trace to aid debugging. ''' if exc_info: e = exc_info[1] if isinstance(e, RetryError): # resubmit this function warning("resubmit after RetryError: %s", e) self._resubmit() return if isinstance(e, (NameError, AttributeError, RuntimeError)): error("%s", e, exc_info=exc_info) Result._complete(self, result, exc_info)
def act_later(self, target): ''' Request that this Action occur on behalf of the Target `target`. Return a Result which returns the success or failure of the action. ''' R = Result(name="%s.action(%s)" % (target, self)) target.maker.defer("%s:act[%s]" % ( self, target, ), self._act, R, target) return R
def after(self, LFs, func, *a, **kw): ''' Submit a function to be run after the supplied LateFunctions `LFs`, return a Result instance for collection. ''' if not isinstance(LFs, list): LFs = list(LFs) self.debug_make("after %s call %s(*%r, **%r)" % (LFs, func, a, kw)) R = Result("Maker.after(%s):%s" % (",".join(str(LF) for LF in LFs), func)) self._makeQ.after(LFs, R, func, *a, **kw) return R
def inner(*a, **kw): if not force and not ifdebug(): return f(*a, **kw) filename, lineno = inspect.stack()[1][1:3] n = seq() R = Result() T = threading.Thread(target=_debug_watcher, args=(filename, lineno, n, f.__name__, R)) T.daemon = True T.start() debug("%s:%d: [%d] call %s(*%r, **%r)", filename, lineno, n, f.__name__, a, kw) start = time.time() try: retval = f(*a, **kw) except Exception as e: error("EXCEPTION from %s(*%s, **%s): %s", f, a, kw, e) raise end = time.time() debug("%s:%d: [%d] called %s, elapsed %gs, got %r", filename, lineno, n, f.__name__, end - start, retval) R.put(retval) return retval
def __init__(self, maker, name, context, prereqs, postprereqs, actions): ''' Initialise a new target. `maker`: the Maker with which this Target is associated. `context`: the file context, for citations. `name`: the name of the target. `prereqs`: macro expression to produce prereqs. `postprereqs`: macro expression to produce post-inference prereqs. `actions`: a list of actions to build this Target The same actions list is shared amongst all Targets defined by a common clause in the Mykefile, and extends during the Mykefile parse _after_ defining those Targets. So we do not modify it the class; instead we extend .pending_actions when .require() is called the first time, just as we do for a :make directive. ''' Result.__init__(self, name=name, lock=RLock()) self.maker = maker self.context = context self.shell = SHELL self._prereqs = prereqs self._postprereqs = postprereqs self.actions = actions self.failed = False
def _after(self, LFs, R, func, *a, **kw): if not isinstance(LFs, list): LFs = list(LFs) if R is None: R = Result("Later.after(%s)" % (",".join(str(_) for _ in LFs))) elif not isinstance(R, Result): raise TypeError( "Later.after(LFs, R, func, ...): expected Result for R, got %r" % (R,) ) def put_func(): ''' Function to defer: run `func` and pass its return value to R.put(). ''' R.call(func, *a, **kw) put_func.__name__ = "%s._after(%r)[func=%s]" % (self, LFs, funcname(func)) return after(LFs, None, lambda: self._defer(put_func))
def request(self, rq_type, flags=0, payload=b'', decode_response=None, channel=0): ''' Compose and dispatch a new request, returns a `Result`. Allocates a new tag, a Result to deliver the response, and records the response decode function for use when the response arrives. Parameters: * `rq_type`: request type code, an int * `flags`: optional flags to accompany the request, an int; default `0`. * `payload`: optional bytes-like object to accompany the request; default `b''` * `decode_response`: optional callable accepting (response_flags, response_payload_bytes) and returning the decoded response payload value; if unspecified, the response payload bytes are used The Result will yield an `(ok, flags, payload)` tuple, where: * `ok`: whether the request was successful * `flags`: the response flags * `payload`: the response payload, decoded by decode_response if specified ''' if rq_type < 0: raise ValueError("rq_type may not be negative (%s)" % (rq_type, )) # reserve type 0 for end-of-requests rq_type += 1 tag = self._new_tag() R = Result() self._pending_add(channel, tag, Request_State(decode_response, R)) self._queue_packet( Packet(is_request=True, channel=channel, tag=tag, flags=flags, rq_type=rq_type, payload=payload)) return R
def _defer_iterable(self, it, outQ, test_ready=None): iterate = partial(next, iter(it)) R = Result() iterationss = [0] @logexc def iterate_once(): ''' Call `iterate`. Place the result on outQ. Close the queue at end of iteration or other exception. Otherwise, requeue ourself to collect the next iteration value. ''' if test_ready is not None and not test_ready(): raise RetryError("iterate_once: not ready yet") try: item = iterate() except StopIteration: outQ.close() R.result = iterationss[0] except Exception as e: # pylint: disable=broad-except exception( "defer_iterable: iterate_once: exception during iteration: %s", e ) outQ.close() R.exc_info = sys.exc_info() else: iterationss[0] += 1 # put the item onto the output queue # this may itself defer various tasks (eg in a pipeline) debug("L.defer_iterable: iterate_once: %s.put(%r)", outQ, item) outQ.put(item) # now queue another iteration to run after those defered tasks self._defer(iterate_once) iterate_once.__name__ = "%s:next(iter(%s))" % ( funcname(iterate_once), getattr(it, '__name__', repr(it)) ) self._defer(iterate_once) return R
def add_bg(self, data): return Result(result=self.add(data))
def cancel(self): ''' Cancel this Target. Actions will cease as soon as decorum allows. ''' self.maker.debug_make("%s: CANCEL", self) Result.cancel(self)
def setUp(self): self.R = Result()