def feed(self, query, feed_all): collab = wiki.GraphingWiki(self.collab_url, ssl_verify_cert=not self.collab_ignore_cert, ssl_ca_certs=self.collab_extra_ca_certs) yield idiokit.thread(collab.authenticate, self.collab_user, self.collab_password) yield idiokit.sleep(5) token = None current = dict() while True: try: result = yield idiokit.thread(collab.request, "IncGetMeta", query, token) except wiki.WikiFailure as fail: self.log.error("IncGetMeta failed: {0!r}".format(fail)) else: incremental, token, (removed, updates) = result removed = set(removed) if not incremental: removed.update(current) current.clear() for page, keys in updates.iteritems(): event = current.setdefault(page, events.Event()) event.add("id:open", self.page_id(page)) event.add("gwikipagename", page) event.add( "collab url", self.collab_url + urllib.quote(page.encode("utf8"))) removed.discard(page) for key, (discarded, added) in keys.iteritems(): for value in map(normalize, discarded): event.discard(key, value) for value in map(normalize, added): event.add(key, value) if not feed_all: yield idiokit.send(event) for page in removed: current.pop(page, None) event = events.Event() event.add("id:close", self.page_id(page)) event.add("gwikipagename", page) event.add("collab url", self.collab_url + page) yield idiokit.send(event) if feed_all: for page in current: yield idiokit.send(current[page]) yield idiokit.sleep(self.poll_interval)
def feed(self, query, feed_all): collab = wiki.GraphingWiki( self.collab_url, ssl_verify_cert=not self.collab_ignore_cert, ssl_ca_certs=self.collab_extra_ca_certs) yield idiokit.thread(collab.authenticate, self.collab_user, self.collab_password) yield idiokit.sleep(5) token = None current = dict() while True: try: result = yield idiokit.thread(collab.request, "IncGetMeta", query, token) except wiki.WikiFailure as fail: self.log.error("IncGetMeta failed: {0!r}".format(fail)) else: incremental, token, (removed, updates) = result removed = set(removed) if not incremental: removed.update(current) current.clear() for page, keys in updates.iteritems(): event = current.setdefault(page, events.Event()) event.add("id:open", self.page_id(page)) event.add("gwikipagename", page) event.add("collab url", self.collab_url + urllib.quote(page.encode("utf8"))) removed.discard(page) for key, (discarded, added) in keys.iteritems(): for value in map(normalize, discarded): event.discard(key, value) for value in map(normalize, added): event.add(key, value) if not feed_all: yield idiokit.send(event) for page in removed: current.pop(page, None) event = events.Event() event.add("id:close", self.page_id(page)) event.add("gwikipagename", page) event.add("collab url", self.collab_url + page) yield idiokit.send(event) if feed_all: for page in current: yield idiokit.send(current[page]) yield idiokit.sleep(self.poll_interval)
def _read_pipe(self): while True: yield idiokit.select.select((self._pipe, ), (), ()) try: line = self._pipe.readline() except IOError as err: yield idiokit.sleep(1) continue if not line: yield idiokit.sleep(0.5) continue yield idiokit.send("PRIVMSG", self._channel, "{0}".format(line))
def _output_rate_limiter(self): while self.xmpp_rate_limit <= 0.0: yield idiokit.sleep(60.0) while True: delta = max(time.time() - self._last_output, 0) delay = 1.0 / self.xmpp_rate_limit - delta if delay > 0.0: yield idiokit.sleep(delay) self._last_output = time.time() msg = yield idiokit.next() yield idiokit.send(msg)
def _poll_files(self): in_progress = os.path.join(self.work_dir, "in-progress") done = os.path.join(self.work_dir, "done") makedirs(in_progress) makedirs(done) for dirname, filename in iter_dir(in_progress): input_name = os.path.join(dirname, filename) output_name = os.path.join(done, filename) yield idiokit.send(input_name, output_name) while True: paths = itertools.chain( iter_dir(os.path.join(self.input_dir, "new")), iter_dir(os.path.join(self.input_dir, "cur"))) for dirname, filename in paths: uuid_name = uuid.uuid4().hex + "." + filename input_name = os.path.join(in_progress, uuid_name) output_name = os.path.join(done, uuid_name) if try_rename(os.path.join(dirname, filename), input_name): yield idiokit.send(input_name, output_name) yield idiokit.sleep(self.poll_interval)
def wait(self): while True: if not self._heap: timeout = None else: timestamp, _ = self._heap.peek() timeout = max(0.0, timestamp - self._now()) waiter = self._waiter try: if timeout is not None: yield waiter | idiokit.sleep(timeout) else: yield waiter except self.WakeUp: pass finally: if waiter is self._waiter: self._waiter = idiokit.Event() while self._heap: timestamp, obj = self._heap.peek() if timestamp > self._now(): break self._heap.pop() idiokit.stop(obj)
def handle_text_plain(self, headers, fileobj): fileobj = self._decode(headers, fileobj) filename = headers[-1].get_filename(None) if filename is not None: self.log.info("Parsing CSV data from an attachment") result = yield self.parse_csv(headers, filename, fileobj) idiokit.stop(result) for match in re.findall(self.url_rex, fileobj.read()): for try_num in xrange(max(self.retry_count, 0) + 1): self.log.info("Fetching URL {0!r}".format(match)) try: info, fileobj = yield utils.fetch_url(match) except utils.FetchUrlFailed as fail: if self.retry_count <= 0: self.log.error("Fetching URL {0!r} failed ({1}), giving up".format(match, fail)) idiokit.stop(False) elif try_num == self.retry_count: self.log.error("Fetching URL {0!r} failed ({1}) after {2} retries, giving up".format(match, fail, self.retry_count)) idiokit.stop(False) else: self.log.error("Fetching URL {0!r} failed ({1}), retrying in {2:.02f} seconds".format(match, fail, self.retry_interval)) yield idiokit.sleep(self.retry_interval) else: break filename = info.get_filename(None) if filename is None: self.log.error("No filename given for the data") continue self.log.info("Parsing CSV data from the URL") result = yield self.parse_csv(headers, filename, fileobj) idiokit.stop(result)
def _connect(self, host, port, retry_interval=60.0): server = None while server is None: self.log.info(u"Connecting to SMTP server {0!r} port {1}".format( host, port)) try: server = yield idiokit.thread( smtplib.SMTP, host, port, timeout=self.smtp_connection_timeout) except (socket.error, smtplib.SMTPException) as exc: self.log.error(u"Failed connecting to SMTP server: {0}".format( utils.format_exception(exc))) else: self.log.info(u"Connected to the SMTP server") break self.log.info( u"Retrying SMTP connection in {0:.2f} seconds".format( retry_interval)) yield idiokit.sleep(retry_interval) idiokit.stop(server)
def main(self, state): if isinstance(state, collections.deque): for item, keys in state: self.queue(0.0, item, **keys) elif state is not None: for delay, args, keys in state: self.queue(delay, *args, **keys) try: while True: now = time.time() if not self._queue or self._queue[0][0] > now: yield idiokit.sleep(1.0) continue _, args, keys = heapq.heappop(self._queue) self._current = args, keys try: yield self.report(*args, **keys) except: self.queue(0.0, *args, **keys) raise else: self._current = None except services.Stop: now = time.time() dumped = [(max(x - now, 0.0), y, z) for (x, y, z) in self._queue] idiokit.stop(_ReportBotState(dumped))
def _poll_files(self): in_progress = os.path.join(self.work_dir, "in-progress") done = os.path.join(self.work_dir, "done") makedirs(in_progress) makedirs(done) for dirname, filename in iter_dir(in_progress): input_name = os.path.join(dirname, filename) output_name = os.path.join(done, filename) yield idiokit.send(input_name, output_name) while True: paths = itertools.chain( iter_dir(os.path.join(self.input_dir, "new")), iter_dir(os.path.join(self.input_dir, "cur")) ) for dirname, filename in paths: uuid_name = uuid.uuid4().hex + "." + filename input_name = os.path.join(in_progress, uuid_name) output_name = os.path.join(done, uuid_name) if try_rename(os.path.join(dirname, filename), input_name): yield idiokit.send(input_name, output_name) yield idiokit.sleep(self.poll_interval)
def follow_config(path, poll_interval=1.0, force_interval=30.0): last_reload = -float("inf") last_mtime = None last_error_msg = None abspath = os.path.abspath(path) while True: try: now = time.time() if now < last_reload: last_reload = now mtime = os.path.getmtime(abspath) if now > last_reload + force_interval or last_mtime != mtime: configs = load_configs(abspath) yield idiokit.send(True, tuple(flatten(configs))) last_error_msg = None last_mtime = mtime last_reload = now except Exception as exc: error_msg = "Could not load module {0!r}: {1!r}".format(abspath, exc) if error_msg != last_error_msg: yield idiokit.send(False, error_msg) last_error_msg = error_msg last_mtime = None yield idiokit.sleep(poll_interval)
def alert(*times): if not times: yield idiokit.Event() return while True: yield idiokit.sleep(min(map(next_time, times))) yield idiokit.send()
def noop(self, noop_interval=60.0): while True: try: yield self.call("noop") except LostConnection: continue yield idiokit.sleep(noop_interval)
def poll(self): while True: try: yield self.fetch_mails(self.filter) except LostConnection: continue yield idiokit.sleep(self.poll_interval)
def cancel(self, node): yield idiokit.sleep(0.0) try: timestamp, _ = self._heap.pop(node) except heap.HeapError: idiokit.stop(False) idiokit.stop(True)
def get_payload(self, i=None, decode=False): yield idiokit.sleep(0.0) if not self.is_multipart(): idiokit.stop(self._message.get_payload(i, decode)) elif i is not None: idiokit.stop(Message(self._message.get_payload(i, decode))) else: idiokit.stop(map(Message, self._message.get_payload(i, decode)))
def queue(self, delay, obj): yield idiokit.sleep(0.0) timestamp = self._now() + delay if not self._heap or timestamp < self._heap.peek()[0]: self._waiter.throw(self.WakeUp()) node = self._heap.push((timestamp, obj)) idiokit.stop(node)
def _cleanup(self, key): try: yield idiokit.consume() finally: if self.counter.dec(key): yield idiokit.sleep(self.grace_period) if not self.counter.contains(key) and key in self.tasks: task = self.tasks.pop(key) task.throw(self.signal)
def _rate_limiter(rate_limit): last_output = time.time() while True: if rate_limit is not None: delta = max(time.time() - last_output, 0) delay = 1.0 / rate_limit - delta if delay > 0.0: yield idiokit.sleep(delay) last_output = time.time() msg = yield idiokit.next() yield idiokit.send(msg)
def logger(): while True: try: yield idiokit.sleep(interval) finally: if counter.count > 0: self.log.info( "Sent {0} events to room {1!r}".format(counter.count, name), event=events.Event({ "type": "room", "service": self.bot_name, "sent events": unicode(counter.count), "room": name})) counter.count = 0
def handle(self, conf, min_delay=15.0, max_delay=600.0): delay = min_delay while True: self.log.info("Launching bot {0!r} from module {1!r}".format(conf.name, conf.module)) returncode = yield self.launch(conf) self.log.info("Relaunching {0!r} in {1} seconds".format(conf.name, delay)) yield idiokit.sleep(delay) if returncode > 0: delay = min(delay * 2, max_delay) else: delay = min_delay
def purge(self, ids, queue): while True: yield idiokit.sleep(1.0) current_time = time.time() while queue and queue[0][0] <= current_time: expire_time, eid = queue.popleft() count, items = ids.pop(eid) if count > 1: ids[eid] = count - 1, items else: yield idiokit.send( events.Event(items).union({"id:close": eid}))
def feed(self): for result in tail_file(self.path, self.offset): if result is None: yield idiokit.sleep(2.0) continue mtime, line = result keys = self.parse(line, mtime) if keys is None: continue event = events.Event() for key, value in keys.items(): event.add(key, value) yield idiokit.send(event)
def _log_stats(self, interval=15.0): while True: yield idiokit.sleep(interval) for room, (seen, sent) in self._stats.iteritems(): self.log.info(u"Room {0}: seen {1}, sent {2} events".format( room, seen, sent), event=events.Event({ "type": "room", "service": self.bot_name, "seen events": unicode(seen), "sent events": unicode(sent), "room": unicode(room) })) self._stats.clear()
def handle(self, conf): if type(self).strategy == StartupBot.strategy: yield _StartupBot.handle(self, conf) return import warnings warnings.warn( "Implementing custom startup bot relaunch strategies by implementing " + "strategy() has been deprecated.", DeprecationWarning) for value in self.strategy(conf): if isinstance(value, numbers.Real): yield idiokit.sleep(value) else: yield self.launch(value)
def run_mailbox(self, min_delay=5.0, max_delay=60.0): mailbox = None try: while True: item = yield idiokit.next() while True: delay = min(min_delay, max_delay) while mailbox is None: try: mailbox = yield idiokit.thread(self.connect) except (imaplib.IMAP4.abort, socket.error) as error: self.log.error( "Failed IMAP connection ({0})".format( utils.format_exception(error))) else: break self.log.info( "Retrying connection in {0:.2f} seconds".format( delay)) yield idiokit.sleep(delay) delay = min(2 * delay, max_delay) event, name, args, keys = item if event.result().unsafe_is_set(): break try: method = getattr(mailbox, name) result = yield idiokit.thread(method, *args, **keys) except (imaplib.IMAP4.abort, socket.error) as error: yield idiokit.thread(self.disconnect, mailbox) self.log.error("Lost IMAP connection ({0})".format( utils.format_exception(error))) mailbox = None event.fail(LostConnection, "", None) break except imaplib.IMAP4.error as error: event.fail(type(error), error, None) break else: event.succeed(result) break finally: if mailbox is not None: yield idiokit.thread(self.disconnect, mailbox)
def handle(self, conf, min_delay=15.0, max_delay=600.0): delay = min_delay while True: self.log.info("Launching bot {0!r} from module {1!r}".format( conf.name, conf.module)) returncode = yield self.launch(conf) self.log.info("Relaunching {0!r} in {1} seconds".format( conf.name, delay)) yield idiokit.sleep(delay) if returncode > 0: delay = min(delay * 2, max_delay) else: delay = min_delay
def purge(self, ids, queue): while True: yield idiokit.sleep(1.0) current_time = time.time() while queue and queue[0][0] <= current_time: expire_time, eid = queue.popleft() count, items = ids.pop(eid) if count > 1: ids[eid] = count - 1, items else: yield idiokit.send(events.Event(items).union({ "id:close": eid }))
def _log_stats(self, interval=15.0): while True: yield idiokit.sleep(interval) for room, (seen, sent) in self._stats.iteritems(): self.log.info( u"Room {0}: seen {1}, sent {2} events".format(room, seen, sent), event=events.Event({ "type": "room", "service": self.bot_name, "seen events": unicode(seen), "sent events": unicode(sent), "room": unicode(room) }) ) self._stats.clear()
def _connect(self, host, port, retry_interval=60.0): server = None while server is None: self.log.info(u"Connecting to SMTP server {0!r} port {1}".format(host, port)) try: server = yield idiokit.thread(smtplib.SMTP, host, port, timeout=self.smtp_connection_timeout) except (socket.error, smtplib.SMTPException) as exc: self.log.error(u"Failed connecting to SMTP server: {0}".format(utils.format_exception(exc))) else: self.log.info(u"Connected to the SMTP server") break self.log.info(u"Retrying SMTP connection in {0:.2f} seconds".format(retry_interval)) yield idiokit.sleep(retry_interval) idiokit.stop(server)
def handle(self, conf): if type(self).strategy == StartupBot.strategy: yield _StartupBot.handle(self, conf) return import warnings warnings.warn( "Implementing custom startup bot relaunch strategies by implementing " + "strategy() has been deprecated.", DeprecationWarning ) for value in self.strategy(conf): if isinstance(value, numbers.Real): yield idiokit.sleep(value) else: yield self.launch(value)
def cleanup(self, ids, queue): while True: yield idiokit.sleep(1.0) current_time = time.time() while queue and queue[0][0] <= current_time: expire_time, eid, unique = queue.popleft() event_set, augment_set = ids[eid] augment_set.pop(unique, None) event = event_set.pop(unique, None) if event is not None: yield idiokit.send(event) if not event_set and not augment_set: del ids[eid]
def _manage_cache(self, query): token = None wikikeys = set() for valueset in self.keys.values(): wikikeys.update(valueset) while True: try: result = yield idiokit.thread(self.collab.request, "IncGetMeta", query, token) except (socket.error, wiki.WikiFailure) as exc: self.log.error("IncGetMeta failed: {0}".format(exc)) else: incremental, token, (removed, updates) = result removed = set(removed) if not incremental: removed.update(self.cache.keys()) self.cache.clear() for page, keys in updates.iteritems(): event = self.cache.setdefault(page, events.Event()) event.add("gwikipagename", page) removed.discard(page) for key, (discarded, added) in keys.iteritems(): for value in discarded: event.discard(key, value) for value in added: if key in wikikeys: event.add(key, value) for page in removed: self.cache.pop(page, None) if removed or updates: self.log.info( "Updated {0} pages and removed {1} pages ({2} pages in cache)" .format(len(updates), len(removed), len(self.cache.keys()))) yield idiokit.sleep(self.poll_interval)
def run_mailbox(self, min_delay=5.0, max_delay=60.0): mailbox = None try: while True: item = yield idiokit.next() while True: delay = min(min_delay, max_delay) while mailbox is None: try: mailbox = yield idiokit.thread(self.connect) except (imaplib.IMAP4.abort, socket.error) as error: self.log.error("Failed IMAP connection ({0})".format(utils.format_exception(error))) else: break self.log.info("Retrying connection in {0:.2f} seconds".format(delay)) yield idiokit.sleep(delay) delay = min(2 * delay, max_delay) event, name, args, keys = item if event.result().unsafe_is_set(): break try: method = getattr(mailbox, name) result = yield idiokit.thread(method, *args, **keys) except (imaplib.IMAP4.abort, socket.error) as error: yield idiokit.thread(self.disconnect, mailbox) self.log.error("Lost IMAP connection ({0})".format(utils.format_exception(error))) mailbox = None except imaplib.IMAP4.error as error: event.fail(type(error), error, None) break else: event.succeed(result) break finally: if mailbox is not None: yield idiokit.thread(self.disconnect, mailbox)
def _manage_cache(self, query): token = None wikikeys = set() for valueset in self.keys.values(): wikikeys.update(valueset) while True: try: result = yield idiokit.thread(self.collab.request, "IncGetMeta", query, token) except (socket.error, wiki.WikiFailure) as exc: self.log.error("IncGetMeta failed: {0}".format(exc)) else: incremental, token, (removed, updates) = result removed = set(removed) if not incremental: removed.update(self.cache.keys()) self.cache.clear() for page, keys in updates.iteritems(): event = self.cache.setdefault(page, events.Event()) event.add("gwikipagename", page) removed.discard(page) for key, (discarded, added) in keys.iteritems(): for value in discarded: event.discard(key, value) for value in added: if key in wikikeys: event.add(key, value) for page in removed: self.cache.pop(page, None) if removed or updates: self.log.info("Updated {0} pages and removed {1} pages ({2} pages in cache)".format( len(updates), len(removed), len(self.cache.keys()))) yield idiokit.sleep(self.poll_interval)
def launch(self, conf): if conf in self._processes: raise RuntimeError("can not launch two processes with same conf") process = self._launch(conf) self._processes[conf] = process sig = None while self._poll(conf, process) is None: try: yield idiokit.sleep(0.25) except _ConfSignal as sig: siginfo = "signal {0}".format(sig.signum) signame = _signal_number_to_name(sig.signum) if signame is not None: siginfo += " (" + signame + ")" self.log.info("Sending {0} to {1!r}".format(siginfo, conf.name)) _kill(process, sig.signum) if sig is not None: raise sig idiokit.stop(process.returncode)
def follow_config(path, poll_interval=1.0, force_interval=30.0): last_reload = -float("inf") last_mtime = None last_error_msg = None abspath = os.path.abspath(path) while True: now = time.time() if now < last_reload: last_reload = now mtime = os.path.getmtime(abspath) if now > last_reload + force_interval or last_mtime != mtime: try: configs = load_configs(abspath) except Exception: _, exc_value, exc_tb = sys.exc_info() stack = traceback.extract_tb(exc_tb) stack = stack[1:] # Make the traceback flatter by discarding the current stack frame error_msg = "Could not load {path!r} (most recent call last):\n{stack}\n{exception}".format( path=abspath, stack="".join(traceback.format_list(stack)).rstrip(), exception=utils.format_exception(exc_value) ) if error_msg != last_error_msg: yield idiokit.send(False, error_msg) last_error_msg = error_msg last_mtime = None else: yield idiokit.send(True, configs) last_error_msg = None last_mtime = mtime last_reload = now yield idiokit.sleep(poll_interval)
def launch(self, conf): if conf in self._processes: raise RuntimeError("can not launch two processes with same conf") process = self._launch(conf) self._processes[conf] = process sig = None while self._poll(conf, process) is None: try: yield idiokit.sleep(0.25) except _ConfSignal as sig: siginfo = "signal {0}".format(sig.signum) signame = _signal_number_to_name(sig.signum) if signame is not None: siginfo += " (" + signame + ")" self.log.info("Sending {0} to {1!r}".format( siginfo, conf.name)) _kill(process, sig.signum) if sig is not None: raise sig idiokit.stop(process.returncode)
def poll(self): while True: yield self.fetch_mails(self.filter) yield idiokit.sleep(self.poll_interval)
def configs(self): yield idiokit.sleep(0.0)
def _alert(self, flush_interval=2.0): while True: yield idiokit.sleep(flush_interval) yield idiokit.send()
def noop(self, noop_interval=60.0): while True: yield self.call("noop") yield idiokit.sleep(noop_interval)
def poll(self, *key): yield idiokit.sleep(0.0)
def report(self, collected): yield idiokit.sleep(0.0)