def create_https_server(host): sock = idiokit.socket.Socket() try: yield sock.bind((host, 0)) _, port = yield sock.getsockname() yield sock.listen(1) except: exc_type, exc_value, exc_traceback = sys.exc_info() yield sock.close() raise exc_type, exc_value, exc_traceback @idiokit.stream def _server(): try: conn, addr = yield sock.accept() finally: yield sock.close() try: with tmpfile(cert_data) as certfile: ssl_conn = yield idiokit.ssl.wrap_socket(conn, server_side=True, certfile=certfile) try: yield ssl_conn.sendall("HTTP/1.0 200 OK\r\nContent-Length: 2\r\n\r\nok") finally: yield ssl_conn.close() finally: yield conn.close() idiokit.stop(_server(), "https://{0}:{1}/".format(host, port))
def build_mail(self, events, to=[], cc=[], bcc=[], template="", template_values={}, **keys): """ Return a mail object produced based on collected events and session parameters. The "events" parameter is None when we just want to test building a mail. """ if events is None: events = [] csv = templates.CSVFormatter() template_keys = { "csv": csv, "attach_csv": templates.AttachUnicode(csv), "attach_and_embed_csv": templates.AttachAndEmbedUnicode(csv), "attach_zip": templates.AttachZip(csv), "to": templates.Const(format_addresses(to)), "cc": templates.Const(format_addresses(cc)), "bcc": templates.Const(format_addresses(bcc)) } for key, value in dict(template_values).iteritems(): template_keys[key] = templates.Event(value) mail_template = MailTemplate(template, **template_keys) msg = yield idiokit.thread(mail_template.format, events) idiokit.stop(msg)
def main(self, state): if state is None: state = dict() self._poll_dedup = state if self.ignore_initial_poll: self.log.info("Ignoring initial polls") try: for key in self._poll_dedup: node = yield self._poll_queue.queue(self.poll_interval, (True, key)) self._poll_cleanup[key] = node while True: cleanup, arg = yield self._poll_queue.wait() if cleanup: self._poll_dedup.pop(arg, None) self._poll_cleanup.pop(arg, None) else: waiter, result = arg waiter.succeed() yield result except services.Stop: idiokit.stop(self._poll_dedup)
def _poll(self, url): self.log.info("Downloading %s" % url) try: info, fileobj = yield utils.fetch_url(url) except utils.FetchUrlFailed, fuf: self.log.error("Download failed: %r", fuf) idiokit.stop()
def _connect(self, host, port, retry_interval=60.0): server = None while server is None: self.log.info(u"Connecting to SMTP server {0!r} port {1}".format( host, port)) try: server = yield idiokit.thread( smtplib.SMTP, host, port, timeout=self.smtp_connection_timeout) except (socket.error, smtplib.SMTPException) as exc: self.log.error(u"Failed connecting to SMTP server: {0}".format( utils.format_exception(exc))) else: self.log.info(u"Connected to the SMTP server") break self.log.info( u"Retrying SMTP connection in {0:.2f} seconds".format( retry_interval)) yield idiokit.sleep(retry_interval) idiokit.stop(server)
def main(self, state): if isinstance(state, collections.deque): for item, keys in state: self.queue(0.0, item, **keys) elif state is not None: for delay, args, keys in state: self.queue(delay, *args, **keys) try: while True: now = time.time() if not self._queue or self._queue[0][0] > now: yield idiokit.sleep(1.0) continue _, args, keys = heapq.heappop(self._queue) self._current = args, keys try: yield self.report(*args, **keys) except: self.queue(0.0, *args, **keys) raise else: self._current = None except services.Stop: now = time.time() dumped = [(max(x - now, 0.0), y, z) for (x, y, z) in self._queue] idiokit.stop(_ReportBotState(dumped))
def create_https_server(host): sock = idiokit.socket.Socket() try: yield sock.bind((host, 0)) _, port = yield sock.getsockname() yield sock.listen(1) except: exc_type, exc_value, exc_traceback = sys.exc_info() yield sock.close() raise exc_type, exc_value, exc_traceback @idiokit.stream def _server(): try: conn, addr = yield sock.accept() finally: yield sock.close() try: with tmpfile(cert_data) as certfile: ssl_conn = yield idiokit.ssl.wrap_socket(conn, server_side=True, certfile=certfile) try: yield ssl_conn.sendall( "HTTP/1.0 200 OK\r\nContent-Length: 2\r\n\r\nok") finally: yield ssl_conn.close() finally: yield conn.close() idiokit.stop(_server(), "https://{0}:{1}/".format(host, port))
def _poll(self, url): request = urllib2.Request(url) for key, value in self.http_headers: request.add_header(key, value) try: self.log.info('Downloading feed from: "%s"', url) _, fileobj = yield utils.fetch_url(request) except utils.FetchUrlFailed as e: self.log.error('Failed to download feed "%s": %r', url, e) idiokit.stop(False) self.log.info("Finished downloading the feed.") byte = fileobj.read(1) while byte and byte != "<": byte = fileobj.read(1) if byte == "<": fileobj.seek(-1, 1) try: for _, elem in etree.iterparse(fileobj): for event in self._parse(elem, url): if event: yield idiokit.send(event) except ParseError as e: self.log.error('Invalid format on feed: "%s", "%r"', url, e)
def wait(self): while True: if not self._heap: timeout = None else: timestamp, _ = self._heap.peek() timeout = max(0.0, timestamp - self._now()) waiter = self._waiter try: if timeout is not None: yield waiter | idiokit.sleep(timeout) else: yield waiter except self.WakeUp: pass finally: if waiter is self._waiter: self._waiter = idiokit.Event() while self._heap: timestamp, obj = self._heap.peek() if timestamp > self._now(): break self._heap.pop() idiokit.stop(obj)
def fetch_url(url, opener=None, timeout=60.0, chunk_size=16384): if opener is None: opener = urllib2.build_opener() try: output = StringIO() fileobj = yield idiokit.thread(opener.open, url, timeout=timeout) try: while True: data = yield idiokit.thread(fileobj.read, chunk_size) if not data: break output.write(data) finally: fileobj.close() info = fileobj.info() info = email.parser.Parser().parsestr(str(info), headersonly=True) output.seek(0) idiokit.stop(info, output) except urllib2.HTTPError as he: raise HTTPError(he.code, he.msg, he.hdrs, he.fp) except urllib2.URLError as error: if _is_timeout(error.reason): raise FetchUrlTimeout("fetching URL timed out") raise FetchUrlFailed(str(error)) except socket.error as error: if _is_timeout(error): raise FetchUrlTimeout("fetching URL timed out") raise FetchUrlFailed(str(error)) except httplib.HTTPException as error: raise FetchUrlFailed(str(error))
def open_session(self, path, conf): @idiokit.stream def _guarded(key, path, session): try: state = yield session except Stop: state = None except: self.errors.throw() raise else: if path is not None: self._put(path, state) finally: del self.sessions[key] if path is None: key = object() session = _guarded(key, None, self.session(None, **conf)) else: key = path while key in self.sessions: old_session = self.sessions[key] yield old_session.throw(Stop()) yield old_session state = self._get(path) session = _guarded(key, path, self.session(state, **conf)) self._put(path, None) self.sessions[key] = session idiokit.stop(self.errors.fork() | session)
def _poll(self, url="http://danger.rulez.sk/projects/bruteforceblocker/blist.php"): self.log.info("Downloading %s" % url) try: info, fileobj = yield utils.fetch_url(url) except utils.FetchUrlFailed, fuf: self.log.error("Download failed: %r", fuf) idiokit.stop(False)
def session(self, service_id, *path, **conf): while True: matches = list() for jid, service_ids in self.catalogue.items(): if service_id in service_ids: matches.append(jid) if not matches: event = idiokit.Event() self.waiters.setdefault(service_id, set()).add(event) try: yield event finally: self.waiters.get(service_id, set()).discard(event) if not self.waiters.get(service_id, None): self.waiters.pop(service_id, None) continue jid = random.choice(matches) task = self._establish_session(jid, service_id, path, conf) self.guarded.setdefault((jid, service_id), set()).add(task) try: session = yield task finally: self.guarded.get((jid, service_id), set()).discard(task) if session is not None: idiokit.stop(session)
def cancel(self, node): yield idiokit.sleep(0.0) try: timestamp, _ = self._heap.pop(node) except heap.HeapError: idiokit.stop(False) idiokit.stop(True)
def fetch(): fetch = "(BODY.PEEK[{0}])".format(path) result, data = yield self.call("uid", "FETCH", uid, fetch) for parts in data: if not isinstance(parts, tuple) or len(parts) != 2: continue idiokit.stop(StringIO(parts[1]))
def _collect_events(): results = [] while True: try: event = yield idiokit.next() except StopIteration: idiokit.stop(results) else: results.append(_event_to_dict(event))
def join_lobby(xmpp, name, nick=None): random_string = unicode(random.randint(0, 10 ** 6)) if nick is None: nick = random_string else: nick = nick + "-" + random_string room = yield xmpp.muc.join(name, nick) idiokit.stop(Lobby(xmpp, room))
def parse_csv(self, filename, fileobj): match = re.match(self.filename_rex, filename) if match is None: self.log.error("Filename {0!r} did not match".format(filename)) idiokit.stop(False) yield idiokit.pipe(utils.csv_to_events(fileobj), _add_filename_info(match.groupdict())) idiokit.stop(True)
def queue(self, delay, obj): yield idiokit.sleep(0.0) timestamp = self._now() + delay if not self._heap or timestamp < self._heap.peek()[0]: self._waiter.throw(self.WakeUp()) node = self._heap.push((timestamp, obj)) idiokit.stop(node)
def handle_text_csv(self, msg): filename = msg.get_filename(None) if filename is None: self.log.error("No filename given for the data") idiokit.stop(False) self.log.info("Parsing CSV data from an attachment") data = yield msg.get_payload(decode=True) result = yield self.parse_csv(filename, StringIO(data)) idiokit.stop(result)
def handle_text_csv(self, headers, fileobj): filename = headers[-1].get_filename(None) if filename is None: self.log.error("No filename given for the data") idiokit.stop(False) self.log.info("Parsing CSV data from an attachment") fileobj = self._decode(headers, fileobj) result = yield self.parse_csv(headers, filename, fileobj) idiokit.stop(result)
def get(client, url): request = yield client.request("GET", url) response = yield request.finish() result = "" while True: data = yield response.read(1024) if not data: break result += data idiokit.stop(result)
def _recvall_stream(sock, amount, timeout=None): data = [] while amount > 0: with wrapped_socket_errnos(errno.ECONNRESET): piece = yield sock.recv(amount, timeout=timeout) if not piece: raise _ConnectionLost("could not recv() all bytes") data.append(piece) amount -= len(piece) idiokit.stop("".join(data))
def parse_csv(self, filename, fileobj): match = re.match(self.filename_rex, filename) if match is None: self.log.error("Filename {0!r} did not match".format(filename)) idiokit.stop(False) yield idiokit.pipe( utils.csv_to_events(fileobj), _add_filename_info(match.groupdict()) ) idiokit.stop(True)
def _collect_set(): result_set = set() while True: try: value = yield idiokit.next() except StopIteration: break result_set.add(value) idiokit.stop(result_set)
def main(test_string, client): s = socket.Socket(socket.AF_INET) try: yield s.bind(("127.0.0.1", 0)) yield s.listen(1) _, port = yield s.getsockname() result = yield serve(test_string, s) | get(client, "http://127.0.0.1:{0}/".format(port)) finally: yield s.close() idiokit.stop(result)
def parse_csv(self, headers, filename, fileobj): match = re.match(self.filename_rex, filename) if match is None: self.log.error("Filename {0!r} did not match".format(filename)) idiokit.stop(False) subject = imapbot.get_header(headers[0], "Subject", None) yield idiokit.pipe( utils.csv_to_events(fileobj), self.normalize(subject, match.groupdict())) idiokit.stop(True)
def _delayed_log(self, log, name, attrs, delay=1.0): try: result = yield timer.timeout(delay, idiokit.consume()) except timer.Timeout: log.open("Waiting for {0!r}".format(name), attrs, status="waiting") try: result = yield idiokit.consume() except Cancel: log.close("Stopped waiting for {0!r}".format(name), attrs, status="removed") raise idiokit.stop(result)
def xmpp_connect(self): verify_cert = not self.xmpp_ignore_cert self.log.info("Connecting to XMPP service with JID " + repr(self.xmpp_jid)) xmpp = yield connect( self.xmpp_jid, self.xmpp_password, host=self.xmpp_host, port=self.xmpp_port, ssl_verify_cert=verify_cert, ssl_ca_certs=self.xmpp_extra_ca_certs) self.log.info("Connected to XMPP service with JID " + repr(self.xmpp_jid)) idiokit.stop(xmpp)
def lookup(self, ip): results = yield self._origin_lookup.lookup(ip) for result in results: result = dict(result) asn = result.get("asn", None) if asn is not None: infos = yield self._asname_lookup.lookup(asn) for info in infos: result.update(info) break idiokit.stop(tuple(result.items())) idiokit.stop(())
def session(self, state, **keys): # Try to build a mail for quick feedback that the templates etc. are # at least somewhat valid. try: yield self.build_mail(None, **keys) except templates.TemplateError as te: self.log.error(u"Mail template was not valid ({0}), pausing session".format(te)) try: yield idiokit.consume() except services.Stop: idiokit.stop(state) result = yield ReportBot.session(self, state, **keys) idiokit.stop(result)
def handle_text_plain(self, headers, fileobj): fileobj = self._decode(headers, fileobj) filename = headers[-1].get_filename(None) if filename is not None: self.log.info("Parsing CSV data from an attachment") result = yield self.parse_csv(headers, filename, fileobj) idiokit.stop(result) for match in re.findall(self.url_rex, fileobj.read()): for try_num in xrange(max(self.retry_count, 0) + 1): self.log.info("Fetching URL {0!r}".format(match)) try: info, fileobj = yield utils.fetch_url(match) except utils.FetchUrlFailed as fail: if self.retry_count <= 0: self.log.error("Fetching URL {0!r} failed ({1}), giving up".format(match, fail)) idiokit.stop(False) elif try_num == self.retry_count: self.log.error("Fetching URL {0!r} failed ({1}) after {2} retries, giving up".format(match, fail, self.retry_count)) idiokit.stop(False) else: self.log.error("Fetching URL {0!r} failed ({1}), retrying in {2:.02f} seconds".format(match, fail, self.retry_interval)) yield idiokit.sleep(self.retry_interval) else: break filename = info.get_filename(None) if filename is None: self.log.error("No filename given for the data") continue self.log.info("Parsing CSV data from the URL") result = yield self.parse_csv(headers, filename, fileobj) idiokit.stop(result)
def handle_application_zip(self, msg): self.log.info("Opening a ZIP attachment") data = yield msg.get_payload(decode=True) try: zip = zipfile.ZipFile(StringIO(data)) except zipfile.BadZipfile as error: self.log.error("ZIP handling failed ({0})".format(error)) idiokit.stop(False) for filename in zip.namelist(): csv_data = zip.open(filename) self.log.info("Parsing CSV data from the ZIP attachment") result = yield self.parse_csv(filename, csv_data) idiokit.stop(result)
def collect(self, state, **keys): if state is None: state = utils.CompressedCollection() try: while True: event = yield idiokit.next() if event is self.REPORT_NOW: yield idiokit.send(state) state = utils.CompressedCollection() else: state.append(event) except services.Stop: idiokit.stop(state)
def handle_application_zip(self, headers, fileobj): self.log.info("Opening a ZIP attachment") fileobj = self._decode(headers, fileobj) try: zip = zipfile.ZipFile(fileobj) except zipfile.BadZipfile as error: self.log.error("ZIP handling failed ({0})".format(error)) idiokit.stop(False) for filename in zip.namelist(): csv_data = StringIO(zip.read(filename)) self.log.info("Parsing CSV data from the ZIP attachment") result = yield self.parse_csv(headers, filename, csv_data) idiokit.stop(result)