def send_dmail_text(self, subject, message_text): if message_text.startswith("from: "): p0 = message_text.find('\n') m_from_asymkey = rsakey.RsaKey(\ privdata=base58.decode(message_text[6:p0])) p0 += 1 else: p0 = 0 m_from_asymkey = None m_dest_ids = [] while message_text.startswith("to: ", p0): p1 = message_text.find('\n') m_dest_enc = message_text[p0+4:p1] m_dest_id, sig_bits = mutil.decode_key(m_dest_enc) m_dest_ids.append((m_dest_enc, m_dest_id, sig_bits)) p0 = p1 + 1 date = mutil.utc_datetime() if message_text[p0] == '\n': p0 += 1 message_text = message_text[p0:] storing_nodes = yield from self.send_dmail(\ m_from_asymkey, m_dest_ids, subject, date, message_text) return storing_nodes
def fetch_recipient_dmail_site(self, addr, significant_bits=None): if type(addr) is str: addr, significant_bits = mutil.decode_key(addr) elif type(addr) in (list, tuple): addr, significant_bits = addr else: assert type(addr) in (bytes, bytearray) if significant_bits: data_rw = yield from self.task_engine.send_find_key(\ addr, significant_bits=significant_bits) addr = bytes(data_rw.data_key) if not addr: log.info("Failed to find key for prefix [{}]."\ .format(recipient_enc)) return None, None data_rw =\ yield from self.task_engine.send_get_data(addr, retry_factor=100) if not data_rw.data: if log.isEnabledFor(logging.INFO): log.info("Failed to fetch dmail site [{}]."\ .format(mbase32.encode(recipient))) return None, None site_data = data_rw.data.decode("UTF-8") if log.isEnabledFor(logging.INFO): log.info("site_data=[{}].".format(site_data)) return addr, DmailSite(site_data)
def do_getdata(self, arg): "<DATA_KEY> [PATH] retrieve data for DATA_KEY from the network." args = arg.split(' ') data_key, significant_bits = decode_key(args[0]) path = args[1].encode() if len(args) == 2 else None if significant_bits: self.writeln("Incomplete key, use findkey.") return start = datetime.today() data_rw =\ yield from multipart.get_data_buffered(\ self.peer.engine, data_key, path=path) diff = datetime.today() - start self.writeln("send_get_data(..) took: {}.".format(diff)) self.writeln("version=[{}].".format(data_rw.version)) self.writeln("data:") if data_rw.data is not None: self.write_raw(data_rw.data) self.writeln("") else: self.writeln("Not found.")
def send_dmail_text(self, subject, message_text): if message_text.startswith("from: "): p0 = message_text.find('\n') m_from_asymkey = rsakey.RsaKey(\ privdata=base58.decode(message_text[6:p0])) p0 += 1 else: p0 = 0 m_from_asymkey = None m_dest_ids = [] while message_text.startswith("to: ", p0): p1 = message_text.find('\n') m_dest_enc = message_text[p0 + 4:p1] m_dest_id, sig_bits = mutil.decode_key(m_dest_enc) m_dest_ids.append((m_dest_enc, m_dest_id, sig_bits)) p0 = p1 + 1 date = mutil.utc_datetime() if message_text[p0] == '\n': p0 += 1 message_text = message_text[p0:] storing_nodes = 0 for dest_id in m_dest_ids: storing_nodes += yield from self.send_dmail(\ m_from_asymkey, dest_id, subject, date, message_text) return storing_nodes
def do_findkey(self, arg): "<DATA_KEY_PREFIX> [TARGET_ID] [SIGNIFICANT_BITS] search the network" " for the given key." args = arg.split(" ") data_key, significant_bits = decode_key(args[0]) target_key = mbase32.decode(args[1]) if len(args) >= 2 else None if len(args) == 3: significant_bits = int(args[2]) start = datetime.today() data_rw = yield from self.peer.engine.tasks.send_find_key( data_key, significant_bits=significant_bits, target_key=target_key ) diff = datetime.today() - start data_key_enc = mbase32.encode(data_rw.data_key) if data_rw.data_key else None self.writeln("data_key=[{}].".format(data_key_enc)) self.writeln("send_find_key(..) took: {}.".format(diff))
def fetch_recipient_dmail_sites(self, recipients): robjs = [] for entry in recipients: if type(entry) is str: recipient, significant_bits =\ mutil.decode_key(entry) recipient =\ (entry, bytes(recipient), significant_bits) if type(entry) in (tuple, list): recipient_enc, recipient, significant_bits = entry if significant_bits: data_rw = yield from self.task_engine.send_find_key(\ recipient, significant_bits=significant_bits) recipient = bytes(data_rw.data_key) if not recipient: log.info("Failed to find key for prefix [{}]."\ .format(recipient_enc)) else: recipient = entry data_rw = yield from self.task_engine.send_get_data(recipient,\ retry_factor=100) if not data_rw.data: if log.isEnabledFor(logging.INFO): log.info("Failed to fetch dmail site [{}]."\ .format(mbase32.encode(recipient))) continue site_data = data_rw.data.decode("UTF-8") if log.isEnabledFor(logging.INFO): log.info("site_data=[{}].".format(site_data)) robjs.append(DmailSite(site_data)) return robjs
def do_findnode(self, arg): "[ID] find the node with id." node_id, significant_bits = decode_key(arg) if significant_bits: self.writeln("Won't FindNode for incomplete key.") return start = datetime.today() result = yield from self.peer.engine.tasks.send_find_node(node_id) diff = datetime.today() - start self.writeln("send_find_node(..) took: {}.".format(diff)) for r in result: self.writeln("nid[{}] FOUND: {:22} id=[{}] diff=[{}]"\ .format(r.id, r.address, hex_string(r.node_id),\ hex_string(\ calc_raw_distance(\ r.node_id, node_id))))
def do_gettargeteddata(self, arg): "<DATA_KEY> retrieve targeted data for DATA_KEY from the network." data_key, significant_bits = decode_key(arg) if significant_bits: self.writeln("Incomplete key, use findkey.") return start = datetime.today() data_rw = yield from self.peer.engine.tasks.send_get_targeted_data(data_key) diff = datetime.today() - start self.writeln("send_get_targeted_data(..) took: {}.".format(diff)) if data_rw.data is not None: self.writeln("data:") self.write_raw(data_rw.data) self.writeln("") else: self.writeln("Not found.")
def do_findkey(self, arg): "<DATA_KEY_PREFIX> [TARGET_ID] [SIGNIFICANT_BITS] search the network" " for the given key." args = arg.split(' ') data_key, significant_bits = decode_key(args[0]) target_key = mbase32.decode(args[1]) if len(args) >= 2 else None if len(args) == 3: significant_bits = int(args[2]) start = datetime.today() data_rw = yield from\ self.peer.engine.tasks.send_find_key(\ data_key, significant_bits=significant_bits,\ target_key=target_key) diff = datetime.today() - start data_key_enc =\ mbase32.encode(data_rw.data_key) if data_rw.data_key else None self.writeln("data_key=[{}].".format(data_key_enc)) self.writeln("send_find_key(..) took: {}.".format(diff))
def do_gettargeteddata(self, arg): "<DATA_KEY> retrieve targeted data for DATA_KEY from the network." data_key, significant_bits = decode_key(arg) if significant_bits: self.writeln("Incomplete key, use findkey.") return start = datetime.today() data_rw =\ yield from self.peer.engine.tasks.send_get_targeted_data(data_key) diff = datetime.today() - start self.writeln("send_get_targeted_data(..) took: {}.".format(diff)) if data_rw.data is not None: self.writeln("data:") self.write_raw(data_rw.data) self.writeln("") else: self.writeln("Not found.")
def dispatch_get_data(self, rpath): orig_etag = etag = self.handler.headers["If-None-Match"] if etag: updateable_key = etag.startswith("updateablekey-") if updateable_key: p0 = etag.index('-') + 1 p1 = etag.find('-', p0) if p1 != -1: version_from_etag = etag[p0:p1] etag = etag[p1+1:] else: version_from_etag = None etag = etag[p0:] else: updateable_key = False if etag == rpath: # If browser has it cached. cache_control = self.handler.headers["Cache-Control"] if not (updateable_key and cache_control == "max-age=0")\ and cache_control != "no-cache": self.send_response(304) if updateable_key: if version_from_etag: self.send_header(\ "X-Maalstroom-UpdateableKey-Version",\ version_from_etag) self.send_header("Cache-Control", "public,max-age=15") self.send_header("ETag", orig_etag) else: self.send_header(\ "Cache-Control", "public,max-age=315360000") self.send_header("ETag", rpath) self.send_header("Content-Length", 0) self.end_headers() self.finish_response() return if not self.connection_count: self.send_error("No connected nodes; cannot fetch from the"\ " network.") return path_sep_idx = rpath.find('/') if path_sep_idx != -1: path = rpath[path_sep_idx+1:].encode() rpath = rpath[:path_sep_idx] else: path = None if not rpath: msg = "Empty key was specified." log.warning(msg) self.send_error(msg, 400) return try: data_key, significant_bits = mutil.decode_key(rpath) except (ValueError, IndexError) as e: log.exception("mutil.decode_key(..), rpath=[{}].".format(rpath)) self.send_error("Invalid encoded key: [{}].".format(rpath), 400) return if significant_bits: # Resolve key via send_find_key. if significant_bits < 32: log.warning("Request supplied key with too few bits [{}]."\ .format(significant_bits)) self.send_error(\ "Key must have at least 32 bits or 7 characters,"\ " len(key)=[{}].".format(len(rpath)), 400) return try: data_rw =\ yield from asyncio.wait_for(\ self.node.chord_engine.tasks.send_find_key(\ data_key, significant_bits),\ 15.0,\ loop=self.loop) data_key = data_rw.data_key except asyncio.TimeoutError: data_key = None if not data_key: self.send_error(b"Key Not Found", errcode=404) return if log.isEnabledFor(logging.INFO): log.info("Found key=[{}].".format(mbase32.encode(data_key))) key_enc = mbase32.encode(data_rw.data_key) if path: url = "{}{}/{}"\ .format(\ self.handler.maalstroom_url_prefix_str,\ key_enc,\ path.decode("UTF-8")) else: url = "{}{}"\ .format(\ self.handler.maalstroom_url_prefix_str,\ key_enc) message = "<html><head><title>Redirecting to Full Key</title>"\ "</head><body><a href=\"{}\">{}</a>\n{}</body></html>"\ .format(url, url, key_enc).encode() self.send_301(url, message) return if log.isEnabledFor(logging.DEBUG): log.debug("Sending GetData: key=[{}], path=[{}]."\ .format(mbase32.encode(data_key), significant_bits, path)) queue = asyncio.Queue(loop=self.loop) # Start the download. try: data_callback = Downloader(self, queue) @asyncio.coroutine def call_wrapper(): try: yield from multipart.get_data(\ self.node.chord_engine, data_key, data_callback,\ path=path, ordered=True) except Exception as e: log.exception("multipart.get_data(..)") data_callback.exception = e data_callback.notify_finished(False) asyncio.async(call_wrapper(), loop=self.loop) except Exception as e: log.exception("send_get_data(..)") self.send_exception(e) return log.debug("Waiting for first data.") #TODO: This can be improved. Right now it causes the response to wait # for the first block of data to be fetched (which could be after a # few hash blocks are fetched) before it allows us to send the headers. # This would cause the browser to report the size rigth away instead of # seeming to take longer. It would require the response to be always be # chunked as we don't know until we get that first data if we are going # to rewrite or not. Such improvement wouldn't increase the speed or # anything so it can wait as it is only cosmetic likely. data = yield from queue.get() if data: if data is Error: self.send_exception(data_callback.exception) self.send_response(200) rewrite_urls = False if data_callback.mime_type: self.send_header("Content-Type", data_callback.mime_type) if data_callback.mime_type\ in ("text/html", "text/css", "application/javascript"): rewrite_urls = True else: dh = data[:160] if dh[0] == 0xFF and dh[1] == 0xD8: self.send_header("Content-Type", "image/jpg") elif dh[0] == 0x89 and dh[1:4] == b"PNG": self.send_header("Content-Type", "image/png") elif dh[:5] == b"GIF89": self.send_header("Content-Type", "image/gif") elif dh[:5] == b"/*CSS": self.send_header("Content-Type", "text/css") rewrite_urls = True elif dh[:12] == b"/*JAVASCRIPT": self.send_header("Content-Type", "application/javascript") rewrite_urls = True elif dh[:8] == bytes(\ [0x00, 0x00, 0x00, 0x18, 0x66, 0x74, 0x79, 0x70])\ or dh[:8] == bytes(\ [0x00, 0x00, 0x00, 0x1c, 0x66, 0x74, 0x79, 0x70]): self.send_header("Content-Type", "video/mp4") elif dh[:8] == bytes(\ [0x50, 0x4b, 0x03, 0x04, 0x0a, 0x00, 0x00, 0x00]): self.send_header("Content-Type", "application/zip") elif dh[:5] == bytes(\ [0x25, 0x50, 0x44, 0x46, 0x2d]): self.send_header("Content-Type", "application/pdf") elif dh[:4] == b"RIFF" and dh[8:11] == b"AVI": self.send_header("Content-Type", "video/avi") else: dhl = dh.lower() if (dhl.find(b"<html") > -1 or dhl.find(b"<HTML>") > -1)\ and (dhl.find(b"<head>") > -1\ or dhl.find(b"<HEAD") > -1): self.send_header("Content-Type", "text/html") rewrite_urls = True else: self.send_header(\ "Content-Type", "application/octet-stream") rewrite_urls = rewrite_urls\ and not self.handler.maalstroom_plugin_used if rewrite_urls: self.send_header("Transfer-Encoding", "chunked") else: self.send_header("Content-Length", data_callback.size) if data_callback.version is not None: self.send_header(\ "X-Maalstroom-UpdateableKey-Version",\ data_callback.version) self.send_header("Cache-Control", "public,max-age=15") self.send_header(\ "ETag",\ "updateablekey-" + str(data_callback.version) + '-'\ + rpath) else: self.send_header("Cache-Control", "public,max-age=315360000") self.send_header("ETag", rpath) self.end_headers() while True: if rewrite_urls: self.send_partial_content(data) else: self.write(data) data = yield from queue.get() if data is None: if rewrite_urls: self.end_partial_content() else: self.finish_response() break elif data is Error: if rewrite_urls: self._fail_partial_content() else: self.close() break if self._abort_event.is_set(): if log.isEnabledFor(logging.INFO): log.info(\ "Maalstroom request got broken pipe from HTTP"\ " side; cancelling.") data_callback.abort = True break else: self.send_error(b"Data not found on network.", 404)
def _read_dmail_post(dispatcher, data): charset = dispatcher.handler.headers["Content-Type"] if charset: p0 = charset.find("charset=") if p0 > -1: p0 += 8 p1 = charset.find(" ", p0 + 8) if p1 == -1: p1 = charset.find(";", p0 + 8) if p1 > -1: charset = charset[p0:p1].strip() else: charset = charset[p0:].strip() if log.isEnabledFor(logging.DEBUG): log.debug("Form charset=[{}].".format(charset)) else: charset = "UTF-8" qs = data.decode(charset) dd = parse_qs(qs, keep_blank_values=True) if log.isEnabledFor(logging.DEBUG): log.debug("dd=[{}].".format(dd)) if not dispatcher.check_csrf_token(dd["csrf_token"][0]): return None, None dm = DmailMessage() subject = dd.get("subject") if subject: dm.subject = subject[0] else: dm.subject = "" sender_dmail_id = dd.get("sender") if sender_dmail_id: sender_dmail_id = sender_dmail_id[0] if log.isEnabledFor(logging.DEBUG): log.debug("sender_dmail_id=[{}].".format(sender_dmail_id)) if sender_dmail_id and sender_dmail_id != "": sender_dmail_id = int(sender_dmail_id) dmail_address = yield from _load_dmail_address(dispatcher, sender_dmail_id) dm.address = dmail_address dm.sender_valid = True dm.sender_dmail_key = dm.address.site_key if not dm.address: owner_if_anon = dd.get("owner_if_anon") if owner_if_anon and owner_if_anon[0]: dmail_address = yield from _load_dmail_address(dispatcher, owner_if_anon[0]) dm.address = dmail_address if dm.address: dm.sender_valid = True else: dm.sender_valid = False # sender_asymkey = rsakey.RsaKey(\ # privdata=dmail_address.site_privatekey)\ # if dmail_address else None # else: # sender_asymkey = None dest_addr_enc = dd.get("destination") if dest_addr_enc: dm.destination_dmail_key, dm.destination_significant_bits = mutil.decode_key(dest_addr_enc[0]) # mbase32.decode(dest_addr_enc[0]) # dispatcher.send_error("You must specify a destination.", 400) content = dd.get("content") if content: dp = DmailPart() dp.mime_type = "text/plain" dp.data = content[0].encode() dm.parts.append(dp) dm.date = mutil.utc_datetime() dm.hidden = False dm.read = True dm.deleted = False submit = dd.get("submit") if submit: return dm, submit[0] else: return dm, None
def do_GET(self): self.__prepare_for_request() rpath = self.path[1:] if rpath and rpath[-1] == '/': rpath = rpath[:-1] connection_cnt = None if not rpath: connection_cnt = self._get_connection_count() content = pages.home_page_content[0].replace(\ b"${CONNECTIONS}", str(connection_cnt).encode()) content = content.replace(\ b"${MORPHIS_VERSION}", self.node.morphis_version.encode()) self._send_content([content, None]) return s_upload = ".upload" s_dmail = ".dmail" s_aiwj = ".aiwj" if log.isEnabledFor(logging.DEBUG): log.debug("rpath=[{}].".format(rpath)) if rpath.startswith(s_aiwj): self._send_content(\ b"AIWJ - Asynchronous IFrames Without Javascript!") return elif rpath.startswith(s_upload): if rpath.startswith(".upload/generate"): priv_key =\ base58.encode(\ rsakey.RsaKey.generate(bits=4096)._encode_key()) self.send_response(307) self.send_header("Location", "{}".format(priv_key)) self.send_header("Content-Length", 0) self.end_headers() return if len(rpath) == len(s_upload): self._send_content(static_upload_page_content) else: content =\ upload_page_content.replace(\ b"${PRIVATE_KEY}",\ rpath[len(s_upload)+1:].encode()) content =\ content.replace(\ b"${VERSION}",\ str(int(time.time()*1000)).encode()) content =\ content.replace(\ b"${UPDATEABLE_KEY_MODE_DISPLAY}",\ b"") content =\ content.replace(\ b"${STATIC_MODE_DISPLAY}",\ b"display: none") self._send_content(content) return elif rpath.startswith(s_dmail): if self.node.web_devel: importlib.reload(pages) importlib.reload(pages.dmail) pages.dmail.serve_get(self, rpath) return if self.headers["If-None-Match"] == rpath: cache_control = self.headers["Cache-Control"] if cache_control != "max-age=0": self.send_response(304) if cache_control: # This should only have been sent for an updateable key. self.send_header("Cache-Control", "max-age=15, public") else: self.send_header("ETag", rpath) self.send_header("Content-Length", 0) self.end_headers() return significant_bits = None # At this point we assume it is a key URL. if connection_cnt is None: connection_cnt = self._get_connection_count() if not connection_cnt: self._send_error("No connected nodes; cannot fetch from the"\ " network.") return path_sep_idx = rpath.find('/') if path_sep_idx != -1: path = rpath[path_sep_idx+1:].encode() rpath = rpath[:path_sep_idx] else: path = None try: data_key, significant_bits = mutil.decode_key(rpath) except: self._send_error(\ "Invalid encoded key: [{}].".format(rpath),\ errcode=400) return data_rw = DataResponseWrapper() self.node.loop.call_soon_threadsafe(\ asyncio.async,\ _send_get_data(data_key, significant_bits, path, data_rw)) data = data_rw.data_queue.get() if significant_bits: if data_rw.data_key: key = mbase32.encode(data_rw.data_key) if path: url = "{}{}/{}"\ .format(\ self.maalstroom_url_prefix.decode(),\ key,\ path.decode("UTF-8")) else: url = "{}{}"\ .format(\ self.maalstroom_url_prefix.decode(),\ key) message = ("<html><head><title>permalink</title></head><body><a href=\"{}\">{}</a>\n{}</body></html>"\ .format(url, url, key))\ .encode() self.send_response(301) self.send_header("Location", url) self.send_header("Content-Type", "text/html") self.send_header("Content-Length", len(message)) self.end_headers() self.wfile.write(message) return if data: self.send_response(200) rewrite_url = False if data_rw.mime_type: self.send_header("Content-Type", data_rw.mime_type) if data_rw.mime_type\ in ("text/html", "text/css", "application/javascript"): rewrite_url = True else: dh = data[:160] if dh[0] == 0xFF and dh[1] == 0xD8: self.send_header("Content-Type", "image/jpg") elif dh[0] == 0x89 and dh[1:4] == b"PNG": self.send_header("Content-Type", "image/png") elif dh[:5] == b"GIF89": self.send_header("Content-Type", "image/gif") elif dh[:5] == b"/*CSS": self.send_header("Content-Type", "text/css") rewrite_url = True elif dh[:12] == b"/*JAVASCRIPT": self.send_header("Content-Type", "application/javascript") rewrite_url = True elif dh[:8] == bytes(\ [0x00, 0x00, 0x00, 0x18, 0x66, 0x74, 0x79, 0x70])\ or dh[:8] == bytes(\ [0x00, 0x00, 0x00, 0x1c, 0x66, 0x74, 0x79, 0x70]): self.send_header("Content-Type", "video/mp4") elif dh[:8] == bytes(\ [0x50, 0x4b, 0x03, 0x04, 0x0a, 0x00, 0x00, 0x00]): self.send_header("Content-Type", "application/zip") elif dh[:5] == bytes(\ [0x25, 0x50, 0x44, 0x46, 0x2d]): self.send_header("Content-Type", "application/pdf") elif dh[:4] == b"RIFF" and dh[8:11] == b"AVI": self.send_header("Content-Type", "video/avi") else: dhl = dh.lower() if (dhl.find(b"<html") > -1 or dhl.find(b"<HTML>") > -1)\ and (dhl.find(b"<head>") > -1\ or dhl.find(b"<HEAD") > -1): self.send_header("Content-Type", "text/html") rewrite_url = True else: self.send_header(\ "Content-Type", "application/octet-stream") rewrite_url = rewrite_url and not self.maalstroom_plugin_used if rewrite_url: self.send_header("Transfer-Encoding", "chunked") else: self.send_header("Content-Length", data_rw.size) if data_rw.version is not None: self.send_header("Cache-Control", "max-age=15, public") # self.send_header("ETag", rpath) else: self.send_header("Cache-Control", "public") self.send_header("ETag", rpath) self.end_headers() try: while True: if rewrite_url: self._send_partial_content(data) else: self.wfile.write(data) data = data_rw.data_queue.get() if data is None: if data_rw.timed_out: log.warning(\ "Request timed out; closing connection.") self.close_connection = True if rewrite_url: self._end_partial_content() break except ConnectionError: if log.isEnabledFor(logging.INFO): log.info("Maalstroom request got broken pipe from HTTP"\ " side; cancelling.") data_rw.cancelled.set() else: self._handle_error(data_rw)
def dispatch_get_data(self, rpath): orig_etag = etag = self.handler.headers["If-None-Match"] if etag: updateable_key = etag.startswith("updateablekey-") if updateable_key: p0 = etag.index('-') + 1 p1 = etag.find('-', p0) if p1 != -1: version_from_etag = etag[p0:p1] etag = etag[p1 + 1:] else: version_from_etag = None etag = etag[p0:] else: updateable_key = False if etag == rpath: # If browser has it cached. cache_control = self.handler.headers["Cache-Control"] if not (updateable_key and cache_control == "max-age=0")\ and cache_control != "no-cache": self.send_response(304) if updateable_key: if version_from_etag: self.send_header(\ "X-Maalstroom-UpdateableKey-Version",\ version_from_etag) self.send_header("Cache-Control", "public,max-age=15") self.send_header("ETag", orig_etag) else: self.send_header(\ "Cache-Control", "public,max-age=315360000") self.send_header("ETag", rpath) self.send_header("Content-Length", 0) self.end_headers() self.finish_response() return if not self.connection_count: self.send_error("No connected nodes; cannot fetch from the"\ " network.") return path_sep_idx = rpath.find('/') if path_sep_idx != -1: path = rpath[path_sep_idx + 1:].encode() rpath = rpath[:path_sep_idx] else: path = None if not rpath: msg = "Empty key was specified." log.warning(msg) self.send_error(msg, 400) return try: data_key, significant_bits = mutil.decode_key(rpath) except (ValueError, IndexError) as e: log.exception("mutil.decode_key(..), rpath=[{}].".format(rpath)) self.send_error("Invalid encoded key: [{}].".format(rpath), 400) return if significant_bits: # Resolve key via send_find_key. if significant_bits < 32: log.warning("Request supplied key with too few bits [{}]."\ .format(significant_bits)) self.send_error(\ "Key must have at least 32 bits or 7 characters,"\ " len(key)=[{}].".format(len(rpath)), 400) return try: data_rw =\ yield from asyncio.wait_for(\ self.node.chord_engine.tasks.send_find_key(\ data_key, significant_bits),\ 15.0,\ loop=self.loop) data_key = data_rw.data_key except asyncio.TimeoutError: data_key = None if not data_key: self.send_error(b"Key Not Found", errcode=404) return if log.isEnabledFor(logging.INFO): log.info("Found key=[{}].".format(mbase32.encode(data_key))) key_enc = mbase32.encode(data_rw.data_key) if path: url = "{}{}/{}"\ .format(\ self.handler.maalstroom_url_prefix_str,\ key_enc,\ path.decode("UTF-8")) else: url = "{}{}"\ .format(\ self.handler.maalstroom_url_prefix_str,\ key_enc) message = "<html><head><title>Redirecting to Full Key</title>"\ "</head><body><a href=\"{}\">{}</a>\n{}</body></html>"\ .format(url, url, key_enc).encode() self.send_301(url, message) return if log.isEnabledFor(logging.DEBUG): log.debug("Sending GetData: key=[{}], path=[{}]."\ .format(mbase32.encode(data_key), significant_bits, path)) queue = asyncio.Queue(loop=self.loop) # Start the download. try: data_callback = Downloader(self, queue) @asyncio.coroutine def call_wrapper(): try: yield from multipart.get_data(\ self.node.chord_engine, data_key, data_callback,\ path=path, ordered=True) except Exception as e: log.exception("multipart.get_data(..)") data_callback.exception = e data_callback.notify_finished(False) asyncio. async (call_wrapper(), loop=self.loop) except Exception as e: log.exception("send_get_data(..)") self.send_exception(e) return log.debug("Waiting for first data.") #TODO: This can be improved. Right now it causes the response to wait # for the first block of data to be fetched (which could be after a # few hash blocks are fetched) before it allows us to send the headers. # This would cause the browser to report the size rigth away instead of # seeming to take longer. It would require the response to be always be # chunked as we don't know until we get that first data if we are going # to rewrite or not. Such improvement wouldn't increase the speed or # anything so it can wait as it is only cosmetic likely. data = yield from queue.get() if data: if data is Error: self.send_exception(data_callback.exception) self.send_response(200) rewrite_urls = False if data_callback.mime_type: self.send_header("Content-Type", data_callback.mime_type) if data_callback.mime_type\ in ("text/html", "text/css", "application/javascript"): rewrite_urls = True else: dh = data[:160] if dh[0] == 0xFF and dh[1] == 0xD8: self.send_header("Content-Type", "image/jpg") elif dh[0] == 0x89 and dh[1:4] == b"PNG": self.send_header("Content-Type", "image/png") elif dh[:5] == b"GIF89": self.send_header("Content-Type", "image/gif") elif dh[:5] == b"/*CSS": self.send_header("Content-Type", "text/css") rewrite_urls = True elif dh[:12] == b"/*JAVASCRIPT": self.send_header("Content-Type", "application/javascript") rewrite_urls = True elif dh[:8] == bytes(\ [0x00, 0x00, 0x00, 0x18, 0x66, 0x74, 0x79, 0x70])\ or dh[:8] == bytes(\ [0x00, 0x00, 0x00, 0x1c, 0x66, 0x74, 0x79, 0x70]): self.send_header("Content-Type", "video/mp4") elif dh[:8] == bytes(\ [0x50, 0x4b, 0x03, 0x04, 0x0a, 0x00, 0x00, 0x00]): self.send_header("Content-Type", "application/zip") elif dh[:5] == bytes(\ [0x25, 0x50, 0x44, 0x46, 0x2d]): self.send_header("Content-Type", "application/pdf") elif dh[:4] == b"RIFF" and dh[8:11] == b"AVI": self.send_header("Content-Type", "video/avi") else: dhl = dh.lower() if (dhl.find(b"<html") > -1 or dhl.find(b"<HTML>") > -1)\ and (dhl.find(b"<head>") > -1\ or dhl.find(b"<HEAD") > -1): self.send_header("Content-Type", "text/html") rewrite_urls = True else: self.send_header(\ "Content-Type", "application/octet-stream") rewrite_urls = rewrite_urls\ and not self.handler.maalstroom_plugin_used if rewrite_urls: self.send_header("Transfer-Encoding", "chunked") else: self.send_header("Content-Length", data_callback.size) if data_callback.version is not None: self.send_header(\ "X-Maalstroom-UpdateableKey-Version",\ data_callback.version) self.send_header("Cache-Control", "public,max-age=15") self.send_header(\ "ETag",\ "updateablekey-" + str(data_callback.version) + '-'\ + rpath) else: self.send_header("Cache-Control", "public,max-age=315360000") self.send_header("ETag", rpath) self.end_headers() while True: if rewrite_urls: self.send_partial_content(data) else: self.write(data) data = yield from queue.get() if data is None: if rewrite_urls: self.end_partial_content() else: self.finish_response() break elif data is Error: if rewrite_urls: self._fail_partial_content() else: self.close() break if self._abort_event.is_set(): if log.isEnabledFor(logging.INFO): log.info(\ "Maalstroom request got broken pipe from HTTP"\ " side; cancelling.") data_callback.abort = True break else: self.send_error(b"Data not found on network.", 404)
def __serve_post(handler, rpath, done_event): assert rpath.startswith(s_dmail) req = rpath[len(s_dmail):] if req == "/compose/make_it_so": data = handler.rfile.read(int(handler.headers["Content-Length"])) if log.isEnabledFor(logging.DEBUG): log.debug("data=[{}].".format(data)) dd = urllib.parse.parse_qs(data, keep_blank_values=True) if log.isEnabledFor(logging.DEBUG): log.debug("dd=[{}].".format(dd)) subject = dd.get(b"subject") if subject: subject = subject[0].decode() else: subject = "" sender_dmail_id = dd.get(b"sender") if sender_dmail_id[0]: sender_dmail_id = int(sender_dmail_id[0]) if log.isEnabledFor(logging.DEBUG): log.debug("sender_dmail_id=[{}].".format(sender_dmail_id)) dmail_address =\ yield from _fetch_dmail_address(handler, sender_dmail_id) sender_asymkey = rsakey.RsaKey(\ privdata=dmail_address.site_privatekey)\ if dmail_address else None else: sender_asymkey = None dest_addr_enc = dd.get(b"destination") if not dest_addr_enc: handler._send_error("You must specify a destination.", 400) return recipient, significant_bits =\ mutil.decode_key(dest_addr_enc[0].decode()) recipients = [(dest_addr_enc, bytes(recipient), significant_bits)] content = dd.get(b"content") if content: content = content[0] de =\ dmail.DmailEngine(handler.node.chord_engine.tasks, handler.node.db) storing_nodes =\ yield from de.send_dmail(\ sender_asymkey,\ recipients,\ subject,\ None,\ content) if storing_nodes: handler._send_content(\ "SUCCESS.<br/><p>Dmail successfully sent to: {}</p>"\ .format(dest_addr_enc[0].decode()).encode()) else: handler._send_content(\ "FAIL.<br/><p>Dmail timed out being stored on the network;"\ "please try again.</p>"\ .format(dest_addr_enc[0].decode()).encode()) else: handler._handle_error()
def __serve_get(handler, rpath, done_event): if len(rpath) == len(s_dmail): handler._send_content(pages.dmail_page_content) else: req = rpath[len(s_dmail):] log.info("req=[{}].".format(req)) if req == "/css": handler._send_content(\ pages.dmail_css_content, content_type="text/css") elif req == "/address_list": handler._send_partial_content( pages.dmail_page_content__f1_start, True) site_keys = yield from _list_dmail_addresses(handler) for dbid, site_key in site_keys: site_key_enc = mbase32.encode(site_key) resp = """<span class="nowrap">[<a href="addr/{}">view</a>]"""\ """ {}</span><br/>"""\ .format(site_key_enc, site_key_enc) handler._send_partial_content(resp) handler._send_partial_content(pages.dmail_page_content__f1_end) handler._end_partial_content() elif req.startswith("/compose/form"): dest_addr_enc = req[14:] if len(req) > 14 else "" handler._send_partial_content(\ pages.dmail_compose_dmail_form_start, True) site_keys = yield from _list_dmail_addresses(handler) for dbid, site_key in site_keys: site_key_enc = mbase32.encode(site_key) sender_element = """<option value="{}">{}</option>"""\ .format(dbid, site_key_enc) handler._send_partial_content(sender_element) handler._send_partial_content(\ "<option value="">[Anonymous]</option>") handler._send_partial_content(\ pages.dmail_compose_dmail_form_end.replace(\ b"${DEST_ADDR}", dest_addr_enc.encode())) handler._end_partial_content() elif req.startswith("/compose"): from_addr = req[9:] if len(req) > 9 else "" if from_addr: iframe_src = "../compose/form/{}".format(from_addr).encode() else: iframe_src = "compose/form".encode() content = pages.dmail_compose_dmail_content[0].replace(\ b"${IFRAME_SRC}", iframe_src) handler._send_content([content, None]) elif req.startswith("/addr/view/"): addr_enc = req[11:] start = pages.dmail_addr_view_start.replace(\ b"${DMAIL_ADDRESS}", addr_enc.encode()) start = start.replace(\ b"${DMAIL_ADDRESS_SHORT}", addr_enc[:32].encode()) handler._send_partial_content(start, True) handler._send_partial_content(pages.dmail_addr_view_end) handler._end_partial_content() elif req.startswith("/addr/settings/edit/publish?"): query = req[28:] qdict = urllib.parse.parse_qs(query, keep_blank_values=True) addr_enc = qdict["dmail_address"][0] difficulty = qdict["difficulty"][0] def processor(dmail_address): if difficulty != dmail_address.keys[0].difficulty: dmail_address.keys[0].difficulty = difficulty return True else: return False dmail_address = yield from\ _process_dmail_address(\ handler, mbase32.decode(addr_enc), processor) dh = dhgroup14.DhGroup14() dh.x = sshtype.parseMpint(dmail_address.keys[0].x)[1] dh.generate_e() dms = dmail.DmailSite() root = dms.root root["target"] =\ mbase32.encode(dmail_address.keys[0].target_key) root["difficulty"] = int(difficulty) root["ssm"] = "mdh-v1" root["sse"] = base58.encode(sshtype.encodeMpint(dh.e)) private_key = rsakey.RsaKey(privdata=dmail_address.site_privatekey) r = yield from\ handler.node.chord_engine.tasks.send_store_updateable_key(\ dms.export(), private_key,\ version=int(time.time()*1000), store_key=True) handler._send_content(\ pages.dmail_addr_settings_edit_success_content[0]\ .format(addr_enc, addr_enc[:32]).encode()) elif req.startswith("/addr/settings/edit/"): addr_enc = req[20:] dmail_address = yield from\ _load_dmail_address(handler, mbase32.decode(addr_enc)) content = pages.dmail_addr_settings_edit_content[0].replace(\ b"${DIFFICULTY}",\ str(dmail_address.keys[0].difficulty).encode()) content = content.replace(\ b"${DMAIL_ADDRESS_SHORT}", addr_enc[:32].encode()) content = content.replace(\ b"${DMAIL_ADDRESS}", addr_enc.encode()) content = content.replace(\ b"${PRIVATE_KEY}",\ base58.encode(dmail_address.site_privatekey).encode()) content = content.replace(\ b"${X}", base58.encode(dmail_address.keys[0].x).encode()) content = content.replace(\ b"${TARGET_KEY}",\ base58.encode(dmail_address.keys[0].target_key).encode()) handler._send_content([content, None]) elif req.startswith("/addr/settings/"): addr_enc = req[15:] content = pages.dmail_addr_settings_content[0].replace(\ b"${IFRAME_SRC}",\ "edit/{}".format(addr_enc).encode()) handler._send_content([content, None]) elif req.startswith("/addr/"): addr_enc = req[6:] if log.isEnabledFor(logging.INFO): log.info("Viewing dmail address [{}].".format(addr_enc)) content = pages.dmail_address_page_content[0].replace(\ b"${IFRAME_SRC}", "view/{}".format(addr_enc).encode()) handler._send_content([content, None]) elif req.startswith("/tag/view/list/"): params = req[15:] p0 = params.index('/') tag = params[:p0] addr_enc = params[p0+1:] if log.isEnabledFor(logging.INFO): log.info("Viewing dmails with tag [{}] for address [{}]."\ .format(tag, addr_enc)) start = pages.dmail_tag_view_list_start.replace(\ b"${TAG_NAME}", tag.encode()) #FIXME: This is getting inefficient now, maybe time for Flask or # something like it. Maybe we can use just it's template renderer. start = start.replace(b"${DMAIL_ADDRESS}", addr_enc.encode()) start = start.replace(\ b"${DMAIL_ADDRESS2}",\ "{}...".format(addr_enc[:32]).encode()) handler._send_partial_content(start, True) yield from\ _list_dmails_for_tag(handler, mbase32.decode(addr_enc), tag) handler._send_partial_content(pages.dmail_tag_view_list_end) handler._end_partial_content() elif req.startswith("/tag/view/"): params = req[10:] content = pages.dmail_tag_view_content[0].replace(\ b"${IFRAME_SRC}", "../list/{}".format(params).encode()) handler._send_content(content) elif req.startswith("/scan/list/"): addr_enc = req[11:] if log.isEnabledFor(logging.INFO): log.info("Viewing inbox for dmail address [{}]."\ .format(addr_enc)) start = pages.dmail_inbox_start.replace(\ b"${DMAIL_ADDRESS}", addr_enc.encode()) start = start.replace(\ b"${DMAIL_ADDRESS2}", "{}...".format(addr_enc[:32]).encode()) handler._send_partial_content(start, True) addr, significant_bits = mutil.decode_key(addr_enc) yield from _scan_new_dmails(handler, addr, significant_bits) handler._send_partial_content(pages.dmail_inbox_end) handler._end_partial_content() elif req.startswith("/scan/"): addr_enc = req[6:] content = pages.dmail_address_page_content[0].replace(\ b"${IFRAME_SRC}", "list/{}".format(addr_enc).encode()) handler._send_content([content, None]) elif req.startswith("/fetch/view/"): keys = req[12:] p0 = keys.index('/') dmail_addr_enc = keys[:p0] dmail_key_enc = keys[p0+1:] dmail_addr = mbase32.decode(dmail_addr_enc) dmail_key = mbase32.decode(dmail_key_enc) dm = yield from _load_dmail(handler, dmail_key) if dm: valid_sig = dm.sender_valid else: dm, valid_sig =\ yield from _fetch_dmail(handler, dmail_addr, dmail_key) dmail_text = _format_dmail(dm, valid_sig) handler._send_content(\ dmail_text.encode(), content_type="text/plain") elif req.startswith("/fetch/panel/mark_as_read/"): req_data = req[26:] p0 = req_data.index('/') dmail_key_enc = req_data[p0+1:] dmail_key = mbase32.decode(dmail_key_enc) def processor(dmail): dmail.read = not dmail.read return True yield from _process_dmail_message(handler, dmail_key, processor) handler._send_204() elif req.startswith("/fetch/panel/trash/"): req_data = req[20:] p0 = req_data.index('/') dmail_key_enc = req_data[p0+1:] dmail_key = mbase32.decode(dmail_key_enc) def processor(dmail): dmail.hidden = not dmail.hidden return True yield from _process_dmail_message(handler, dmail_key, processor) handler._send_204() elif req.startswith("/fetch/panel/"): req_data = req[13:] content = pages.dmail_fetch_panel_content[0].replace(\ b"${DMAIL_IDS}", req_data.encode()) handler._send_content([content, None]) elif req.startswith("/fetch/wrapper/"): req_data = req[15:] content = pages.dmail_fetch_wrapper[0].replace(\ b"${IFRAME_SRC}",\ "../../view/{}"\ .format(req_data).encode()) #FIXME: This is getting inefficient now, maybe time for Flask or # something like it. Maybe we can use just it's template renderer. content = content.replace(\ b"${IFRAME2_SRC}",\ "../../panel/{}"\ .format(req_data).encode()) handler._send_content([content, None]) elif req.startswith("/fetch/"): req_data = req[7:] content = pages.dmail_address_page_content[0].replace(\ b"${IFRAME_SRC}", "../wrapper/{}".format(req_data).encode()) handler._send_content([content, None]) elif req == "/create_address": handler._send_content(pages.dmail_create_address_content) elif req == "/create_address/form": handler._send_content(pages.dmail_create_address_form_content) elif req.startswith("/create_address/make_it_so?"): query = req[27:] qdict = urllib.parse.parse_qs(query, keep_blank_values=True) prefix = qdict["prefix"][0] difficulty = int(qdict["difficulty"][0]) log.info("prefix=[{}].".format(prefix)) privkey, dmail_key, dms =\ yield from _create_dmail_address(handler, prefix, difficulty) dmail_key_enc = mbase32.encode(dmail_key) handler._send_partial_content(pages.dmail_frame_start, True) handler._send_partial_content(b"SUCCESS<br/>") handler._send_partial_content(\ """<p>New dmail address: <a href="../addr/{}">{}</a></p>"""\ .format(dmail_key_enc, dmail_key_enc).encode()) handler._send_partial_content(pages.dmail_frame_end) handler._end_partial_content() else: handler._handle_error()
def __main(): global loop log.info("mcc running.") parser = argparse.ArgumentParser() parser.add_argument(\ "--address",\ help="The address of the Morphis node to connect to.",\ default="127.0.0.1:4250") parser.add_argument(\ "--create-dmail",\ help="Generate and upload a new dmail site.",\ action="store_true") parser.add_argument(\ "--dburl",\ help="Specify the database url to use.") parser.add_argument(\ "--fetch-dmail", help="Fetch dmail for specified key_id.") parser.add_argument(\ "-i",\ help="Read file as stdin.") parser.add_argument("--nn", type=int,\ help="Node instance number.") parser.add_argument(\ "--prefix",\ help="Specify the prefix for various things (currently --create-dmail"\ ").") parser.add_argument(\ "--scan-dmail",\ help="Scan the network for available dmails.") parser.add_argument(\ "--send-dmail",\ help="Send stdin as a dmail with the specified subject. The"\ " sender and recipients may be specified at the beginning of the"\ " data as with email headers: 'from: ' and 'to: '.") parser.add_argument(\ "--stat",\ help="Report node status.",\ action="store_true") parser.add_argument("-l", dest="logconf",\ help="Specify alternate logging.ini [IF SPECIFIED, THIS MUST BE THE"\ " FIRST PARAMETER!].") parser.add_argument(\ "--dmail-target",\ help="Specify the dmail target to validate dmail against.") parser.add_argument(\ "-x",\ help="Specify the x (Diffie-Hellman private secret) to use.") args = parser.parse_args() # Load or generate client mcc key. key_filename = "data/mcc_key-rsa.mnk" if os.path.exists(key_filename): log.info("mcc private key file found, loading.") client_key = rsakey.RsaKey(filename=key_filename) else: log.info("mcc private key file missing, generating.") client_key = rsakey.RsaKey.generate(bits=4096) client_key.write_private_key_file(key_filename) # Connect a Morphis Client (lightweight Node) instance. mc = client.Client(loop, client_key=client_key, address=args.address) r = yield from mc.connect() if not r: log.warning("Connection failed; exiting.") loop.stop() return dbase = init_db(args) de = dmail.DmailEngine(mc, dbase) log.info("Processing command requests...") if args.stat: r = yield from mc.send_command("stat") print(r.decode("UTF-8"), end='') if args.create_dmail: log.info("Creating and uploading dmail site.") privkey, data_key, dms =\ yield from de.generate_dmail_address(args.prefix) print("privkey: {}".format(base58.encode(privkey._encode_key()))) print("x: {}".format(base58.encode(sshtype.encodeMpint(dms.dh.x)))) print("dmail address: {}".format(mbase32.encode(data_key))) if args.send_dmail: log.info("Sending dmail.") if args.i: with open(args.i, "rb") as fh: dmail_data = fh.read().decode() else: dmail_data = stdin.read() if log.isEnabledFor(logging.DEBUG): log.debug("dmail_data=[{}].".format(dmail_data)) yield from de.send_dmail_text(args.send_dmail, dmail_data) if args.scan_dmail: log.info("Scanning dmail address.") addr, sig_bits = mutil.decode_key(args.scan_dmail) def key_callback(key): print("dmail key: [{}].".format(mbase32.encode(key))) yield from de.scan_dmail_address(\ addr, sig_bits, key_callback=key_callback) if args.fetch_dmail: log.info("Fetching dmail for key=[{}].".format(args.fetch_dmail)) key = mbase32.decode(args.fetch_dmail) if args.x: l, x_int = sshtype.parseMpint(base58.decode(args.x)) else: x_int = None dmail_target = args.dmail_target dm, valid_sig = yield from de.fetch_dmail(key, x_int, dmail_target) if not dm: raise Exception("No dmail found.") if not x_int: print("Encrypted dmail data=[\n{}].".format(mutil.hex_dump(dm))) else: print("Subject: {}\n".format(dm.subject)) if dm.sender_pubkey: print("From: {}"\ .format(mbase32.encode(enc.generate_ID(dm.sender_pubkey)))) i = 0 for part in dm.parts: print("DmailPart[{}]:\n mime-type=[{}]\n data=[{}]\n"\ .format(i, part.mime_type, part.data)) i += 1 log.info("Disconnecting.") yield from mc.disconnect() loop.stop()
def __main(): global loop log.info("mcc running.") parser = argparse.ArgumentParser() parser.add_argument(\ "--address",\ help="The address of the Morphis node to connect to.",\ default="127.0.0.1:4250") parser.add_argument(\ "--create-dmail",\ help="Generate and upload a new dmail site.",\ action="store_true") parser.add_argument(\ "--dburl",\ help="Specify the database url to use.") parser.add_argument(\ "--fetch-dmail", help="Fetch dmail for specified key_id.") parser.add_argument(\ "-i",\ help="Read file as stdin.") parser.add_argument("--nn", type=int,\ help="Node instance number.") parser.add_argument(\ "--prefix",\ help="Specify the prefix for various things (currently --create-dmail"\ ").") parser.add_argument(\ "--scan-dmail",\ help="Scan the network for available dmails.") parser.add_argument(\ "--send-dmail",\ help="Send stdin as a dmail with the specified subject. The"\ " sender and recipients may be specified at the beginning of the"\ " data as with email headers: 'from: ' and 'to: '.") parser.add_argument(\ "--stat",\ help="Report node status.",\ action="store_true") parser.add_argument("-l", dest="logconf",\ help="Specify alternate logging.ini [IF SPECIFIED, THIS MUST BE THE"\ " FIRST PARAMETER!].") parser.add_argument(\ "--dmail-target",\ help="Specify the dmail target to validate dmail against.") parser.add_argument(\ "-x",\ help="Specify the x (Diffie-Hellman private secret) to use.") args = parser.parse_args() # Load or generate client mcc key. key_filename = "data/mcc_key-rsa.mnk" if os.path.exists(key_filename): log.info("mcc private key file found, loading.") client_key = rsakey.RsaKey(filename=key_filename) else: log.info("mcc private key file missing, generating.") client_key = rsakey.RsaKey.generate(bits=4096) client_key.write_private_key_file(key_filename) # Connect a Morphis Client (lightweight Node) instance. mc = client.Client(loop, client_key=client_key, address=args.address) r = yield from mc.connect() if not r: log.warning("Connection failed; exiting.") loop.stop() return dbase = init_db(args) de = dmail.DmailEngine(mc, dbase) log.info("Processing command requests...") if args.stat: r = yield from mc.send_command("stat") print(r.decode("UTF-8"), end='') if args.create_dmail: log.info("Creating and uploading dmail site.") privkey, data_key, dms, storing_nodes =\ yield from de.generate_dmail_address(args.prefix) print("privkey: {}".format(base58.encode(privkey._encode_key()))) print("x: {}".format(base58.encode(sshtype.encodeMpint(dms.dh.x)))) print("dmail address: {}".format(mbase32.encode(data_key))) print("storing_nodes=[{}]."\ .format(base58.encode(privkey._encode_key()))) if args.send_dmail: log.info("Sending dmail.") if args.i: with open(args.i, "rb") as fh: dmail_data = fh.read().decode() else: dmail_data = stdin.read() if log.isEnabledFor(logging.DEBUG): log.debug("dmail_data=[{}].".format(dmail_data)) yield from de.send_dmail_text(args.send_dmail, dmail_data) if args.scan_dmail: log.info("Scanning dmail address.") addr, sig_bits = mutil.decode_key(args.scan_dmail) def key_callback(key): print("dmail key: [{}].".format(mbase32.encode(key))) yield from de.scan_dmail_address(\ addr, sig_bits, key_callback=key_callback) if args.fetch_dmail: log.info("Fetching dmail for key=[{}].".format(args.fetch_dmail)) key = mbase32.decode(args.fetch_dmail) if args.x: l, x_int = sshtype.parseMpint(base58.decode(args.x)) else: x_int = None dmail_target = args.dmail_target dm, valid_sig = yield from de.fetch_dmail(key, x_int, dmail_target) if not dm: raise Exception("No dmail found.") if not x_int: print("Encrypted dmail data=[\n{}].".format(mutil.hex_dump(dm))) else: print("Subject: {}\n".format(dm.subject)) if dm.sender_pubkey: print("From: {}"\ .format(mbase32.encode(enc.generate_ID(dm.sender_pubkey)))) i = 0 for part in dm.parts: print("DmailPart[{}]:\n mime-type=[{}]\n data=[{}]\n"\ .format(i, part.mime_type, part.data)) i += 1 log.info("Disconnecting.") yield from mc.disconnect() loop.stop()