def _dmail_auto_publish(self, dmail_address): data_rw = yield from self.engine.tasks.send_get_data(\ dmail_address.site_key, retry_factor=100) if data_rw.data: if log.isEnabledFor(logging.DEBUG): log.debug("Succeeded in fetching dmail site [{}]; won't"\ " auto-publish."\ .format(mbase32.encode(dmail_address.site_key))) return if log.isEnabledFor(logging.INFO): log.info("Failed to fetch dmail site [{}]; republishing."\ .format(mbase32.encode(dmail_address.site_key))) private_key = rsakey.RsaKey(privdata=dmail_address.site_privatekey) dh = dhgroup14.DhGroup14() dh.x = sshtype.parseMpint(dmail_address.keys[0].x)[1] dh.generate_e() dms = dmail.DmailSite() root = dms.root root["ssm"] = "mdh-v1" root["sse"] = base58.encode(sshtype.encodeMpint(dh.e)) root["target"] =\ mbase32.encode(dmail_address.keys[0].target_key) root["difficulty"] = int(dmail_address.keys[0].difficulty) storing_nodes =\ yield from self._dmail_engine.publish_dmail_site(private_key, dms) if log.isEnabledFor(logging.INFO): log.info("Republished Dmail site with [{}] storing nodes."\ .format(storing_nodes))
def generate_target(self): target = os.urandom(chord.NODE_ID_BYTES) if log.isEnabledFor(logging.INFO): log.info("dmail target=[{}].".format(mbase32.encode(target))) self.root["target"] = mbase32.encode(target)
def _fetch_dmail(handler, dmail_addr, dmail_key): de =\ dmail.DmailEngine(handler.node.chord_engine.tasks, handler.node.db) if log.isEnabledFor(logging.INFO): dmail_key_enc = mbase32.encode(dmail_key) dmail_addr_enc = mbase32.encode(dmail_addr) log.info("Fetching dmail (key=[{}]) for address=[{}]."\ .format(dmail_key_enc, dmail_addr_enc)) dmail_address = yield from _load_dmail_address(handler, dmail_addr) dmail_key_obj = dmail_address.keys[0] target_key = dmail_key_obj.target_key x_bin = dmail_key_obj.x l, x = sshtype.parseMpint(x_bin) dm, valid_sig =\ yield from de.fetch_dmail(bytes(dmail_key), x, target_key) if not dm: handler._send_partial_content(\ "Dmail for key [{}] was not found."\ .format(dmail_key_enc)) return None, None return dm, valid_sig
def fetch_dmail(self, key, x=None, target_key=None): data_rw = yield from self.task_engine.send_get_targeted_data(key) data = data_rw.data if not data: return None, None if not x: return data, None tb = mp.TargetedBlock(data) if target_key: if tb.target_key != target_key: tb_tid_enc = mbase32.encode(tb.target_key) tid_enc = mbase32.encode(target_key) raise DmailException(\ "TargetedBlock->target_key [{}] does not match request"\ " [{}]."\ .format(tb_tid_enc, tid_enc)) dw = DmailWrapper(tb.buf, mp.TargetedBlock.BLOCK_OFFSET) if dw.ssm != "mdh-v1": raise DmailException(\ "Unrecognized key exchange method in dmail [{}]."\ .format(dw.ssm)) kex = dhgroup14.DhGroup14() kex.x = x kex.generate_e() kex.f = dw.ssf if dw.sse != kex.e: raise DmailException(\ "Dmail [{}] is encrypted with a different e [{}] than"\ " the specified x resulted in [{}]."\ .format(mbase32.encode(data_rw.data_key), dw.sse, kex.e)) kex.calculate_k() key = self._generate_encryption_key(tb.target_key, kex.k) data = enc.decrypt_data_block(dw.data_enc, key) if not data: raise DmailException("Dmail data was empty.") dmail = Dmail(data, 0, dw.data_len) if dw.signature: signature = dw.signature pubkey = rsakey.RsaKey(dmail.sender_pubkey) valid_sig = pubkey.verify_rsassa_pss_sig(dw.data_enc, signature) return dmail, valid_sig else: return dmail, False
def __fetch_hash_tree_ref(self, data_key, depth, position, retry=None): if not retry: data_rw = yield from self.engine.tasks.send_get_data(data_key) else: data_rw = yield from self.engine.tasks.send_get_data(data_key, retry_factor=retry[3] * 10) self._task_semaphore.release() if self._abort: return if not data_rw.data: # Fetch failed. if retry: retry[3] += 1 # Tries. if retry[3] >= 32: if log.isEnabledFor(logging.INFO): log.info("Block id [{}] failed too much; aborting.".format(mbase32.encode(data_key))) self._do_abort() return else: retry = [depth, position, data_key, 1] self._failed.append(retry) if log.isEnabledFor(logging.INFO): log.info("Block id [{}] failed, retrying (tries=[{}]).".format(mbase32.encode(data_key), retry[3])) if self.ordered: # This very fetch is probably blocking future ones so retry # immediately! self._schedule_retry() else: if retry: if log.isEnabledFor(logging.INFO): log.info("Succeeded with retry [{}] on try [{}].".format(mbase32.encode(data_key), retry[3])) if self.ordered: if position != self._next_position: waiter = asyncio.futures.Future(loop=self.engine.loop) yield from self.__wait(position, waiter) if not depth: r = self.data_callback.notify_data(position, data_rw.data) if not r: if log.isEnabledFor(logging.DEBUG): log.debug("Received cancel signal; aborting download.") self._do_abort() return self.__notify_position_complete(position + len(data_rw.data)) else: yield from self._fetch_hash_tree_refs(data_rw.data, 0, depth, position) self._task_cnt -= 1 if self._task_cnt <= 0: assert self._task_cnt == 0 self._tasks_done.set()
def scan_dmail_address(self, addr, significant_bits, key_callback=None): if log.isEnabledFor(logging.INFO): log.info("Scanning dmail [{}].".format(mbase32.encode(addr))) def dbcall(): with self.db.open_session() as sess: q = sess.query(db.DmailAddress)\ .filter(db.DmailAddress.site_key == addr) dmail_address = q.first() if dmail_address: dmail_address.keys sess.expunge_all() return dmail_address dmail_address = yield from self.loop.run_in_executor(None, dbcall) if dmail_address: log.info("Found DmailAddress locally, using local settings.") target = dmail_address.keys[0].target_key significant_bits = dmail_address.keys[0].difficulty else: log.info("DmailAddress not found locally, fetching settings from"\ " the network.") addr, dsite = yield from\ self.fetch_recipient_dmail_site(addr, significant_bits) if not dsite: raise DmailException("Dmail site not found.") target = dsite.root["target"] significant_bits = dsite.root["difficulty"] target = mbase32.decode(target) start = target while True: data_rw = yield from self.task_engine.send_find_key(\ start, target_key=target, significant_bits=significant_bits,\ retry_factor=100) key = data_rw.data_key if not key: break if log.isEnabledFor(logging.INFO): log.info("Found dmail key: [{}].".format(mbase32.encode(key))) if key_callback: key_callback(key) start = key
def _send_get_data(data_key, significant_bits, path, data_rw): if log.isEnabledFor(logging.DEBUG): log.debug(\ "Sending GetData: key=[{}], significant_bits=[{}], path=[{}]."\ .format(mbase32.encode(data_key), significant_bits, path)) try: if significant_bits: future = asyncio.async(\ node.chord_engine.tasks.send_find_key(\ data_key, significant_bits), loop=node.loop) yield from asyncio.wait_for(future, 15.0, loop=node.loop) ct_data_rw = future.result() data_key = ct_data_rw.data_key if not data_key: data_rw.data = b"Key Not Found" data_rw.version = -1 data_rw.data_queue.put(None) return if log.isEnabledFor(logging.INFO): log.info("Found key=[{}].".format(mbase32.encode(data_key))) data_rw.data_key = bytes(data_key) data_rw.data_queue.put(None) return # future = asyncio.async(\ # node.chord_engine.tasks.send_get_data(data_key),\ # loop=node.loop) # # yield from asyncio.wait_for(future, 15.0, loop=node.loop) # # ct_data_rw = future.result() data_callback = Downloader(data_rw) r = yield from multipart.get_data(\ node.chord_engine, data_key, data_callback, path=path, ordered=True) if r is False: raise asyncio.TimeoutError() except asyncio.TimeoutError: data_rw.timed_out = True except: log.exception("send_get_data(..)") data_rw.exception = True data_rw.data_queue.put(None)
def _list_dmails_for_tag(handler, addr, tag): def dbcall(): with handler.node.db.open_session() as sess: q = sess.query(DmailMessage)\ .filter(\ DmailMessage.address.has(DmailAddress.site_key == addr))\ .filter(DmailMessage.tags.any(DmailTag.name == tag))\ .filter(DmailMessage.hidden == False)\ .order_by(DmailMessage.read, DmailMessage.date.desc()) msgs = q.all() sess.expunge_all() return msgs msgs = yield from handler.node.loop.run_in_executor(None, dbcall) addr_enc = mbase32.encode(addr) for msg in msgs: key_enc = mbase32.encode(msg.data_key) is_read = "" if msg.read else "(unread)" subject = msg.subject if len(subject) > 80: subject = subject[:80] + "..." sender_key = msg.sender_dmail_key if sender_key: if msg.sender_valid: sender_key = mbase32.encode(sender_key[:32]) + "..." else: sender_key = """<span class="strikethrough">""" + mbase32.encode(sender_key[:32]) + "</span>..." else: sender_key = "Anonymous" handler._send_partial_content(\ """<span class="nowrap">{}: <a href="../../../../fetch/{}/{}" title="{}">{}</a> - {}</span><span class="right_text tag">{}</span><br/>"""\ .format(\ mutil.format_human_no_ms_datetime(msg.date),\ addr_enc,\ key_enc,\ # key_enc[:32],\ key_enc, subject,\ sender_key,\ is_read)) if not msgs: handler._send_partial_content("Mailbox is empty.")
def run(self): self._running = True if log.isEnabledFor(logging.INFO): addr_enc = mbase32.encode(self.dmail_address.site_key) log.info("DmailAutoscanProcess (addr=[{}]) running."\ .format(addr_enc)) while self._running: new_cnt, old_cnt, err_cnt = yield from\ self.client_engine._dmail_engine.scan_and_save_new_dmails(\ self.dmail_address) if log.isEnabledFor(logging.INFO): log.info("Finished scanning Dmails for address [{}];"\ " new_cnt=[{}], old_cnt=[{}], err_cnt=[{}]."\ .format(addr_enc, new_cnt, old_cnt, err_cnt)) if not self.scan_interval: self._running = False if not self._running: break time_left = self.scan_interval start = time.time() while time_left > 0: if log.isEnabledFor(logging.INFO): log.info("Sleeping for [{}] seconds.".format(time_left)) self._task =\ asyncio.async(\ asyncio.sleep(time_left, loop=self.loop),\ loop=self.loop) try: yield from self._task self._task = None break except asyncio.CancelledError: self._task = None if log.isEnabledFor(logging.INFO): log.info("Woken from sleep for address [{}]."\ .format(\ mbase32.encode(self.dmail_address.site_key))) if self._scan_now: self._scan_now = False break time_left = self.scan_interval - (time.time() - start)
def run(self): self._running = True if log.isEnabledFor(logging.INFO): addr_enc = mbase32.encode(self.dmail_address.site_key) log.info("DmailAutoscanProcess (addr=[{}]) running."\ .format(addr_enc)) while self._running: new_cnt, old_cnt, err_cnt = yield from\ self.client_engine._dmail_engine.scan_and_save_new_dmails(\ self.dmail_address) if log.isEnabledFor(logging.INFO): log.info("Finished scanning Dmails for address [{}];"\ " new_cnt=[{}], old_cnt=[{}], err_cnt=[{}]."\ .format(addr_enc, new_cnt, old_cnt, err_cnt)) if not self.scan_interval: self._running = False if not self._running: break time_left = self.scan_interval start = time.time() while time_left > 0: if log.isEnabledFor(logging.INFO): log.info("Sleeping for [{}] seconds.".format(time_left)) self._task =\ asyncio.async(asyncio.sleep(time_left), loop=self.loop) try: yield from self._task self._task = None break except asyncio.CancelledError: self._task = None if log.isEnabledFor(logging.INFO): log.info("Woken from sleep for address [{}]."\ .format(\ mbase32.encode(self.dmail_address.site_key))) if self._scan_now: self._scan_now = False break time_left = self.scan_interval - (time.time() - start)
def fetch_recipient_dmail_sites(self, recipients): robjs = [] for entry in recipients: if type(entry) in (tuple, list): recipient_enc, recipient, significant_bits = entry if significant_bits: data_rw = yield from self.task_engine.send_find_key(\ recipient, significant_bits=significant_bits) recipient = bytes(data_rw.data_key) if not recipient: log.info("Failed to find key for prefix [{}]."\ .format(recipient_enc)) else: recipient = entry data_rw = yield from self.task_engine.send_get_data(recipient) if not data_rw.data: if log.isEnabledFor(logging.INFO): log.info("Failed to fetch dmail site [{}]."\ .format(mbase32.encode(recipient))) continue site_data = data_rw.data.decode("UTF-8") if log.isEnabledFor(logging.INFO): log.info("site_data=[{}].".format(site_data)) robjs.append(DmailSite(site_data)) return robjs
def send_get_data(self, data_key, path=None): data_key_enc = mbase32.encode(data_key) if path: cmd = "getdata {} {}".format(data_key_enc, path) else: cmd = "getdata {}".format(data_key_enc) r = yield from self.send_command(cmd) data_rw = chord_tasks.DataResponseWrapper(data_key) p0 = r.find(b"version=[") + 9 p1 = r.find(b']', p0) ver_str = r[p0:p1] data_rw.version = int(ver_str) if ver_str != b"None" else None p0 = p1 + 1 p0 = r.find(b"data:\r\n", p0) + 7 data = r[p0:-2] # -2 for the "\r\n". #FIXME: This is ambiguous with data that == "Not found." :) data_rw.data = data if data != b"Not found." else None return data_rw
def fetch_recipient_dmail_site(self, addr, significant_bits=None): if type(addr) is str: addr, significant_bits = mutil.decode_key(addr) elif type(addr) in (list, tuple): addr, significant_bits = addr else: assert type(addr) in (bytes, bytearray) if significant_bits: data_rw = yield from self.task_engine.send_find_key(\ addr, significant_bits=significant_bits) addr = bytes(data_rw.data_key) if not addr: log.info("Failed to find key for prefix [{}]."\ .format(recipient_enc)) return None, None data_rw =\ yield from self.task_engine.send_get_data(addr, retry_factor=100) if not data_rw.data: if log.isEnabledFor(logging.INFO): log.info("Failed to fetch dmail site [{}]."\ .format(mbase32.encode(recipient))) return None, None site_data = data_rw.data.decode("UTF-8") if log.isEnabledFor(logging.INFO): log.info("site_data=[{}].".format(site_data)) return addr, DmailSite(site_data)
def stop(self): if self._running: if log.isEnabledFor(logging.INFO): log.info("Stopping DmailAutoscanProcess (addr=[{}])."\ .format(mbase32.encode(self.dmail_address.site_key))) self._running = False if self._task: self._task.cancel()
def do_stat(self, arg): "Report the node status." engine = self.peer.engine self.writeln("Node:\n\tversion=[{}]\n\tid=[{}]\n\tbind_port=[{}]\n"\ "\tconnections={}"\ .format(engine.node.morphis_version,\ mbase32.encode(engine.node_id), engine._bind_port,\ len(engine.peers)))
def do_stat(self, arg): "Report the node status." engine = self.peer.engine self.writeln( "Node:\n\tid=[{}]\n\tbind_port=[{}]\n\tconnections={}".format( mbase32.encode(engine.node_id), engine._bind_port, len(engine.peers) ) )
def update_scan_interval(self, interval): if not interval: self._running = False if self._task: self._task.cancel() return self.scan_interval = interval if self._running: if log.isEnabledFor(logging.INFO): log.info("Notifying DmailAutoscanProcess (addr=[{}]) of"\ " interval change."\ .format(mbase32.encode(self.dmail_address.site_key))) if self._task: self._task.cancel() else: if log.isEnabledFor(logging.INFO): log.info("Starting DmailAutoscanProcess (addr=[{}])."\ .format(mbase32.encode(self.dmail_address.site_key))) asyncio.async(self.run(), loop=self.loop)
def update_scan_interval(self, interval): if not interval: self._running = False if self._task: self._task.cancel() return self.scan_interval = interval if self._running: if log.isEnabledFor(logging.INFO): log.info("Notifying DmailAutoscanProcess (addr=[{}]) of"\ " interval change."\ .format(mbase32.encode(self.dmail_address.site_key))) if self._task: self._task.cancel() else: if log.isEnabledFor(logging.INFO): log.info("Starting DmailAutoscanProcess (addr=[{}])."\ .format(mbase32.encode(self.dmail_address.site_key))) asyncio. async (self.run(), loop=self.loop)
def send_get_targeted_data(self, data_key): data_key_enc = mbase32.encode(data_key) cmd = "gettargeteddata {}".format(data_key_enc) r = yield from self.send_command(cmd) data_rw = chord_tasks.DataResponseWrapper(data_key) p0 = r.find(b"data:\r\n") + 7 data_rw.data = r[p0:-2] # -2 for the "\r\n". return data_rw
def send_find_key(self, prefix, target_key=None, significant_bits=None): cmd = "findkey " + mbase32.encode(prefix) if target_key: cmd += " " + mbase32.encode(target_key) if significant_bits: cmd += " " + str(significant_bits) r = yield from self.send_command(cmd) p0 = r.find(b"data_key=[") + 10 p1 = r.find(b']', p0) data_key = r[p0:p1].decode() if data_key == "None": data_key = None else: data_key = mbase32.decode(data_key) data_rw = chord_tasks.DataResponseWrapper(data_key) return data_rw
def _set_upload_page(content): global static_upload_page_content, upload_page_content upload_page_content = content content = content.replace(\ b"${UPDATEABLE_KEY_MODE_DISPLAY}",\ b"display: none") content = content.replace(\ b"${STATIC_MODE_DISPLAY}",\ b"") static_upload_page_content[0] = content static_upload_page_content[1] =\ mbase32.encode(enc.generate_ID(static_upload_page_content[0]))
def fetch_dmail(self, key, x=None, target_key=None): "Fetch the Dmail referred to by key from the network."\ " Returns a Dmail object, not a db.DmailMessage object." data_rw = yield from self.task_engine.send_get_targeted_data(key) data = data_rw.data if not data: return None, None if not x: return data, None tb = mp.TargetedBlock(data) if target_key: if tb.target_key != target_key: tb_tid_enc = mbase32.encode(tb.target_key) tid_enc = mbase32.encode(target_key) raise DmailException(\ "TargetedBlock->target_key [{}] does not match request"\ " [{}]."\ .format(tb_tid_enc, tid_enc)) version =\ struct.unpack_from(">L", tb.buf, mp.TargetedBlock.BLOCK_OFFSET)[0] if version == 1: dmail, valid_sig =\ yield from self._process_dmail_v1(key, x, tb, data_rw) else: assert version == 2 dmail, valid_sig =\ yield from self._process_dmail_v2(key, x, tb, data_rw) return dmail, valid_sig
def process_key(key): nonlocal new_dmail_cnt exists = yield from _check_have_dmail(handler, key) key_enc = mbase32.encode(key) if log.isEnabledFor(logging.DEBUG): log.debug("Processing Dmail (key=[{}]).".format(key_enc)) if exists: if log.isEnabledFor(logging.DEBUG): log.debug("Ignoring dmail (key=[{}]) we already have."\ .format(key_enc)) return yield from _fetch_and_save_dmail(handler, addr, key) addr_enc = mbase32.encode(addr) handler._send_partial_content(\ """<a href="../../fetch/{}/{}">{}</a><br/>"""\ .format(addr_enc, key_enc, key_enc)) new_dmail_cnt += 1
def _send_content(self, content_entry, cacheable=True, content_type=None): if type(content_entry) in (list, tuple): content = content_entry[0] content_id = content_entry[1] else: content = content_entry cacheable = False if not self.maalstroom_plugin_used: content =\ content.replace(b"morphis://", self.maalstroom_url_prefix) if cacheable and not content_id: if callable(content): content = content() content_id = mbase32.encode(enc.generate_ID(content)) content_entry[1] = content_id if cacheable and self.headers["If-None-Match"] == content_id: cache_control = self.headers["Cache-Control"] if cache_control != "max-age=0": self.send_response(304) if cache_control: # This should only have been sent for an updateable key. self.send_header("Cache-Control", "max-age=15, public") else: self.send_header("ETag", content_id) self.send_header("Content-Length", 0) self.end_headers() return if callable(content): content = content() self.send_response(200) self.send_header("Content-Length", len(content)) self.send_header("Content-Type",\ "text/html" if content_type is None else content_type) if cacheable: self.send_header("Cache-Control", "public") self.send_header("ETag", content_id) else: self._send_no_cache() self.end_headers() self.wfile.write(content) return
def __find_key(rp): # log.debug("Worker running.") wid, prefix = rp.recv() while True: key = rsakey.RsaKey.generate(bits=4096) pubkey_bytes = key.asbytes() pubkey_hash = enc.generate_ID(pubkey_bytes) pubkey_hash_enc = mbase32.encode(pubkey_hash) if pubkey_hash_enc.startswith(prefix): # if log.isEnabledFor(logging.INFO): # log.info("Worker #{} found key.".format(wid)) rp.send(key._encode_key()) return
def do_findkey(self, arg): "<DATA_KEY_PREFIX> [TARGET_ID] [SIGNIFICANT_BITS] search the network" " for the given key." args = arg.split(" ") data_key, significant_bits = decode_key(args[0]) target_key = mbase32.decode(args[1]) if len(args) >= 2 else None if len(args) == 3: significant_bits = int(args[2]) start = datetime.today() data_rw = yield from self.peer.engine.tasks.send_find_key( data_key, significant_bits=significant_bits, target_key=target_key ) diff = datetime.today() - start data_key_enc = mbase32.encode(data_rw.data_key) if data_rw.data_key else None self.writeln("data_key=[{}].".format(data_key_enc)) self.writeln("send_find_key(..) took: {}.".format(diff))
def _process_dmail_v2(self, key, x, tb, data_rw): dw = DmailWrapper(tb.buf, mp.TargetedBlock.BLOCK_OFFSET) if dw.ssm != "mdh-v1": raise DmailException(\ "Unrecognized key exchange method in dmail [{}]."\ .format(dw.ssm)) # Calculate the shared secret. kex = dhgroup14.DhGroup14() kex.x = x kex.generate_e() kex.f = dw.ssf if dw.sse != kex.e: raise DmailException(\ "Dmail [{}] is encrypted with a different e [{}] than"\ " the specified x resulted in [{}]."\ .format(mbase32.encode(data_rw.data_key), dw.sse, kex.e)) kex.calculate_k() # Generate the AES-256 encryption key. key = self._generate_encryption_key(tb.target_key, kex.k) # Decrypt the data. data = enc.decrypt_data_block(dw.data_enc, key) if not data: raise DmailException("Dmail data was empty.") dmail = Dmail(data) if dmail.signature: pubkey = rsakey.RsaKey(dmail.sender_pubkey) valid_sig =\ pubkey.verify_rsassa_pss_sig(\ data[:dmail.signature_offset], dmail.signature) return dmail, valid_sig else: return dmail, False
def fetch_recipient_dmail_sites(self, recipients): robjs = [] for entry in recipients: if type(entry) is str: recipient, significant_bits =\ mutil.decode_key(entry) recipient =\ (entry, bytes(recipient), significant_bits) if type(entry) in (tuple, list): recipient_enc, recipient, significant_bits = entry if significant_bits: data_rw = yield from self.task_engine.send_find_key(\ recipient, significant_bits=significant_bits) recipient = bytes(data_rw.data_key) if not recipient: log.info("Failed to find key for prefix [{}]."\ .format(recipient_enc)) else: recipient = entry data_rw = yield from self.task_engine.send_get_data(recipient,\ retry_factor=100) if not data_rw.data: if log.isEnabledFor(logging.INFO): log.info("Failed to fetch dmail site [{}]."\ .format(mbase32.encode(recipient))) continue site_data = data_rw.data.decode("UTF-8") if log.isEnabledFor(logging.INFO): log.info("site_data=[{}].".format(site_data)) robjs.append(DmailSite(site_data)) return robjs
def _format_dmail(dm, valid_sig): from_db = type(dm) is DmailMessage dmail_text = [] if (from_db and dm.sender_dmail_key) or (not from_db and dm.sender_pubkey): if from_db: sender_dmail_key = dm.sender_dmail_key else: sender_dmail_key = enc.generate_ID(dm.sender_pubkey) if valid_sig: dmail_text += "Sender Address Verified.\n\n" else: dmail_text += "WARNING: Sender Address Forged!\n\n" dmail_text += "From: {}\n".format(mbase32.encode(sender_dmail_key)) dmail_text += "Subject: {}\n".format(dm.subject) if from_db: date_fmtted = dm.date else: date_fmtted = mutil.parse_iso_datetime(dm.date) dmail_text += "Date: {}\n".format(date_fmtted) dmail_text += '\n' i = 0 for part in dm.parts: dmail_text += part.data.decode() dmail_text += '\n' if len(dm.parts) > 1: dmail_text += "----- ^ dmail part #{} ^ -----\n\n".format(i) i += 1 dmail_text = ''.join(dmail_text) return dmail_text
def do_findkey(self, arg): "<DATA_KEY_PREFIX> [TARGET_ID] [SIGNIFICANT_BITS] search the network" " for the given key." args = arg.split(' ') data_key, significant_bits = decode_key(args[0]) target_key = mbase32.decode(args[1]) if len(args) >= 2 else None if len(args) == 3: significant_bits = int(args[2]) start = datetime.today() data_rw = yield from\ self.peer.engine.tasks.send_find_key(\ data_key, significant_bits=significant_bits,\ target_key=target_key) diff = datetime.today() - start data_key_enc =\ mbase32.encode(data_rw.data_key) if data_rw.data_key else None self.writeln("data_key=[{}].".format(data_key_enc)) self.writeln("send_find_key(..) took: {}.".format(diff))
def scan_dmail_address(self, addr, significant_bits, key_callback=None): addr_enc = mbase32.encode(addr) if log.isEnabledFor(logging.INFO): log.info("Scanning dmail [{}].".format(addr_enc)) def dbcall(): with self.db.open_session() as sess: q = sess.query(db.DmailAddress)\ .filter(db.DmailAddress.site_key == addr) dmail_address = q.first() if dmail_address: dmail_address.keys sess.expunge_all() return dmail_address dmail_address = yield from self.loop.run_in_executor(None, dbcall) if dmail_address: log.info("Found DmailAddress locally, using local settings.") target = dmail_address.keys[0].target_key significant_bits = dmail_address.keys[0].difficulty else: log.info("DmailAddress not found locally, fetching settings from"\ " the network.") dsites = yield from\ self.fetch_recipient_dmail_sites(\ [(addr_enc, addr, significant_bits)]) if not dsites: raise DmailException("Dmail site not found.") dsite = dsites[0] target = dsite.root["target"] significant_bits = dsite.root["difficulty"] target = mbase32.decode(target) start = target while True: data_rw = yield from self.task_engine.send_find_key(\ start, target_key=target, significant_bits=significant_bits,\ retry_factor=100) key = data_rw.data_key if not key: break if log.isEnabledFor(logging.INFO): log.info("Found dmail key: [{}].".format(mbase32.encode(key))) if key_callback: key_callback(key) start = key
def get_data(engine, data_key, data_callback, path=None, ordered=False,\ positions=None, retry_seconds=30, concurrency=64, max_link_depth=1): assert not path or type(path) is bytes, type(path) assert isinstance(data_callback, DataCallback), type(data_callback) data_rw = yield from engine.tasks.send_get_data(data_key, path) data = data_rw.data if data is None: data_rw = yield from engine.tasks.send_get_data(\ data_key, path, retry_factor=10) data = data_rw.data if data is None: return None if data_rw.version: data_callback.notify_version(data_rw.version) else: #FIXME: Remove this from here after it is integrated into the coming # chord_task rewrite. # Reupload the key to keep prefix searches in the network. r = random.randint(1, 5) if r == 1: asyncio.async(\ engine.tasks.send_store_key(\ data_rw.data, data_key, retry_factor=50),\ loop=engine.loop) link_depth = 0 while True: if not data.startswith(MorphisBlock.UUID): data_callback.notify_size(len(data)) data_callback.notify_data(0, data) return True block_type = MorphisBlock.parse_block_type(data) if block_type == BlockType.link.value: link_depth += 1 if link_depth > max_link_depth: if log.isEnabledFor(logging.WARNING): log.warning(\ "Exceeded maximum link depth [{}] for key [{}]."\ .format(max_link_depth, mbase32.encode(data_key))) return False block = LinkBlock(data) if block.mime_type: data_callback.notify_mime_type(block.mime_type) data_rw = yield from engine.tasks.send_get_data(block.destination) data = data_rw.data if not data: data_rw = yield from engine.tasks.send_get_data(\ block.destination, retry_factor=10) data = data_rw.data if not data: return None continue if block_type != BlockType.hash_tree.value: data_callback.notify_size(len(data)) data_callback.notify_data(0, data) return True fetch = HashTreeFetch(\ engine, data_callback, ordered, positions, retry_seconds,\ concurrency) r = yield from fetch.fetch(HashTreeBlock(data)) return r
def _fetch_and_save_dmail(self, dmail_message_key, dmail_address,\ address_key): key_type = type(dmail_message_key) if key_type is not bytes: assert key_type is bytearray dmail_message_key = bytes(dmail_message_key) # Fetch the Dmail data from the network. l, x_mpint = sshtype.parseMpint(address_key.x) dmobj, valid_sig =\ yield from self.fetch_dmail(\ dmail_message_key, x_mpint, address_key.target_key) if not dmobj: if log.isEnabledFor(logging.INFO): log.info("Dmail was not found on the network.") return False if dmobj.version > 1: if dmobj.destination_addr != dmail_address.site_key: log.warning(\ "Dmail was addressed to [{}], yet passed address was"\ " [{}]."\ .format(mbase32.encode(dmobj.destination_addr),\ mbase32.encode(dmail_address.site_key))) sig_valid = False # Save the Dmail to our local database. def dbcall(): with self.db.open_session() as sess: self.db.lock_table(sess, db.DmailMessage) q = sess.query(func.count("*")).select_from(db.DmailMessage)\ .filter(db.DmailMessage.data_key == dmail_message_key) if q.scalar(): return False msg = db.DmailMessage() msg.dmail_address_id = dmail_address.id msg.dmail_key_id = address_key.id msg.data_key = dmail_message_key msg.sender_dmail_key =\ enc.generate_ID(dmobj.sender_pubkey)\ if dmobj.sender_pubkey else None msg.sender_valid = valid_sig msg.subject = dmobj.subject msg.date = mutil.parse_iso_datetime(dmobj.date) msg.hidden = False msg.read = False msg.deleted = False attach_dmail_tag(sess, msg, "Inbox") msg.parts = [] for part in dmobj.parts: dbpart = db.DmailPart() dbpart.mime_type = part.mime_type dbpart.data = part.data msg.parts.append(dbpart) sess.add(msg) sess.commit() yield from self.loop.run_in_executor(None, dbcall) if log.isEnabledFor(logging.INFO): log.info("Dmail saved!") return True
def __serve_get(handler, rpath, done_event): if len(rpath) == len(s_dmail): handler._send_content(pages.dmail_page_content) else: req = rpath[len(s_dmail):] log.info("req=[{}].".format(req)) if req == "/css": handler._send_content(\ pages.dmail_css_content, content_type="text/css") elif req == "/address_list": handler._send_partial_content( pages.dmail_page_content__f1_start, True) site_keys = yield from _list_dmail_addresses(handler) for dbid, site_key in site_keys: site_key_enc = mbase32.encode(site_key) resp = """<span class="nowrap">[<a href="addr/{}">view</a>]"""\ """ {}</span><br/>"""\ .format(site_key_enc, site_key_enc) handler._send_partial_content(resp) handler._send_partial_content(pages.dmail_page_content__f1_end) handler._end_partial_content() elif req.startswith("/compose/form"): dest_addr_enc = req[14:] if len(req) > 14 else "" handler._send_partial_content(\ pages.dmail_compose_dmail_form_start, True) site_keys = yield from _list_dmail_addresses(handler) for dbid, site_key in site_keys: site_key_enc = mbase32.encode(site_key) sender_element = """<option value="{}">{}</option>"""\ .format(dbid, site_key_enc) handler._send_partial_content(sender_element) handler._send_partial_content(\ "<option value="">[Anonymous]</option>") handler._send_partial_content(\ pages.dmail_compose_dmail_form_end.replace(\ b"${DEST_ADDR}", dest_addr_enc.encode())) handler._end_partial_content() elif req.startswith("/compose"): from_addr = req[9:] if len(req) > 9 else "" if from_addr: iframe_src = "../compose/form/{}".format(from_addr).encode() else: iframe_src = "compose/form".encode() content = pages.dmail_compose_dmail_content[0].replace(\ b"${IFRAME_SRC}", iframe_src) handler._send_content([content, None]) elif req.startswith("/addr/view/"): addr_enc = req[11:] start = pages.dmail_addr_view_start.replace(\ b"${DMAIL_ADDRESS}", addr_enc.encode()) start = start.replace(\ b"${DMAIL_ADDRESS_SHORT}", addr_enc[:32].encode()) handler._send_partial_content(start, True) handler._send_partial_content(pages.dmail_addr_view_end) handler._end_partial_content() elif req.startswith("/addr/settings/edit/publish?"): query = req[28:] qdict = urllib.parse.parse_qs(query, keep_blank_values=True) addr_enc = qdict["dmail_address"][0] difficulty = qdict["difficulty"][0] def processor(dmail_address): if difficulty != dmail_address.keys[0].difficulty: dmail_address.keys[0].difficulty = difficulty return True else: return False dmail_address = yield from\ _process_dmail_address(\ handler, mbase32.decode(addr_enc), processor) dh = dhgroup14.DhGroup14() dh.x = sshtype.parseMpint(dmail_address.keys[0].x)[1] dh.generate_e() dms = dmail.DmailSite() root = dms.root root["target"] =\ mbase32.encode(dmail_address.keys[0].target_key) root["difficulty"] = int(difficulty) root["ssm"] = "mdh-v1" root["sse"] = base58.encode(sshtype.encodeMpint(dh.e)) private_key = rsakey.RsaKey(privdata=dmail_address.site_privatekey) r = yield from\ handler.node.chord_engine.tasks.send_store_updateable_key(\ dms.export(), private_key,\ version=int(time.time()*1000), store_key=True) handler._send_content(\ pages.dmail_addr_settings_edit_success_content[0]\ .format(addr_enc, addr_enc[:32]).encode()) elif req.startswith("/addr/settings/edit/"): addr_enc = req[20:] dmail_address = yield from\ _load_dmail_address(handler, mbase32.decode(addr_enc)) content = pages.dmail_addr_settings_edit_content[0].replace(\ b"${DIFFICULTY}",\ str(dmail_address.keys[0].difficulty).encode()) content = content.replace(\ b"${DMAIL_ADDRESS_SHORT}", addr_enc[:32].encode()) content = content.replace(\ b"${DMAIL_ADDRESS}", addr_enc.encode()) content = content.replace(\ b"${PRIVATE_KEY}",\ base58.encode(dmail_address.site_privatekey).encode()) content = content.replace(\ b"${X}", base58.encode(dmail_address.keys[0].x).encode()) content = content.replace(\ b"${TARGET_KEY}",\ base58.encode(dmail_address.keys[0].target_key).encode()) handler._send_content([content, None]) elif req.startswith("/addr/settings/"): addr_enc = req[15:] content = pages.dmail_addr_settings_content[0].replace(\ b"${IFRAME_SRC}",\ "edit/{}".format(addr_enc).encode()) handler._send_content([content, None]) elif req.startswith("/addr/"): addr_enc = req[6:] if log.isEnabledFor(logging.INFO): log.info("Viewing dmail address [{}].".format(addr_enc)) content = pages.dmail_address_page_content[0].replace(\ b"${IFRAME_SRC}", "view/{}".format(addr_enc).encode()) handler._send_content([content, None]) elif req.startswith("/tag/view/list/"): params = req[15:] p0 = params.index('/') tag = params[:p0] addr_enc = params[p0+1:] if log.isEnabledFor(logging.INFO): log.info("Viewing dmails with tag [{}] for address [{}]."\ .format(tag, addr_enc)) start = pages.dmail_tag_view_list_start.replace(\ b"${TAG_NAME}", tag.encode()) #FIXME: This is getting inefficient now, maybe time for Flask or # something like it. Maybe we can use just it's template renderer. start = start.replace(b"${DMAIL_ADDRESS}", addr_enc.encode()) start = start.replace(\ b"${DMAIL_ADDRESS2}",\ "{}...".format(addr_enc[:32]).encode()) handler._send_partial_content(start, True) yield from\ _list_dmails_for_tag(handler, mbase32.decode(addr_enc), tag) handler._send_partial_content(pages.dmail_tag_view_list_end) handler._end_partial_content() elif req.startswith("/tag/view/"): params = req[10:] content = pages.dmail_tag_view_content[0].replace(\ b"${IFRAME_SRC}", "../list/{}".format(params).encode()) handler._send_content(content) elif req.startswith("/scan/list/"): addr_enc = req[11:] if log.isEnabledFor(logging.INFO): log.info("Viewing inbox for dmail address [{}]."\ .format(addr_enc)) start = pages.dmail_inbox_start.replace(\ b"${DMAIL_ADDRESS}", addr_enc.encode()) start = start.replace(\ b"${DMAIL_ADDRESS2}", "{}...".format(addr_enc[:32]).encode()) handler._send_partial_content(start, True) addr, significant_bits = mutil.decode_key(addr_enc) yield from _scan_new_dmails(handler, addr, significant_bits) handler._send_partial_content(pages.dmail_inbox_end) handler._end_partial_content() elif req.startswith("/scan/"): addr_enc = req[6:] content = pages.dmail_address_page_content[0].replace(\ b"${IFRAME_SRC}", "list/{}".format(addr_enc).encode()) handler._send_content([content, None]) elif req.startswith("/fetch/view/"): keys = req[12:] p0 = keys.index('/') dmail_addr_enc = keys[:p0] dmail_key_enc = keys[p0+1:] dmail_addr = mbase32.decode(dmail_addr_enc) dmail_key = mbase32.decode(dmail_key_enc) dm = yield from _load_dmail(handler, dmail_key) if dm: valid_sig = dm.sender_valid else: dm, valid_sig =\ yield from _fetch_dmail(handler, dmail_addr, dmail_key) dmail_text = _format_dmail(dm, valid_sig) handler._send_content(\ dmail_text.encode(), content_type="text/plain") elif req.startswith("/fetch/panel/mark_as_read/"): req_data = req[26:] p0 = req_data.index('/') dmail_key_enc = req_data[p0+1:] dmail_key = mbase32.decode(dmail_key_enc) def processor(dmail): dmail.read = not dmail.read return True yield from _process_dmail_message(handler, dmail_key, processor) handler._send_204() elif req.startswith("/fetch/panel/trash/"): req_data = req[20:] p0 = req_data.index('/') dmail_key_enc = req_data[p0+1:] dmail_key = mbase32.decode(dmail_key_enc) def processor(dmail): dmail.hidden = not dmail.hidden return True yield from _process_dmail_message(handler, dmail_key, processor) handler._send_204() elif req.startswith("/fetch/panel/"): req_data = req[13:] content = pages.dmail_fetch_panel_content[0].replace(\ b"${DMAIL_IDS}", req_data.encode()) handler._send_content([content, None]) elif req.startswith("/fetch/wrapper/"): req_data = req[15:] content = pages.dmail_fetch_wrapper[0].replace(\ b"${IFRAME_SRC}",\ "../../view/{}"\ .format(req_data).encode()) #FIXME: This is getting inefficient now, maybe time for Flask or # something like it. Maybe we can use just it's template renderer. content = content.replace(\ b"${IFRAME2_SRC}",\ "../../panel/{}"\ .format(req_data).encode()) handler._send_content([content, None]) elif req.startswith("/fetch/"): req_data = req[7:] content = pages.dmail_address_page_content[0].replace(\ b"${IFRAME_SRC}", "../wrapper/{}".format(req_data).encode()) handler._send_content([content, None]) elif req == "/create_address": handler._send_content(pages.dmail_create_address_content) elif req == "/create_address/form": handler._send_content(pages.dmail_create_address_form_content) elif req.startswith("/create_address/make_it_so?"): query = req[27:] qdict = urllib.parse.parse_qs(query, keep_blank_values=True) prefix = qdict["prefix"][0] difficulty = int(qdict["difficulty"][0]) log.info("prefix=[{}].".format(prefix)) privkey, dmail_key, dms =\ yield from _create_dmail_address(handler, prefix, difficulty) dmail_key_enc = mbase32.encode(dmail_key) handler._send_partial_content(pages.dmail_frame_start, True) handler._send_partial_content(b"SUCCESS<br/>") handler._send_partial_content(\ """<p>New dmail address: <a href="../addr/{}">{}</a></p>"""\ .format(dmail_key_enc, dmail_key_enc).encode()) handler._send_partial_content(pages.dmail_frame_end) handler._end_partial_content() else: handler._handle_error()
def send_content(self, content_entry, cacheable=True, content_type=None,\ charset=None): if type(content_entry) in (list, tuple): content = content_entry[0] content_id = content_entry[1] if len(content_entry) == 3 and not content_type: content_type = content_entry[2] else: content = content_entry cacheable = False if not content_type: if not charset: charset = self.get_accept_charset() content_type = "text/html; charset={}".format(charset) if type(content) is str: if charset: content = content.encode(charset) else: content = content.encode() if not self.handler.maalstroom_plugin_used: content =\ content.replace(\ b"morphis://", self.handler.maalstroom_url_prefix) if cacheable and not content_id: if callable(content): content = content() log.info("Generating content_id.") content_id = mbase32.encode(enc.generate_ID(content)) content_entry[1] = content_id etag = self.handler.headers["If-None-Match"] if cacheable and etag == content_id: # #TODO: Consider getting rid of this updateablekey support here # # because we don't send updateable keys this way ever. # updateable_key = etag.startswith("updateablekey-") cache_control = self.handler.headers["Cache-Control"] # if not (updateable_key and cache_control == "max-age=0")\ # and cache_control != "no-cache": if cache_control != "no-cache": self.send_response(304) # if updateable_key: # p0 = etag.index('-') # p1 = etag.index('-', p0 + 1) # version = etag[p0:p1] # self.send_header(\ # "X-Maalstroom-UpdateableKey-Version",\ # version) # self.send_header("Cache-Control", "public,max-age=15") # else: self.send_header("Cache-Control", "public,max-age=300") self.send_header("ETag", content_id) self.send_header("Content-Length", 0) self.end_headers() self.finish_response() return if callable(content): content = content() self.send_response(200) self.send_header("Content-Length", len(content)) self.send_header("Content-Type", content_type) if cacheable: self.send_header("Cache-Control", "public,max-age=300") self.send_header("ETag", content_id) else: self._send_no_cache() self.end_headers() self.write(content) self.finish_response() return
def dispatch_get_data(self, rpath): orig_etag = etag = self.handler.headers["If-None-Match"] if etag: updateable_key = etag.startswith("updateablekey-") if updateable_key: p0 = etag.index('-') + 1 p1 = etag.find('-', p0) if p1 != -1: version_from_etag = etag[p0:p1] etag = etag[p1 + 1:] else: version_from_etag = None etag = etag[p0:] else: updateable_key = False if etag == rpath: # If browser has it cached. cache_control = self.handler.headers["Cache-Control"] if not (updateable_key and cache_control == "max-age=0")\ and cache_control != "no-cache": self.send_response(304) if updateable_key: if version_from_etag: self.send_header(\ "X-Maalstroom-UpdateableKey-Version",\ version_from_etag) self.send_header("Cache-Control", "public,max-age=15") self.send_header("ETag", orig_etag) else: self.send_header(\ "Cache-Control", "public,max-age=315360000") self.send_header("ETag", rpath) self.send_header("Content-Length", 0) self.end_headers() self.finish_response() return if not self.connection_count: self.send_error("No connected nodes; cannot fetch from the"\ " network.") return path_sep_idx = rpath.find('/') if path_sep_idx != -1: path = rpath[path_sep_idx + 1:].encode() rpath = rpath[:path_sep_idx] else: path = None if not rpath: msg = "Empty key was specified." log.warning(msg) self.send_error(msg, 400) return try: data_key, significant_bits = mutil.decode_key(rpath) except (ValueError, IndexError) as e: log.exception("mutil.decode_key(..), rpath=[{}].".format(rpath)) self.send_error("Invalid encoded key: [{}].".format(rpath), 400) return if significant_bits: # Resolve key via send_find_key. if significant_bits < 32: log.warning("Request supplied key with too few bits [{}]."\ .format(significant_bits)) self.send_error(\ "Key must have at least 32 bits or 7 characters,"\ " len(key)=[{}].".format(len(rpath)), 400) return try: data_rw =\ yield from asyncio.wait_for(\ self.node.chord_engine.tasks.send_find_key(\ data_key, significant_bits),\ 15.0,\ loop=self.loop) data_key = data_rw.data_key except asyncio.TimeoutError: data_key = None if not data_key: self.send_error(b"Key Not Found", errcode=404) return if log.isEnabledFor(logging.INFO): log.info("Found key=[{}].".format(mbase32.encode(data_key))) key_enc = mbase32.encode(data_rw.data_key) if path: url = "{}{}/{}"\ .format(\ self.handler.maalstroom_url_prefix_str,\ key_enc,\ path.decode("UTF-8")) else: url = "{}{}"\ .format(\ self.handler.maalstroom_url_prefix_str,\ key_enc) message = "<html><head><title>Redirecting to Full Key</title>"\ "</head><body><a href=\"{}\">{}</a>\n{}</body></html>"\ .format(url, url, key_enc).encode() self.send_301(url, message) return if log.isEnabledFor(logging.DEBUG): log.debug("Sending GetData: key=[{}], path=[{}]."\ .format(mbase32.encode(data_key), significant_bits, path)) queue = asyncio.Queue(loop=self.loop) # Start the download. try: data_callback = Downloader(self, queue) @asyncio.coroutine def call_wrapper(): try: yield from multipart.get_data(\ self.node.chord_engine, data_key, data_callback,\ path=path, ordered=True) except Exception as e: log.exception("multipart.get_data(..)") data_callback.exception = e data_callback.notify_finished(False) asyncio. async (call_wrapper(), loop=self.loop) except Exception as e: log.exception("send_get_data(..)") self.send_exception(e) return log.debug("Waiting for first data.") #TODO: This can be improved. Right now it causes the response to wait # for the first block of data to be fetched (which could be after a # few hash blocks are fetched) before it allows us to send the headers. # This would cause the browser to report the size rigth away instead of # seeming to take longer. It would require the response to be always be # chunked as we don't know until we get that first data if we are going # to rewrite or not. Such improvement wouldn't increase the speed or # anything so it can wait as it is only cosmetic likely. data = yield from queue.get() if data: if data is Error: self.send_exception(data_callback.exception) self.send_response(200) rewrite_urls = False if data_callback.mime_type: self.send_header("Content-Type", data_callback.mime_type) if data_callback.mime_type\ in ("text/html", "text/css", "application/javascript"): rewrite_urls = True else: dh = data[:160] if dh[0] == 0xFF and dh[1] == 0xD8: self.send_header("Content-Type", "image/jpg") elif dh[0] == 0x89 and dh[1:4] == b"PNG": self.send_header("Content-Type", "image/png") elif dh[:5] == b"GIF89": self.send_header("Content-Type", "image/gif") elif dh[:5] == b"/*CSS": self.send_header("Content-Type", "text/css") rewrite_urls = True elif dh[:12] == b"/*JAVASCRIPT": self.send_header("Content-Type", "application/javascript") rewrite_urls = True elif dh[:8] == bytes(\ [0x00, 0x00, 0x00, 0x18, 0x66, 0x74, 0x79, 0x70])\ or dh[:8] == bytes(\ [0x00, 0x00, 0x00, 0x1c, 0x66, 0x74, 0x79, 0x70]): self.send_header("Content-Type", "video/mp4") elif dh[:8] == bytes(\ [0x50, 0x4b, 0x03, 0x04, 0x0a, 0x00, 0x00, 0x00]): self.send_header("Content-Type", "application/zip") elif dh[:5] == bytes(\ [0x25, 0x50, 0x44, 0x46, 0x2d]): self.send_header("Content-Type", "application/pdf") elif dh[:4] == b"RIFF" and dh[8:11] == b"AVI": self.send_header("Content-Type", "video/avi") else: dhl = dh.lower() if (dhl.find(b"<html") > -1 or dhl.find(b"<HTML>") > -1)\ and (dhl.find(b"<head>") > -1\ or dhl.find(b"<HEAD") > -1): self.send_header("Content-Type", "text/html") rewrite_urls = True else: self.send_header(\ "Content-Type", "application/octet-stream") rewrite_urls = rewrite_urls\ and not self.handler.maalstroom_plugin_used if rewrite_urls: self.send_header("Transfer-Encoding", "chunked") else: self.send_header("Content-Length", data_callback.size) if data_callback.version is not None: self.send_header(\ "X-Maalstroom-UpdateableKey-Version",\ data_callback.version) self.send_header("Cache-Control", "public,max-age=15") self.send_header(\ "ETag",\ "updateablekey-" + str(data_callback.version) + '-'\ + rpath) else: self.send_header("Cache-Control", "public,max-age=315360000") self.send_header("ETag", rpath) self.end_headers() while True: if rewrite_urls: self.send_partial_content(data) else: self.write(data) data = yield from queue.get() if data is None: if rewrite_urls: self.end_partial_content() else: self.finish_response() break elif data is Error: if rewrite_urls: self._fail_partial_content() else: self.close() break if self._abort_event.is_set(): if log.isEnabledFor(logging.INFO): log.info(\ "Maalstroom request got broken pipe from HTTP"\ " side; cancelling.") data_callback.abort = True break else: self.send_error(b"Data not found on network.", 404)
def __fetch_hash_tree_ref(self, data_key, depth, position, retry=None): if not retry: data_rw = yield from self.engine.tasks.send_get_data(data_key) else: data_rw = yield from self.engine.tasks.send_get_data(\ data_key, retry_factor=retry[3] * 10) self._task_semaphore.release() if self._abort: return if not data_rw.data: # Fetch failed. if retry: retry[3] += 1 # Tries. if retry[3] >= 32: if log.isEnabledFor(logging.INFO): log.info("Block id [{}] failed too much; aborting."\ .format(mbase32.encode(data_key))) self._do_abort() return else: retry = [depth, position, data_key, 1] self._failed.append(retry) if log.isEnabledFor(logging.INFO): log.info("Block id [{}] failed, retrying (tries=[{}])."\ .format(mbase32.encode(data_key), retry[3])) if self.ordered: # This very fetch is probably blocking future ones so retry # immediately! self._schedule_retry() else: if retry: if log.isEnabledFor(logging.INFO): log.info("Succeeded with retry [{}] on try [{}]."\ .format(mbase32.encode(data_key), retry[3])) if self.ordered: if position != self._next_position: waiter = asyncio.futures.Future(loop=self.engine.loop) yield from self.__wait(position, waiter) if not depth: r = self.data_callback.notify_data(position, data_rw.data) if not r: if log.isEnabledFor(logging.DEBUG): log.debug("Received cancel signal; aborting download.") self._do_abort() return self.__notify_position_complete(position + len(data_rw.data)) else: yield from\ self._fetch_hash_tree_refs(\ data_rw.data, 0, depth, position) self._task_cnt -= 1 if self._task_cnt <= 0: assert self._task_cnt == 0 self._tasks_done.set()
def __main(): global loop log.info("mcc running.") parser = argparse.ArgumentParser() parser.add_argument(\ "--address",\ help="The address of the Morphis node to connect to.",\ default="127.0.0.1:4250") parser.add_argument(\ "--create-dmail",\ help="Generate and upload a new dmail site.",\ action="store_true") parser.add_argument(\ "--dburl",\ help="Specify the database url to use.") parser.add_argument(\ "--fetch-dmail", help="Fetch dmail for specified key_id.") parser.add_argument(\ "-i",\ help="Read file as stdin.") parser.add_argument("--nn", type=int,\ help="Node instance number.") parser.add_argument(\ "--prefix",\ help="Specify the prefix for various things (currently --create-dmail"\ ").") parser.add_argument(\ "--scan-dmail",\ help="Scan the network for available dmails.") parser.add_argument(\ "--send-dmail",\ help="Send stdin as a dmail with the specified subject. The"\ " sender and recipients may be specified at the beginning of the"\ " data as with email headers: 'from: ' and 'to: '.") parser.add_argument(\ "--stat",\ help="Report node status.",\ action="store_true") parser.add_argument("-l", dest="logconf",\ help="Specify alternate logging.ini [IF SPECIFIED, THIS MUST BE THE"\ " FIRST PARAMETER!].") parser.add_argument(\ "--dmail-target",\ help="Specify the dmail target to validate dmail against.") parser.add_argument(\ "-x",\ help="Specify the x (Diffie-Hellman private secret) to use.") args = parser.parse_args() # Load or generate client mcc key. key_filename = "data/mcc_key-rsa.mnk" if os.path.exists(key_filename): log.info("mcc private key file found, loading.") client_key = rsakey.RsaKey(filename=key_filename) else: log.info("mcc private key file missing, generating.") client_key = rsakey.RsaKey.generate(bits=4096) client_key.write_private_key_file(key_filename) # Connect a Morphis Client (lightweight Node) instance. mc = client.Client(loop, client_key=client_key, address=args.address) r = yield from mc.connect() if not r: log.warning("Connection failed; exiting.") loop.stop() return dbase = init_db(args) de = dmail.DmailEngine(mc, dbase) log.info("Processing command requests...") if args.stat: r = yield from mc.send_command("stat") print(r.decode("UTF-8"), end='') if args.create_dmail: log.info("Creating and uploading dmail site.") privkey, data_key, dms =\ yield from de.generate_dmail_address(args.prefix) print("privkey: {}".format(base58.encode(privkey._encode_key()))) print("x: {}".format(base58.encode(sshtype.encodeMpint(dms.dh.x)))) print("dmail address: {}".format(mbase32.encode(data_key))) if args.send_dmail: log.info("Sending dmail.") if args.i: with open(args.i, "rb") as fh: dmail_data = fh.read().decode() else: dmail_data = stdin.read() if log.isEnabledFor(logging.DEBUG): log.debug("dmail_data=[{}].".format(dmail_data)) yield from de.send_dmail_text(args.send_dmail, dmail_data) if args.scan_dmail: log.info("Scanning dmail address.") addr, sig_bits = mutil.decode_key(args.scan_dmail) def key_callback(key): print("dmail key: [{}].".format(mbase32.encode(key))) yield from de.scan_dmail_address(\ addr, sig_bits, key_callback=key_callback) if args.fetch_dmail: log.info("Fetching dmail for key=[{}].".format(args.fetch_dmail)) key = mbase32.decode(args.fetch_dmail) if args.x: l, x_int = sshtype.parseMpint(base58.decode(args.x)) else: x_int = None dmail_target = args.dmail_target dm, valid_sig = yield from de.fetch_dmail(key, x_int, dmail_target) if not dm: raise Exception("No dmail found.") if not x_int: print("Encrypted dmail data=[\n{}].".format(mutil.hex_dump(dm))) else: print("Subject: {}\n".format(dm.subject)) if dm.sender_pubkey: print("From: {}"\ .format(mbase32.encode(enc.generate_ID(dm.sender_pubkey)))) i = 0 for part in dm.parts: print("DmailPart[{}]:\n mime-type=[{}]\n data=[{}]\n"\ .format(i, part.mime_type, part.data)) i += 1 log.info("Disconnecting.") yield from mc.disconnect() loop.stop()
def key_callback(data_key): self.writeln("data_key=[{}].".format(mbase32.encode(data_key)))
def get_data(engine, data_key, data_callback, path=None, ordered=False,\ positions=None, retry_seconds=30, concurrency=64, max_link_depth=1): assert not path or type(path) is bytes, type(path) assert isinstance(data_callback, DataCallback), type(data_callback) data_rw = yield from engine.tasks.send_get_data(data_key, path) data = data_rw.data if data is None: data_rw = yield from engine.tasks.send_get_data(\ data_key, path, retry_factor=10) data = data_rw.data if data is None: return None if data_rw.version: data_callback.notify_version(data_rw.version) else: #FIXME: Remove this from here after it is integrated into the coming # chord_task rewrite. # Reupload the key to keep prefix searches in the network. r = random.randint(1, 5) if r == 1: asyncio.async(\ engine.tasks.send_store_key(\ data_rw.data, data_key, retry_factor=50),\ loop=engine.loop) link_depth = 0 while True: if not data.startswith(MorphisBlock.UUID): data_callback.notify_size(len(data)) data_callback.notify_data(0, data) data_callback.notify_finished(True) return True block_type = MorphisBlock.parse_block_type(data) if block_type == BlockType.link.value: link_depth += 1 if link_depth > max_link_depth: if log.isEnabledFor(logging.WARNING): log.warning(\ "Exceeded maximum link depth [{}] for key [{}]."\ .format(max_link_depth, mbase32.encode(data_key))) return False block = LinkBlock(data) if block.mime_type: data_callback.notify_mime_type(block.mime_type) data_rw = yield from engine.tasks.send_get_data(block.destination) data = data_rw.data if data is None: data_rw = yield from engine.tasks.send_get_data(\ block.destination, retry_factor=10) data = data_rw.data if data is None: return None continue if block_type != BlockType.hash_tree.value: data_callback.notify_size(len(data)) data_callback.notify_data(0, data) data_callback.notify_finished(True) return True fetch = HashTreeFetch(\ engine, data_callback, ordered, positions, retry_seconds,\ concurrency) r = yield from fetch.fetch(HashTreeBlock(data)) data_callback.notify_finished(r) return r
def __main(): global loop log.info("mcc running.") parser = argparse.ArgumentParser() parser.add_argument(\ "--address",\ help="The address of the Morphis node to connect to.",\ default="127.0.0.1:4250") parser.add_argument(\ "--create-dmail",\ help="Generate and upload a new dmail site.",\ action="store_true") parser.add_argument(\ "--dburl",\ help="Specify the database url to use.") parser.add_argument(\ "--fetch-dmail", help="Fetch dmail for specified key_id.") parser.add_argument(\ "-i",\ help="Read file as stdin.") parser.add_argument("--nn", type=int,\ help="Node instance number.") parser.add_argument(\ "--prefix",\ help="Specify the prefix for various things (currently --create-dmail"\ ").") parser.add_argument(\ "--scan-dmail",\ help="Scan the network for available dmails.") parser.add_argument(\ "--send-dmail",\ help="Send stdin as a dmail with the specified subject. The"\ " sender and recipients may be specified at the beginning of the"\ " data as with email headers: 'from: ' and 'to: '.") parser.add_argument(\ "--stat",\ help="Report node status.",\ action="store_true") parser.add_argument("-l", dest="logconf",\ help="Specify alternate logging.ini [IF SPECIFIED, THIS MUST BE THE"\ " FIRST PARAMETER!].") parser.add_argument(\ "--dmail-target",\ help="Specify the dmail target to validate dmail against.") parser.add_argument(\ "-x",\ help="Specify the x (Diffie-Hellman private secret) to use.") args = parser.parse_args() # Load or generate client mcc key. key_filename = "data/mcc_key-rsa.mnk" if os.path.exists(key_filename): log.info("mcc private key file found, loading.") client_key = rsakey.RsaKey(filename=key_filename) else: log.info("mcc private key file missing, generating.") client_key = rsakey.RsaKey.generate(bits=4096) client_key.write_private_key_file(key_filename) # Connect a Morphis Client (lightweight Node) instance. mc = client.Client(loop, client_key=client_key, address=args.address) r = yield from mc.connect() if not r: log.warning("Connection failed; exiting.") loop.stop() return dbase = init_db(args) de = dmail.DmailEngine(mc, dbase) log.info("Processing command requests...") if args.stat: r = yield from mc.send_command("stat") print(r.decode("UTF-8"), end='') if args.create_dmail: log.info("Creating and uploading dmail site.") privkey, data_key, dms, storing_nodes =\ yield from de.generate_dmail_address(args.prefix) print("privkey: {}".format(base58.encode(privkey._encode_key()))) print("x: {}".format(base58.encode(sshtype.encodeMpint(dms.dh.x)))) print("dmail address: {}".format(mbase32.encode(data_key))) print("storing_nodes=[{}]."\ .format(base58.encode(privkey._encode_key()))) if args.send_dmail: log.info("Sending dmail.") if args.i: with open(args.i, "rb") as fh: dmail_data = fh.read().decode() else: dmail_data = stdin.read() if log.isEnabledFor(logging.DEBUG): log.debug("dmail_data=[{}].".format(dmail_data)) yield from de.send_dmail_text(args.send_dmail, dmail_data) if args.scan_dmail: log.info("Scanning dmail address.") addr, sig_bits = mutil.decode_key(args.scan_dmail) def key_callback(key): print("dmail key: [{}].".format(mbase32.encode(key))) yield from de.scan_dmail_address(\ addr, sig_bits, key_callback=key_callback) if args.fetch_dmail: log.info("Fetching dmail for key=[{}].".format(args.fetch_dmail)) key = mbase32.decode(args.fetch_dmail) if args.x: l, x_int = sshtype.parseMpint(base58.decode(args.x)) else: x_int = None dmail_target = args.dmail_target dm, valid_sig = yield from de.fetch_dmail(key, x_int, dmail_target) if not dm: raise Exception("No dmail found.") if not x_int: print("Encrypted dmail data=[\n{}].".format(mutil.hex_dump(dm))) else: print("Subject: {}\n".format(dm.subject)) if dm.sender_pubkey: print("From: {}"\ .format(mbase32.encode(enc.generate_ID(dm.sender_pubkey)))) i = 0 for part in dm.parts: print("DmailPart[{}]:\n mime-type=[{}]\n data=[{}]\n"\ .format(i, part.mime_type, part.data)) i += 1 log.info("Disconnecting.") yield from mc.disconnect() loop.stop()
def key_callback(key): print("dmail key: [{}].".format(mbase32.encode(key)))
def __send_dmail(self, from_asymkey, recipient, dmail): assert type(recipient) is DmailSite root = recipient.root sse = sshtype.parseMpint(base58.decode(root["sse"]))[1] target = root["target"] difficulty = root["difficulty"] dh = dhgroup14.DhGroup14() dh.generate_x() dh.generate_e() dh.f = sse k = dh.calculate_k() target_key = mbase32.decode(target) key = self._generate_encryption_key(target_key, k) dmail_bytes = dmail.encode() m, r = enc.encrypt_data_block(dmail_bytes, key) if m: if r: m = m + r else: m = r dw = DmailWrapper() dw.ssm = _dh_method_name dw.sse = sse dw.ssf = dh.e if from_asymkey: dw.signature = from_asymkey.calc_rsassa_pss_sig(m) else: dw.signature = b'' dw.data_len = len(dmail_bytes) dw.data_enc = m tb = mp.TargetedBlock() tb.target_key = target_key tb.noonce = int(0).to_bytes(64, "big") tb.block = dw tb_data = tb.encode() tb_header = tb_data[:mp.TargetedBlock.BLOCK_OFFSET] if log.isEnabledFor(logging.INFO): log.info(\ "Attempting work on dmail (target=[{}], difficulty=[{}])."\ .format(target, difficulty)) def threadcall(): return brute.generate_targeted_block(\ target_key, difficulty, tb_header,\ mp.TargetedBlock.NOONCE_OFFSET,\ mp.TargetedBlock.NOONCE_SIZE) noonce_bytes = yield from self.loop.run_in_executor(None, threadcall) if log.isEnabledFor(logging.INFO): log.info("Work found noonce [{}].".format(noonce_bytes)) mp.TargetedBlock.set_noonce(tb_data, noonce_bytes) if log.isEnabledFor(logging.INFO): mp.TargetedBlock.set_noonce(tb_header, noonce_bytes) log.info("hash=[{}]."\ .format(mbase32.encode(enc.generate_ID(tb_header)))) key = None def key_callback(val): nonlocal key key = val log.info("Sending dmail to the network.") if log.isEnabledFor(logging.DEBUG): log.debug("dmail block data=[\n{}]."\ .format(mutil.hex_dump(tb_data))) total_storing = 0 retry = 0 while True: storing_nodes = yield from\ self.task_engine.send_store_targeted_data(\ tb_data, store_key=True, key_callback=key_callback,\ retry_factor=retry * 10) total_storing += storing_nodes if total_storing >= 3: break if retry > 32: break elif retry > 3: yield from asyncio.sleep(1) retry += 1 key_enc = mbase32.encode(key) id_enc = mbase32.encode(enc.generate_ID(key)) if log.isEnabledFor(logging.INFO): log.info("Dmail sent; key=[{}], id=[{}], storing_nodes=[{}]."\ .format(key_enc, id_enc, total_storing)) return total_storing
def scan_and_save_new_dmails(self, dmail_address): new_dmail_cnt = 0 old_dmail_cnt = 0 err_dmail_cnt = 0 address_key = dmail_address.keys[0] target = address_key.target_key significant_bits = address_key.difficulty start = target def check_have_dmail_dbcall(): with self.db.open_session() as sess: q = sess.query(func.count("*")).select_from(db.DmailMessage)\ .filter(db.DmailMessage.data_key == dmail_key) if q.scalar(): return True return False while True: data_rw = yield from self.task_engine.send_find_key(\ start, target_key=target, significant_bits=significant_bits,\ retry_factor=100) start = dmail_key = data_rw.data_key if not dmail_key: if log.isEnabledFor(logging.INFO): log.info("No more Dmails found for address (id=[{}])."\ .format(dmail_address.id)) break if log.isEnabledFor(logging.INFO): key_enc = mbase32.encode(dmail_key) log.info("Found dmail key: [{}].".format(key_enc)) exists =\ yield from self.loop.run_in_executor(\ None, check_have_dmail_dbcall) if exists: if log.isEnabledFor(logging.DEBUG): log.debug("Ignoring dmail (key=[{}]) we already have."\ .format(key_enc)) old_dmail_cnt += 1 continue try: yield from self._fetch_and_save_dmail(\ dmail_key, dmail_address, address_key) new_dmail_cnt += 1 except Exception as e: log.exception("Trying to fetch and save Dmail for key [{}]"\ " caused exception: {}"\ .format(mbase32.encode(dmail_key), e)) err_dmail_cnt += 1 if log.isEnabledFor(logging.INFO): if new_dmail_cnt: log.info("Moved [{}] Dmails to Inbox.".format(new_dmail_cnt)) else: log.info("No new Dmails.") return new_dmail_cnt, old_dmail_cnt, err_dmail_cnt
def _send_dmail(self, from_asymkey, recipient, dmail_bytes, signature): assert type(recipient) is DmailSite # Read in recipient DmailSite. root = recipient.root sse = sshtype.parseMpint(base58.decode(root["sse"]))[1] target_enc = root["target"] difficulty = root["difficulty"] # Calculate a shared secret. dh = dhgroup14.DhGroup14() dh.generate_x() dh.generate_e() dh.f = sse k = dh.calculate_k() target_key = mbase32.decode(target_enc) key = self._generate_encryption_key(target_key, k) # Encrypt the Dmail bytes. m, r = enc.encrypt_data_block(dmail_bytes, key) if m: if r: m = m + r else: m = r # Store it in a DmailWrapper. dw = DmailWrapper() dw.ssm = _dh_method_name dw.sse = sse dw.ssf = dh.e dw.data_len = len(dmail_bytes) dw.data_enc = m # Store the DmailWrapper in a TargetedBlock. tb = mp.TargetedBlock() tb.target_key = target_key tb.nonce = int(0).to_bytes(64, "big") tb.block = dw tb_data = tb.encode() tb_header = tb_data[:mp.TargetedBlock.BLOCK_OFFSET] # Do the POW on the TargetedBlock. if log.isEnabledFor(logging.INFO): log.info(\ "Attempting work on dmail (target=[{}], difficulty=[{}])."\ .format(target_enc, difficulty)) def threadcall(): return brute.generate_targeted_block(\ target_key, difficulty, tb_header,\ mp.TargetedBlock.NOONCE_OFFSET,\ mp.TargetedBlock.NOONCE_SIZE) nonce_bytes = yield from self.loop.run_in_executor(None, threadcall) if log.isEnabledFor(logging.INFO): log.info("Work found nonce [{}].".format(nonce_bytes)) mp.TargetedBlock.set_nonce(tb_data, nonce_bytes) if log.isEnabledFor(logging.INFO): mp.TargetedBlock.set_nonce(tb_header, nonce_bytes) log.info("Message key=[{}]."\ .format(mbase32.encode(enc.generate_ID(tb_header)))) key = None def key_callback(val): nonlocal key key = val if log.isEnabledFor(logging.DEBUG): log.debug("TargetedBlock dump=[\n{}]."\ .format(mutil.hex_dump(tb_data))) # Upload the TargetedBlock to the network. log.info("Sending dmail to the network.") total_storing = 0 retry = 0 while True: storing_nodes = yield from\ self.task_engine.send_store_targeted_data(\ tb_data, store_key=True, key_callback=key_callback,\ retry_factor=retry * 10) total_storing += storing_nodes if total_storing >= 3: break if retry > 32: break elif retry > 3: yield from asyncio.sleep(1) retry += 1 key_enc = mbase32.encode(key) id_enc = mbase32.encode(enc.generate_ID(key)) if log.isEnabledFor(logging.INFO): log.info("Dmail sent; key=[{}], id=[{}], storing_nodes=[{}]."\ .format(key_enc, id_enc, total_storing)) return total_storing
def _do_POST(self, rpath): log.info("POST; rpath=[{}].".format(rpath)) if rpath != ".upload/upload": yield from maalstroom.dmail.serve_post(self, rpath) return if not self.connection_count: self.send_error("No connected nodes; cannot upload to the"\ " network.") return if log.isEnabledFor(logging.DEBUG): log.debug("headers=[{}].".format(self.handler.headers)) version = None path = None mime_type = None if self.handler.headers["Content-Type"]\ == "application/x-www-form-urlencoded": log.debug("Content-Type=[application/x-www-form-urlencoded].") data = yield from self.read_request() privatekey = None else: if log.isEnabledFor(logging.DEBUG): log.debug("Content-Type=[{}]."\ .format(self.handler.headers["Content-Type"])) data = yield from self.read_request() form = cgi.FieldStorage(\ fp=io.BytesIO(data),\ headers=self.handler.headers,\ environ={\ "REQUEST_METHOD": "POST",\ "CONTENT_TYPE": self.handler.headers["Content-Type"]}) if log.isEnabledFor(logging.DEBUG): log.debug("form=[{}].".format(form)) formelement = form["fileToUpload"] filename = formelement.filename data = formelement.file.read() if log.isEnabledFor(logging.INFO): log.info("filename=[{}].".format(filename)) privatekey = form["privateKey"].value if privatekey and privatekey != "${PRIVATE_KEY}": if log.isEnabledFor(logging.INFO): log.info("privatekey=[{}].".format(privatekey)) privatekey = base58.decode(privatekey) privatekey = rsakey.RsaKey(privdata=privatekey) path = form["path"].value.encode() version = form["version"].value if not version: version = 0 else: version = int(version) mime_type = form["mime_type"].value else: privatekey = None if log.isEnabledFor(logging.DEBUG): log.debug("data=[{}].".format(data)) if not privatekey: assert not version and not path and not mime_type try: key_callback = KeyCallback() yield from multipart.store_data(\ self.node.chord_engine, data, privatekey=privatekey,\ path=path, version=version, key_callback=key_callback,\ mime_type=mime_type) except asyncio.TimeoutError: self.send_error(errcode=408) except Exception as e: log.exception("send_store_data(..)") self.send_exception(e) if key_callback.data_key: enckey = mbase32.encode(key_callback.data_key) if privatekey and path: url = "{}{}/{}"\ .format(\ self.handler.maalstroom_url_prefix_str,\ enckey,\ path.decode("UTF-8")) else: url = "{}{}"\ .format(\ self.handler.maalstroom_url_prefix_str,\ enckey) if privatekey: message = '<a id="key" href="{}">updateable key link</a>'\ .format(url) if key_callback.referred_key: message +=\ '<br/><a id="referred_key" href="{}{}">perma link</a>'\ .format(\ self.handler.maalstroom_url_prefix_str,\ mbase32.encode(key_callback.referred_key)) else: message = '<a id="key" href="{}">perma link</a>'.format(url) self.send_response(200) self.send_header("Content-Type", "text/html") self.send_header("Content-Length", len(message)) self.end_headers() self.write(bytes(message, "UTF-8")) self.finish_response()
def send_content(self, content_entry, cacheable=True, content_type=None,\ charset=None): if type(content_entry) in (list, tuple): content = content_entry[0] content_id = content_entry[1] if len(content_entry) == 3 and not content_type: content_type = content_entry[2] else: content = content_entry cacheable = False if not content_type: if not charset: charset = self.get_accept_charset() content_type = "text/html; charset={}".format(charset) if type(content) is str: if charset: content = content.encode(charset) else: content = content.encode() if not self.handler.maalstroom_plugin_used: content =\ content.replace(\ b"morphis://", self.handler.maalstroom_url_prefix) if cacheable and not content_id: if callable(content): content = content() log.info("Generating content_id.") content_id = mbase32.encode(enc.generate_ID(content)) content_entry[1] = content_id etag = self.handler.headers["If-None-Match"] if cacheable and etag == content_id: cache_control = self.handler.headers["Cache-Control"] if cache_control != "no-cache": self.send_response(304) self.send_header("Cache-Control", "public,max-age=300") self.send_header("ETag", content_id) self.send_header("Content-Length", 0) self.end_headers() self.finish_response() return if callable(content): content = content() self.send_response(200) self.send_default_headers() self.send_header("Content-Length", len(content)) self.send_header("Content-Type", content_type) if cacheable: self.send_header("Cache-Control", "public,max-age=300") self.send_header("ETag", content_id) else: self._send_no_cache() self.send_frame_options_header() self.end_headers() self.write(content) self.finish_response() return
def scan_and_save_new_dmails(self, dmail_address): assert type(dmail_address) is db.DmailAddress, type(dmail_address) new_dmail_cnt = 0 old_dmail_cnt = 0 err_dmail_cnt = 0 address_key = dmail_address.keys[0] target = address_key.target_key significant_bits = address_key.difficulty start = target def check_have_dmail_dbcall(): with self.db.open_session() as sess: q = sess.query(func.count("*")).select_from(db.DmailMessage)\ .filter(db.DmailMessage.data_key == dmail_key) if q.scalar(): return True return False while True: data_rw = yield from self.task_engine.send_find_key(\ start, target_key=target, significant_bits=significant_bits,\ retry_factor=100) start = dmail_key = data_rw.data_key if not dmail_key: if log.isEnabledFor(logging.INFO): log.info("No more Dmails found for address (id=[{}])."\ .format(dmail_address.id)) break if log.isEnabledFor(logging.INFO): key_enc = mbase32.encode(dmail_key) log.info("Found dmail key: [{}].".format(key_enc)) exists =\ yield from self.loop.run_in_executor(\ None, check_have_dmail_dbcall) if exists: if log.isEnabledFor(logging.DEBUG): log.debug("Ignoring dmail (key=[{}]) we already have."\ .format(key_enc)) old_dmail_cnt += 1 continue try: yield from self._fetch_and_save_dmail(\ dmail_key, dmail_address, address_key) new_dmail_cnt += 1 except Exception as e: log.exception("Trying to fetch and save Dmail for key [{}]"\ " caused exception: {}"\ .format(mbase32.encode(dmail_key), e)) err_dmail_cnt += 1 if log.isEnabledFor(logging.INFO): if new_dmail_cnt: log.info("Moved [{}] Dmails to Inbox.".format(new_dmail_cnt)) else: log.info("No new Dmails.") return new_dmail_cnt, old_dmail_cnt, err_dmail_cnt
def fetch_dmail(self, key, x=None, target_key=None): "Fetch the Dmail referred to by key from the network."\ " Returns a Dmail object, not a db.DmailMessage object." data_rw = yield from self.task_engine.send_get_targeted_data(key) data = data_rw.data if not data: return None, None if not x: return data, None tb = mp.TargetedBlock(data) if target_key: if tb.target_key != target_key: tb_tid_enc = mbase32.encode(tb.target_key) tid_enc = mbase32.encode(target_key) raise DmailException(\ "TargetedBlock->target_key [{}] does not match request"\ " [{}]."\ .format(tb_tid_enc, tid_enc)) dw = DmailWrapper(tb.buf, mp.TargetedBlock.BLOCK_OFFSET) if dw.ssm != "mdh-v1": raise DmailException(\ "Unrecognized key exchange method in dmail [{}]."\ .format(dw.ssm)) kex = dhgroup14.DhGroup14() kex.x = x kex.generate_e() kex.f = dw.ssf if dw.sse != kex.e: raise DmailException(\ "Dmail [{}] is encrypted with a different e [{}] than"\ " the specified x resulted in [{}]."\ .format(mbase32.encode(data_rw.data_key), dw.sse, kex.e)) kex.calculate_k() key = self._generate_encryption_key(tb.target_key, kex.k) data = enc.decrypt_data_block(dw.data_enc, key) if not data: raise DmailException("Dmail data was empty.") dmail = Dmail(data, 0, dw.data_len) if dw.signature: signature = dw.signature pubkey = rsakey.RsaKey(dmail.sender_pubkey) valid_sig = pubkey.verify_rsassa_pss_sig(dw.data_enc, signature) return dmail, valid_sig else: return dmail, False