def _peer_authenticated(self, key): self.node_key = key if not self.node_id: self.node_id = enc.generate_ID(self.node_key.asbytes()) if not self.distance: self.update_distance()
def main(): global in_pipe, out_pipe, public_key, private_key try: docopt_config = "Usage: my_program.py [--port=PORT] [--connect=PORT]" arguments = docopt.docopt(docopt_config) port = arguments["--port"] if port == None: port = 5555 connect_dest = arguments["--connect"] except docopt.DocoptExit as e: print(e.message) return context = zmq.Context() in_pipe = zpipe(context) out_pipe = zpipe(context) loop = asyncio.get_event_loop() net_config = {"port": port} # Generate Node Keys & Id. private_key = enc.generate_RSA(4096) public_key = private_key.publickey() # debug("Private Key=[%s], Public Key=[%s]." % (str(private_key.exportKey("PEM")), str(public_key.exportKey("PEM")))) node_id = enc.generate_ID(public_key.exportKey("DER")) debug("node_id=[%s]." % node_id.hexdigest()) # Start Net Engine. zmq_future = loop.run_in_executor(None, engageNet, loop, context, out_pipe[0], in_pipe[1], net_config) # thread = threading.Thread(target=engageNet, args=(loop, context, out_pipe[0], in_pipe[1], net_config)) # thread.daemon = True # thread.start() # Connect for testing. if connect_dest != None: out_pipe[1].send_multipart( [b"conn", "tcp://{}".format(connect_dest).encode()]) # out_pipe[0].send_multipart([b"conn", "tcp://localhost:{}".format(port).encode()]) try: loop.run_until_complete(zmq_future) except BaseException as e: handleException("loop.run_until_complete()", e) out_pipe[1].send_multipart([b"shutdown"]) loop.stop() loop.close() zmq_future.cancel() sys.exit(1)
def dbcall(): with self.node.db.open_session() as sess: tlocked = False batch = [] added = [] for peer in peers: assert type(peer) is Peer if not check_address(peer.address): continue if not tlocked: self.node.db.lock_table(sess, Peer) tlocked = True q = sess.query(func.count("*")) if peer.pubkey: assert peer.node_id is None peer.node_id = enc.generate_ID(peer.pubkey) peer.distance, peer.direction = calc_log_distance(self.node_id, peer.node_id) q = q.filter(Peer.node_id == peer.node_id) elif peer.address: assert peer.node_id is None q = q.filter(Peer.address == peer.address) if q.scalar() > 0: if log.isEnabledFor(logging.DEBUG): log.debug("Peer [{}] already in list.".format(peer.address)) continue peer.connected = False if log.isEnabledFor(logging.INFO): log.info("Adding Peer [{}].".format(peer.address)) sess.add(peer) batch.append(peer) if len(batch) == 10: sess.commit() for dbpeer in batch: fetch_id_in_thread = dbpeer.id added.extend(batch) batch.clear() sess.expunge_all() tlocked = False if batch and tlocked: sess.commit() for dbpeer in batch: fetch_id_in_thread = dbpeer.id added.extend(batch) sess.expunge_all() return added
def main(): global in_pipe, out_pipe, public_key, private_key try: docopt_config = "Usage: my_program.py [--port=PORT] [--connect=PORT]" arguments = docopt.docopt(docopt_config) port = arguments["--port"] if port == None: port = 5555 connect_dest = arguments["--connect"] except docopt.DocoptExit as e: print(e.message) return context = zmq.Context() in_pipe = zpipe(context) out_pipe = zpipe(context) loop = asyncio.get_event_loop() net_config = {"port": port} # Generate Node Keys & Id. private_key = enc.generate_RSA(4096) public_key = private_key.publickey(); # debug("Private Key=[%s], Public Key=[%s]." % (str(private_key.exportKey("PEM")), str(public_key.exportKey("PEM")))) node_id = enc.generate_ID(public_key.exportKey("DER")) debug("node_id=[%s]." % node_id.hexdigest()) # Start Net Engine. zmq_future = loop.run_in_executor(None, engageNet, loop, context, out_pipe[0], in_pipe[1], net_config) # thread = threading.Thread(target=engageNet, args=(loop, context, out_pipe[0], in_pipe[1], net_config)) # thread.daemon = True # thread.start() # Connect for testing. if connect_dest != None: out_pipe[1].send_multipart([b"conn", "tcp://{}".format(connect_dest).encode()]) # out_pipe[0].send_multipart([b"conn", "tcp://localhost:{}".format(port).encode()]) try: loop.run_until_complete(zmq_future) except BaseException as e: handleException("loop.run_until_complete()", e) out_pipe[1].send_multipart([b"shutdown"]) loop.stop() loop.close() zmq_future.cancel() sys.exit(1)
def main(): debug("Entered main().") private_key = enc.generate_RSA(4096) public_key = private_key.publickey(); debug("Private Key=[%s], Public Key=[%s]." % (str(private_key.exportKey("PEM")), str(public_key.exportKey("PEM")))) id = enc.generate_ID(public_key.exportKey("DER")) debug("id=[%s]." % id.hexdigest())
def _set_upload_page(content): global static_upload_page_content, upload_page_content upload_page_content = content content = content.replace(\ b"${UPDATEABLE_KEY_MODE_DISPLAY}",\ b"display: none") content = content.replace(\ b"${STATIC_MODE_DISPLAY}",\ b"") static_upload_page_content[0] = content static_upload_page_content[1] =\ mbase32.encode(enc.generate_ID(static_upload_page_content[0]))
def _send_content(self, content_entry, cacheable=True, content_type=None): if type(content_entry) in (list, tuple): content = content_entry[0] content_id = content_entry[1] else: content = content_entry cacheable = False if not self.maalstroom_plugin_used: content =\ content.replace(b"morphis://", self.maalstroom_url_prefix) if cacheable and not content_id: if callable(content): content = content() content_id = mbase32.encode(enc.generate_ID(content)) content_entry[1] = content_id if cacheable and self.headers["If-None-Match"] == content_id: cache_control = self.headers["Cache-Control"] if cache_control != "max-age=0": self.send_response(304) if cache_control: # This should only have been sent for an updateable key. self.send_header("Cache-Control", "max-age=15, public") else: self.send_header("ETag", content_id) self.send_header("Content-Length", 0) self.end_headers() return if callable(content): content = content() self.send_response(200) self.send_header("Content-Length", len(content)) self.send_header("Content-Type",\ "text/html" if content_type is None else content_type) if cacheable: self.send_header("Cache-Control", "public") self.send_header("ETag", content_id) else: self._send_no_cache() self.end_headers() self.wfile.write(content) return
def __init__(self, node, bind_address=None): self.node = node self.node_id = enc.generate_ID(node.node_key.asbytes()) self.loop = node.loop self.running = False self.server = None #Task. self.server_protocol = None self.shells = {} self.forced_connects = {} # {id, Peer} self.pending_connections = {} # {Task, Peer->dbid} self.peers = {} # {protocol.address: Peer}. self.peer_buckets = [{} for i in range(NODE_ID_BITS)] # [{addr: Peer}] self.peer_trie = bittrie.BitTrie() # {node_id, Peer} self.protocol_ready = asyncio.Event(loop=self.loop) self.last_db_peer_count = 0 self.minimum_connections = 32 self.maximum_connections = 256 self.hard_maximum_connections = self.maximum_connections * 2 self.connect_peers = None # ["host:port"] self._bind_address = None self._bind_port = None self._next_request_id = 0 self._doing_process_connection_count = False self._process_connection_count_handle = None self._last_process_connection_count = datetime(1, 1, 1) self._doing_stabilize = False self._do_stabilize_handle = None self._last_stabilize = None self.tasks = ct.ChordTasks(self) self.furthest_data_block = b""
def dbcall(): with handler.node.db.open_session() as sess: handler.node.db.lock_table(sess, DmailMessage) q = sess.query(func.count("*"))\ .filter(DmailMessage.data_key == dmail_key) if q.scalar(): return False q = sess.query(DmailAddress.id)\ .filter(DmailAddress.site_key == dmail_addr) dmail_address = q.first() msg = DmailMessage() msg.dmail_address_id = dmail_address.id msg.data_key = dmail_key msg.sender_dmail_key =\ enc.generate_ID(dmailobj.sender_pubkey)\ if dmailobj.sender_pubkey else None msg.sender_valid = valid_sig msg.subject = dmailobj.subject msg.date = mutil.parse_iso_datetime(dmailobj.date) msg.hidden = False msg.read = False tag = DmailTag() tag.name = "Inbox" msg.tags = [tag] msg.parts = [] for part in dmailobj.parts: dbpart = DmailPart() dbpart.mime_type = part.mime_type dbpart.data = part.data msg.parts.append(dbpart) sess.add(msg) sess.commit()
def encode(self): nbuf = super().encode() assert len(self.noonce) == TargetedBlock.NOONCE_SIZE nbuf += self.noonce assert self.target_key is not None and len(self.target_key) == consts.NODE_ID_BYTES nbuf += self.target_key nbuf += b" " * consts.NODE_ID_BYTES # block_hash placeholder. assert len(nbuf) == TargetedBlock.BLOCK_OFFSET self.block.encode(nbuf) self.block_hash = enc.generate_ID(nbuf[TargetedBlock.BLOCK_OFFSET :]) block_hash_offset = TargetedBlock.BLOCK_OFFSET - consts.NODE_ID_BYTES nbuf[block_hash_offset : TargetedBlock.BLOCK_OFFSET] = self.block_hash return nbuf
def __find_key(rp): # log.debug("Worker running.") wid, prefix = rp.recv() while True: key = rsakey.RsaKey.generate(bits=4096) pubkey_bytes = key.asbytes() pubkey_hash = enc.generate_ID(pubkey_bytes) pubkey_hash_enc = mbase32.encode(pubkey_hash) if pubkey_hash_enc.startswith(prefix): # if log.isEnabledFor(logging.INFO): # log.info("Worker #{} found key.".format(wid)) rp.send(key._encode_key()) return
def encode(self): nbuf = super().encode() assert len(self.nonce) == TargetedBlock.NOONCE_SIZE nbuf += self.nonce assert self.target_key is not None\ and len(self.target_key) == consts.NODE_ID_BYTES nbuf += self.target_key nbuf += b' ' * consts.NODE_ID_BYTES # block_hash placeholder. assert len(nbuf) == TargetedBlock.BLOCK_OFFSET self.block.encode(nbuf) self.block_hash = enc.generate_ID(nbuf[TargetedBlock.BLOCK_OFFSET:]) block_hash_offset = TargetedBlock.BLOCK_OFFSET - consts.NODE_ID_BYTES nbuf[block_hash_offset:TargetedBlock.BLOCK_OFFSET] = self.block_hash return nbuf
def _format_dmail(dm, valid_sig): from_db = type(dm) is DmailMessage dmail_text = [] if (from_db and dm.sender_dmail_key) or (not from_db and dm.sender_pubkey): if from_db: sender_dmail_key = dm.sender_dmail_key else: sender_dmail_key = enc.generate_ID(dm.sender_pubkey) if valid_sig: dmail_text += "Sender Address Verified.\n\n" else: dmail_text += "WARNING: Sender Address Forged!\n\n" dmail_text += "From: {}\n".format(mbase32.encode(sender_dmail_key)) dmail_text += "Subject: {}\n".format(dm.subject) if from_db: date_fmtted = dm.date else: date_fmtted = mutil.parse_iso_datetime(dm.date) dmail_text += "Date: {}\n".format(date_fmtted) dmail_text += '\n' i = 0 for part in dm.parts: dmail_text += part.data.decode() dmail_text += '\n' if len(dm.parts) > 1: dmail_text += "----- ^ dmail part #{} ^ -----\n\n".format(i) i += 1 dmail_text = ''.join(dmail_text) return dmail_text
def __find_noonce(rp): # log.debug("Worker running.") wid, prefix, nbits, data, noonce_offset, noonce_size = rp.recv() max_dist = HASH_BITS - nbits nbytes = int(nbits / 8) nbytes += 4 # Extra bytes to increase probability of enough possibilities. nbytes = min(nbytes, noonce_size) ne = noonce_offset + noonce_size noonce_offset = ne - nbytes noonce = wid while True: noonce_bytes = noonce.to_bytes(nbytes, "big") data[noonce_offset:ne] = noonce_bytes h = enc.generate_ID(data) try: dist, direction = mutil.calc_log_distance(h, prefix) match = dist <= max_dist and direction == -1 except IndexError: # log.debug("Exactly matched prefix.") match = True if match: # if log.isEnabledFor(logging.INFO): # log.info("noonce_bytes=[{}]."\ # .format(mutil.hex_string(noonce_bytes))) # if log.isEnabledFor(logging.DEBUG): # log.debug("resulting block=[\n{}]."\ # .format(mutil.hex_dump(data))) rp.send(noonce_bytes) return noonce += WORKERS
def dbcall(): with self.db.open_session() as sess: self.db.lock_table(sess, db.DmailMessage) q = sess.query(func.count("*")).select_from(db.DmailMessage)\ .filter(db.DmailMessage.data_key == dmail_message_key) if q.scalar(): return False msg = db.DmailMessage() msg.dmail_address_id = dmail_address.id msg.dmail_key_id = address_key.id msg.data_key = dmail_message_key msg.sender_dmail_key =\ enc.generate_ID(dmobj.sender_pubkey)\ if dmobj.sender_pubkey else None msg.sender_valid = valid_sig msg.subject = dmobj.subject msg.date = mutil.parse_iso_datetime(dmobj.date) msg.hidden = False msg.read = False msg.deleted = False attach_dmail_tag(sess, msg, "Inbox") msg.parts = [] for part in dmobj.parts: dbpart = db.DmailPart() dbpart.mime_type = part.mime_type dbpart.data = part.data msg.parts.append(dbpart) sess.add(msg) sess.commit()
def _generate_encryption_key(self, target_key, k): return enc.generate_ID(\ b"The life forms running github are more retarded than any retard!"\ + target_key + sshtype.encodeMpint(k)\ + b"https://github.com/nixxquality/WebMConverter/commit/"\ + b"c1ac0baac06fa7175677a4a1bf65860a84708d67")
def _send_dmail(self, from_asymkey, recipient, dmail_bytes, signature): assert type(recipient) is DmailSite # Read in recipient DmailSite. root = recipient.root sse = sshtype.parseMpint(base58.decode(root["sse"]))[1] target_enc = root["target"] difficulty = root["difficulty"] # Calculate a shared secret. dh = dhgroup14.DhGroup14() dh.generate_x() dh.generate_e() dh.f = sse k = dh.calculate_k() target_key = mbase32.decode(target_enc) key = self._generate_encryption_key(target_key, k) # Encrypt the Dmail bytes. m, r = enc.encrypt_data_block(dmail_bytes, key) if m: if r: m = m + r else: m = r # Store it in a DmailWrapper. dw = DmailWrapper() dw.ssm = _dh_method_name dw.sse = sse dw.ssf = dh.e dw.data_len = len(dmail_bytes) dw.data_enc = m # Store the DmailWrapper in a TargetedBlock. tb = mp.TargetedBlock() tb.target_key = target_key tb.nonce = int(0).to_bytes(64, "big") tb.block = dw tb_data = tb.encode() tb_header = tb_data[:mp.TargetedBlock.BLOCK_OFFSET] # Do the POW on the TargetedBlock. if log.isEnabledFor(logging.INFO): log.info(\ "Attempting work on dmail (target=[{}], difficulty=[{}])."\ .format(target_enc, difficulty)) def threadcall(): return brute.generate_targeted_block(\ target_key, difficulty, tb_header,\ mp.TargetedBlock.NOONCE_OFFSET,\ mp.TargetedBlock.NOONCE_SIZE) nonce_bytes = yield from self.loop.run_in_executor(None, threadcall) if log.isEnabledFor(logging.INFO): log.info("Work found nonce [{}].".format(nonce_bytes)) mp.TargetedBlock.set_nonce(tb_data, nonce_bytes) if log.isEnabledFor(logging.INFO): mp.TargetedBlock.set_nonce(tb_header, nonce_bytes) log.info("Message key=[{}]."\ .format(mbase32.encode(enc.generate_ID(tb_header)))) key = None def key_callback(val): nonlocal key key = val if log.isEnabledFor(logging.DEBUG): log.debug("TargetedBlock dump=[\n{}]."\ .format(mutil.hex_dump(tb_data))) # Upload the TargetedBlock to the network. log.info("Sending dmail to the network.") total_storing = 0 retry = 0 while True: storing_nodes = yield from\ self.task_engine.send_store_targeted_data(\ tb_data, store_key=True, key_callback=key_callback,\ retry_factor=retry * 10) total_storing += storing_nodes if total_storing >= 3: break if retry > 32: break elif retry > 3: yield from asyncio.sleep(1) retry += 1 key_enc = mbase32.encode(key) id_enc = mbase32.encode(enc.generate_ID(key)) if log.isEnabledFor(logging.INFO): log.info("Dmail sent; key=[{}], id=[{}], storing_nodes=[{}]."\ .format(key_enc, id_enc, total_storing)) return total_storing
def _generate_encryption_key(self, target_key, k): return enc.generate_ID(\ b"The life forms running github are more retarded than any"\ + b" retard!" + target_key + sshtype.encodeMpint(k)\ + b"https://github.com/nixxquality/WebMConverter/commit/"\ + b"c1ac0baac06fa7175677a4a1bf65860a84708d67")
def __main(): global loop log.info("mcc running.") parser = argparse.ArgumentParser() parser.add_argument(\ "--address",\ help="The address of the Morphis node to connect to.",\ default="127.0.0.1:4250") parser.add_argument(\ "--create-dmail",\ help="Generate and upload a new dmail site.",\ action="store_true") parser.add_argument(\ "--dburl",\ help="Specify the database url to use.") parser.add_argument(\ "--fetch-dmail", help="Fetch dmail for specified key_id.") parser.add_argument(\ "-i",\ help="Read file as stdin.") parser.add_argument("--nn", type=int,\ help="Node instance number.") parser.add_argument(\ "--prefix",\ help="Specify the prefix for various things (currently --create-dmail"\ ").") parser.add_argument(\ "--scan-dmail",\ help="Scan the network for available dmails.") parser.add_argument(\ "--send-dmail",\ help="Send stdin as a dmail with the specified subject. The"\ " sender and recipients may be specified at the beginning of the"\ " data as with email headers: 'from: ' and 'to: '.") parser.add_argument(\ "--stat",\ help="Report node status.",\ action="store_true") parser.add_argument("-l", dest="logconf",\ help="Specify alternate logging.ini [IF SPECIFIED, THIS MUST BE THE"\ " FIRST PARAMETER!].") parser.add_argument(\ "--dmail-target",\ help="Specify the dmail target to validate dmail against.") parser.add_argument(\ "-x",\ help="Specify the x (Diffie-Hellman private secret) to use.") args = parser.parse_args() # Load or generate client mcc key. key_filename = "data/mcc_key-rsa.mnk" if os.path.exists(key_filename): log.info("mcc private key file found, loading.") client_key = rsakey.RsaKey(filename=key_filename) else: log.info("mcc private key file missing, generating.") client_key = rsakey.RsaKey.generate(bits=4096) client_key.write_private_key_file(key_filename) # Connect a Morphis Client (lightweight Node) instance. mc = client.Client(loop, client_key=client_key, address=args.address) r = yield from mc.connect() if not r: log.warning("Connection failed; exiting.") loop.stop() return dbase = init_db(args) de = dmail.DmailEngine(mc, dbase) log.info("Processing command requests...") if args.stat: r = yield from mc.send_command("stat") print(r.decode("UTF-8"), end='') if args.create_dmail: log.info("Creating and uploading dmail site.") privkey, data_key, dms, storing_nodes =\ yield from de.generate_dmail_address(args.prefix) print("privkey: {}".format(base58.encode(privkey._encode_key()))) print("x: {}".format(base58.encode(sshtype.encodeMpint(dms.dh.x)))) print("dmail address: {}".format(mbase32.encode(data_key))) print("storing_nodes=[{}]."\ .format(base58.encode(privkey._encode_key()))) if args.send_dmail: log.info("Sending dmail.") if args.i: with open(args.i, "rb") as fh: dmail_data = fh.read().decode() else: dmail_data = stdin.read() if log.isEnabledFor(logging.DEBUG): log.debug("dmail_data=[{}].".format(dmail_data)) yield from de.send_dmail_text(args.send_dmail, dmail_data) if args.scan_dmail: log.info("Scanning dmail address.") addr, sig_bits = mutil.decode_key(args.scan_dmail) def key_callback(key): print("dmail key: [{}].".format(mbase32.encode(key))) yield from de.scan_dmail_address(\ addr, sig_bits, key_callback=key_callback) if args.fetch_dmail: log.info("Fetching dmail for key=[{}].".format(args.fetch_dmail)) key = mbase32.decode(args.fetch_dmail) if args.x: l, x_int = sshtype.parseMpint(base58.decode(args.x)) else: x_int = None dmail_target = args.dmail_target dm, valid_sig = yield from de.fetch_dmail(key, x_int, dmail_target) if not dm: raise Exception("No dmail found.") if not x_int: print("Encrypted dmail data=[\n{}].".format(mutil.hex_dump(dm))) else: print("Subject: {}\n".format(dm.subject)) if dm.sender_pubkey: print("From: {}"\ .format(mbase32.encode(enc.generate_ID(dm.sender_pubkey)))) i = 0 for part in dm.parts: print("DmailPart[{}]:\n mime-type=[{}]\n data=[{}]\n"\ .format(i, part.mime_type, part.data)) i += 1 log.info("Disconnecting.") yield from mc.disconnect() loop.stop()
def dbcall(): with self.node.db.open_session() as sess: tlocked = False batch = [] added = [] for peer in peers: assert type(peer) is Peer if not check_address(peer.address): continue if not tlocked: self.node.db.lock_table(sess, Peer) tlocked = True q = sess.query(func.count("*")).select_from(Peer) if peer.pubkey: assert peer.node_id is None peer.node_id = enc.generate_ID(peer.pubkey) peer.distance, peer.direction =\ calc_log_distance(\ self.node_id,\ peer.node_id) q = q.filter(Peer.node_id == peer.node_id) elif peer.address: assert peer.node_id is None q = q.filter(Peer.address == peer.address) if q.scalar() > 0: if log.isEnabledFor(logging.DEBUG): log.debug("Peer [{}] already in list."\ .format(peer.address)) continue peer.connected = False if log.isEnabledFor(logging.INFO): log.info("Adding Peer [{}].".format(peer.address)) sess.add(peer) batch.append(peer) if len(batch) == 10: sess.commit() for dbpeer in batch: fetch_id_in_thread = dbpeer.id added.extend(batch) batch.clear() sess.expunge_all() tlocked = False if batch and tlocked: sess.commit() for dbpeer in batch: fetch_id_in_thread = dbpeer.id added.extend(batch) sess.expunge_all() return added
def __send_dmail(self, from_asymkey, recipient, dmail): assert type(recipient) is DmailSite root = recipient.root sse = sshtype.parseMpint(base58.decode(root["sse"]))[1] target = root["target"] difficulty = root["difficulty"] dh = dhgroup14.DhGroup14() dh.generate_x() dh.generate_e() dh.f = sse k = dh.calculate_k() target_key = mbase32.decode(target) key = self._generate_encryption_key(target_key, k) dmail_bytes = dmail.encode() m, r = enc.encrypt_data_block(dmail_bytes, key) if m: if r: m = m + r else: m = r dw = DmailWrapper() dw.ssm = _dh_method_name dw.sse = sse dw.ssf = dh.e if from_asymkey: dw.signature = from_asymkey.calc_rsassa_pss_sig(m) else: dw.signature = b'' dw.data_len = len(dmail_bytes) dw.data_enc = m tb = mp.TargetedBlock() tb.target_key = target_key tb.noonce = int(0).to_bytes(64, "big") tb.block = dw tb_data = tb.encode() tb_header = tb_data[:mp.TargetedBlock.BLOCK_OFFSET] if log.isEnabledFor(logging.INFO): log.info(\ "Attempting work on dmail (target=[{}], difficulty=[{}])."\ .format(target, difficulty)) def threadcall(): return brute.generate_targeted_block(\ target_key, difficulty, tb_header,\ mp.TargetedBlock.NOONCE_OFFSET,\ mp.TargetedBlock.NOONCE_SIZE) noonce_bytes = yield from self.loop.run_in_executor(None, threadcall) if log.isEnabledFor(logging.INFO): log.info("Work found noonce [{}].".format(noonce_bytes)) mp.TargetedBlock.set_noonce(tb_data, noonce_bytes) if log.isEnabledFor(logging.INFO): mp.TargetedBlock.set_noonce(tb_header, noonce_bytes) log.info("hash=[{}]."\ .format(mbase32.encode(enc.generate_ID(tb_header)))) key = None def key_callback(val): nonlocal key key = val log.info("Sending dmail to the network.") if log.isEnabledFor(logging.DEBUG): log.debug("dmail block data=[\n{}]."\ .format(mutil.hex_dump(tb_data))) total_storing = 0 retry = 0 while True: storing_nodes = yield from\ self.task_engine.send_store_targeted_data(\ tb_data, store_key=True, key_callback=key_callback,\ retry_factor=retry * 10) total_storing += storing_nodes if total_storing >= 3: break if retry > 32: break elif retry > 3: yield from asyncio.sleep(1) retry += 1 key_enc = mbase32.encode(key) id_enc = mbase32.encode(enc.generate_ID(key)) if log.isEnabledFor(logging.INFO): log.info("Dmail sent; key=[{}], id=[{}], storing_nodes=[{}]."\ .format(key_enc, id_enc, total_storing)) return total_storing
def __main(): global loop log.info("mcc running.") parser = argparse.ArgumentParser() parser.add_argument(\ "--address",\ help="The address of the Morphis node to connect to.",\ default="127.0.0.1:4250") parser.add_argument(\ "--create-dmail",\ help="Generate and upload a new dmail site.",\ action="store_true") parser.add_argument(\ "--dburl",\ help="Specify the database url to use.") parser.add_argument(\ "--fetch-dmail", help="Fetch dmail for specified key_id.") parser.add_argument(\ "-i",\ help="Read file as stdin.") parser.add_argument("--nn", type=int,\ help="Node instance number.") parser.add_argument(\ "--prefix",\ help="Specify the prefix for various things (currently --create-dmail"\ ").") parser.add_argument(\ "--scan-dmail",\ help="Scan the network for available dmails.") parser.add_argument(\ "--send-dmail",\ help="Send stdin as a dmail with the specified subject. The"\ " sender and recipients may be specified at the beginning of the"\ " data as with email headers: 'from: ' and 'to: '.") parser.add_argument(\ "--stat",\ help="Report node status.",\ action="store_true") parser.add_argument("-l", dest="logconf",\ help="Specify alternate logging.ini [IF SPECIFIED, THIS MUST BE THE"\ " FIRST PARAMETER!].") parser.add_argument(\ "--dmail-target",\ help="Specify the dmail target to validate dmail against.") parser.add_argument(\ "-x",\ help="Specify the x (Diffie-Hellman private secret) to use.") args = parser.parse_args() # Load or generate client mcc key. key_filename = "data/mcc_key-rsa.mnk" if os.path.exists(key_filename): log.info("mcc private key file found, loading.") client_key = rsakey.RsaKey(filename=key_filename) else: log.info("mcc private key file missing, generating.") client_key = rsakey.RsaKey.generate(bits=4096) client_key.write_private_key_file(key_filename) # Connect a Morphis Client (lightweight Node) instance. mc = client.Client(loop, client_key=client_key, address=args.address) r = yield from mc.connect() if not r: log.warning("Connection failed; exiting.") loop.stop() return dbase = init_db(args) de = dmail.DmailEngine(mc, dbase) log.info("Processing command requests...") if args.stat: r = yield from mc.send_command("stat") print(r.decode("UTF-8"), end='') if args.create_dmail: log.info("Creating and uploading dmail site.") privkey, data_key, dms =\ yield from de.generate_dmail_address(args.prefix) print("privkey: {}".format(base58.encode(privkey._encode_key()))) print("x: {}".format(base58.encode(sshtype.encodeMpint(dms.dh.x)))) print("dmail address: {}".format(mbase32.encode(data_key))) if args.send_dmail: log.info("Sending dmail.") if args.i: with open(args.i, "rb") as fh: dmail_data = fh.read().decode() else: dmail_data = stdin.read() if log.isEnabledFor(logging.DEBUG): log.debug("dmail_data=[{}].".format(dmail_data)) yield from de.send_dmail_text(args.send_dmail, dmail_data) if args.scan_dmail: log.info("Scanning dmail address.") addr, sig_bits = mutil.decode_key(args.scan_dmail) def key_callback(key): print("dmail key: [{}].".format(mbase32.encode(key))) yield from de.scan_dmail_address(\ addr, sig_bits, key_callback=key_callback) if args.fetch_dmail: log.info("Fetching dmail for key=[{}].".format(args.fetch_dmail)) key = mbase32.decode(args.fetch_dmail) if args.x: l, x_int = sshtype.parseMpint(base58.decode(args.x)) else: x_int = None dmail_target = args.dmail_target dm, valid_sig = yield from de.fetch_dmail(key, x_int, dmail_target) if not dm: raise Exception("No dmail found.") if not x_int: print("Encrypted dmail data=[\n{}].".format(mutil.hex_dump(dm))) else: print("Subject: {}\n".format(dm.subject)) if dm.sender_pubkey: print("From: {}"\ .format(mbase32.encode(enc.generate_ID(dm.sender_pubkey)))) i = 0 for part in dm.parts: print("DmailPart[{}]:\n mime-type=[{}]\n data=[{}]\n"\ .format(i, part.mime_type, part.data)) i += 1 log.info("Disconnecting.") yield from mc.disconnect() loop.stop()
def send_content(self, content_entry, cacheable=True, content_type=None,\ charset=None): if type(content_entry) in (list, tuple): content = content_entry[0] content_id = content_entry[1] if len(content_entry) == 3 and not content_type: content_type = content_entry[2] else: content = content_entry cacheable = False if not content_type: if not charset: charset = self.get_accept_charset() content_type = "text/html; charset={}".format(charset) if type(content) is str: if charset: content = content.encode(charset) else: content = content.encode() if not self.handler.maalstroom_plugin_used: content =\ content.replace(\ b"morphis://", self.handler.maalstroom_url_prefix) if cacheable and not content_id: if callable(content): content = content() log.info("Generating content_id.") content_id = mbase32.encode(enc.generate_ID(content)) content_entry[1] = content_id etag = self.handler.headers["If-None-Match"] if cacheable and etag == content_id: cache_control = self.handler.headers["Cache-Control"] if cache_control != "no-cache": self.send_response(304) self.send_header("Cache-Control", "public,max-age=300") self.send_header("ETag", content_id) self.send_header("Content-Length", 0) self.end_headers() self.finish_response() return if callable(content): content = content() self.send_response(200) self.send_default_headers() self.send_header("Content-Length", len(content)) self.send_header("Content-Type", content_type) if cacheable: self.send_header("Cache-Control", "public,max-age=300") self.send_header("ETag", content_id) else: self._send_no_cache() self.send_frame_options_header() self.end_headers() self.write(content) self.finish_response() return
def send_content(self, content_entry, cacheable=True, content_type=None,\ charset=None): if type(content_entry) in (list, tuple): content = content_entry[0] content_id = content_entry[1] if len(content_entry) == 3 and not content_type: content_type = content_entry[2] else: content = content_entry cacheable = False if not content_type: if not charset: charset = self.get_accept_charset() content_type = "text/html; charset={}".format(charset) if type(content) is str: if charset: content = content.encode(charset) else: content = content.encode() if not self.handler.maalstroom_plugin_used: content =\ content.replace(\ b"morphis://", self.handler.maalstroom_url_prefix) if cacheable and not content_id: if callable(content): content = content() log.info("Generating content_id.") content_id = mbase32.encode(enc.generate_ID(content)) content_entry[1] = content_id etag = self.handler.headers["If-None-Match"] if cacheable and etag == content_id: # #TODO: Consider getting rid of this updateablekey support here # # because we don't send updateable keys this way ever. # updateable_key = etag.startswith("updateablekey-") cache_control = self.handler.headers["Cache-Control"] # if not (updateable_key and cache_control == "max-age=0")\ # and cache_control != "no-cache": if cache_control != "no-cache": self.send_response(304) # if updateable_key: # p0 = etag.index('-') # p1 = etag.index('-', p0 + 1) # version = etag[p0:p1] # self.send_header(\ # "X-Maalstroom-UpdateableKey-Version",\ # version) # self.send_header("Cache-Control", "public,max-age=15") # else: self.send_header("Cache-Control", "public,max-age=300") self.send_header("ETag", content_id) self.send_header("Content-Length", 0) self.end_headers() self.finish_response() return if callable(content): content = content() self.send_response(200) self.send_header("Content-Length", len(content)) self.send_header("Content-Type", content_type) if cacheable: self.send_header("Cache-Control", "public,max-age=300") self.send_header("ETag", content_id) else: self._send_no_cache() self.end_headers() self.write(content) self.finish_response() return