def _process_encrypted_buffer(self): blksize = 16 if self.inCipher != None: # if len(self.buf) > 20: # max(blksize, 20): bs, hmacSize if len(self.buf) < blksize: return False if len(self.cbuf) == 0: out = self.inCipher.decrypt(self.buf[:blksize]) if log.isEnabledFor(logging.DEBUG): log.debug("Decrypted [\n{}] to [\n{}]."\ .format(hex_dump(self.buf[:blksize]), hex_dump(out))) self.cbuf += out packet_length = struct.unpack(">L", out[:4])[0] log.debug("packet_length=[{}].".format(packet_length)) if packet_length > MAX_PACKET_LENGTH: errmsg = "Illegal packet_length [{}] received."\ .format(packet_length) log.warning(errmsg) raise SshException(errmsg) # Add size of packet_length as we leave it in buf. self.bpLength = packet_length + 4 self.buf = self.buf[blksize:] if self.bpLength == blksize: return True if len(self.buf) < min(\ 1024, self.bpLength - len(self.cbuf) + self.inHmacSize): return True l = min(len(self.buf), self.bpLength - len(self.cbuf)) if not l: return True dsize = l - (l % blksize) blks = self.buf[:dsize] self.buf = self.buf[dsize:] assert len(blks) % blksize == 0,\ "len(blks)=[{}], dsize=[{}], l=[{}],"\ " len(self.buf)=[{}], len(self.cbuf)=[{}], blksize=[{}],"\ " self.bpLength=[{}], type(blks)=[{}]."\ .format(len(blks), dsize, l, len(self.buf),\ len(self.cbuf), blksize, self.bpLength, type(blks)) out = self.inCipher.decrypt(blks) self.cbuf += out if log.isEnabledFor(logging.DEBUG): log.debug("Decrypted [\n{}] to [\n{}].".format(hex_dump(blks), hex_dump(out))) log.debug("len(cbuf)={}, cbuf=[\n{}]".format(len(self.cbuf), hex_dump(self.cbuf))) else: self.cbuf = self.buf return True
def _data_received(self, data): if log.isEnabledFor(logging.DEBUG): log.debug("data_received(..): start.") log.debug("X: Received: [\n{}].".format(hex_dump(data))) if self.binaryMode: self.buf += data if not self.packet and self.inboundEnabled: self.process_buffer() log.debug("data_received(..): end (binaryMode).") return # Handle handshake packet, detect end. end = data.find(b"\r\n") if end != -1: self.buf += data[0:end] self.packet = self.buf self.buf = data[end+2:] self.binaryMode = True if self.waiter != None: self.waiter.set_result(False) self.waiter = None # The following would overwrite packet if it were a complete # packet in the buf. # if len(self.buf) > 0: # self.process_buffer() else: self.buf += data log.debug("data_received(..): end.")
def write_packet(self, packet): if log.isEnabledFor(logging.INFO): log.info("Writing packet_type=[{}] ({} bytes) to address=[{}]."\ .format(packet.packet_type, len(packet.buf), self.address)) if log.isEnabledFor(logging.DEBUG): log.debug("data=[\n{}].".format(hex_dump(packet.buf))) self.write_data([packet.buf])
def channel_data(self, protocol, local_cid, data): if log.isEnabledFor(logging.DEBUG): log.debug("Received data: local_cid=[{}], value=[\n{}]."\ .format(local_cid, mutil.hex_dump(data))) # Return value controls if the data gets added to the channel queue. r = yield from self.peer.engine.channel_data(\ self.peer, local_cid, data) return r
def readline(self): buf = bytearray() savedcmd = None while True: packet = yield from self.queue.get() if not packet: log.info("Shell shutting down.") return None msg = BinaryMessage(packet) if log.isEnabledFor(logging.DEBUG + 1): if log.isEnabledFor(logging.DEBUG): log.debug("Received text:\n[{}]."\ .format(hex_dump(msg.value))) else: log.log(logging.DEBUG+1, "Received text [{}]."\ .format(msg.value)) lenval = len(msg.value) if lenval == 1: char = msg.value[0] if char == 0x7f: if not buf: continue self.write(LEFT_ARROW) self.write(b' ') self.write(LEFT_ARROW) self.flush() buf = buf[:-1] continue elif char == 0x04: self.writeln("quit") self.flush() return "quit" elif lenval == 3: if msg.value == UP_ARROW: if savedcmd == None: savedcmd = buf.copy() self._replace_line(buf, self.lastcmd.encode("UTF-8")) continue elif msg.value == DOWN_ARROW: if savedcmd != None: self._replace_line(buf, savedcmd) savedcmd = None continue buf += msg.value # Echo back their input. rmsg = BinaryMessage() rmsg.value = msg.value.replace(b'\n', b"\r\n") self.peer.protocol.write_channel_data(self.local_cid, rmsg.encode()) #TODO: Replace this hacky code that handle multibyte characters # with something better. This lets you type one and hit enter # without it breaking, but if you type one and hit backspace then # you are still doomed until you press CTRL-D to quit. i = 0 while True: i = buf.find(b'\r', i) if i == -1: outer_continue = True break try: line = buf[:i].decode() except Exception: i += 1 continue buf = buf[i + 1:] return line if outer_continue: outer_continue = False continue
def __send_dmail(self, from_asymkey, recipient, dmail): assert type(recipient) is DmailSite root = recipient.root sse = sshtype.parseMpint(base58.decode(root["sse"]))[1] target = root["target"] difficulty = root["difficulty"] dh = dhgroup14.DhGroup14() dh.generate_x() dh.generate_e() dh.f = sse k = dh.calculate_k() target_key = mbase32.decode(target) key = self._generate_encryption_key(target_key, k) dmail_bytes = dmail.encode() m, r = enc.encrypt_data_block(dmail_bytes, key) if m: if r: m = m + r else: m = r dw = DmailWrapper() dw.ssm = _dh_method_name dw.sse = sse dw.ssf = dh.e if from_asymkey: dw.signature = from_asymkey.calc_rsassa_pss_sig(m) else: dw.signature = b'' dw.data_len = len(dmail_bytes) dw.data_enc = m tb = mp.TargetedBlock() tb.target_key = target_key tb.noonce = int(0).to_bytes(64, "big") tb.block = dw tb_data = tb.encode() tb_header = tb_data[:mp.TargetedBlock.BLOCK_OFFSET] if log.isEnabledFor(logging.INFO): log.info(\ "Attempting work on dmail (target=[{}], difficulty=[{}])."\ .format(target, difficulty)) def threadcall(): return brute.generate_targeted_block(\ target_key, difficulty, tb_header,\ mp.TargetedBlock.NOONCE_OFFSET,\ mp.TargetedBlock.NOONCE_SIZE) noonce_bytes = yield from self.loop.run_in_executor(None, threadcall) if log.isEnabledFor(logging.INFO): log.info("Work found noonce [{}].".format(noonce_bytes)) mp.TargetedBlock.set_noonce(tb_data, noonce_bytes) if log.isEnabledFor(logging.INFO): mp.TargetedBlock.set_noonce(tb_header, noonce_bytes) log.info("hash=[{}]."\ .format(mbase32.encode(enc.generate_ID(tb_header)))) key = None def key_callback(val): nonlocal key key = val log.info("Sending dmail to the network.") if log.isEnabledFor(logging.DEBUG): log.debug("dmail block data=[\n{}]."\ .format(mutil.hex_dump(tb_data))) total_storing = 0 retry = 0 while True: storing_nodes = yield from\ self.task_engine.send_store_targeted_data(\ tb_data, store_key=True, key_callback=key_callback,\ retry_factor=retry * 10) total_storing += storing_nodes if total_storing >= 3: break if retry > 32: break elif retry > 3: yield from asyncio.sleep(1) retry += 1 key_enc = mbase32.encode(key) id_enc = mbase32.encode(enc.generate_ID(key)) if log.isEnabledFor(logging.INFO): log.info("Dmail sent; key=[{}], id=[{}], storing_nodes=[{}]."\ .format(key_enc, id_enc, total_storing)) return total_storing
def _send_dmail(self, from_asymkey, recipient, dmail_bytes, signature): assert type(recipient) is DmailSite # Read in recipient DmailSite. root = recipient.root sse = sshtype.parseMpint(base58.decode(root["sse"]))[1] target_enc = root["target"] difficulty = root["difficulty"] # Calculate a shared secret. dh = dhgroup14.DhGroup14() dh.generate_x() dh.generate_e() dh.f = sse k = dh.calculate_k() target_key = mbase32.decode(target_enc) key = self._generate_encryption_key(target_key, k) # Encrypt the Dmail bytes. m, r = enc.encrypt_data_block(dmail_bytes, key) if m: if r: m = m + r else: m = r # Store it in a DmailWrapper. dw = DmailWrapper() dw.ssm = _dh_method_name dw.sse = sse dw.ssf = dh.e dw.data_len = len(dmail_bytes) dw.data_enc = m # Store the DmailWrapper in a TargetedBlock. tb = mp.TargetedBlock() tb.target_key = target_key tb.nonce = int(0).to_bytes(64, "big") tb.block = dw tb_data = tb.encode() tb_header = tb_data[:mp.TargetedBlock.BLOCK_OFFSET] # Do the POW on the TargetedBlock. if log.isEnabledFor(logging.INFO): log.info(\ "Attempting work on dmail (target=[{}], difficulty=[{}])."\ .format(target_enc, difficulty)) def threadcall(): return brute.generate_targeted_block(\ target_key, difficulty, tb_header,\ mp.TargetedBlock.NOONCE_OFFSET,\ mp.TargetedBlock.NOONCE_SIZE) nonce_bytes = yield from self.loop.run_in_executor(None, threadcall) if log.isEnabledFor(logging.INFO): log.info("Work found nonce [{}].".format(nonce_bytes)) mp.TargetedBlock.set_nonce(tb_data, nonce_bytes) if log.isEnabledFor(logging.INFO): mp.TargetedBlock.set_nonce(tb_header, nonce_bytes) log.info("Message key=[{}]."\ .format(mbase32.encode(enc.generate_ID(tb_header)))) key = None def key_callback(val): nonlocal key key = val if log.isEnabledFor(logging.DEBUG): log.debug("TargetedBlock dump=[\n{}]."\ .format(mutil.hex_dump(tb_data))) # Upload the TargetedBlock to the network. log.info("Sending dmail to the network.") total_storing = 0 retry = 0 while True: storing_nodes = yield from\ self.task_engine.send_store_targeted_data(\ tb_data, store_key=True, key_callback=key_callback,\ retry_factor=retry * 10) total_storing += storing_nodes if total_storing >= 3: break if retry > 32: break elif retry > 3: yield from asyncio.sleep(1) retry += 1 key_enc = mbase32.encode(key) id_enc = mbase32.encode(enc.generate_ID(key)) if log.isEnabledFor(logging.INFO): log.info("Dmail sent; key=[{}], id=[{}], storing_nodes=[{}]."\ .format(key_enc, id_enc, total_storing)) return total_storing
def __main(): global loop log.info("mcc running.") parser = argparse.ArgumentParser() parser.add_argument(\ "--address",\ help="The address of the Morphis node to connect to.",\ default="127.0.0.1:4250") parser.add_argument(\ "--create-dmail",\ help="Generate and upload a new dmail site.",\ action="store_true") parser.add_argument(\ "--dburl",\ help="Specify the database url to use.") parser.add_argument(\ "--fetch-dmail", help="Fetch dmail for specified key_id.") parser.add_argument(\ "-i",\ help="Read file as stdin.") parser.add_argument("--nn", type=int,\ help="Node instance number.") parser.add_argument(\ "--prefix",\ help="Specify the prefix for various things (currently --create-dmail"\ ").") parser.add_argument(\ "--scan-dmail",\ help="Scan the network for available dmails.") parser.add_argument(\ "--send-dmail",\ help="Send stdin as a dmail with the specified subject. The"\ " sender and recipients may be specified at the beginning of the"\ " data as with email headers: 'from: ' and 'to: '.") parser.add_argument(\ "--stat",\ help="Report node status.",\ action="store_true") parser.add_argument("-l", dest="logconf",\ help="Specify alternate logging.ini [IF SPECIFIED, THIS MUST BE THE"\ " FIRST PARAMETER!].") parser.add_argument(\ "--dmail-target",\ help="Specify the dmail target to validate dmail against.") parser.add_argument(\ "-x",\ help="Specify the x (Diffie-Hellman private secret) to use.") args = parser.parse_args() # Load or generate client mcc key. key_filename = "data/mcc_key-rsa.mnk" if os.path.exists(key_filename): log.info("mcc private key file found, loading.") client_key = rsakey.RsaKey(filename=key_filename) else: log.info("mcc private key file missing, generating.") client_key = rsakey.RsaKey.generate(bits=4096) client_key.write_private_key_file(key_filename) # Connect a Morphis Client (lightweight Node) instance. mc = client.Client(loop, client_key=client_key, address=args.address) r = yield from mc.connect() if not r: log.warning("Connection failed; exiting.") loop.stop() return dbase = init_db(args) de = dmail.DmailEngine(mc, dbase) log.info("Processing command requests...") if args.stat: r = yield from mc.send_command("stat") print(r.decode("UTF-8"), end='') if args.create_dmail: log.info("Creating and uploading dmail site.") privkey, data_key, dms, storing_nodes =\ yield from de.generate_dmail_address(args.prefix) print("privkey: {}".format(base58.encode(privkey._encode_key()))) print("x: {}".format(base58.encode(sshtype.encodeMpint(dms.dh.x)))) print("dmail address: {}".format(mbase32.encode(data_key))) print("storing_nodes=[{}]."\ .format(base58.encode(privkey._encode_key()))) if args.send_dmail: log.info("Sending dmail.") if args.i: with open(args.i, "rb") as fh: dmail_data = fh.read().decode() else: dmail_data = stdin.read() if log.isEnabledFor(logging.DEBUG): log.debug("dmail_data=[{}].".format(dmail_data)) yield from de.send_dmail_text(args.send_dmail, dmail_data) if args.scan_dmail: log.info("Scanning dmail address.") addr, sig_bits = mutil.decode_key(args.scan_dmail) def key_callback(key): print("dmail key: [{}].".format(mbase32.encode(key))) yield from de.scan_dmail_address(\ addr, sig_bits, key_callback=key_callback) if args.fetch_dmail: log.info("Fetching dmail for key=[{}].".format(args.fetch_dmail)) key = mbase32.decode(args.fetch_dmail) if args.x: l, x_int = sshtype.parseMpint(base58.decode(args.x)) else: x_int = None dmail_target = args.dmail_target dm, valid_sig = yield from de.fetch_dmail(key, x_int, dmail_target) if not dm: raise Exception("No dmail found.") if not x_int: print("Encrypted dmail data=[\n{}].".format(mutil.hex_dump(dm))) else: print("Subject: {}\n".format(dm.subject)) if dm.sender_pubkey: print("From: {}"\ .format(mbase32.encode(enc.generate_ID(dm.sender_pubkey)))) i = 0 for part in dm.parts: print("DmailPart[{}]:\n mime-type=[{}]\n data=[{}]\n"\ .format(i, part.mime_type, part.data)) i += 1 log.info("Disconnecting.") yield from mc.disconnect() loop.stop()
def readline(self): buf = bytearray() savedcmd = None while True: packet = yield from self.queue.get() if not packet: log.info("Shell shutting down.") return None msg = BinaryMessage(packet) if log.isEnabledFor(logging.DEBUG+1): if log.isEnabledFor(logging.DEBUG): log.debug("Received text:\n[{}]."\ .format(hex_dump(msg.value))) else: log.log(logging.DEBUG+1, "Received text [{}]."\ .format(msg.value)) lenval = len(msg.value) if lenval == 1: char = msg.value[0] if char == 0x7f: if not buf: continue self.write(LEFT_ARROW) self.write(b' ') self.write(LEFT_ARROW) self.flush() buf = buf[:-1] continue elif char == 0x04: self.writeln("quit") self.flush() return "quit" elif lenval == 3: if msg.value == UP_ARROW: if savedcmd == None: savedcmd = buf.copy() self._replace_line(buf, self.lastcmd.encode("UTF-8")) continue elif msg.value == DOWN_ARROW: if savedcmd != None: self._replace_line(buf, savedcmd) savedcmd = None continue buf += msg.value # Echo back their input. rmsg = BinaryMessage() rmsg.value = msg.value.replace(b'\n', b"\r\n") self.peer.protocol.write_channel_data(self.local_cid, rmsg.encode()) #TODO: Replace this hacky code that handle multibyte characters # with something better. This lets you type one and hit enter # without it breaking, but if you type one and hit backspace then # you are still doomed until you press CTRL-D to quit. i = 0 while True: i = buf.find(b'\r', i) if i == -1: outer_continue = True break try: line = buf[:i].decode() except Exception: i += 1 continue buf = buf[i+1:] return line if outer_continue: outer_continue = False continue
def __process_chord_packet(self, peer, queue, local_cid): "Returns True to stop processing the queue." data = yield from queue.get() if not data: return True log.info("Processing chord packet.") if log.isEnabledFor(logging.DEBUG): log.debug("data=\n[{}].".format(hex_dump(data))) packet_type = cp.ChordMessage.parse_type(data) if packet_type == cp.CHORD_MSG_GET_PEERS: log.info("Received CHORD_MSG_GET_PEERS message.") msg = cp.ChordGetPeers(data) raise Exception() # if peer.protocol.server_mode: # omsg = cp.ChordGetPeers() # omsg.sender_port = self._bind_port # # peer.protocol.write_channel_data(local_cid, omsg.encode()) # # self._check_update_remote_address(msg, peer) # # pl = list(self.peers.values()) # while True: # cnt = len(pl) # # msg = cp.ChordPeerList() # msg.peers = pl[:min(25, cnt)] # # peer.protocol.write_channel_data(local_cid, msg.encode()) # # if cnt <= 25: # break; # # pl = pl[25:] elif packet_type == cp.CHORD_MSG_PEER_LIST: log.info("Received CHORD_MSG_PEER_LIST message.") msg = cp.ChordPeerList(data) if not msg.peers: log.debug("Ignoring empty PeerList.") return yield from self.add_peers(msg.peers) elif packet_type == cp.CHORD_MSG_NODE_INFO: log.info("Received CHORD_MSG_NODE_INFO message.") msg = cp.ChordNodeInfo(data) peer.version = msg.version peer.full_node = True # Respond to them. Even though it doesn't make sense for now as # they (a client) knows our bind port obviously, but in the future # this message will contain the version and options of the protocol # that don't belong at the lower SSH level. rmsg = cp.ChordNodeInfo() rmsg.sender_address = self.bind_address rmsg.version = self.node.morphis_version peer.protocol.write_channel_data(local_cid, rmsg.encode()) yield from self._check_update_remote_address(msg, peer) if log.isEnabledFor(logging.INFO): log.info("Inbound Node (addr=[{}]) reports as version=[{}]."\ .format(peer.address, peer.version)) self._notify_protocol_ready() elif packet_type == cp.CHORD_MSG_FIND_NODE: log.info("Received CHORD_MSG_FIND_NODE message.") msg = cp.ChordFindNode(data) task = self.tasks.process_find_node_request(\ msg, data, peer, queue, local_cid) done, pending =\ yield from asyncio.wait([task], loop=self.loop, timeout=60) if pending: if log.isEnabledFor(logging.INFO): log.info("Peer requested tunnel operation took too long;"\ " aborting.") yield from peer.protocol.close_channel(local_cid); return True #NOTE: The problem is that SQLite only allows bitwise operations on integer # columns, or at least it does NOT work on blob/bytea columns. The other # problem is then that integer can only hold upto 64 bits, so the node_id will # have to be broken up into 8 columns (as it is currently 512 bits). # # Example of non-working db code. Sqlite seems to break when order by contains # any bitwise operations. (It just returns the rows in order of id.) # def dbcall(): # with self.node.db.open_session() as sess: # t = text(\ # "SELECT id, address FROM peer ORDER BY"\ # " (~(node_id&:r_id))&(node_id|:r2_id) DESC"\ # " LIMIT :lim") # # st = t.bindparams(r_id = msg.node_id, r2_id = msg.node_id,\ # lim = BUCKET_SIZE)\ # .columns(id=Integer, address=String) # # rs = sess.connection().execute(st) # # for r in rs: # log.info("nn: {} FOUND: {} {}".format(self.node.instance, r.id, r.address)) # # yield from self.loop.run_in_executor(None, dbcall) else: log.warning("Ignoring unrecognized packet (packet_type=[{}])."\ .format(packet_type))
def __main(): global loop log.info("mcc running.") parser = argparse.ArgumentParser() parser.add_argument(\ "--address",\ help="The address of the Morphis node to connect to.",\ default="127.0.0.1:4250") parser.add_argument(\ "--create-dmail",\ help="Generate and upload a new dmail site.",\ action="store_true") parser.add_argument(\ "--dburl",\ help="Specify the database url to use.") parser.add_argument(\ "--fetch-dmail", help="Fetch dmail for specified key_id.") parser.add_argument(\ "-i",\ help="Read file as stdin.") parser.add_argument("--nn", type=int,\ help="Node instance number.") parser.add_argument(\ "--prefix",\ help="Specify the prefix for various things (currently --create-dmail"\ ").") parser.add_argument(\ "--scan-dmail",\ help="Scan the network for available dmails.") parser.add_argument(\ "--send-dmail",\ help="Send stdin as a dmail with the specified subject. The"\ " sender and recipients may be specified at the beginning of the"\ " data as with email headers: 'from: ' and 'to: '.") parser.add_argument(\ "--stat",\ help="Report node status.",\ action="store_true") parser.add_argument("-l", dest="logconf",\ help="Specify alternate logging.ini [IF SPECIFIED, THIS MUST BE THE"\ " FIRST PARAMETER!].") parser.add_argument(\ "--dmail-target",\ help="Specify the dmail target to validate dmail against.") parser.add_argument(\ "-x",\ help="Specify the x (Diffie-Hellman private secret) to use.") args = parser.parse_args() # Load or generate client mcc key. key_filename = "data/mcc_key-rsa.mnk" if os.path.exists(key_filename): log.info("mcc private key file found, loading.") client_key = rsakey.RsaKey(filename=key_filename) else: log.info("mcc private key file missing, generating.") client_key = rsakey.RsaKey.generate(bits=4096) client_key.write_private_key_file(key_filename) # Connect a Morphis Client (lightweight Node) instance. mc = client.Client(loop, client_key=client_key, address=args.address) r = yield from mc.connect() if not r: log.warning("Connection failed; exiting.") loop.stop() return dbase = init_db(args) de = dmail.DmailEngine(mc, dbase) log.info("Processing command requests...") if args.stat: r = yield from mc.send_command("stat") print(r.decode("UTF-8"), end='') if args.create_dmail: log.info("Creating and uploading dmail site.") privkey, data_key, dms =\ yield from de.generate_dmail_address(args.prefix) print("privkey: {}".format(base58.encode(privkey._encode_key()))) print("x: {}".format(base58.encode(sshtype.encodeMpint(dms.dh.x)))) print("dmail address: {}".format(mbase32.encode(data_key))) if args.send_dmail: log.info("Sending dmail.") if args.i: with open(args.i, "rb") as fh: dmail_data = fh.read().decode() else: dmail_data = stdin.read() if log.isEnabledFor(logging.DEBUG): log.debug("dmail_data=[{}].".format(dmail_data)) yield from de.send_dmail_text(args.send_dmail, dmail_data) if args.scan_dmail: log.info("Scanning dmail address.") addr, sig_bits = mutil.decode_key(args.scan_dmail) def key_callback(key): print("dmail key: [{}].".format(mbase32.encode(key))) yield from de.scan_dmail_address(\ addr, sig_bits, key_callback=key_callback) if args.fetch_dmail: log.info("Fetching dmail for key=[{}].".format(args.fetch_dmail)) key = mbase32.decode(args.fetch_dmail) if args.x: l, x_int = sshtype.parseMpint(base58.decode(args.x)) else: x_int = None dmail_target = args.dmail_target dm, valid_sig = yield from de.fetch_dmail(key, x_int, dmail_target) if not dm: raise Exception("No dmail found.") if not x_int: print("Encrypted dmail data=[\n{}].".format(mutil.hex_dump(dm))) else: print("Subject: {}\n".format(dm.subject)) if dm.sender_pubkey: print("From: {}"\ .format(mbase32.encode(enc.generate_ID(dm.sender_pubkey)))) i = 0 for part in dm.parts: print("DmailPart[{}]:\n mime-type=[{}]\n data=[{}]\n"\ .format(i, part.mime_type, part.data)) i += 1 log.info("Disconnecting.") yield from mc.disconnect() loop.stop()
def __process_chord_packet(self, peer, queue, local_cid): "Returns True to stop processing the queue." data = yield from queue.get() if not data: return True log.info("Processing chord packet.") if log.isEnabledFor(logging.DEBUG): log.debug("data=\n[{}].".format(hex_dump(data))) packet_type = cp.ChordMessage.parse_type(data) if packet_type == cp.CHORD_MSG_GET_PEERS: log.info("Received CHORD_MSG_GET_PEERS message.") msg = cp.ChordGetPeers(data) raise Exception() # if peer.protocol.server_mode: # omsg = cp.ChordGetPeers() # omsg.sender_port = self._bind_port # # peer.protocol.write_channel_data(local_cid, omsg.encode()) # # self._check_update_remote_address(msg, peer) # # pl = list(self.peers.values()) # while True: # cnt = len(pl) # # msg = cp.ChordPeerList() # msg.peers = pl[:min(25, cnt)] # # peer.protocol.write_channel_data(local_cid, msg.encode()) # # if cnt <= 25: # break; # # pl = pl[25:] elif packet_type == cp.CHORD_MSG_PEER_LIST: log.info("Received CHORD_MSG_PEER_LIST message.") msg = cp.ChordPeerList(data) if not msg.peers: log.debug("Ignoring empty PeerList.") return yield from self.add_peers(msg.peers) elif packet_type == cp.CHORD_MSG_NODE_INFO: log.info("Received CHORD_MSG_NODE_INFO message.") msg = cp.ChordNodeInfo(data) peer.version = msg.version peer.full_node = True # Respond to them. Even though it doesn't make sense for now as # they (a client) knows our bind port obviously, but in the future # this message will contain the version and options of the protocol # that don't belong at the lower SSH level. rmsg = cp.ChordNodeInfo() rmsg.sender_address = self.bind_address rmsg.version = self.node.morphis_version peer.protocol.write_channel_data(local_cid, rmsg.encode()) yield from self._check_update_remote_address(msg, peer) if log.isEnabledFor(logging.INFO): log.info("Inbound Node (addr=[{}]) reports as version=[{}]."\ .format(peer.address, peer.version)) self._notify_protocol_ready() elif packet_type == cp.CHORD_MSG_FIND_NODE: log.info("Received CHORD_MSG_FIND_NODE message.") msg = cp.ChordFindNode(data) task = self.tasks.process_find_node_request(\ msg, data, peer, queue, local_cid) done, pending =\ yield from asyncio.wait([task], loop=self.loop, timeout=60) if pending: if log.isEnabledFor(logging.INFO): log.info("Peer requested tunnel operation took too long;"\ " aborting.") yield from peer.protocol.close_channel(local_cid) return True #NOTE: The problem is that SQLite only allows bitwise operations on integer # columns, or at least it does NOT work on blob/bytea columns. The other # problem is then that integer can only hold upto 64 bits, so the node_id will # have to be broken up into 8 columns (as it is currently 512 bits). # # Example of non-working db code. Sqlite seems to break when order by contains # any bitwise operations. (It just returns the rows in order of id.) # def dbcall(): # with self.node.db.open_session() as sess: # t = text(\ # "SELECT id, address FROM peer ORDER BY"\ # " (~(node_id&:r_id))&(node_id|:r2_id) DESC"\ # " LIMIT :lim") # # st = t.bindparams(r_id = msg.node_id, r2_id = msg.node_id,\ # lim = BUCKET_SIZE)\ # .columns(id=Integer, address=String) # # rs = sess.connection().execute(st) # # for r in rs: # log.info("nn: {} FOUND: {} {}".format(self.node.instance, r.id, r.address)) # # yield from self.loop.run_in_executor(None, dbcall) else: log.warning("Ignoring unrecognized packet (packet_type=[{}])."\ .format(packet_type))
def _process_buffer(self): if log.isEnabledFor(logging.DEBUG): log.debug("P: process_buffer(): called (binaryMode={}), buf=[\n{}].".format(self.binaryMode, hex_dump(self.buf))) assert self.binaryMode r = self._process_encrypted_buffer() if not r: return # cbuf is clear text buf. while True: if self.bpLength is None: assert not self.inCipher if len(self.cbuf) < 4: return if log.isEnabledFor(logging.DEBUG): log.debug("t=[{}].".format(self.cbuf[:4])) packet_length = struct.unpack(">L", self.cbuf[:4])[0] if log.isEnabledFor(logging.DEBUG): log.debug("packet_length=[{}].".format(packet_length)) if packet_length > MAX_PACKET_LENGTH: errmsg = "Illegal packet_length [{}] received."\ .format(packet_length) log.warning(errmsg) raise SshException(errmsg) self.bpLength = packet_length + 4 # Add size of packet_length as we leave it in buf. else: if len(self.cbuf) < self.bpLength or len(self.buf) < self.inHmacSize: return; if log.isEnabledFor(logging.DEBUG): log.debug("PACKET READ (bpLength={}, inHmacSize={}, len(self.cbuf)={}, len(self.buf)={})".format(self.bpLength, self.inHmacSize, len(self.cbuf), len(self.buf))) padding_length = struct.unpack("B", self.cbuf[4:5])[0] log.debug("padding_length=[{}].".format(padding_length)) padding_offset = self.bpLength - padding_length payload = self.cbuf[5:padding_offset] padding = self.cbuf[padding_offset:self.bpLength] # mac = self.cbuf[self.bpLength:self.bpLength + self.inHmacSize] mac = self.buf[:self.inHmacSize] if log.isEnabledFor(logging.DEBUG): log.debug("payload=[\n{}], padding=[\n{}], mac=[\n{}] len(mac)={}.".format(hex_dump(payload), hex_dump(padding), hex_dump(mac), len(mac))) if self.inHmacSize != 0: self.buf = self.buf[self.inHmacSize:] mbuf = struct.pack(">L", self.inPacketId) tmac = hmac.new(self.inHmacKey, digestmod=sha1) tmac.update(mbuf) tmac.update(self.cbuf) cmac = tmac.digest() if log.isEnabledFor(logging.DEBUG): log.debug("inPacketId={} len(cmac)={}, cmac=[\n{}].".format(self.inPacketId, len(cmac), hex_dump(cmac))) r = hmac.compare_digest(cmac, mac) log.info("HMAC check result: [{}].".format(r)) if not r: raise SshException("HMAC check failure, packetId={}.".format(self.inPacketId)) newbuf = self.cbuf[self.bpLength + self.inHmacSize:] if self.cbuf == self.buf: self.cbuf = bytearray() self.buf = newbuf else: self.cbuf = newbuf if self.waitingForNewKeys: packet_type = mnetpacket.SshPacket.parse_type(payload) if packet_type == mnetpacket.SSH_MSG_NEWKEYS: if self.server_mode: self.init_inbound_encryption() else: # Disable further processing until inbound # encryption is setup. It may not have yet as # parameters and newkeys may have come in same tcp # packet. self.set_inbound_enabled(False) self.waitingForNewKeys = False self.packet = payload self.inPacketId = (self.inPacketId + 1) & 0xFFFFFFFF self.bpLength = None if self.waiter != None: self.waiter.set_result(False) self.waiter = None break;
def connectTaskSecure(protocol, server_mode): # Send KexInit packet. opobj = mnetpacket.SshKexInitMessage() opobj.cookie = os.urandom(16) # opobj.kex_algorithms = "diffie-hellman-group-exchange-sha256" opobj.kex_algorithms = "diffie-hellman-group14-sha1" opobj.server_host_key_algorithms = "ssh-rsa" opobj.encryption_algorithms_client_to_server = "aes256-cbc" opobj.encryption_algorithms_server_to_client = "aes256-cbc" # opobj.mac_algorithms_client_to_server = "hmac-sha2-512" # opobj.mac_algorithms_server_to_client = "hmac-sha2-512" opobj.mac_algorithms_client_to_server = "hmac-sha1" opobj.mac_algorithms_server_to_client = "hmac-sha1" opobj.compression_algorithms_client_to_server = "none" opobj.compression_algorithms_server_to_client = "none" opobj.encode() protocol.local_kex_init_message = opobj.buf protocol.write_packet(opobj) # Read KexInit packet. packet = yield from protocol.read_packet() if not packet: return False if log.isEnabledFor(logging.DEBUG): log.debug("X: Received packet [{}].".format(hex_dump(packet))) packet_type = mnetpacket.SshPacket.parse_type(packet) if log.isEnabledFor(logging.INFO): log.info("packet_type=[{}].".format(packet_type)) if packet_type != 20: log.warning("Peer sent unexpected packet_type[{}], disconnecting.".format(packet_type)) protocol.close() return False protocol.remote_kex_init_message = packet pobj = mnetpacket.SshKexInitMessage(packet) if log.isEnabledFor(logging.DEBUG): log.debug("cookie=[{}].".format(pobj.cookie)) if log.isEnabledFor(logging.INFO): log.info("keyExchangeAlgorithms=[{}].".format(pobj.kex_algorithms)) protocol.waitingForNewKeys = True # ke = kex.KexGroup14(protocol) # log.info("Calling start_kex()...") # r = yield from ke.do_kex() ke = kexdhgroup14sha1.KexDhGroup14Sha1(protocol) log.info("Calling kex->run()...") r = yield from ke.run() if not r: # Client is rejected for some reason by higher level. protocol.close() return False # Setup encryption now that keys are exchanged. protocol.init_outbound_encryption() if not protocol.server_mode: """ Server gets done automatically since parameters are always there before NEWKEYS is received, but client the parameters and NEWKEYS message may come in the same tcppacket, so the auto part just turns off inbound processing and waits for us to call init_inbound_encryption when we have the parameters ready. """ protocol.init_inbound_encryption() protocol.set_inbound_enabled(True) packet = yield from protocol.read_packet() if not packet: return False m = mnetpacket.SshNewKeysMessage(packet) log.debug("Received SSH_MSG_NEWKEYS.") if protocol.server_mode: packet = yield from protocol.read_packet() if not packet: return False # m = mnetpacket.SshPacket(None, packet) # log.info("X: Received packet (type={}) [{}].".format(m.packet_type, packet)) m = mnetpacket.SshServiceRequestMessage(packet) log.info("Service requested [{}].".format(m.service_name)) if m.service_name != "ssh-userauth": raise SshException("Remote end requested unexpected service (name=[{}]).".format(m.service_name)) mr = mnetpacket.SshServiceAcceptMessage() mr.service_name = "ssh-userauth" mr.encode() protocol.write_packet(mr) packet = yield from protocol.read_packet() if not packet: return False m = mnetpacket.SshUserauthRequestMessage(packet) log.info("Userauth requested with method=[{}].".format(m.method_name)) if m.method_name == "none": mr = mnetpacket.SshUserauthFailureMessage() mr.auths = "publickey" mr.partial_success = False mr.encode() protocol.write_packet(mr) packet = yield from protocol.read_packet() if not packet: return False m = mnetpacket.SshUserauthRequestMessage(packet) log.info("Userauth requested with method=[{}].".format(m.method_name)) if m.method_name != "publickey": raise SshException("Unhandled client auth method [{}].".format(m.method_name)) if m.algorithm_name != "ssh-rsa": raise SshException("Unhandled client auth algorithm [{}].".format(m.algorithm_name)) log.debug("m.signature_present()={}.".format(m.signature_present)) if not m.signature_present: mr = mnetpacket.SshUserauthPkOkMessage() mr.algorithm_name = m.algorithm_name mr.public_key = m.public_key mr.encode() protocol.write_packet(mr) packet = yield from protocol.read_packet() if not packet: return False m = mnetpacket.SshUserauthRequestMessage(packet) log.info("Userauth requested with method=[{}].".format(m.method_name)) if m.method_name != "publickey": raise SshException("Unhandled client auth method [{}].".format(m.method_name)) if m.algorithm_name != "ssh-rsa": raise SshException("Unhandled client auth algorithm [{}].".format(m.algorithm_name)) if log.isEnabledFor(logging.DEBUG): log.debug("signature=[{}].".format(hex_dump(m.signature))) if protocol.client_key: if protocol.client_key.asbytes() != m.public_key: raise SshException("Key provided by client differs from that which we were expecting.") else: protocol.client_key = rsakey.RsaKey(m.public_key) buf = bytearray() buf += sshtype.encodeBinary(protocol.session_id) buf += packet[:-m.signature_length] r = protocol.client_key.verify_ssh_sig(buf, m.signature) log.info("Userauth signature check result: [{}].".format(r)) if not r: raise SshException("Signature and key provided by client did not match.") r = yield from protocol.connection_handler.peer_authenticated(protocol) if not r: # Client is rejected for some reason by higher level. protocol.close() return False mr = mnetpacket.SshUserauthSuccessMessage() mr.encode() protocol.write_packet(mr) else: # client mode. m = mnetpacket.SshServiceRequestMessage() m.service_name = "ssh-userauth" m.encode() protocol.write_packet(m) packet = yield from protocol.read_packet() if not packet: return False m = mnetpacket.SshServiceAcceptMessage(packet) log.info("Service request accepted [{}].".format(m.service_name)) mr = mnetpacket.SshUserauthRequestMessage() mr.user_name = "dev" mr.service_name = "ssh-connection" mr.method_name = "publickey" mr.signature_present = True mr.algorithm_name = "ssh-rsa" ckey = protocol.client_key mr.public_key = ckey.asbytes() mr.encode() mrb = bytearray() mrb += sshtype.encodeBinary(protocol.session_id) mrb += mr.buf sig = sshtype.encodeBinary(ckey.sign_ssh_data(mrb)) mrb = mr.buf assert mr.buf == mrb mrb += sig protocol.write_packet(mr) packet = yield from protocol.read_packet() if not packet: return False m = mnetpacket.SshUserauthSuccessMessage(packet) log.info("Userauth accepted.") log.info("Connect task done (server={}).".format(server_mode)) # if not server_mode: # protocol.close() return True