def send_packet(key_list: 'KeyList', # Key list object gateway: 'Gateway', # Gateway object log_queue: 'Queue[log_queue_data]', # Multiprocessing queue for logged messages assembly_packet: bytes, # Padded plaintext assembly packet onion_pub_key: Optional[bytes] = None, # Recipient v3 Onion Service address log_messages: Optional[bool] = None, # When True, log the message assembly packet log_as_ph: Optional[bool] = None # When True, log assembly packet as placeholder data ) -> None: """Encrypt and send assembly packet. The assembly packets are encrypted using a symmetric message key. TFC provides forward secrecy via a hash ratchet, meaning previous message key is replaced by it's BLAKE2b hash. The preimage resistance of the hash function prevents retrospective decryption of ciphertexts in cases of physical compromise. The hash ratchet state (the number of times initial message key has been passed through BLAKE2b) is delivered to recipient inside the hash ratchet counter. This counter is encrypted with a static symmetric key called the header key. The encrypted assembly packet and encrypted harac are prepended with datagram headers that tell if the encrypted assembly packet is a command or a message. Packets with MESSAGE_DATAGRAM_HEADER also contain a second header, which is the public key of the recipient's Onion Service. This allows the ciphertext to be requested from Relay Program's server by the correct contact. Once the encrypted_packet has been output, the hash ratchet advances to the next state, and the assembly packet is pushed to log_queue, which is read by the `log_writer_loop` process (that can be found at src.common.db_logs). This approach prevents IO delays caused by `input_loop` reading the log file from affecting the `sender_loop` process, which could reveal schedule information under traffic masking mode. """ if len(assembly_packet) != ASSEMBLY_PACKET_LENGTH: raise CriticalError("Invalid assembly packet PT length.") if onion_pub_key is None: keyset = key_list.get_keyset(LOCAL_PUBKEY) header = COMMAND_DATAGRAM_HEADER else: keyset = key_list.get_keyset(onion_pub_key) header = MESSAGE_DATAGRAM_HEADER + onion_pub_key harac_in_bytes = int_to_bytes(keyset.tx_harac) encrypted_harac = encrypt_and_sign(harac_in_bytes, keyset.tx_hk) encrypted_message = encrypt_and_sign(assembly_packet, keyset.tx_mk) encrypted_packet = header + encrypted_harac + encrypted_message gateway.write(encrypted_packet) keyset.rotate_tx_mk() log_queue.put((onion_pub_key, assembly_packet, log_messages, log_as_ph, key_list.master_key))
def split_to_assembly_packets(payload: bytes, p_type: str) -> List[bytes]: """Split payload to assembly packets. Messages and commands are compressed to reduce transmission time. Files have been compressed at earlier phase, before B85 encoding. If the compressed message can not be sent over one packet, it is split into multiple assembly packets with headers. Long messages are encrypted with inner layer of XSalsa20-Poly1305 to provide sender based control over partially transmitted data. Regardless of packet size, files always have an inner layer of encryption, and it is added in earlier phase. Commands do not need sender-based control, so they are only delivered with hash that makes integrity check easy. First assembly packet in file transmission is prepended with 8-byte packet counter that tells sender and receiver how many packets the file transmission requires. """ s_header = {MESSAGE: M_S_HEADER, FILE: F_S_HEADER, COMMAND: C_S_HEADER}[p_type] l_header = {MESSAGE: M_L_HEADER, FILE: F_L_HEADER, COMMAND: C_L_HEADER}[p_type] a_header = {MESSAGE: M_A_HEADER, FILE: F_A_HEADER, COMMAND: C_A_HEADER}[p_type] e_header = {MESSAGE: M_E_HEADER, FILE: F_E_HEADER, COMMAND: C_E_HEADER}[p_type] if p_type in [MESSAGE, COMMAND]: payload = zlib.compress(payload, level=COMPRESSION_LEVEL) if len(payload) < PADDING_LEN: padded = byte_padding(payload) packet_list = [s_header + padded] else: if p_type == MESSAGE: msg_key = csprng() payload = encrypt_and_sign(payload, msg_key) payload += msg_key elif p_type == FILE: payload = bytes(FILE_PACKET_CTR_LEN) + payload elif p_type == COMMAND: payload += hash_chain(payload) padded = byte_padding(payload) p_list = split_byte_string(padded, item_len=PADDING_LEN) if p_type == FILE: p_list[0] = int_to_bytes(len(p_list)) + p_list[0][FILE_PACKET_CTR_LEN:] packet_list = ([l_header + p_list[0]] + [a_header + p for p in p_list[1:-1]] + [e_header + p_list[-1]]) return packet_list
def test_long_file(self): # Setup packet = Packet('*****@*****.**', self.contact, ORIGIN_CONTACT_HEADER, FILE, self.settings) packet.long_active = True compressed = zlib.compress(os.urandom(10000), level=COMPRESSION_LEVEL) file_key = os.urandom(KEY_LENGTH) encrypted = encrypt_and_sign(compressed, key=file_key) encrypted += file_key encoded = base64.b85encode(encrypted) file_data = int_to_bytes(1000) + int_to_bytes(10000)+ b'testfile.txt' + US_BYTE + encoded packets = split_to_assembly_packets(file_data, FILE) for p in packets: packet.add_packet(p) # Test self.assertIsNone(packet.assemble_and_store_file()) self.assertTrue(os.path.isfile(f'{DIR_RX_FILES}Alice/testfile.txt')) self.assertEqual(os.path.getsize(f'{DIR_RX_FILES}Alice/testfile.txt'), 10000)
def test_group_remove_datagram(self) -> None: # Setup packet = self.create_packet(GROUP_MSG_MEMBER_REM_HEADER + bytes(GROUP_ID_LENGTH) + int_to_bytes(2) + nick_to_pub_key('Alice') + nick_to_pub_key('Bob')) self.queues[GATEWAY_QUEUE].put((self.ts, packet)) # Test self.assertIsNone(src_incoming(*self.args, unit_test=True)) self.assertEqual(self.queues[DST_MESSAGE_QUEUE].qsize(), 0)
def test_rotate_tx_mk(self): self.assertIsNone(self.keyset.rotate_tx_mk()) self.assertEqual( self.keyset.tx_mk, blake2b(bytes(SYMMETRIC_KEY_LENGTH) + int_to_bytes(INITIAL_HARAC), digest_size=SYMMETRIC_KEY_LENGTH)) self.assertEqual(self.keyset.rx_mk, bytes(SYMMETRIC_KEY_LENGTH)) self.assertEqual(self.keyset.tx_hk, bytes(SYMMETRIC_KEY_LENGTH)) self.assertEqual(self.keyset.rx_hk, bytes(SYMMETRIC_KEY_LENGTH)) self.assertEqual(self.keyset.tx_harac, 1) self.assertEqual(self.keyset.rx_harac, INITIAL_HARAC)
def test_file_datagram(self): # Setup packet = self.create_packet(FILE_DATAGRAM_HEADER + int_to_bytes(2) + nick_to_pub_key('Alice') + nick_to_pub_key('Bob') + 200 * b'a') self.queues[GATEWAY_QUEUE].put((self.ts, packet)) # Test self.assertIsNone(src_incoming(*self.args, unittest=True)) self.assertEqual(self.queues[DST_MESSAGE_QUEUE].qsize(), 0) self.assertEqual(self.queues[F_TO_FLASK_QUEUE].qsize(), 2)
def test_noise_packet_interrupts_file(self): # Setup packet = Packet('*****@*****.**', self.contact, ORIGIN_CONTACT_HEADER, FILE, self.settings) compressed = zlib.compress(os.urandom(10000), level=COMPRESSION_LEVEL) file_key = os.urandom(KEY_LENGTH) encrypted = encrypt_and_sign(compressed, key=file_key) encrypted += file_key encoded = base64.b85encode(encrypted) file_data = int_to_bytes(1000) + int_to_bytes(10000) + b'testfile.txt' + US_BYTE + encoded packets = split_to_assembly_packets(file_data, FILE) packets = packets[:20] packets.append(byte_padding(P_N_HEADER)) # Add cancel packet for p in packets: packet.add_packet(p) # Test self.assertEqual(len(packet.assembly_pt_list), 0) # Cancel packet empties packet list self.assertFalse(packet.long_active) self.assertFalse(packet.is_complete) self.assertEqual(packet.log_masking_ctr, len(packets))
def setUp(self): self.msg = ("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis" " dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendu" "m velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id ma" "ssa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut el" "it iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dic" "tumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio r" "utrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec ali" "quam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis sceleris" "que. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum loborti" "s neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu.") self.contact = create_contact() self.settings = Settings(logfile_masking=True) compressed = zlib.compress(b'abcdefghijk', level=COMPRESSION_LEVEL) file_key = os.urandom(KEY_LENGTH) encrypted = encrypt_and_sign(compressed, key=file_key) encrypted += file_key encoded = base64.b85encode(encrypted) self.short_f_data = (int_to_bytes(1) + int_to_bytes(2) + b'testfile.txt' + US_BYTE + encoded)
def mock_file_preprocessor(payload): if len(payload) < 255: padded = byte_padding(payload) packet_list = [F_S_HEADER + padded] else: payload = bytes(8) + payload padded = byte_padding(payload) p_list = split_byte_string(padded, item_len=255) packet_list = ([F_L_HEADER + int_to_bytes(len(p_list)) + p_list[0][8:]] + [F_A_HEADER + p for p in p_list[1:-1]] + [F_E_HEADER + p_list[-1]]) return packet_list
def test_function(self): # Setup no_msg = int_to_bytes(1) cmd_data = b'*****@*****.**' + US_BYTE + no_msg window_list = WindowList() contact_list = ContactList() settings = Settings() master_key = MasterKey() # Test self.assertFR("Error: Could not find 'user_data/ut_logs'.", display_logs, cmd_data, window_list, contact_list, settings, master_key)
def test_disabled_file_reception_raises_fr_with_end_packet(self): # Setup packet = Packet('*****@*****.**', self.contact, ORIGIN_CONTACT_HEADER, FILE, self.settings) packet.long_active = True compressed = zlib.compress(os.urandom(10000), level=COMPRESSION_LEVEL) file_key = os.urandom(KEY_LENGTH) encrypted = encrypt_and_sign(compressed, key=file_key) encrypted += file_key encoded = base64.b85encode(encrypted) file_data = int_to_bytes(1000) + int_to_bytes(10000)+ b'testfile.txt' + US_BYTE + encoded packets = split_to_assembly_packets(file_data, FILE) for p in packets[:-1]: self.assertIsNone(packet.add_packet(p)) packet.contact.file_reception = False # Test for p in packets[-1:]: self.assertFR("Alert! File reception disabled mid-transfer.", packet.add_packet, p) self.assertEqual(packet.log_masking_ctr, len(packets))
def group_add_member(group_name: str, purp_members: List['bytes'], contact_list: 'ContactList', group_list: 'GroupList', settings: 'Settings', queues: 'QueueDict', master_key: 'MasterKey', _: Optional[bytes] = None ) -> None: """Add new member(s) to a specified group.""" if group_name not in group_list.get_list_of_group_names(): if yes(f"Group {group_name} was not found. Create new group?", abort=False, head=1): group_create(group_name, purp_members, contact_list, group_list, settings, queues, master_key) return None else: raise FunctionReturn("Group creation aborted.", head=0, delay=1, tail_clear=True) purp_pub_keys = set(purp_members) pub_keys = set(contact_list.get_list_of_pub_keys()) before_adding = set(group_list.get_group(group_name).get_list_of_member_pub_keys()) ok_pub_keys_set = set(pub_keys & purp_pub_keys) new_in_group_set = set(ok_pub_keys_set - before_adding) end_assembly = list(before_adding | new_in_group_set) rejected = list(purp_pub_keys - pub_keys) already_in_g = list(before_adding & purp_pub_keys) new_in_group = list(new_in_group_set) ok_pub_keys = list(ok_pub_keys_set) if len(end_assembly) > settings.max_number_of_group_members: raise FunctionReturn(f"Error: TFC settings only allow {settings.max_number_of_group_members} " f"members per group.", head_clear=True) group = group_list.get_group(group_name) group.add_members([contact_list.get_contact_by_pub_key(k) for k in new_in_group]) command = GROUP_ADD + group.group_id + b''.join(ok_pub_keys) queue_command(command, settings, queues) group_management_print(ADDED_MEMBERS, new_in_group, contact_list, group_name) group_management_print(ALREADY_MEMBER, already_in_g, contact_list, group_name) group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name) if new_in_group: if yes("Publish the list of new members to involved?", abort=False): add_packet = (GROUP_MSG_MEMBER_ADD_HEADER + group.group_id + int_to_bytes(len(before_adding)) + b''.join(before_adding) + b''.join(new_in_group)) queue_to_nc(add_packet, queues[RELAY_PACKET_QUEUE])
def new_master_key(self): password = MasterKey.new_password() salt = os.urandom(32) rounds = 1 assert isinstance(salt, bytes) while True: time_start = time.monotonic() master_key, memory = argon2_kdf(password, salt, rounds, local_testing=False) time_final = time.monotonic() - time_start if time_final > 3.0: self.master_key = master_key master_key_hash = hash_chain(master_key) ensure_dir(f'{DIR_USER_DATA}/') with open(self.file_name, 'wb+') as f: f.write(salt + master_key_hash + int_to_bytes(rounds) + int_to_bytes(memory)) break else: rounds *= 2
def test_successful_command_decryption(self): # Setup command = byte_padding(b'test') encrypted_message = encrypt_and_sign(command, KEY_LENGTH*b'\x01') encrypted_harac = encrypt_and_sign(int_to_bytes(1), KEY_LENGTH*b'\x01') packet = COMMAND_PACKET_HEADER + encrypted_harac + encrypted_message keyset = self.key_list.get_keyset(LOCAL_ID) keyset.tx_harac = 1 # Test assembly_pt, account, origin = decrypt_assembly_packet(packet, self.window_list, self.contact_list, self.key_list) self.assertEqual(assembly_pt, command) self.assertEqual(account, LOCAL_ID) self.assertEqual(origin, ORIGIN_USER_HEADER)
def group_rm_member(group_name: str, purp_members: List[bytes], contact_list: 'ContactList', group_list: 'GroupList', settings: 'Settings', queues: 'QueueDict', master_key: 'MasterKey', _: Optional[bytes] = None) -> None: """Remove member(s) from the specified group or remove the group itself.""" if not purp_members: group_rm_group(group_name, contact_list, group_list, settings, queues, master_key) if group_name not in group_list.get_list_of_group_names(): raise FunctionReturn(f"Group '{group_name}' does not exist.", head_clear=True) purp_pub_keys = set(purp_members) pub_keys = set(contact_list.get_list_of_pub_keys()) before_removal = set( group_list.get_group(group_name).get_list_of_member_pub_keys()) ok_pub_keys_set = set(purp_pub_keys & pub_keys) removable_set = set(before_removal & ok_pub_keys_set) remaining = list(before_removal - removable_set) not_in_group = list(ok_pub_keys_set - before_removal) rejected = list(purp_pub_keys - pub_keys) removable = list(removable_set) ok_pub_keys = list(ok_pub_keys_set) group = group_list.get_group(group_name) group.remove_members(removable) command = GROUP_REMOVE + group.group_id + b''.join(ok_pub_keys) queue_command(command, settings, queues) group_management_print(REMOVED_MEMBERS, removable, contact_list, group_name) group_management_print(NOT_IN_GROUP, not_in_group, contact_list, group_name) group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name) if removable and remaining and yes( "Publish the list of removed members to remaining members?", abort=False): rem_packet = (GROUP_MSG_MEMBER_REM_HEADER + group.group_id + int_to_bytes(len(remaining)) + b''.join(remaining) + b''.join(removable)) queue_to_nc(rem_packet, queues[RELAY_PACKET_QUEUE])
def test_expired_harac_raises_fr(self): # Setup encrypted_message = encrypt_and_sign(PRIVATE_MESSAGE_HEADER + byte_padding(b'test'), 32 * b'\x01') harac_in_bytes = int_to_bytes(1) encrypted_harac = encrypt_and_sign(harac_in_bytes, 32 * b'\x01') packet = MESSAGE_PACKET_HEADER + encrypted_harac + encrypted_message + ORIGIN_CONTACT_HEADER + b'*****@*****.**' window_list = WindowList(nicks=['Alice', 'local']) contact_list = ContactList(nicks=['Alice', 'local']) key_list = KeyList(nicks=['Alice', 'local']) keyset = key_list.get_keyset('*****@*****.**') keyset.rx_harac = 3 # Test self.assertFR("Warning! Received packet from Alice had an expired hash ratchet counter.", decrypt_assembly_packet, packet, window_list, contact_list, key_list)
def process_message_datagram(ts: 'datetime', packet: bytes, header: bytes, buf_key: bytes, queues: 'QueueDict') -> None: """Process message or public key datagram.""" packets_to_dst = queues[DST_MESSAGE_QUEUE] onion_pub_key, payload = separate_header(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH) packet_str = header.decode() + b85encode(payload) ts_bytes = int_to_bytes(int(ts.strftime("%Y%m%d%H%M%S%f")[:-4])) buffer_to_flask(packet_str, onion_pub_key, ts, header, buf_key) if header == MESSAGE_DATAGRAM_HEADER: packets_to_dst.put(header + ts_bytes + onion_pub_key + ORIGIN_USER_HEADER + payload)
def queue_packet(mk, hk, tx_harac, packet, onion_pub_key=None): """Create encrypted datagram.""" if onion_pub_key is None: header = b'' queue = queues[COMMAND_DATAGRAM_HEADER] packet = split_to_assembly_packets(packet, COMMAND)[0] else: header = onion_pub_key + ORIGIN_CONTACT_HEADER queue = queues[MESSAGE_DATAGRAM_HEADER] packet = split_to_assembly_packets(packet, MESSAGE)[0] encrypted_harac = encrypt_and_sign(int_to_bytes(tx_harac), hk) encrypted_message = encrypt_and_sign(packet, mk) encrypted_packet = header + encrypted_harac + encrypted_message queue.put((datetime.datetime.now(), encrypted_packet))
def check_for_files(url_token: str, onion_pub_key: bytes, onion_addr: str, short_addr: str, session: 'Session', queues: 'QueueDict') -> None: """See if a file is available from contact..""" try: file_data = session.get(f"http://{onion_addr}.onion/{url_token}/files", stream=True).content if file_data: ts = datetime.now() ts_bytes = int_to_bytes(int(ts.strftime("%Y%m%d%H%M%S%f")[:-4])) packet = FILE_DATAGRAM_HEADER + ts_bytes + onion_pub_key + ORIGIN_CONTACT_HEADER + file_data queues[DST_MESSAGE_QUEUE].put(packet) rp_print(f"File from contact {short_addr}", ts) except requests.exceptions.RequestException: pass
def load_file_data(self) -> None: """Load file name, size and data from specified path.""" if not os.path.isfile(self.path): raise FunctionReturn("Error: File not found.") self.name = (self.path.split('/')[-1]).encode() self.name_length_check() byte_size = os.path.getsize(self.path) if byte_size == 0: raise FunctionReturn("Error: Target file is empty.") self.size = int_to_bytes(byte_size) self.size_print = readable_size(byte_size) with open(self.path, 'rb') as f: self.data = f.read()
def setUp(self): self.cmd_data = b'*****@*****.**' + US_BYTE + int_to_bytes(1) self.ts = datetime.now() self.window_list = WindowList(nicks=['Alice', 'Bob']) self.window = self.window_list.get_window('*****@*****.**') self.window.type_print = 'contact' self.contact_list = ContactList(nicks=['Alice', 'Bob']) self.group_list = GroupList() self.settings = Settings() self.master_key = MasterKey() self.time = datetime.fromtimestamp( struct.unpack('<L', binascii.unhexlify('08ceae02'))[0]).strftime('%H:%M') self.o_struct_pack = struct.pack struct.pack = lambda *_: binascii.unhexlify('08ceae02')
def send_packet(packet: bytes, key_list: 'KeyList', settings: 'Settings', gateway: 'Gateway', l_queue: 'Queue', rx_account: str = None, tx_account: str = None, logging: bool = None) -> None: """Encrypt and send assembly packet. Load keys from key database, encrypt assembly packet, add headers, send and optionally log the assembly packet. :param packet: Padded plaintext assembly packet :param key_list: Key list object :param settings: Settings object :param gateway: Gateway object :param l_queue: Multiprocessing queue for logged messages :param rx_account: Recipient account :param tx_account: Sender's account associated with recipient's account :param logging: When True, log the assembly packet :return: None """ if len(packet) != 256: raise CriticalError("Invalid assembly packet PT length.") if rx_account is None: keyset = key_list.get_keyset('local') header = COMMAND_PACKET_HEADER trailer = b'' else: keyset = key_list.get_keyset(rx_account) header = MESSAGE_PACKET_HEADER trailer = tx_account.encode() + US_BYTE + rx_account.encode() harac_in_bytes = int_to_bytes(keyset.tx_harac) encrypted_harac = encrypt_and_sign(harac_in_bytes, keyset.tx_hek) encrypted_message = encrypt_and_sign(packet, keyset.tx_key) encrypted_packet = header + encrypted_harac + encrypted_message + trailer transmit(encrypted_packet, settings, gateway) keyset.rotate_tx_key() if logging and rx_account is not None: l_queue.put((packet, rx_account, settings, key_list.master_key))
def store_settings(self) -> None: """Store settings to encrypted database.""" attribute_list = [self.__getattribute__(k) for k in self.key_list] # Convert attributes into constant length byte string pt_bytes = b'' for a in attribute_list: if isinstance(a, bool): pt_bytes += bool_to_bytes(a) elif isinstance(a, int): pt_bytes += int_to_bytes(a) elif isinstance(a, float): pt_bytes += double_to_bytes(a) elif isinstance(a, str): pt_bytes += str_to_bytes(a) else: raise CriticalError("Invalid attribute type in settings.") ct_bytes = encrypt_and_sign(pt_bytes, self.master_key.master_key) ensure_dir(f'{DIR_USER_DATA}/') with open(self.file_name, 'wb+') as f: f.write(ct_bytes)
def test_function(self): # Setup ts = datetime.datetime.now() no_msg = int_to_bytes(1) cmd_data = b'*****@*****.**' + US_BYTE + no_msg window_list = WindowList() contact_list = ContactList() settings = Settings() master_key = MasterKey() write_log_entry(F_S_HEADER + bytes(255), '*****@*****.**', settings, master_key) # Test self.assertIsNone( export_logs(cmd_data, ts, window_list, contact_list, settings, master_key)) os.remove('Unittest - Plaintext log (None)') cleanup()
def setUp(self): self.unittest_dir = cd_unittest() self.cmd_data = int_to_bytes(1) + nick_to_pub_key("Bob") self.ts = datetime.now() self.window_list = WindowList(nicks=['Alice', 'Bob']) self.window = self.window_list.get_window(nick_to_pub_key("Bob")) self.window.type_print = 'contact' self.window.name = 'Bob' self.window.type = WIN_TYPE_CONTACT self.contact_list = ContactList(nicks=['Alice', 'Bob']) self.group_list = GroupList() self.settings = Settings() self.master_key = MasterKey() self.args = (self.ts, self.window_list, self.contact_list, self.group_list, self.settings, self.master_key) time_float = struct.unpack('<L', bytes.fromhex('08ceae02'))[0] self.time = datetime.fromtimestamp(time_float).strftime("%H:%M:%S.%f")[:-4]
def queue_packet(key, hek, tx_harac, packet, rx_account=None): if rx_account is None: header = COMMAND_PACKET_HEADER trailer = b'' queue = queues[COMMAND_PACKET_HEADER] packet = split_to_assembly_packets(packet, COMMAND)[0] else: header = MESSAGE_PACKET_HEADER trailer = ORIGIN_CONTACT_HEADER + rx_account queue = queues[MESSAGE_PACKET_HEADER] packet = split_to_assembly_packets(packet, MESSAGE)[0] encrypted_harac = encrypt_and_sign(int_to_bytes(tx_harac), hek) encrypted_message = encrypt_and_sign(packet, key) encrypted_packet = header + encrypted_harac + encrypted_message + trailer queue.put((datetime.datetime.now(), encrypted_packet)) time.sleep(0.1)
def print_logs(user_input: 'UserInput', window: 'Window', contact_list: 'ContactList', settings: 'Settings', c_queue: 'Queue', master_key: 'MasterKey') -> None: """Print log files on screen.""" try: no_messages_str = user_input.plaintext.split()[1] if not no_messages_str.isdigit(): raise FunctionReturn("Specified invalid number of messages to print.") no_messages = int(no_messages_str) except IndexError: no_messages = 0 packet = LOG_DISPLAY_HEADER + window.uid.encode() + US_BYTE + int_to_bytes(no_messages) queue_command(packet, settings, c_queue) access_history(window, contact_list, settings, master_key, no_messages)
def update_delivery_time(self) -> None: """Calculate transmission time. Transmission time is based on average delays and settings. """ no_packets = self.count_number_of_packets() if self.settings.session_traffic_masking: avg_delay = self.settings.traffic_masking_static_delay + ( self.settings.traffic_masking_random_delay / 2) if self.settings.multi_packet_random_delay: avg_delay += (self.settings.max_duration_of_random_delay / 2) total_time = len(self.window) * no_packets * avg_delay total_time *= 2 # Accommodate command packets between file packets total_time += no_packets * TRAFFIC_MASKING_QUEUE_CHECK_DELAY else: # Determine total data to be transmitted over serial rs = RSCodec(2 * self.settings.session_serial_error_correction) total_data = 0 for c in self.window: data = os.urandom(PACKET_LENGTH) + c.rx_account.encode( ) + c.tx_account.encode() enc_data = rs.encode(data) total_data += no_packets * len(enc_data) # Determine time required to send all data total_time = 0.0 if self.settings.local_testing_mode: total_time += no_packets * LOCAL_TESTING_PACKET_DELAY else: total_bauds = total_data * BAUDS_PER_BYTE total_time += total_bauds / self.settings.session_serial_baudrate total_time += no_packets * self.settings.txm_inter_packet_delay if self.settings.multi_packet_random_delay: total_time += no_packets * ( self.settings.max_duration_of_random_delay / 2) # Update delivery time self.time_bytes = int_to_bytes(int(total_time)) self.time_print = str(datetime.timedelta(seconds=int(total_time)))
def rotate_tx_mk(self) -> None: """\ Update Transmitter Program's tx-message key and tx-harac. Replacing the key with its hash provides per-message forward secrecy for sent messages. The hash ratchet used is also known as the SCIMP Ratchet[1], and it is widely used, e.g., as part of Signal's Double Ratchet[2]. To ensure the hash ratchet does not fall into a short cycle of keys, the harac (that is a non-repeating value) is used as an additional input when deriving the next key. [1] (pp. 17-18) https://netzpolitik.org/wp-upload/SCIMP-paper.pdf [2] https://signal.org/blog/advanced-ratcheting/ """ self.tx_mk = blake2b(self.tx_mk + int_to_bytes(self.tx_harac), digest_size=SYMMETRIC_KEY_LENGTH) self.tx_harac += 1 self.store_keys()
def queue_file(window: 'Window', settings: 'Settings', f_queue: 'Queue', gateway: 'Gateway') -> None: """Ask file path and load file data.""" path = ask_path_gui("Select file to send...", settings, get_file=True) file = File(path, window, settings, gateway) name = file.name.decode() size = file.size.decode() payload = file.plaintext if len(payload) < 255: padded = byte_padding(payload) packet_list = [F_S_HEADER + padded] else: payload = bytes(8) + payload padded = byte_padding(payload) p_list = split_byte_string(padded, item_len=255) # < number of packets > packet_list = ( [F_L_HEADER + int_to_bytes(len(p_list)) + p_list[0][8:]] + [F_A_HEADER + p for p in p_list[1:-1]] + [F_E_HEADER + p_list[-1]]) for p in packet_list: assert len(p) == 256 if settings.confirm_sent_files: if not yes( f"Send {name} ({size}) to {window.type} {window.name} " f"({len(packet_list)} packets, time: {file.time_s})?", tail=1): raise FunctionReturn("File selection aborted.") if settings.session_trickle: log_m_dictionary = dict((c.rx_account, c.log_messages) for c in window) for p in packet_list: f_queue.put((p, log_m_dictionary)) else: for c in window: for p in packet_list: f_queue.put((p, settings, c.rx_account, c.tx_account, c.log_messages, window.uid))