def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() self.file_name = f'{DIR_USER_DATA}ut_logs' self.temp_name = self.file_name + '_temp' self.settings = Settings() self.database_key = os.urandom(SYMMETRIC_KEY_LENGTH) self.message_log = MessageLog(self.file_name, self.database_key)
def access_logs(window: Union['TxWindow', 'RxWindow'], contact_list: 'ContactList', group_list: 'GroupList', settings: 'Settings', master_key: 'MasterKey', msg_to_load: int = 0, export: bool = False) -> None: """\ Load 'msg_to_load' last messages from log database and display or export them. The default value of zero for `msg_to_load` means all messages for the window will be retrieved from the log database. """ file_name = f'{DIR_USER_DATA}{settings.software_operation}_logs' packet_list = PacketList(settings, contact_list) message_list = [] # type: List[MsgTuple] group_msg_id = b'' check_log_file_exists(file_name) message_log = MessageLog(file_name, master_key.master_key) for log_entry in message_log: onion_pub_key, timestamp, origin, assembly_packet \ = separate_headers(log_entry, [ONION_SERVICE_PUBLIC_KEY_LENGTH, TIMESTAMP_LENGTH, ORIGIN_HEADER_LENGTH]) if window.type == WIN_TYPE_CONTACT and onion_pub_key != window.uid: continue packet = packet_list.get_packet(onion_pub_key, origin, MESSAGE, log_access=True) try: packet.add_packet(assembly_packet) except SoftError: continue if not packet.is_complete: continue group_msg_id = add_complete_message_to_message_list( timestamp, onion_pub_key, group_msg_id, packet, message_list, window) message_log.close_database() print_logs(message_list[-msg_to_load:], export, msg_to_load, window, contact_list, group_list, settings)
def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() self.master_key = MasterKey() self.settings = Settings() self.time = STATIC_TIMESTAMP self.contact_list = ContactList(self.master_key, self.settings) self.group_list = GroupList(groups=['test_group']) self.file_name = f'{DIR_USER_DATA}{self.settings.software_operation}_logs' self.tmp_file_name = self.file_name + "_temp" self.tfc_log_database = MessageLog(self.file_name, self.master_key.master_key) self.args = self.contact_list, self.group_list, self.settings, self.master_key self.msg = ( "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis" " dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendu" "m velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id ma" "ssa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut el" "it iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dic" "tumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio r" "utrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec ali" "quam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis sceleris" "que. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum loborti" "s neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu." )
def test_export_short_private_message(self, _: Any) -> None: # Setup # Test title displayed by the Receiver program. self.settings.software_operation = RX self.log_file = f'{DIR_USER_DATA}{self.settings.software_operation}_logs' self.tfc_log_database = MessageLog(self.log_file, self.master_key.master_key) # Add a message from contact Alice to user (Bob). for p in assembly_packet_creator(MESSAGE, 'Hi Bob'): write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database, origin=ORIGIN_CONTACT_HEADER) # Add a message from user (Bob) to Alice. for p in assembly_packet_creator(MESSAGE, 'Hi Alice'): write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database) # Test self.assertIsNone(access_logs(*self.args, export=True)) with open("Receiver - Plaintext log (Alice)") as f: self.assertEqual( f.read(), f"""\ Log file of message(s) to/from contact Alice ════════════════════════════════════════════════════════════════════════════════ {self.time} Alice: Hi Bob {self.time} Me: Hi Alice <End of log file> """)
def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() self.master_key = MasterKey() self.settings = Settings() self.log_file = f'{DIR_USER_DATA}{self.settings.software_operation}_logs' self.tfc_log_database = MessageLog(self.log_file, self.master_key.master_key)
def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() self.old_master_key = MasterKey() self.new_master_key = MasterKey(master_key=os.urandom(SYMMETRIC_KEY_LENGTH)) self.settings = Settings() self.tmp_file_name = f"{DIR_USER_DATA}{self.settings.software_operation}_logs_temp" self.time = STATIC_TIMESTAMP self.log_file = f'{DIR_USER_DATA}{self.settings.software_operation}_logs' self.message_log = MessageLog(self.log_file, self.old_master_key.master_key)
def change_log_db_key(old_key: bytes, new_key: bytes, settings: 'Settings') -> None: """Re-encrypt the log database with a new master key.""" ensure_dir(DIR_USER_DATA) file_name = f'{DIR_USER_DATA}{settings.software_operation}_logs' temp_name = file_name + TEMP_SUFFIX if not os.path.isfile(file_name): raise SoftError("No log database available.") if os.path.isfile(temp_name): os.remove(temp_name) message_log_old = MessageLog(file_name, old_key) message_log_tmp = MessageLog(temp_name, new_key) for log_entry in message_log_old: message_log_tmp.insert_log_entry(log_entry) message_log_old.close_database() message_log_tmp.close_database()
def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() self.ts = datetime.now() self.master_key = MasterKey() self.settings = Settings() self.contact_list = ContactList(nicks=[LOCAL_ID]) self.window_list = WindowList(nicks=[LOCAL_ID]) self.group_list = GroupList() self.key_list = KeyList() self.args = (self.ts, self.window_list, self.contact_list, self.group_list, self.key_list, self.settings, self.master_key) self.log_file = f'{DIR_USER_DATA}{self.settings.software_operation}_logs' self.tfc_log_database = MessageLog(self.log_file, self.master_key.master_key)
def write_log_entry( assembly_packet: bytes, # Assembly packet to log onion_pub_key: bytes, # Onion Service public key of the associated contact message_log: MessageLog, # MessageLog object origin: bytes = ORIGIN_USER_HEADER, # The direction of logged packet ) -> None: """Add an assembly packet to the encrypted log database. Logging assembly packets allows reconstruction of conversation while protecting metadata about the length of messages alternative log file formats could reveal to a physical attacker. Transmitter Program can only log sent messages. This is not useful for recalling conversations but it makes it possible to audit recipient's Destination Computer-side logs, where malware could have substituted content of the sent messages. Files are not produced or accessed by TFC. Thus, keeping a copy of file data in the log database is pointless and potentially dangerous, because the user should be right to assume deleting the file from `received_files` directory is enough. However, from the perspective of metadata, a difference between the number of logged packets and the number of output packets could reveal additional metadata about communication. Thus, during traffic masking, if `settings.log_file_masking` is enabled, instead of file data, TFC writes placeholder data to the log database. """ timestamp = struct.pack('<L', int(time.time())) log_entry = onion_pub_key + timestamp + origin + assembly_packet if len(log_entry) != LOG_ENTRY_LENGTH: raise CriticalError("Invalid log entry length.") ensure_dir(DIR_USER_DATA) message_log.insert_log_entry(log_entry)
def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() self.cmd_data = int_to_bytes(1) + nick_to_pub_key("Bob") self.ts = datetime.now() self.window_list = WindowList(nicks=['Alice', 'Bob']) self.window = self.window_list.get_window(nick_to_pub_key("Bob")) self.window.type_print = WIN_TYPE_CONTACT self.window.name = 'Bob' self.window.type = WIN_TYPE_CONTACT self.contact_list = ContactList(nicks=['Alice', 'Bob']) self.group_list = GroupList() self.settings = Settings(software_operation=RX) self.master_key = MasterKey(operation=RX, local_test=True) self.args = (self.ts, self.window_list, self.contact_list, self.group_list, self.settings, self.master_key) self.log_file = f'{DIR_USER_DATA}{self.settings.software_operation}_logs' self.tfc_log_database = MessageLog(self.log_file, self.master_key.master_key) time_float = struct.unpack('<L', bytes.fromhex('08ceae02'))[0] self.time = datetime.fromtimestamp(time_float).strftime("%H:%M:%S.%f")[:-4]
def test_valid_temp_database_is_loaded(self) -> None: log_file = MessageLog(self.file_name, database_key=self.database_key) tmp_file = MessageLog(self.temp_name, database_key=self.database_key) log_file.insert_log_entry(b'a') log_file.insert_log_entry(b'b') log_file.insert_log_entry(b'c') log_file.insert_log_entry(b'd') log_file.insert_log_entry(b'e') tmp_file.insert_log_entry(b'f') tmp_file.insert_log_entry(b'g') tmp_file.insert_log_entry(b'h') tmp_file.insert_log_entry(b'i') tmp_file.insert_log_entry(b'j') self.assertTrue(os.path.isfile(self.temp_name)) log_file = MessageLog(self.file_name, database_key=self.database_key) self.assertEqual(list(log_file), [b'f', b'g', b'h', b'i', b'j']) self.assertFalse(os.path.isfile(self.temp_name))
def test_invalid_temp_database_is_not_loaded(self) -> None: log_file = MessageLog(self.file_name, database_key=self.database_key) tmp_file = MessageLog(self.temp_name, database_key=self.database_key) log_file.insert_log_entry(b'a') log_file.insert_log_entry(b'b') log_file.insert_log_entry(b'c') log_file.insert_log_entry(b'd') log_file.insert_log_entry(b'e') tmp_file.insert_log_entry(b'a') tmp_file.insert_log_entry(b'b') tmp_file.c.execute(f"""INSERT INTO log_entries (log_entry) VALUES (?)""", (b'c',)) tmp_file.conn.commit() tmp_file.insert_log_entry(b'd') tmp_file.insert_log_entry(b'e') self.assertTrue(os.path.isfile(self.temp_name)) log_file = MessageLog(self.file_name, database_key=self.database_key) self.assertEqual(list(log_file), [b'a', b'b', b'c', b'd', b'e']) self.assertFalse(os.path.isfile(self.temp_name))
def test_loop(self, *_) -> None: # Setup queues = gen_queue_dict() kek = SYMMETRIC_KEY_LENGTH * b"a" conf_code = bytes(1) tx_pub_key = nick_to_pub_key("Bob") o_sleep = self.o_sleep test_delay = 0.3 def queue_packet(mk, hk, tx_harac, packet, onion_pub_key=None) -> None: """Create encrypted datagram.""" if onion_pub_key is None: header = b"" queue = queues[COMMAND_DATAGRAM_HEADER] packet = split_to_assembly_packets(packet, COMMAND)[0] else: header = onion_pub_key + ORIGIN_CONTACT_HEADER queue = queues[MESSAGE_DATAGRAM_HEADER] packet = split_to_assembly_packets(packet, MESSAGE)[0] encrypted_harac = encrypt_and_sign(int_to_bytes(tx_harac), hk) encrypted_message = encrypt_and_sign(packet, mk) encrypted_packet = header + encrypted_harac + encrypted_message queue.put((datetime.datetime.now(), encrypted_packet)) def queue_delayer() -> None: """Place datagrams into queue after delay.""" o_sleep(test_delay) local_harac = INITIAL_HARAC tx_harac = INITIAL_HARAC local_hek = SYMMETRIC_KEY_LENGTH * b"a" file_key = SYMMETRIC_KEY_LENGTH * b"b" local_key = SYMMETRIC_KEY_LENGTH * b"a" tx_mk = SYMMETRIC_KEY_LENGTH * b"a" tx_hk = SYMMETRIC_KEY_LENGTH * b"a" # Queue local key packet local_key_packet = encrypt_and_sign(local_key + local_hek + conf_code, key=kek) queues[LOCAL_KEY_DATAGRAM_HEADER].put( (datetime.datetime.now(), local_key_packet)) o_sleep(test_delay) # Select file window command = WIN_SELECT + WIN_UID_FILE queue_packet(local_key, tx_hk, local_harac, command) local_key, local_harac = rotate_key(local_key, local_harac) o_sleep(test_delay) # Select local window command = WIN_SELECT + WIN_UID_COMMAND queue_packet(local_key, tx_hk, local_harac, command) local_key, local_harac = rotate_key(local_key, local_harac) o_sleep(test_delay) # A message that goes to buffer queue_packet( tx_mk, tx_hk, tx_harac, bool_to_bytes(False) + PRIVATE_MESSAGE_HEADER + b"Hi Bob", tx_pub_key) tx_mk, tx_harac = rotate_key(tx_mk, tx_harac) # ECDHE keyset for Bob command = (KEY_EX_ECDHE + nick_to_pub_key("Bob") + (4 * SYMMETRIC_KEY_LENGTH * b"a") + str_to_bytes("Bob")) queue_packet(local_key, tx_hk, local_harac, command) local_key, local_harac = rotate_key(local_key, local_harac) o_sleep(test_delay) # Message for Bob queue_packet( tx_mk, tx_hk, tx_harac, bool_to_bytes(False) + PRIVATE_MESSAGE_HEADER + b"Hi Bob", tx_pub_key) tx_mk, tx_harac = rotate_key(tx_mk, tx_harac) o_sleep(test_delay) # Enable file reception for Bob command = CH_FILE_RECV + ENABLE.upper() + US_BYTE queue_packet(local_key, tx_hk, local_harac, command) o_sleep(test_delay) # File packet from Bob ct = encrypt_and_sign(b"test", file_key) f_hash = blake2b(ct) packet = nick_to_pub_key("Bob") + ORIGIN_CONTACT_HEADER + ct queues[FILE_DATAGRAM_HEADER].put((datetime.datetime.now(), packet)) o_sleep(test_delay) # File key packet from Bob queue_packet( tx_mk, tx_hk, tx_harac, bool_to_bytes(False) + FILE_KEY_HEADER + base64.b85encode(f_hash + file_key), tx_pub_key) o_sleep(test_delay) # Queue exit message to break the loop o_sleep(0.5) queues[UNIT_TEST_QUEUE].put(EXIT) o_sleep(test_delay) threading.Thread(target=queue_delayer).start() # Test master_key = MasterKey() settings = Settings() message_log = MessageLog( f"{DIR_USER_DATA}{settings.software_operation}_logs", master_key.master_key) self.assertIsNone( output_loop( queues, Gateway(), settings, ContactList(), KeyList(), GroupList(), master_key, message_log, stdin_fd=1, unit_test=True, )) # Teardown tear_queues(queues)
class TestMessageLog(unittest.TestCase): def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() self.file_name = f'{DIR_USER_DATA}ut_logs' self.temp_name = self.file_name + '_temp' self.settings = Settings() self.database_key = os.urandom(SYMMETRIC_KEY_LENGTH) self.message_log = MessageLog(self.file_name, self.database_key) def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) def test_empty_log_database_is_verified(self) -> None: self.assertTrue(self.message_log.verify_file(self.file_name)) def test_database_with_one_entry_is_verified(self) -> None: # Setup test_entry = b'test_log_entry' self.message_log.insert_log_entry(test_entry) # Test self.assertTrue(self.message_log.verify_file(self.file_name)) def test_invalid_database_returns_false(self) -> None: # Setup self.message_log.c.execute("DROP TABLE log_entries") self.message_log.conn.commit() # Test self.assertFalse(self.message_log.verify_file(self.file_name)) def test_invalid_entry_returns_false(self) -> None: # Setup params = (os.urandom(LOG_ENTRY_LENGTH),) self.message_log.c.execute(f"""INSERT INTO log_entries (log_entry) VALUES (?)""", params) self.message_log.conn.commit() # Test self.assertFalse(self.message_log.verify_file(self.file_name)) def test_table_creation(self) -> None: self.assertIsInstance(self.message_log, MessageLog) self.assertTrue(os.path.isfile(self.file_name)) def test_writing_to_log_database(self) -> None: data = os.urandom(LOG_ENTRY_LENGTH) self.assertIsNone(self.message_log.insert_log_entry(data)) def test_iterating_over_log_database(self) -> None: data = [os.urandom(LOG_ENTRY_LENGTH), os.urandom(LOG_ENTRY_LENGTH)] for entry in data: self.assertIsNone(self.message_log.insert_log_entry(entry)) for index, stored_entry in enumerate(self.message_log): self.assertEqual(stored_entry, data[index]) def test_invalid_temp_database_is_not_loaded(self) -> None: log_file = MessageLog(self.file_name, database_key=self.database_key) tmp_file = MessageLog(self.temp_name, database_key=self.database_key) log_file.insert_log_entry(b'a') log_file.insert_log_entry(b'b') log_file.insert_log_entry(b'c') log_file.insert_log_entry(b'd') log_file.insert_log_entry(b'e') tmp_file.insert_log_entry(b'a') tmp_file.insert_log_entry(b'b') tmp_file.c.execute(f"""INSERT INTO log_entries (log_entry) VALUES (?)""", (b'c',)) tmp_file.conn.commit() tmp_file.insert_log_entry(b'd') tmp_file.insert_log_entry(b'e') self.assertTrue(os.path.isfile(self.temp_name)) log_file = MessageLog(self.file_name, database_key=self.database_key) self.assertEqual(list(log_file), [b'a', b'b', b'c', b'd', b'e']) self.assertFalse(os.path.isfile(self.temp_name)) def test_valid_temp_database_is_loaded(self) -> None: log_file = MessageLog(self.file_name, database_key=self.database_key) tmp_file = MessageLog(self.temp_name, database_key=self.database_key) log_file.insert_log_entry(b'a') log_file.insert_log_entry(b'b') log_file.insert_log_entry(b'c') log_file.insert_log_entry(b'd') log_file.insert_log_entry(b'e') tmp_file.insert_log_entry(b'f') tmp_file.insert_log_entry(b'g') tmp_file.insert_log_entry(b'h') tmp_file.insert_log_entry(b'i') tmp_file.insert_log_entry(b'j') self.assertTrue(os.path.isfile(self.temp_name)) log_file = MessageLog(self.file_name, database_key=self.database_key) self.assertEqual(list(log_file), [b'f', b'g', b'h', b'i', b'j']) self.assertFalse(os.path.isfile(self.temp_name)) def test_database_closing(self) -> None: self.message_log.close_database() # Test insertion would fail at this point with self.assertRaises(sqlite3.ProgrammingError): self.message_log.c.execute(f"""INSERT INTO log_entries (log_entry) VALUES (?)""", (os.urandom(LOG_ENTRY_LENGTH),)) # Test closed database is re-opened during write data = os.urandom(LOG_ENTRY_LENGTH) self.assertIsNone(self.message_log.insert_log_entry(data))
def remove_logs(contact_list: 'ContactList', group_list: 'GroupList', settings: 'Settings', master_key: 'MasterKey', selector: bytes) -> None: """\ Remove log entries for selector (public key of an account/group ID). If the selector is a public key, all messages (both the private conversation and any associated group messages) sent to and received from the associated contact are removed. If the selector is a group ID, only messages for the group matching that group ID are removed. """ ensure_dir(DIR_USER_DATA) file_name = f'{DIR_USER_DATA}{settings.software_operation}_logs' temp_name = file_name + TEMP_SUFFIX packet_list = PacketList(settings, contact_list) entries_to_keep = [] # type: List[bytes] removed = False contact = len(selector) == ONION_SERVICE_PUBLIC_KEY_LENGTH check_log_file_exists(file_name) message_log = MessageLog(file_name, master_key.master_key) for log_entry in message_log: onion_pub_key, _, origin, assembly_packet = separate_headers( log_entry, [ ONION_SERVICE_PUBLIC_KEY_LENGTH, TIMESTAMP_LENGTH, ORIGIN_HEADER_LENGTH ]) if contact: if onion_pub_key == selector: removed = True else: entries_to_keep.append(log_entry) else: # Group packet = packet_list.get_packet(onion_pub_key, origin, MESSAGE, log_access=True) try: packet.add_packet(assembly_packet, log_entry) except SoftError: continue if not packet.is_complete: continue removed = check_packet_fate(entries_to_keep, packet, removed, selector) message_log.close_database() message_log_temp = MessageLog(temp_name, master_key.master_key) for log_entry in entries_to_keep: message_log_temp.insert_log_entry(log_entry) message_log_temp.close_database() os.replace(temp_name, file_name) try: name = contact_list.get_nick_by_pub_key( selector) if contact else group_list.get_group_by_id(selector).name except StopIteration: name = pub_key_to_short_address(selector) if contact else b58encode( selector) action = "Removed" if removed else "Found no" win_type = "contact" if contact else "group" raise SoftError(f"{action} log entries for {win_type} '{name}'.")
def test_long_group_message(self, _: Any) -> None: # Setup # Test title displayed by the Receiver program. self.settings.software_operation = RX self.log_file = f'{DIR_USER_DATA}{self.settings.software_operation}_logs' self.tfc_log_database = MessageLog(self.log_file, self.master_key.master_key) self.window = RxWindow(type=WIN_TYPE_GROUP, uid=group_name_to_group_id('test_group'), name='test_group', group=self.group, type_print='group') # Add an assembly packet sequence sent to contact Alice in group containing cancel packet. # Access_logs should skip this. packets = assembly_packet_creator( MESSAGE, self.msg, group_id=group_name_to_group_id('test_group')) packets = packets[2:] + [M_C_HEADER + bytes(PADDING_LENGTH)] for p in packets: write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database) # Add an orphaned 'append' assembly packet. access_logs should skip this. write_log_entry(M_A_HEADER + bytes(PADDING_LENGTH), nick_to_pub_key('Alice'), self.tfc_log_database) # Add a private message. access_logs should skip this. for p in assembly_packet_creator(MESSAGE, 'This is a short private message'): write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database) # Add a group message for a different group. access_logs should skip this. for p in assembly_packet_creator(MESSAGE, 'This is a short group message', group_id=GROUP_ID_LENGTH * b'1'): write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database) # Add messages to Alice and Charlie in group. # Add duplicate of outgoing message that should be skipped by access_logs. for p in assembly_packet_creator( MESSAGE, self.msg, group_id=group_name_to_group_id('test_group')): write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database) write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database, origin=ORIGIN_CONTACT_HEADER) write_log_entry(p, nick_to_pub_key('Charlie'), self.tfc_log_database) write_log_entry(p, nick_to_pub_key('Charlie'), self.tfc_log_database, origin=ORIGIN_CONTACT_HEADER) # Test self.assert_prints((CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + f"""\ Log file of message(s) to/from group test_group ════════════════════════════════════════════════════════════════════════════════ {self.time} Me: Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendum velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id massa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut elit iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dictumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio rutrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec aliquam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis scelerisque. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum lobortis neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu. {self.time} Alice: Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendum velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id massa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut elit iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dictumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio rutrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec aliquam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis scelerisque. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum lobortis neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu. {self.time} Charlie: Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendum velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id massa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut elit iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dictumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio rutrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec aliquam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis scelerisque. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum lobortis neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu. <End of log file> """), access_logs, self.window, self.contact_list, self.group_list, self.settings, self.master_key)
def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() self.master_key = MasterKey() self.message_log = MessageLog(f'{DIR_USER_DATA}{TX}_logs', self.master_key.master_key)
def main() -> None: """Load persistent data and launch the Transmitter/Receiver Program. This function decrypts user data from databases and launches processes for Transmitter or Receiver Program. It then monitors the EXIT_QUEUE for EXIT/WIPE signals and each process in case one of them dies. If you're reading this code to get the big picture on how TFC works, start by looking at the loop functions below, defined as the target for each process, from top to bottom: From `input_loop` process, you can see how the Transmitter Program processes a message or command from the user, creates assembly packets for a message/file/command, and how those are eventually pushed into a multiprocessing queue, from where they are loaded by the `sender_loop`. The `sender_loop` process encrypts outgoing assembly packets, and outputs the encrypted datagrams to the Networked Computer. The process also sends assembly packets to the `log_writer_loop`. The `log_writer_loop` process filters out non-message assembly packets and if logging for contact is enabled, stores the message assembly packet into an encrypted log database. The `noise_loop` processes are used to provide the `sender_loop` an interface identical to that of the `input_loop`. The `sender_loop` uses the interface to load noise packets/commands when traffic masking is enabled. Refer to the file `relay.py` to see how the Relay Program on Networked Computer manages datagrams between the network and Source/Destination Computer. In Receiver Program (also launched by this file), the `gateway_loop` process acts as a buffer for incoming datagrams. This buffer is consumed by the `receiver_loop` process that organizes datagrams loaded from the buffer into a set of queues depending on datagram type. Finally, the `output_loop` process loads and processes datagrams from the queues in the order of priority. """ working_dir = f'{os.getenv("HOME")}/{DIR_TFC}' ensure_dir(working_dir) os.chdir(working_dir) operation, local_test, data_diode_sockets, qubes = process_arguments() check_kernel_version() print_title(operation) master_key = MasterKey(operation, local_test) gateway = Gateway(operation, local_test, data_diode_sockets, qubes) settings = Settings(master_key, operation, local_test, qubes) contact_list = ContactList(master_key, settings) key_list = KeyList(master_key, settings) group_list = GroupList(master_key, settings, contact_list) message_log = MessageLog( f'{DIR_USER_DATA}{settings.software_operation}_logs', master_key.master_key) if settings.software_operation == TX: onion_service = OnionService(master_key) queues = { MESSAGE_PACKET_QUEUE: Queue(), # Standard messages COMMAND_PACKET_QUEUE: Queue(), # Standard commands TM_MESSAGE_PACKET_QUEUE: Queue(), # Traffic masking messages TM_FILE_PACKET_QUEUE: Queue(), # Traffic masking files TM_COMMAND_PACKET_QUEUE: Queue(), # Traffic masking commands TM_NOISE_PACKET_QUEUE: Queue(), # Traffic masking noise packets TM_NOISE_COMMAND_QUEUE: Queue(), # Traffic masking noise commands RELAY_PACKET_QUEUE: Queue(), # Unencrypted datagrams to Networked Computer LOG_PACKET_QUEUE: Queue(), # `log_writer_loop` assembly packets to be logged LOG_SETTING_QUEUE: Queue( ), # `log_writer_loop` logging state management between noise packets TRAFFIC_MASKING_QUEUE: Queue( ), # `log_writer_loop` traffic masking setting management commands LOGFILE_MASKING_QUEUE: Queue( ), # `log_writer_loop` logfile masking setting management commands KEY_MANAGEMENT_QUEUE: Queue(), # `sender_loop` key database management commands KEY_MGMT_ACK_QUEUE: Queue( ), # `sender_loop` key management ACK messages to `input_loop` SENDER_MODE_QUEUE: Queue( ), # `sender_loop` default/traffic masking mode switch commands WINDOW_SELECT_QUEUE: Queue( ), # `sender_loop` window selection commands during traffic masking EXIT_QUEUE: Queue() # EXIT/WIPE signal from `input_loop` to `main` } # type: Dict[bytes, Queue[Any]] process_list = [ Process(target=input_loop, args=(queues, settings, gateway, contact_list, group_list, master_key, onion_service, sys.stdin.fileno())), Process(target=sender_loop, args=(queues, settings, gateway, key_list)), Process(target=log_writer_loop, args=(queues, settings, message_log)), Process(target=noise_loop, args=(queues, contact_list)), Process(target=noise_loop, args=(queues, )) ] else: queues = { GATEWAY_QUEUE: Queue(), # Buffer for incoming datagrams LOCAL_KEY_DATAGRAM_HEADER: Queue(), # Local key datagrams MESSAGE_DATAGRAM_HEADER: Queue(), # Message datagrams FILE_DATAGRAM_HEADER: Queue(), # File datagrams COMMAND_DATAGRAM_HEADER: Queue(), # Command datagrams EXIT_QUEUE: Queue() # EXIT/WIPE signal from `output_loop` to `main` } process_list = [ Process(target=gateway_loop, args=(queues, gateway)), Process(target=receiver_loop, args=(queues, gateway)), Process(target=output_loop, args=(queues, gateway, settings, contact_list, key_list, group_list, master_key, message_log, sys.stdin.fileno())) ] for p in process_list: p.start() monitor_processes(process_list, settings.software_operation, queues)