def key_ex_ecdhe(packet: bytes, ts: 'datetime', window_list: 'WindowList', contact_list: 'ContactList', key_list: 'KeyList', settings: 'Settings') -> None: """Add contact and symmetric keys derived from X448 shared key.""" onion_pub_key, tx_mk, rx_mk, tx_hk, rx_hk, nick_bytes \ = separate_headers(packet, [ONION_SERVICE_PUBLIC_KEY_LENGTH] + 4*[SYMMETRIC_KEY_LENGTH]) try: nick = bytes_to_str(nick_bytes) except (struct.error, UnicodeError): raise FunctionReturn("Error: Received invalid contact data") contact_list.add_contact(onion_pub_key, nick, bytes(FINGERPRINT_LENGTH), bytes(FINGERPRINT_LENGTH), KEX_STATUS_NONE, settings.log_messages_by_default, settings.accept_files_by_default, settings.show_notifications_by_default) key_list.add_keyset(onion_pub_key, tx_mk, rx_mk, tx_hk, rx_hk) message = f"Successfully added {nick}." local_win = window_list.get_local_window() local_win.add_new(ts, message) c_code = blake2b(onion_pub_key, digest_size=CONFIRM_CODE_LENGTH) m_print([message, f"Confirmation code (to Transmitter): {c_code.hex()}"], box=True)
def key_ex_psk_tx(packet: bytes, ts: 'datetime', window_list: 'WindowList', contact_list: 'ContactList', key_list: 'KeyList', settings: 'Settings') -> None: """Add contact and Tx-PSKs.""" onion_pub_key, tx_mk, _, tx_hk, _, nick_bytes \ = separate_headers(packet, [ONION_SERVICE_PUBLIC_KEY_LENGTH] + 4*[SYMMETRIC_KEY_LENGTH]) try: nick = bytes_to_str(nick_bytes) except (struct.error, UnicodeError): raise FunctionReturn("Error: Received invalid contact data") contact_list.add_contact(onion_pub_key, nick, bytes(FINGERPRINT_LENGTH), bytes(FINGERPRINT_LENGTH), KEX_STATUS_NO_RX_PSK, settings.log_messages_by_default, settings.accept_files_by_default, settings.show_notifications_by_default) # The Rx-side keys are set as null-byte strings to indicate they have not # been added yet. The zero-keys do not allow existential forgeries as # `decrypt_assembly_packet`does not allow the use of zero-keys for decryption. key_list.add_keyset(onion_pub_key=onion_pub_key, tx_mk=tx_mk, rx_mk=bytes(SYMMETRIC_KEY_LENGTH), tx_hk=tx_hk, rx_hk=bytes(SYMMETRIC_KEY_LENGTH)) message = f"Added Tx-side PSK for {nick} ({pub_key_to_short_address(onion_pub_key)})." local_win = window_list.get_local_window() local_win.add_new(ts, message) m_print(message, bold=True, tail_clear=True, delay=1)
def key_ex_psk_tx(packet: bytes, ts: 'datetime', window_list: 'WindowList', contact_list: 'ContactList', key_list: 'KeyList', settings: 'Settings') -> None: """Add contact and Tx-PSKs.""" onion_pub_key, tx_mk, _, tx_hk, _, nick_bytes \ = separate_headers(packet, [ONION_SERVICE_PUBLIC_KEY_LENGTH] + 4*[SYMMETRIC_KEY_LENGTH]) try: nick = bytes_to_str(nick_bytes) except (struct.error, UnicodeError): raise SoftError("Error: Received invalid contact data") contact_list.add_contact(onion_pub_key, nick, bytes(FINGERPRINT_LENGTH), bytes(FINGERPRINT_LENGTH), KEX_STATUS_NO_RX_PSK, settings.log_messages_by_default, settings.accept_files_by_default, settings.show_notifications_by_default) # The Rx-side keys are set as null-byte strings to indicate they have not # been added yet. The zero-keys do not allow existential forgeries as # `decrypt_assembly_packet`does not allow the use of zero-keys for decryption. key_list.add_keyset(onion_pub_key=onion_pub_key, tx_mk=tx_mk, rx_mk=bytes(SYMMETRIC_KEY_LENGTH), tx_hk=tx_hk, rx_hk=bytes(SYMMETRIC_KEY_LENGTH)) c_code = blake2b(onion_pub_key, digest_size=CONFIRM_CODE_LENGTH) message = f"Added Tx-side PSK for {nick} ({pub_key_to_short_address(onion_pub_key)})." cmd_win = window_list.get_command_window() cmd_win.add_new(ts, message) m_print([message, f"Confirmation code (to Transmitter): {c_code.hex()}"], box=True)
def load_contacts(self) -> None: """Load contacts from encrypted database.""" with open(self.file_name, 'rb') as f: ct_bytes = f.read() pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key) entries = split_byte_string(pt_bytes, item_len=CONTACT_LENGTH) contacts = [e for e in entries if not e.startswith(self.dummy_id)] for c in contacts: assert len(c) == CONTACT_LENGTH self.contacts.append(Contact(rx_account =bytes_to_str( c[ 0:1024]), tx_account =bytes_to_str( c[1024:2048]), nick =bytes_to_str( c[2048:3072]), tx_fingerprint= c[3072:3104], rx_fingerprint= c[3104:3136], log_messages =bytes_to_bool(c[3136:3137]), file_reception=bytes_to_bool(c[3137:3138]), notifications =bytes_to_bool(c[3138:3139])))
def load_contacts(self) -> None: """Load contacts from encrypted database.""" ensure_dir(f'{DIR_USER_DATA}/') with open(self.file_name, 'rb') as f: ct_bytes = f.read() pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key) entries = split_byte_string(pt_bytes, item_len=3139) # 3 * 1024 + 2 * 32 + 3 * 1 dummy_id = 'dummy_contact'.encode('utf-32') contacts = [e for e in entries if not e.startswith(dummy_id)] for c in contacts: rx_account = bytes_to_str(c[ 0:1024]) tx_account = bytes_to_str(c[1024:2048]) nick = bytes_to_str(c[2048:3072]) tx_fingerprint = c[3072:3104] rx_fingerprint = c[3104:3136] log_messages = bytes_to_bool(c[3136:3137]) file_reception = bytes_to_bool(c[3137:3138]) notifications = bytes_to_bool(c[3138:3139]) self.contacts.append(Contact(rx_account, tx_account, nick, tx_fingerprint, rx_fingerprint, log_messages, file_reception, notifications))
def process_imported_file(ts: 'datetime', packet: bytes, window_list: 'WindowList', settings: 'Settings'): """Decrypt and store imported file.""" while True: try: print('') key = get_b58_key(B58_FILE_KEY, settings) except KeyboardInterrupt: raise FunctionReturn("File import aborted.", head=2) try: phase("Decrypting file", head=1) file_pt = auth_and_decrypt(packet[1:], key, soft_e=True) phase(DONE) break except (nacl.exceptions.CryptoError, nacl.exceptions.ValueError): phase('ERROR', done=True) c_print("Invalid decryption key. Try again.") print_on_previous_line(reps=7, delay=1.5) except KeyboardInterrupt: phase('ABORT', done=True) raise FunctionReturn("File import aborted.") try: phase("Decompressing file") file_dc = zlib.decompress(file_pt) phase(DONE) except zlib.error: phase('ERROR', done=True) raise FunctionReturn("Error: Decompression of file data failed.") try: f_name = bytes_to_str(file_dc[:PADDED_UTF32_STR_LEN]) except UnicodeError: raise FunctionReturn("Error: Received file name had invalid encoding.") if not f_name.isprintable() or not f_name: raise FunctionReturn("Error: Received file had an invalid name.") f_data = file_dc[PADDED_UTF32_STR_LEN:] final_name = store_unique(f_data, DIR_IMPORTED, f_name) message = f"Stored imported file as '{final_name}'" box_print(message, head=1) local_win = window_list.get_local_window() local_win.add_new(ts, message)
def process_file( ts: 'datetime', # Timestamp of received_packet onion_pub_key: bytes, # Onion Service pubkey of sender file_ct: bytes, # File ciphertext file_key: bytes, # File decryption key contact_list: 'ContactList', # ContactList object window_list: 'WindowList', # WindowList object settings: 'Settings' # Settings object ) -> None: """Store file received from a contact.""" nick = contact_list.get_contact_by_pub_key(onion_pub_key).nick phase("Processing received file", head=1) try: file_pt = auth_and_decrypt(file_ct, file_key) except nacl.exceptions.CryptoError: raise FunctionReturn( f"Error: Decryption key for file from {nick} was invalid.") try: file_dc = decompress(file_pt, settings.max_decompress_size) except zlib.error: raise FunctionReturn(f"Error: Failed to decompress file from {nick}.") phase(DONE) print_on_previous_line(reps=2) try: file_name = bytes_to_str(file_dc[:PADDED_UTF32_STR_LENGTH]) except UnicodeError: raise FunctionReturn( f"Error: Name of file from {nick} had invalid encoding.") if not file_name.isprintable() or not file_name or '/' in file_name: raise FunctionReturn(f"Error: Name of file from {nick} was invalid.") f_data = file_dc[PADDED_UTF32_STR_LENGTH:] file_dir = f'{DIR_RECV_FILES}{nick}/' final_name = store_unique(f_data, file_dir, file_name) message = f"Stored file from {nick} as '{final_name}'." if settings.traffic_masking and window_list.active_win is not None: window = window_list.active_win else: window = window_list.get_window(onion_pub_key) window.add_new(ts, message, onion_pub_key, output=True, event_msg=True)
def _load_contacts(self) -> None: """Load contacts from the encrypted database. This function first reads and decrypts the database content. It then splits the plaintext into a list of 1124-byte blocks: each block contains the serialized data of one contact. Next, the function will remove from the list all dummy contacts (that start with dummy contact's public key). The function will then populate the `self.contacts` list with Contact objects, the data of which is sliced and decoded from the dummy-free blocks. """ with open(self.file_name, 'rb') as f: ct_bytes = f.read() pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key, database=self.file_name) blocks = split_byte_string(pt_bytes, item_len=CONTACT_LENGTH) df_blocks = [ b for b in blocks if not b.startswith(self.dummy_contact.onion_pub_key) ] for block in df_blocks: if len(block) != CONTACT_LENGTH: raise CriticalError("Invalid data in contact database.") (onion_pub_key, tx_fingerprint, rx_fingerprint, kex_status_byte, log_messages_byte, file_reception_byte, notifications_byte, nick_bytes) = separate_headers( block, [ONION_SERVICE_PUBLIC_KEY_LENGTH] + 2 * [FINGERPRINT_LENGTH] + [KEX_STATUS_LENGTH] + 3 * [ENCODED_BOOLEAN_LENGTH]) self.contacts.append( Contact(onion_pub_key=onion_pub_key, tx_fingerprint=tx_fingerprint, rx_fingerprint=rx_fingerprint, kex_status=kex_status_byte, log_messages=bytes_to_bool(log_messages_byte), file_reception=bytes_to_bool(file_reception_byte), notifications=bytes_to_bool(notifications_byte), nick=bytes_to_str(nick_bytes)))
def process_imported_file(ts: 'datetime', packet: bytes, window_list: 'WindowList'): """Decrypt and store imported file.""" while True: try: print('') key = get_b58_key('imported_file') phase("Decrypting file", head=1) file_pt = auth_and_decrypt(packet[1:], key, soft_e=True) phase("Done") break except nacl.exceptions.CryptoError: c_print("Invalid decryption key. Try again.", head=2) print_on_previous_line(reps=6, delay=1.5) except KeyboardInterrupt: raise FunctionReturn("File import aborted.") try: phase("Decompressing file") file_dc = zlib.decompress(file_pt) phase("Done") except zlib.error: raise FunctionReturn("Decompression of file data failed.") try: f_name = bytes_to_str(file_dc[:1024]) except UnicodeError: raise FunctionReturn("Received file had an invalid name.") if not f_name.isprintable(): raise FunctionReturn("Received file had an invalid name.") f_data = file_dc[1024:] final_name = store_unique(f_data, DIR_IMPORTED, f_name) message = "Stored imported file to {}/{}".format(DIR_IMPORTED, final_name) box_print(message, head=1) local_win = window_list.get_local_window() local_win.print_new(ts, message, print_=False)
def load_keys(self) -> None: """Load keys from encrypted database.""" with open(self.file_name, 'rb') as f: ct_bytes = f.read() pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key) entries = split_byte_string(pt_bytes, item_len=KEYSET_LENGTH) keysets = [e for e in entries if not e.startswith(self.dummy_id)] for k in keysets: assert len(k) == KEYSET_LENGTH self.keysets.append( KeySet(rx_account=bytes_to_str(k[0:1024]), tx_key=k[1024:1056], rx_key=k[1056:1088], tx_hek=k[1088:1120], rx_hek=k[1120:1152], tx_harac=bytes_to_int(k[1152:1160]), rx_harac=bytes_to_int(k[1160:1168]), store_keys=self.store_keys))
def load_keys(self) -> None: """Load keys from encrypted database.""" ensure_dir(f'{DIR_USER_DATA}/') with open(self.file_name, 'rb') as f: ct_bytes = f.read() pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key) keysets = split_byte_string(pt_bytes, item_len=1168) # 1024 + 4 * 32 + 2 * 8 dummy_id = 'dummy_contact'.encode('utf-32') keysets = [k for k in keysets if not k.startswith(dummy_id)] for k in keysets: rx_account = bytes_to_str(k[0:1024]) tx_key = k[1024:1056] rx_key = k[1056:1088] tx_hek = k[1088:1120] rx_hek = k[1120:1152] tx_harac = bytes_to_int(k[1152:1160]) rx_harac = bytes_to_int(k[1160:1168]) self.keysets.append( KeySet(rx_account, tx_key, rx_key, tx_hek, rx_hek, tx_harac, rx_harac, self.store_keys))
def load_settings(self) -> None: """Load settings from encrypted database.""" ensure_dir(f'{DIR_USER_DATA}/') with open(self.file_name, 'rb') as f: ct_bytes = f.read() pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key) # Update settings based on plaintext byte string content for i, key in enumerate(self.key_list): attribute = self.__getattribute__(key) if isinstance(attribute, bool): value = bytes_to_bool( pt_bytes[0]) # type: Union[bool, int, float, str] pt_bytes = pt_bytes[1:] elif isinstance(attribute, int): value = bytes_to_int(pt_bytes[:8]) pt_bytes = pt_bytes[8:] elif isinstance(attribute, float): value = bytes_to_double(pt_bytes[:8]) pt_bytes = pt_bytes[8:] elif isinstance(attribute, str): value = bytes_to_str(pt_bytes[:1024]) pt_bytes = pt_bytes[ 1024:] # 255 * 4 = 1020. The four additional bytes is the UTF-32 BOM. else: raise CriticalError( "Invalid data type in settings default values.") setattr(self, key, value)
def test_bytes_to_str(self): encoded = str_to_bytes('test') self.assertEqual(bytes_to_str(encoded), 'test')
def _load_groups(self) -> None: """Load groups from the encrypted database. The function first reads, authenticates and decrypts the group database data. Next, it slices and decodes the header values that help the function to properly de-serialize the database content. The function then removes dummy groups based on header data. Next, the function updates the group database settings if necessary. It then splits group data based on header data into blocks, which are further sliced, and processed if necessary, to obtain data required to create Group objects. Finally, if needed, the function will update the group database content. """ pt_bytes = self.database.load_database() # Slice and decode headers group_db_headers, pt_bytes = separate_header(pt_bytes, GROUP_DB_HEADER_LENGTH) padding_for_group_db, padding_for_members, number_of_groups, members_in_largest_group \ = list(map(bytes_to_int, split_byte_string(group_db_headers, ENCODED_INTEGER_LENGTH))) # Slice dummy groups bytes_per_group = GROUP_STATIC_LENGTH + padding_for_members * ONION_SERVICE_PUBLIC_KEY_LENGTH dummy_data_len = (padding_for_group_db - number_of_groups) * bytes_per_group group_data = pt_bytes[:-dummy_data_len] update_db = self._check_db_settings(number_of_groups, members_in_largest_group) blocks = split_byte_string(group_data, item_len=bytes_per_group) all_pub_keys = self.contact_list.get_list_of_pub_keys() dummy_pub_key = onion_address_to_pub_key(DUMMY_MEMBER) # Deserialize group objects for block in blocks: if len(block) != bytes_per_group: raise CriticalError("Invalid data in group database.") name_bytes, group_id, log_messages_byte, notification_byte, ser_pub_keys \ = separate_headers(block, [PADDED_UTF32_STR_LENGTH, GROUP_ID_LENGTH] + 2*[ENCODED_BOOLEAN_LENGTH]) pub_key_list = split_byte_string( ser_pub_keys, item_len=ONION_SERVICE_PUBLIC_KEY_LENGTH) group_pub_keys = [k for k in pub_key_list if k != dummy_pub_key] group_members = [ self.contact_list.get_contact_by_pub_key(k) for k in group_pub_keys if k in all_pub_keys ] self.groups.append( Group(name=bytes_to_str(name_bytes), group_id=group_id, log_messages=bytes_to_bool(log_messages_byte), notifications=bytes_to_bool(notification_byte), members=group_members, settings=self.settings, store_groups=self.store_groups)) update_db |= set(all_pub_keys) > set(group_pub_keys) if update_db: self.store_groups()
def load_groups(self) -> None: """Load groups from encrypted database.""" ensure_dir(f'{DIR_USER_DATA}/') with open(self.file_name, 'rb') as f: ct_bytes = f.read() pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key) update_db = False # Slice and decode headers padding_for_g = bytes_to_int(pt_bytes[0:8]) padding_for_m = bytes_to_int(pt_bytes[8:16]) n_of_actual_g = bytes_to_int(pt_bytes[16:24]) largest_group = bytes_to_int(pt_bytes[24:32]) if n_of_actual_g > self.settings.m_number_of_groups: self.settings.m_number_of_groups = round_up(n_of_actual_g) self.settings.store_settings() update_db = True print( "Group database had {} groups. Increased max number of groups to {}." .format(n_of_actual_g, self.settings.m_number_of_groups)) if largest_group > self.settings.m_members_in_group: self.settings.m_members_in_group = round_up(largest_group) self.settings.store_settings() update_db = True print( "A group in group database had {} members. Increased max size of groups to {}." .format(largest_group, self.settings.m_members_in_group)) # Strip header bytes pt_bytes = pt_bytes[32:] # ( no_fields * (padding + BOM) * bytes/char) + booleans bytes_per_group = ((1 + padding_for_m) * (255 + 1) * 4) + 2 # Remove dummy groups no_dummy_groups = padding_for_g - n_of_actual_g pt_bytes = pt_bytes[:-(no_dummy_groups * bytes_per_group)] groups = split_byte_string(pt_bytes, item_len=bytes_per_group) for g in groups: # Remove padding name = bytes_to_str(g[0:1024]) log_messages = bytes_to_bool(g[1024:1025]) notifications = bytes_to_bool(g[1025:1026]) members_b = split_byte_string(g[1026:], item_len=1024) members = [bytes_to_str(m) for m in members_b] # Remove dummy members members_df = [m for m in members if not m == 'dummy_member'] # Load contacts based on stored rx_account group_members = [ self.contact_list.get_contact(m) for m in members_df if self.contact_list.has_contact(m) ] self.groups.append( Group(name, log_messages, notifications, group_members, self.settings, self.store_groups)) if update_db: self.store_groups()
def load_groups(self) -> None: """Load groups from encrypted database.""" with open(self.file_name, 'rb') as f: ct_bytes = f.read() pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key) update_db = False # Slice and decode headers padding_for_group_db = bytes_to_int(pt_bytes[0:8]) padding_for_members = bytes_to_int(pt_bytes[8:16]) number_of_actual_groups = bytes_to_int(pt_bytes[16:24]) largest_group = bytes_to_int(pt_bytes[24:32]) if number_of_actual_groups > self.settings.max_number_of_groups: self.settings.max_number_of_groups = round_up( number_of_actual_groups) self.settings.store_settings() update_db = True print( "Group database had {} groups. Increased max number of groups to {}." .format(number_of_actual_groups, self.settings.max_number_of_groups)) if largest_group > self.settings.max_number_of_group_members: self.settings.max_number_of_group_members = round_up(largest_group) self.settings.store_settings() update_db = True print( "A group in group database had {} members. Increased max size of groups to {}." .format(largest_group, self.settings.max_number_of_group_members)) group_name_field = 1 string_fields_in_group = padding_for_members + group_name_field bytes_per_group = string_fields_in_group * PADDED_UTF32_STR_LEN + 2 * BOOLEAN_SETTING_LEN # Remove group header and dummy groups dummy_group_data = (padding_for_group_db - number_of_actual_groups) * bytes_per_group group_data = pt_bytes[GROUP_DB_HEADER_LEN:-dummy_group_data] groups = split_byte_string(group_data, item_len=bytes_per_group) for g in groups: assert len(g) == bytes_per_group name = bytes_to_str(g[0:1024]) log_messages = bytes_to_bool(g[1024:1025]) notifications = bytes_to_bool(g[1025:1026]) members_bytes = split_byte_string(g[1026:], item_len=PADDED_UTF32_STR_LEN) members_w_dummies = [bytes_to_str(m) for m in members_bytes] members = [m for m in members_w_dummies if m != DUMMY_MEMBER] # Load contacts based on stored rx_account group_members = [ self.contact_list.get_contact(m) for m in members if self.contact_list.has_contact(m) ] # Update group database if any member has been removed from contact database if not all(m in self.contact_list.get_list_of_accounts() for m in members): update_db = True self.groups.append( Group(name, log_messages, notifications, group_members, self.settings, self.store_groups)) if update_db: self.store_groups()
def access_history(window: Union['Window', 'Window_'], contact_list: 'ContactList', settings: 'Settings', master_key: 'MasterKey', msg_to_load: int = 0, export: bool = False) -> None: """Decrypt 'msg_to_load' last messages from log database and display/export it. :param window: Window object :param contact_list: ContactList object :param settings: Settings object :param master_key: Master key object :param msg_to_load: Number of messages to load :param export: When True, write logged messages into plaintext file instead of printing them. :return: None """ def read_entry(): """Read encrypted log entry. Length | Data type --------|-------------------------------- 24 | XSalsa20 nonce 4 | Timestamp 4 | UTF-32 BOM 4*255 | Padded account (UTF-32) 1 | Origin header 1 | Assembly packet header 255 | Padded assembly packet (UTF-8) 16 | Poly1305 tag """ return log_file.read(1325) ensure_dir(f'{DIR_USER_DATA}/') file_name = f'{DIR_USER_DATA}/{settings.software_operation}_logs' if not os.path.isfile(file_name): raise FunctionReturn(f"Error: Could not find '{file_name}'.") log_file = open(file_name, 'rb') ts_message_list = [] # type: List[Tuple[str, str, bytes, str]] assembly_p_buffer = dict() group_timestamp = b'' for ct in iter(read_entry, b''): pt = auth_and_decrypt(ct, key=master_key.master_key) account = bytes_to_str(pt[5:1029]) if window.type == 'contact' and window.uid != account: continue t_stamp = parse_ts_bytes(pt[0:4], settings) origin_byte = pt[4:5] origin = origin_byte.decode() assembly_header = pt[1029:1030] assembly_pt = pt[1030:] if assembly_header == M_S_HEADER: depadded = rm_padding_bytes(assembly_pt) decompressed = zlib.decompress(depadded) if decompressed[:1] == PRIVATE_MESSAGE_HEADER: if window.type == 'group': continue decoded = decompressed[1:].decode() elif decompressed[:1] == GROUP_MESSAGE_HEADER: group_name, decoded = [f.decode() for f in decompressed[9:].split(US_BYTE)] if group_name != window.name: continue if group_timestamp == decompressed[1:9]: continue else: group_timestamp = decompressed[1:9] ts_message_list.append((t_stamp, account, origin_byte, decoded)) elif assembly_header == M_L_HEADER: assembly_p_buffer[origin + account] = assembly_pt elif assembly_header == M_A_HEADER: if (origin + account) in assembly_p_buffer: assembly_p_buffer[origin + account] += assembly_pt elif assembly_header == M_E_HEADER: if (origin + account) in assembly_p_buffer: assembly_p_buffer[origin + account] += assembly_pt pt_buf = assembly_p_buffer.pop(origin + account) inner_l = rm_padding_bytes(pt_buf) msg_key = inner_l[-32:] enc_msg = inner_l[:-32] decrypted = auth_and_decrypt(enc_msg, key=msg_key) decompressed = zlib.decompress(decrypted) if decompressed[:1] == PRIVATE_MESSAGE_HEADER: if window.type == 'group': continue decoded = decompressed[1:].decode() elif decompressed[:1] == GROUP_MESSAGE_HEADER: group_name, decoded = [f.decode() for f in decompressed[9:].split(US_BYTE)] if group_name != window.name: continue if group_timestamp == decompressed[1:9]: # Skip duplicates of outgoing messages continue else: group_timestamp = decompressed[1:9] ts_message_list.append((t_stamp, account, origin_byte, decoded)) elif assembly_header == M_C_HEADER: assembly_p_buffer.pop(origin + account, None) log_file.close() if not export: clear_screen() print('') tty_w = get_tty_w() system = dict(tx="TxM", rx="RxM", ut="Unittest")[settings.software_operation] m_dir = dict(tx="sent to", rx="to/from", ut="to/from")[settings.software_operation] f_name = open(f"{system} - Plaintext log ({window.name})", 'w+') if export else sys.stdout subset = '' if msg_to_load == 0 else f"{msg_to_load} most recent " title = textwrap.fill(f"Log file of {subset}message(s) {m_dir} {window.name}", tty_w) print(title, file=f_name) print(tty_w * '═', file=f_name) for timestamp, account, origin_, message in ts_message_list[-msg_to_load:]: nick = "Me" if origin_ == ORIGIN_USER_HEADER else contact_list.get_contact(account).nick print(textwrap.fill(f"{timestamp} {nick}:", tty_w), file=f_name) print('', file=f_name) print(textwrap.fill(message, tty_w), file=f_name) print('', file=f_name) print(tty_w * '─', file=f_name) if export: f_name.close() else: print('')