def load_settings(self) -> None: """Load persistent settings from file.""" with open(self.file_name, 'rb') as f: settings = f.read() self.serial_baudrate = bytes_to_int(settings[0:8]) self.serial_error_correction = bytes_to_int(settings[8:16]) self.serial_usb_adapter = bytes_to_bool(settings[16:17]) self.disable_gui_dialog = bytes_to_bool(settings[17:18])
def load_settings(self) -> None: """Load settings from the encrypted database.""" with open(self.file_name, 'rb') as f: ct_bytes = f.read() pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key, database=self.file_name) # Update settings based on plaintext byte string content for key in self.key_list: attribute = self.__getattribute__(key) if isinstance(attribute, bool): value = bytes_to_bool( pt_bytes[0]) # type: Union[bool, int, float] pt_bytes = pt_bytes[ENCODED_BOOLEAN_LENGTH:] elif isinstance(attribute, int): value = bytes_to_int(pt_bytes[:ENCODED_INTEGER_LENGTH]) pt_bytes = pt_bytes[ENCODED_INTEGER_LENGTH:] elif isinstance(attribute, float): value = bytes_to_double(pt_bytes[:ENCODED_FLOAT_LENGTH]) pt_bytes = pt_bytes[ENCODED_FLOAT_LENGTH:] else: raise CriticalError( "Invalid data type in settings default values.") setattr(self, key, value)
def load_settings(self) -> None: """Load settings from the encrypted database.""" pt_bytes = self.database.load_database() # Update settings based on plaintext byte string content for key in self.key_list: attribute = self.__getattribute__(key) if isinstance(attribute, bool): value = bytes_to_bool(pt_bytes[0]) # type: Union[bool, int, float] pt_bytes = pt_bytes[ENCODED_BOOLEAN_LENGTH:] elif isinstance(attribute, int): value = bytes_to_int(pt_bytes[:ENCODED_INTEGER_LENGTH]) pt_bytes = pt_bytes[ENCODED_INTEGER_LENGTH:] elif isinstance(attribute, float): value = bytes_to_double(pt_bytes[:ENCODED_FLOAT_LENGTH]) pt_bytes = pt_bytes[ENCODED_FLOAT_LENGTH:] else: raise CriticalError("Invalid data type in settings default values.") setattr(self, key, value)
def add_complete_message_to_message_list( timestamp: bytes, onion_pub_key: bytes, group_msg_id: bytes, packet: 'Packet', message_list: List[MsgTuple], window: Union['TxWindow', 'RxWindow']) -> bytes: """Add complete log file message to `message_list`.""" whisper_byte, header, message = separate_headers( packet.assemble_message_packet(), [WHISPER_FIELD_LENGTH, MESSAGE_HEADER_LENGTH]) whisper = bytes_to_bool(whisper_byte) if header == PRIVATE_MESSAGE_HEADER and window.type == WIN_TYPE_CONTACT: message_list.append((bytes_to_timestamp(timestamp), message.decode(), onion_pub_key, packet.origin, whisper, False)) elif header == GROUP_MESSAGE_HEADER and window.type == WIN_TYPE_GROUP: purp_group_id, message = separate_header(message, GROUP_ID_LENGTH) if window.group is not None and purp_group_id != window.group.group_id: return group_msg_id purp_msg_id, message = separate_header(message, GROUP_MSG_ID_LENGTH) if packet.origin == ORIGIN_USER_HEADER: if purp_msg_id == group_msg_id: return group_msg_id group_msg_id = purp_msg_id message_list.append((bytes_to_timestamp(timestamp), message.decode(), onion_pub_key, packet.origin, whisper, False)) return group_msg_id
def manage_contact_req(command: bytes, queues: 'QueueDict', notify: bool = True) -> None: """Control whether contact requests are accepted.""" enabled = bytes_to_bool(command) if notify: m_print(f"Contact requests are have been {('enabled' if enabled else 'disabled')}.", head=1, tail=1) queues[C_REQ_MGR_QUEUE].put(enabled)
def _load_contacts(self) -> None: """Load contacts from the encrypted database. This function first reads and decrypts the database content. It then splits the plaintext into a list of 1124-byte blocks: each block contains the serialized data of one contact. Next, the function will remove from the list all dummy contacts (that start with dummy contact's public key). The function will then populate the `self.contacts` list with Contact objects, the data of which is sliced and decoded from the dummy-free blocks. """ with open(self.file_name, 'rb') as f: ct_bytes = f.read() pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key, database=self.file_name) blocks = split_byte_string(pt_bytes, item_len=CONTACT_LENGTH) df_blocks = [ b for b in blocks if not b.startswith(self.dummy_contact.onion_pub_key) ] for block in df_blocks: if len(block) != CONTACT_LENGTH: raise CriticalError("Invalid data in contact database.") (onion_pub_key, tx_fingerprint, rx_fingerprint, kex_status_byte, log_messages_byte, file_reception_byte, notifications_byte, nick_bytes) = separate_headers( block, [ONION_SERVICE_PUBLIC_KEY_LENGTH] + 2 * [FINGERPRINT_LENGTH] + [KEX_STATUS_LENGTH] + 3 * [ENCODED_BOOLEAN_LENGTH]) self.contacts.append( Contact(onion_pub_key=onion_pub_key, tx_fingerprint=tx_fingerprint, rx_fingerprint=rx_fingerprint, kex_status=kex_status_byte, log_messages=bytes_to_bool(log_messages_byte), file_reception=bytes_to_bool(file_reception_byte), notifications=bytes_to_bool(notifications_byte), nick=bytes_to_str(nick_bytes)))
def load_contacts(self) -> None: """Load contacts from encrypted database.""" with open(self.file_name, 'rb') as f: ct_bytes = f.read() pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key) entries = split_byte_string(pt_bytes, item_len=CONTACT_LENGTH) contacts = [e for e in entries if not e.startswith(self.dummy_id)] for c in contacts: assert len(c) == CONTACT_LENGTH self.contacts.append(Contact(rx_account =bytes_to_str( c[ 0:1024]), tx_account =bytes_to_str( c[1024:2048]), nick =bytes_to_str( c[2048:3072]), tx_fingerprint= c[3072:3104], rx_fingerprint= c[3104:3136], log_messages =bytes_to_bool(c[3136:3137]), file_reception=bytes_to_bool(c[3137:3138]), notifications =bytes_to_bool(c[3138:3139])))
def load_contacts(self) -> None: """Load contacts from encrypted database.""" ensure_dir(f'{DIR_USER_DATA}/') with open(self.file_name, 'rb') as f: ct_bytes = f.read() pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key) entries = split_byte_string(pt_bytes, item_len=3139) # 3 * 1024 + 2 * 32 + 3 * 1 dummy_id = 'dummy_contact'.encode('utf-32') contacts = [e for e in entries if not e.startswith(dummy_id)] for c in contacts: rx_account = bytes_to_str(c[ 0:1024]) tx_account = bytes_to_str(c[1024:2048]) nick = bytes_to_str(c[2048:3072]) tx_fingerprint = c[3072:3104] rx_fingerprint = c[3104:3136] log_messages = bytes_to_bool(c[3136:3137]) file_reception = bytes_to_bool(c[3137:3138]) notifications = bytes_to_bool(c[3138:3139]) self.contacts.append(Contact(rx_account, tx_account, nick, tx_fingerprint, rx_fingerprint, log_messages, file_reception, notifications))
def load_settings(self) -> None: """Load settings from encrypted database.""" ensure_dir(f'{DIR_USER_DATA}/') with open(self.file_name, 'rb') as f: ct_bytes = f.read() pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key) # Update settings based on plaintext byte string content for i, key in enumerate(self.key_list): attribute = self.__getattribute__(key) if isinstance(attribute, bool): value = bytes_to_bool( pt_bytes[0]) # type: Union[bool, int, float, str] pt_bytes = pt_bytes[1:] elif isinstance(attribute, int): value = bytes_to_int(pt_bytes[:8]) pt_bytes = pt_bytes[8:] elif isinstance(attribute, float): value = bytes_to_double(pt_bytes[:8]) pt_bytes = pt_bytes[8:] elif isinstance(attribute, str): value = bytes_to_str(pt_bytes[:1024]) pt_bytes = pt_bytes[ 1024:] # 255 * 4 = 1020. The four additional bytes is the UTF-32 BOM. else: raise CriticalError( "Invalid data type in settings default values.") setattr(self, key, value)
def test_bytes_to_bool(self): self.assertEqual(bytes_to_bool(b'\x00'), False) self.assertEqual(bytes_to_bool(b'\x01'), True)
def _load_groups(self) -> None: """Load groups from the encrypted database. The function first reads, authenticates and decrypts the group database data. Next, it slices and decodes the header values that help the function to properly de-serialize the database content. The function then removes dummy groups based on header data. Next, the function updates the group database settings if necessary. It then splits group data based on header data into blocks, which are further sliced, and processed if necessary, to obtain data required to create Group objects. Finally, if needed, the function will update the group database content. """ pt_bytes = self.database.load_database() # Slice and decode headers group_db_headers, pt_bytes = separate_header(pt_bytes, GROUP_DB_HEADER_LENGTH) padding_for_group_db, padding_for_members, number_of_groups, members_in_largest_group \ = list(map(bytes_to_int, split_byte_string(group_db_headers, ENCODED_INTEGER_LENGTH))) # Slice dummy groups bytes_per_group = GROUP_STATIC_LENGTH + padding_for_members * ONION_SERVICE_PUBLIC_KEY_LENGTH dummy_data_len = (padding_for_group_db - number_of_groups) * bytes_per_group group_data = pt_bytes[:-dummy_data_len] update_db = self._check_db_settings(number_of_groups, members_in_largest_group) blocks = split_byte_string(group_data, item_len=bytes_per_group) all_pub_keys = self.contact_list.get_list_of_pub_keys() dummy_pub_key = onion_address_to_pub_key(DUMMY_MEMBER) # Deserialize group objects for block in blocks: if len(block) != bytes_per_group: raise CriticalError("Invalid data in group database.") name_bytes, group_id, log_messages_byte, notification_byte, ser_pub_keys \ = separate_headers(block, [PADDED_UTF32_STR_LENGTH, GROUP_ID_LENGTH] + 2*[ENCODED_BOOLEAN_LENGTH]) pub_key_list = split_byte_string( ser_pub_keys, item_len=ONION_SERVICE_PUBLIC_KEY_LENGTH) group_pub_keys = [k for k in pub_key_list if k != dummy_pub_key] group_members = [ self.contact_list.get_contact_by_pub_key(k) for k in group_pub_keys if k in all_pub_keys ] self.groups.append( Group(name=bytes_to_str(name_bytes), group_id=group_id, log_messages=bytes_to_bool(log_messages_byte), notifications=bytes_to_bool(notification_byte), members=group_members, settings=self.settings, store_groups=self.store_groups)) update_db |= set(all_pub_keys) > set(group_pub_keys) if update_db: self.store_groups()
def load_groups(self) -> None: """Load groups from encrypted database.""" ensure_dir(f'{DIR_USER_DATA}/') with open(self.file_name, 'rb') as f: ct_bytes = f.read() pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key) update_db = False # Slice and decode headers padding_for_g = bytes_to_int(pt_bytes[0:8]) padding_for_m = bytes_to_int(pt_bytes[8:16]) n_of_actual_g = bytes_to_int(pt_bytes[16:24]) largest_group = bytes_to_int(pt_bytes[24:32]) if n_of_actual_g > self.settings.m_number_of_groups: self.settings.m_number_of_groups = round_up(n_of_actual_g) self.settings.store_settings() update_db = True print( "Group database had {} groups. Increased max number of groups to {}." .format(n_of_actual_g, self.settings.m_number_of_groups)) if largest_group > self.settings.m_members_in_group: self.settings.m_members_in_group = round_up(largest_group) self.settings.store_settings() update_db = True print( "A group in group database had {} members. Increased max size of groups to {}." .format(largest_group, self.settings.m_members_in_group)) # Strip header bytes pt_bytes = pt_bytes[32:] # ( no_fields * (padding + BOM) * bytes/char) + booleans bytes_per_group = ((1 + padding_for_m) * (255 + 1) * 4) + 2 # Remove dummy groups no_dummy_groups = padding_for_g - n_of_actual_g pt_bytes = pt_bytes[:-(no_dummy_groups * bytes_per_group)] groups = split_byte_string(pt_bytes, item_len=bytes_per_group) for g in groups: # Remove padding name = bytes_to_str(g[0:1024]) log_messages = bytes_to_bool(g[1024:1025]) notifications = bytes_to_bool(g[1025:1026]) members_b = split_byte_string(g[1026:], item_len=1024) members = [bytes_to_str(m) for m in members_b] # Remove dummy members members_df = [m for m in members if not m == 'dummy_member'] # Load contacts based on stored rx_account group_members = [ self.contact_list.get_contact(m) for m in members_df if self.contact_list.has_contact(m) ] self.groups.append( Group(name, log_messages, notifications, group_members, self.settings, self.store_groups)) if update_db: self.store_groups()
def process_message(ts: 'datetime', assembly_packet_ct: bytes, window_list: 'WindowList', packet_list: 'PacketList', contact_list: 'ContactList', key_list: 'KeyList', group_list: 'GroupList', settings: 'Settings', master_key: 'MasterKey', file_keys: Dict[bytes, bytes]) -> None: """Process received private / group message.""" local_window = window_list.get_local_window() onion_pub_key, origin, assembly_packet_ct = separate_headers( assembly_packet_ct, [ONION_SERVICE_PUBLIC_KEY_LENGTH, ORIGIN_HEADER_LENGTH]) if onion_pub_key == LOCAL_PUBKEY: raise FunctionReturn( "Warning! Received packet masqueraded as a command.", window=local_window) if origin not in [ORIGIN_USER_HEADER, ORIGIN_CONTACT_HEADER]: raise FunctionReturn( "Error: Received packet had an invalid origin-header.", window=local_window) assembly_packet = decrypt_assembly_packet(assembly_packet_ct, onion_pub_key, origin, window_list, contact_list, key_list) p_type = FILE if assembly_packet[:ASSEMBLY_PACKET_HEADER_LENGTH].isupper( ) else MESSAGE packet = packet_list.get_packet(onion_pub_key, origin, p_type) logging = contact_list.get_contact_by_pub_key(onion_pub_key).log_messages def log_masking_packets(completed: bool = False) -> None: """Add masking packets to log file. If logging and log file masking are enabled, this function will in case of erroneous transmissions, store the correct number of placeholder data packets to log file to hide the quantity of communication that log file observation would otherwise reveal. """ if logging and settings.log_file_masking and (packet.log_masking_ctr or completed): no_masking_packets = len(packet.assembly_pt_list ) if completed else packet.log_masking_ctr for _ in range(no_masking_packets): write_log_entry(PLACEHOLDER_DATA, onion_pub_key, settings, master_key, origin) packet.log_masking_ctr = 0 try: packet.add_packet(assembly_packet) except FunctionReturn: log_masking_packets() raise log_masking_packets() if not packet.is_complete: return None try: if p_type == FILE: packet.assemble_and_store_file(ts, onion_pub_key, window_list) raise FunctionReturn( "File storage complete.", output=False) # Raising allows calling log_masking_packets elif p_type == MESSAGE: whisper_byte, header, assembled = separate_headers( packet.assemble_message_packet(), [WHISPER_FIELD_LENGTH, MESSAGE_HEADER_LENGTH]) if len(whisper_byte) != WHISPER_FIELD_LENGTH: raise FunctionReturn( "Error: Message from contact had an invalid whisper header." ) whisper = bytes_to_bool(whisper_byte) if header == GROUP_MESSAGE_HEADER: logging = process_group_message(assembled, ts, onion_pub_key, origin, whisper, group_list, window_list) elif header == PRIVATE_MESSAGE_HEADER: window = window_list.get_window(onion_pub_key) window.add_new(ts, assembled.decode(), onion_pub_key, origin, output=True, whisper=whisper) elif header == FILE_KEY_HEADER: nick = process_file_key_message(assembled, onion_pub_key, origin, contact_list, file_keys) raise FunctionReturn( f"Received file decryption key from {nick}", window=local_window) else: raise FunctionReturn( "Error: Message from contact had an invalid header.") # Logging if whisper: raise FunctionReturn("Whisper message complete.", output=False) if logging: for p in packet.assembly_pt_list: write_log_entry(p, onion_pub_key, settings, master_key, origin) except (FunctionReturn, UnicodeError): log_masking_packets(completed=True) raise finally: packet.clear_assembly_packets()
def access_logs(window: Union['TxWindow', 'RxWindow'], contact_list: 'ContactList', group_list: 'GroupList', settings: 'Settings', master_key: 'MasterKey', msg_to_load: int = 0, export: bool = False) -> None: """\ Load 'msg_to_load' last messages from log database and display or export them. The default value of zero for `msg_to_load` means all messages for the window will be retrieved from the log database. """ file_name = f'{DIR_USER_DATA}{settings.software_operation}_logs' log_file = get_logfile(file_name) packet_list = PacketList(settings, contact_list) message_log = [] # type: List[MsgTuple] group_msg_id = b'' for ct in iter(lambda: log_file.read(LOG_ENTRY_LENGTH), b''): plaintext = auth_and_decrypt(ct, master_key.master_key, database=file_name) onion_pub_key, timestamp, origin, assembly_packet = separate_headers( plaintext, [ ONION_SERVICE_PUBLIC_KEY_LENGTH, TIMESTAMP_LENGTH, ORIGIN_HEADER_LENGTH ]) if window.type == WIN_TYPE_CONTACT and onion_pub_key != window.uid: continue packet = packet_list.get_packet(onion_pub_key, origin, MESSAGE, log_access=True) try: packet.add_packet(assembly_packet) except FunctionReturn: continue if not packet.is_complete: continue whisper_byte, header, message = separate_headers( packet.assemble_message_packet(), [WHISPER_FIELD_LENGTH, MESSAGE_HEADER_LENGTH]) whisper = bytes_to_bool(whisper_byte) if header == PRIVATE_MESSAGE_HEADER and window.type == WIN_TYPE_CONTACT: message_log.append( (bytes_to_timestamp(timestamp), message.decode(), onion_pub_key, packet.origin, whisper, False)) elif header == GROUP_MESSAGE_HEADER and window.type == WIN_TYPE_GROUP: purp_group_id, message = separate_header(message, GROUP_ID_LENGTH) if window.group is not None and purp_group_id != window.group.group_id: continue purp_msg_id, message = separate_header(message, GROUP_MSG_ID_LENGTH) if packet.origin == ORIGIN_USER_HEADER: if purp_msg_id == group_msg_id: continue group_msg_id = purp_msg_id message_log.append( (bytes_to_timestamp(timestamp), message.decode(), onion_pub_key, packet.origin, whisper, False)) log_file.close() print_logs(message_log[-msg_to_load:], export, msg_to_load, window, contact_list, group_list, settings)
def load_groups(self) -> None: """Load groups from encrypted database.""" with open(self.file_name, 'rb') as f: ct_bytes = f.read() pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key) update_db = False # Slice and decode headers padding_for_group_db = bytes_to_int(pt_bytes[0:8]) padding_for_members = bytes_to_int(pt_bytes[8:16]) number_of_actual_groups = bytes_to_int(pt_bytes[16:24]) largest_group = bytes_to_int(pt_bytes[24:32]) if number_of_actual_groups > self.settings.max_number_of_groups: self.settings.max_number_of_groups = round_up( number_of_actual_groups) self.settings.store_settings() update_db = True print( "Group database had {} groups. Increased max number of groups to {}." .format(number_of_actual_groups, self.settings.max_number_of_groups)) if largest_group > self.settings.max_number_of_group_members: self.settings.max_number_of_group_members = round_up(largest_group) self.settings.store_settings() update_db = True print( "A group in group database had {} members. Increased max size of groups to {}." .format(largest_group, self.settings.max_number_of_group_members)) group_name_field = 1 string_fields_in_group = padding_for_members + group_name_field bytes_per_group = string_fields_in_group * PADDED_UTF32_STR_LEN + 2 * BOOLEAN_SETTING_LEN # Remove group header and dummy groups dummy_group_data = (padding_for_group_db - number_of_actual_groups) * bytes_per_group group_data = pt_bytes[GROUP_DB_HEADER_LEN:-dummy_group_data] groups = split_byte_string(group_data, item_len=bytes_per_group) for g in groups: assert len(g) == bytes_per_group name = bytes_to_str(g[0:1024]) log_messages = bytes_to_bool(g[1024:1025]) notifications = bytes_to_bool(g[1025:1026]) members_bytes = split_byte_string(g[1026:], item_len=PADDED_UTF32_STR_LEN) members_w_dummies = [bytes_to_str(m) for m in members_bytes] members = [m for m in members_w_dummies if m != DUMMY_MEMBER] # Load contacts based on stored rx_account group_members = [ self.contact_list.get_contact(m) for m in members if self.contact_list.has_contact(m) ] # Update group database if any member has been removed from contact database if not all(m in self.contact_list.get_list_of_accounts() for m in members): update_db = True self.groups.append( Group(name, log_messages, notifications, group_members, self.settings, self.store_groups)) if update_db: self.store_groups()
def process_message( ts: 'datetime', # Timestamp of received message packet onion_pub_key: bytes, # Onion address of associated contact origin: bytes, # Origin of message (user / contact) logging: bool, # When True, message will be logged packet: 'Packet', # Packet object window_list: 'WindowList', # WindowList object contact_list: 'ContactList', # ContactList object group_list: 'GroupList', # GroupList object message_log: 'MessageLog', # MessageLog object file_keys: Dict[bytes, bytes] # Dictionary of file decryption keys ) -> None: """Process message packet. The received message might be a private or group message, or it might contain decryption key for file received earlier. Each received message contains a whisper header that allows the sender to request the message to not be logged. This request will be obeyed as long as the recipient does not edit the source code below. Thus, the sender should not trust a whisper message is never logged. """ whisper_byte, header, assembled = separate_headers( packet.assemble_message_packet(), [WHISPER_FIELD_LENGTH, MESSAGE_HEADER_LENGTH]) if len(whisper_byte) != WHISPER_FIELD_LENGTH: raise SoftError( "Error: Message from contact had an invalid whisper header.") whisper = bytes_to_bool(whisper_byte) if header == GROUP_MESSAGE_HEADER: logging = process_group_message(ts, assembled, onion_pub_key, origin, whisper, group_list, window_list) elif header == PRIVATE_MESSAGE_HEADER: window = window_list.get_window(onion_pub_key) window.add_new(ts, assembled.decode(), onion_pub_key, origin, output=True, whisper=whisper) elif header == FILE_KEY_HEADER: nick = process_file_key_message(assembled, onion_pub_key, origin, contact_list, file_keys) raise SoftError(f"Received file decryption key from {nick}", window=window_list.get_command_window()) else: raise SoftError("Error: Message from contact had an invalid header.") # Logging if whisper: raise SoftError("Whisper message complete.", output=False) if logging: for p in packet.assembly_pt_list: write_log_entry(p, onion_pub_key, message_log, origin)