def test_invalid_name_raises_fr(self): # Setup file_name = str_to_bytes('\x01testfile.txt') data = file_name + os.urandom(1000) compressed = zlib.compress(data, level=COMPRESSION_LEVEL) packet = IMPORTED_FILE_HEADER + encrypt_and_sign(compressed, self.key) # Test self.assertFR("Error: Received file had an invalid name.", process_imported_file, self.ts, packet, self.window_list, self.settings)
def test_valid_file_without_key_is_cached(self): # Setup file_ct = encrypt_and_sign(self.compressed, self.file_key) file_hash = blake2b(file_ct) packet = nick_to_pub_key('Alice') + ORIGIN_CONTACT_HEADER + file_ct # Test self.assertIsNone(new_file(self.ts, packet, *self.args)) self.assertEqual(self.file_buf[nick_to_pub_key('Alice') + file_hash], (self.ts, file_ct))
def test_invalid_name_encoding_raises_fr(self): # Setup file_name = binascii.unhexlify('8095b2f59d650ab7') data = file_name + os.urandom(1000) compressed = zlib.compress(data, level=COMPRESSION_LEVEL) packet = IMPORTED_FILE_HEADER + encrypt_and_sign(compressed, self.key) # Test self.assertFR("Error: Received file name had invalid encoding.", process_imported_file, self.ts, packet, self.window_list, self.settings)
def test_valid_file_with_key_is_processed(self, _: Any) -> None: # Setup file_ct = encrypt_and_sign(self.compressed, self.file_key) file_hash = blake2b(file_ct) packet = nick_to_pub_key('Alice') + ORIGIN_CONTACT_HEADER + file_ct self.file_keys = {(nick_to_pub_key('Alice') + file_hash): self.file_key} self.args = self.file_keys, self.file_buf, self.contact_list, self.window_list, self.settings # Test self.assertIsNone(new_file(self.ts, packet, *self.args))
def setUp(self): self.unittest_dir = cd_unittest() self.ts = datetime.now() self.account = nick_to_pub_key('Alice') self.file_key = SYMMETRIC_KEY_LENGTH * b'a' self.file_ct = encrypt_and_sign(50 * b'a', key=self.file_key) self.contact_list = ContactList(nicks=['Alice']) self.window_list = WindowList() self.settings = Settings() self.args = self.file_key, self.contact_list, self.window_list, self.settings
def send_packet(key_list: 'KeyList', gateway: 'Gateway', log_queue: 'Queue', packet: bytes, settings: 'Settings', rx_account: str = None, tx_account: str = None, logging: bool = None, log_as_ph: bool = None) -> None: """Encrypt and send assembly packet. :param packet: Padded plaintext assembly packet :param key_list: Key list object :param settings: Settings object :param gateway: Gateway object :param log_queue: Multiprocessing queue for logged messages :param rx_account: Recipient account :param tx_account: Sender's account associated with recipient's account :param logging: When True, log the assembly packet :param log_as_ph: When True, log assembly packet as placeholder data :return: None """ if len(packet) != ASSEMBLY_PACKET_LEN: raise CriticalError("Invalid assembly packet PT length.") if rx_account is None: keyset = key_list.get_keyset(LOCAL_ID) header = COMMAND_PACKET_HEADER trailer = b'' else: keyset = key_list.get_keyset(rx_account) header = MESSAGE_PACKET_HEADER trailer = tx_account.encode() + US_BYTE + rx_account.encode() harac_in_bytes = int_to_bytes(keyset.tx_harac) encrypted_harac = encrypt_and_sign(harac_in_bytes, keyset.tx_hek) encrypted_message = encrypt_and_sign(packet, keyset.tx_key) encrypted_packet = header + encrypted_harac + encrypted_message + trailer transmit(encrypted_packet, settings, gateway) keyset.rotate_tx_key() log_queue.put((logging, log_as_ph, packet, rx_account, settings, key_list.master_key))
def test_valid_import(self): # Setup file_name = str_to_bytes('testfile.txt') data = file_name + os.urandom(1000) compressed = zlib.compress(data, level=COMPRESSION_LEVEL) packet = IMPORTED_FILE_HEADER + encrypt_and_sign(compressed, self.key) # Test self.assertIsNone(process_imported_file(self.ts, packet, self.window_list, self.settings)) self.assertTrue(os.path.isfile(f"{DIR_IMPORTED}testfile.txt"))
def store_database(self, pt_bytes: bytes, replace: bool = True) -> None: """Encrypt and store data into database.""" ct_bytes = encrypt_and_sign(pt_bytes, self.database_key) ensure_dir(DIR_USER_DATA) self.ensure_temp_write(ct_bytes) # Replace the original file with a temp file. (`os.replace` is atomic as per # POSIX requirements): https://docs.python.org/3/library/os.html#os.replace if replace: self.replace_database()
def test_successful_command_decryption(self): # Setup command = byte_padding(b'test') encrypted_message = encrypt_and_sign(command, 32 * b'\x01') harac_in_bytes = int_to_bytes(1) encrypted_harac = encrypt_and_sign(harac_in_bytes, 32 * b'\x01') packet = COMMAND_PACKET_HEADER + encrypted_harac + encrypted_message window_list = WindowList(nicks=['Alice', 'local']) contact_list = ContactList(nicks=['Alice', 'local']) key_list = KeyList(nicks=['Alice', 'local']) keyset = key_list.get_keyset('local') keyset.tx_harac = 1 # Test assembly_pt, account, origin = decrypt_assembly_packet(packet, window_list, contact_list, key_list) self.assertEqual(assembly_pt, command) self.assertEqual(account, 'local') self.assertEqual(origin, ORIGIN_USER_HEADER)
def test_successful_packet_decryption_with_offset(self): # Setup message = PRIVATE_MESSAGE_HEADER + byte_padding(b'test') encrypted_message = encrypt_and_sign(message, hash_chain(32 * b'\x01')) harac_in_bytes = int_to_bytes(2) encrypted_harac = encrypt_and_sign(harac_in_bytes, 32 * b'\x01') packet = MESSAGE_PACKET_HEADER + encrypted_harac + encrypted_message + ORIGIN_CONTACT_HEADER + b'*****@*****.**' window_list = WindowList(nicks=['Alice', 'local']) contact_list = ContactList(nicks=['Alice', 'local']) key_list = KeyList(nicks=['Alice', 'local']) keyset = key_list.get_keyset('*****@*****.**') keyset.rx_harac = 1 # Test assembly_pt, account, origin = decrypt_assembly_packet(packet, window_list, contact_list, key_list) self.assertEqual(assembly_pt, message) self.assertEqual(account, '*****@*****.**') self.assertEqual(origin, ORIGIN_CONTACT_HEADER)
def test_harac_dos_can_be_interrupted(self): # Setup encrypted_message = encrypt_and_sign(PRIVATE_MESSAGE_HEADER + byte_padding(b'test'), 32 * b'\x01') harac_in_bytes = int_to_bytes(10000) encrypted_harac = encrypt_and_sign(harac_in_bytes, 32 * b'\x01') packet = MESSAGE_PACKET_HEADER + encrypted_harac + encrypted_message + ORIGIN_CONTACT_HEADER + b'*****@*****.**' o_input = builtins.input builtins.input = lambda x: 'No' window_list = WindowList(nicks=['Alice', 'local']) contact_list = ContactList(nicks=['Alice', 'local']) key_list = KeyList(nicks=['Alice', 'local']) keyset = key_list.get_keyset('*****@*****.**') keyset.rx_harac = 3 # Test self.assertFR("Dropped packet from Alice.", decrypt_assembly_packet, packet, window_list, contact_list, key_list) # Teardown builtins.input = o_input
def split_to_assembly_packets(payload: bytes, p_type: str) -> List[bytes]: """Split payload to assembly packets. Messages and commands are compressed to reduce transmission time. Files have been compressed at earlier phase, before B85 encoding. If the compressed message can not be sent over one packet, it is split into multiple assembly packets with headers. Long messages are encrypted with inner layer of XSalsa20-Poly1305 to provide sender based control over partially transmitted data. Regardless of packet size, files always have an inner layer of encryption, and it is added in earlier phase. Commands do not need sender-based control, so they are only delivered with hash that makes integrity check easy. First assembly packet in file transmission is prepended with 8-byte packet counter that tells sender and receiver how many packets the file transmission requires. """ s_header = {MESSAGE: M_S_HEADER, FILE: F_S_HEADER, COMMAND: C_S_HEADER}[p_type] l_header = {MESSAGE: M_L_HEADER, FILE: F_L_HEADER, COMMAND: C_L_HEADER}[p_type] a_header = {MESSAGE: M_A_HEADER, FILE: F_A_HEADER, COMMAND: C_A_HEADER}[p_type] e_header = {MESSAGE: M_E_HEADER, FILE: F_E_HEADER, COMMAND: C_E_HEADER}[p_type] if p_type in [MESSAGE, COMMAND]: payload = zlib.compress(payload, level=COMPRESSION_LEVEL) if len(payload) < PADDING_LEN: padded = byte_padding(payload) packet_list = [s_header + padded] else: if p_type == MESSAGE: msg_key = csprng() payload = encrypt_and_sign(payload, msg_key) payload += msg_key elif p_type == FILE: payload = bytes(FILE_PACKET_CTR_LEN) + payload elif p_type == COMMAND: payload += hash_chain(payload) padded = byte_padding(payload) p_list = split_byte_string(padded, item_len=PADDING_LEN) if p_type == FILE: p_list[0] = int_to_bytes(len(p_list)) + p_list[0][FILE_PACKET_CTR_LEN:] packet_list = ([l_header + p_list[0]] + [a_header + p for p in p_list[1:-1]] + [e_header + p_list[-1]]) return packet_list
def store_groups(self) -> None: """Write groups to encrypted database.""" groups = self.groups + [self.generate_dummy_group()] * ( self.settings.max_number_of_groups - len(self.groups)) pt_bytes = self.generate_group_db_header() pt_bytes += b''.join([g.serialize_g() for g in groups]) ct_bytes = encrypt_and_sign(pt_bytes, self.master_key.master_key) ensure_dir(DIR_USER_DATA) with open(self.file_name, 'wb+') as f: f.write(ct_bytes)
def test_disabled_file_reception_raises_fr(self): # Setup file_ct = encrypt_and_sign(self.compressed, self.file_key) packet = nick_to_pub_key('Alice') + ORIGIN_CONTACT_HEADER + file_ct self.contact_list.get_contact_by_address_or_nick( 'Alice').file_reception = False # Test self.assert_fr( "Alert! Discarded file from Alice as file reception for them is disabled.", new_file, self.ts, packet, *self.args)
def test_decryption_fail_raises_fr(self): # Setup key = os.urandom(32) f_data = encrypt_and_sign(b'filedata', key) f_data += key[1:] + b'' f_data = base64.b85encode(f_data) payload = US_BYTE.join([b'filename', b'unused', b'unused', f_data]) nick = 'Alice' # Test self.assertFR("Decryption of file data failed.", process_received_file, payload, nick)
def test_missing_file_data_raises_fr(self): # Setup key = os.urandom(32) compressed = zlib.compress(b'', level=9) f_data = encrypt_and_sign(compressed, key) f_data += key f_data = base64.b85encode(f_data) payload = US_BYTE.join([b'filename', b'unused', b'unused', f_data]) nick = 'Alice' # Test self.assertFR("Received file did not contain data.", process_received_file, payload, nick)
def store_contacts(self) -> None: """Write contacts to encrypted database.""" dummy_contact_bytes = self.generate_dummy_contact() number_of_dummies = self.settings.m_number_of_accnts - len(self.contacts) pt_bytes = b''.join([c.dump_c() for c in self.contacts]) pt_bytes += number_of_dummies * dummy_contact_bytes ct_bytes = encrypt_and_sign(pt_bytes, self.master_key.master_key) ensure_dir(f'{DIR_USER_DATA}/') with open(self.file_name, 'wb+') as f: f.write(ct_bytes)
def test_old_local_key_packet_raises_fr(self, *_): # Setup self.key_list.keysets = [] new_key = os.urandom(SYMMETRIC_KEY_LENGTH) new_hek = os.urandom(SYMMETRIC_KEY_LENGTH) new_conf_code = os.urandom(CONFIRM_CODE_LENGTH) new_packet = encrypt_and_sign(new_key + new_hek + new_conf_code, key=self.new_kek) # Test self.assertIsNone(process_local_key(self.ts, self.packet, *self.args)) self.assert_fr("Error: Received old local key packet.", process_local_key, self.ts, self.packet, *self.args) self.assertIsNone(process_local_key(self.ts, new_packet, *self.args))
def test_encrypt_and_sign_with_kat(self): """Test encryption with official test vectors.""" # Setup o_nacl_utils_random = nacl.utils.random nacl.utils.random = lambda _: self.nonce_tv # Test self.assertEqual(encrypt_and_sign(self.pt_tv, self.key_tv), self.nonce_tv + self.ct_tv) # Teardown nacl.utils.random = o_nacl_utils_random
def process_file_data(data: bytes) -> bytes: """Compress, encrypt and encode file data. Compress file to reduce data transmission time. Add an inner layer of encryption to provide sender-based control over partial transmission. """ compressed = zlib.compress(data, level=COMPRESSION_LEVEL) file_key = csprng() processed = encrypt_and_sign(compressed, key=file_key) processed += file_key return processed
def store_keys(self) -> None: """Write keys to encrypted database.""" num_of_dummies = self.settings.m_number_of_accnts - len(self.keysets) dummy_keyset = self.generate_dummy_keyset() pt_bytes = b''.join([k.dump_k() for k in self.keysets]) pt_bytes += num_of_dummies * dummy_keyset ct_bytes = encrypt_and_sign(pt_bytes, self.master_key.master_key) ensure_dir(f'{DIR_USER_DATA}/') with open(self.file_name, 'wb+') as f: f.write(ct_bytes)
def test_verify_file(self) -> None: # Setup pt_bytes = os.urandom(MASTERKEY_DB_SIZE) ct_bytes = encrypt_and_sign(pt_bytes, self.master_key.master_key) with open(self.database_name, 'wb') as f: f.write(ct_bytes) # Test valid file content returns True. self.assertTrue(self.database.verify_file(self.database_name)) # Test invalid file content returns False. tamper_file(self.database_name, tamper_size=1) self.assertFalse(self.database.verify_file(self.database_name))
def store_groups(self) -> None: """Write groups to encrypted database.""" dummy_group_bytes = self.generate_dummy_group() number_of_dummies = self.settings.m_number_of_groups - len(self.groups) pt_bytes = self.generate_header() pt_bytes += b''.join([g.dump_g() for g in self.groups]) pt_bytes += number_of_dummies * dummy_group_bytes ct_bytes = encrypt_and_sign(pt_bytes, self.master_key.master_key) ensure_dir(f'{DIR_USER_DATA}/') with open(self.file_name, 'wb+') as f: f.write(ct_bytes)
def test_invalid_compression_raises_fr(self): # Setup key = os.urandom(32) compressed = zlib.compress(b'filedata', level=9) compressed = compressed[:-1] + b'a' f_data = encrypt_and_sign(compressed, key) f_data += key f_data = base64.b85encode(f_data) payload = US_BYTE.join([b'filename', b'unused', b'unused', f_data]) nick = 'Alice' # Test self.assertFR("Decompression of file data failed.", process_received_file, payload, nick)
def test_invalid_content_raises_critical_error(self) -> None: # Setup invalid_data = b'a' pt_bytes = b''.join([k.serialize_k() for k in self.keylist.keysets + self.keylist._dummy_keysets()]) ct_bytes = encrypt_and_sign(pt_bytes + invalid_data, self.master_key.master_key) ensure_dir(DIR_USER_DATA) with open(self.file_name, 'wb+') as f: f.write(ct_bytes) # Test with self.assertRaises(SystemExit): KeyList(self.master_key, self.settings)
def test_loading_invalid_onion_key_raises_critical_error(self, _): # Setup ct_bytes = encrypt_and_sign( (ONION_SERVICE_PRIVATE_KEY_LENGTH + 1) * b'a', self.master_key.master_key) ensure_dir(DIR_USER_DATA) with open(f'{DIR_USER_DATA}{TX}_onion_db', 'wb+') as f: f.write(ct_bytes) # Test with self.assertRaises(SystemExit): OnionService(self.master_key)
def test_invalid_compression_raises_fr(self): # Setup data = os.urandom(1000) compressed = zlib.compress(data, level=COMPRESSION_LEVEL) compressed = compressed[:-2] + b'aa' packet = IMPORTED_FILE_HEADER + encrypt_and_sign(compressed, self.key) input_list = ['bad', self.key_b58] gen = iter(input_list) builtins.input = lambda _: str(next(gen)) # Test self.assertFR("Error: Decompression of file data failed.", process_imported_file, self.ts, packet, self.window_list, self.settings)
def queue_message(user_input: Union['UserInput', 'Message'], window: Union['MockWindow', 'Window'], settings: 'Settings', m_queue: 'Queue', header: bytes = b'') -> None: """Convert message into set of assembly packets and queue them. :param user_input: UserInput object :param window: Window object :param settings: Settings object :param m_queue: Multiprocessing message queue :param header: Overrides message header with group management header :return: None """ if not header: if window.type == 'group': timestamp = double_to_bytes(time.time() * 1000) header = GROUP_MESSAGE_HEADER + timestamp + window.name.encode( ) + US_BYTE else: header = PRIVATE_MESSAGE_HEADER plaintext = user_input.plaintext.encode() payload = header + plaintext payload = zlib.compress(payload, level=9) if len(payload) < 255: padded = byte_padding(payload) packet_list = [M_S_HEADER + padded] else: msg_key = keygen() payload = encrypt_and_sign(payload, msg_key) payload += msg_key padded = byte_padding(payload) p_list = split_byte_string(padded, item_len=255) packet_list = ([M_L_HEADER + p_list[0]] + [M_A_HEADER + p for p in p_list[1:-1]] + [M_E_HEADER + p_list[-1]]) if settings.session_trickle: log_m_dictionary = dict((c.rx_account, c.log_messages) for c in window) for p in packet_list: m_queue.put((p, log_m_dictionary)) else: for c in window: log_setting = window.group.log_messages if window.type == 'group' else c.log_messages for p in packet_list: m_queue.put((p, settings, c.rx_account, c.tx_account, log_setting, window.uid))
def insert_log_entry(self, pt_log_entry: bytes) -> None: """Encrypt log entry and insert the ciphertext into the sqlite3 database.""" ct_log_entry = encrypt_and_sign(pt_log_entry, self.database_key) try: self.c.execute( """INSERT INTO log_entries (log_entry) VALUES (?)""", (ct_log_entry, )) self.conn.commit() except sqlite3.Error: # Re-connect to database self.conn = sqlite3.connect(self.database_name) self.c = self.conn.cursor() self.insert_log_entry(pt_log_entry)
def test_successful_storage_during_traffic_masking(self, _: Any) -> None: # Setup self.settings.traffic_masking = True self.window_list.active_win = self.window_list.get_window(nick_to_pub_key('Bob')) compressed = zlib.compress(str_to_bytes("testfile.txt") + b'file_data', level=COMPRESSION_LEVEL) file_data = encrypt_and_sign(compressed, self.file_key) self.assertIsNone(process_file(self.ts, self.account, file_data, *self.args)) self.assertEqual(self.window_list.get_window(nick_to_pub_key('Bob')).message_log[0][1], "Stored file from Alice as 'testfile.txt'.") self.assertTrue(os.path.isfile(f'{DIR_RECV_FILES}Alice/testfile.txt'))