Beispiel #1
0
def receiver_loop(queues:   Dict[bytes, 'Queue'],
                  gateway:  'Gateway',
                  unittest: bool = False
                  ) -> None:
    """Decode received packets and forward them to packet queues."""
    gateway_queue = queues[GATEWAY_QUEUE]

    while True:
        with ignored(EOFError, KeyboardInterrupt):
            if gateway_queue.qsize() == 0:
                time.sleep(0.01)

            _, packet = gateway_queue.get()

            try:
                packet = gateway.detect_errors(packet)
            except FunctionReturn:
                continue

            header, ts_bytes, payload = separate_headers(packet, [DATAGRAM_HEADER_LENGTH, DATAGRAM_TIMESTAMP_LENGTH])

            try:
                ts = datetime.strptime(str(bytes_to_int(ts_bytes)), "%Y%m%d%H%M%S%f")
            except (ValueError, struct.error):
                m_print("Error: Failed to decode timestamp in the received packet.", head=1, tail=1)
                continue

            if header in [MESSAGE_DATAGRAM_HEADER, FILE_DATAGRAM_HEADER,
                          COMMAND_DATAGRAM_HEADER, LOCAL_KEY_DATAGRAM_HEADER]:
                queues[header].put((ts, payload))

            if unittest:
                break
Beispiel #2
0
def add_onion_data(command: bytes, queues: 'QueueDict') -> None:
    """Add Onion Service data.

    Separate onion service private key and public keys for
    pending/existing contacts and add them as contacts.

    The ONION_KEY_QUEUE is read by
        relay.onion.onion_service()
    """
    os_private_key, confirmation_code, allow_req_byte, no_pending_bytes, ser_pub_keys \
        = separate_headers(command, [ONION_SERVICE_PRIVATE_KEY_LENGTH, CONFIRM_CODE_LENGTH,
                                     ENCODED_BOOLEAN_LENGTH, ENCODED_INTEGER_LENGTH])

    no_pending           = bytes_to_int(no_pending_bytes)
    public_key_list      = split_byte_string(ser_pub_keys, ONION_SERVICE_PUBLIC_KEY_LENGTH)
    pending_public_keys  = public_key_list[:no_pending]
    existing_public_keys = public_key_list[no_pending:]

    for onion_pub_key in pending_public_keys:
        add_contact(onion_pub_key, queues, existing=False)
    for onion_pub_key in existing_public_keys:
        add_contact(onion_pub_key, queues, existing=True)

    manage_contact_req(allow_req_byte, queues, notify=False)
    queues[ONION_KEY_QUEUE].put((os_private_key, confirmation_code))
Beispiel #3
0
def src_incoming(queues:   'QueueDict',
                 gateway:  'Gateway',
                 unittest: bool = False
                 ) -> None:
    """\
    Redirect messages received from Source Computer to appropriate queues.
    """
    packets_from_sc   = queues[GATEWAY_QUEUE]
    packets_to_dc     = queues[DST_MESSAGE_QUEUE]
    commands_to_dc    = queues[DST_COMMAND_QUEUE]
    messages_to_flask = queues[M_TO_FLASK_QUEUE]
    files_to_flask    = queues[F_TO_FLASK_QUEUE]
    commands_to_relay = queues[SRC_TO_RELAY_QUEUE]

    while True:
        with ignored(EOFError, KeyboardInterrupt):
            while packets_from_sc.qsize() == 0:
                time.sleep(0.01)

            ts, packet = packets_from_sc.get()  # type: datetime, bytes
            ts_bytes   = int_to_bytes(int(ts.strftime('%Y%m%d%H%M%S%f')[:-4]))

            try:
                packet = gateway.detect_errors(packet)
            except FunctionReturn:
                continue

            header, packet = separate_header(packet, DATAGRAM_HEADER_LENGTH)

            if header == UNENCRYPTED_DATAGRAM_HEADER:
                commands_to_relay.put(packet)

            elif header in [COMMAND_DATAGRAM_HEADER, LOCAL_KEY_DATAGRAM_HEADER]:
                commands_to_dc.put(header + ts_bytes + packet)
                p_type = 'Command  ' if header == COMMAND_DATAGRAM_HEADER else 'Local key'
                rp_print(f"{p_type} to local Receiver", ts)

            elif header in [MESSAGE_DATAGRAM_HEADER, PUBLIC_KEY_DATAGRAM_HEADER]:
                onion_pub_key, payload = separate_header(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH)
                packet_str             = header.decode() + b85encode(payload)
                queue_to_flask(packet_str, onion_pub_key, messages_to_flask, ts, header)
                if header == MESSAGE_DATAGRAM_HEADER:
                    packets_to_dc.put(header + ts_bytes + onion_pub_key + ORIGIN_USER_HEADER + payload)

            elif header == FILE_DATAGRAM_HEADER:
                no_contacts_b, payload = separate_header(packet, ENCODED_INTEGER_LENGTH)
                no_contacts            = bytes_to_int(no_contacts_b)
                ser_accounts, file_ct  = separate_header(payload, no_contacts * ONION_SERVICE_PUBLIC_KEY_LENGTH)
                pub_keys               = split_byte_string(ser_accounts, item_len=ONION_SERVICE_PUBLIC_KEY_LENGTH)
                for onion_pub_key in pub_keys:
                    queue_to_flask(file_ct, onion_pub_key, files_to_flask, ts, header)

            elif header in [GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER,
                            GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER,
                            GROUP_MSG_EXIT_GROUP_HEADER]:
                process_group_management_message(ts, packet, header, messages_to_flask)

            if unittest:
                break
Beispiel #4
0
def decrypt_assembly_packet(packet:        bytes,          # Assembly packet ciphertext
                            onion_pub_key: bytes,          # Onion Service pubkey of associated contact
                            origin:        bytes,          # Direction of packet
                            window_list:   'WindowList',   # WindowList object
                            contact_list:  'ContactList',  # ContactList object
                            key_list:      'KeyList'       # Keylist object
                            ) -> bytes:                    # Decrypted assembly packet
    """Decrypt assembly packet from contact/local Transmitter."""
    ct_harac, ct_assemby_packet = separate_header(packet, header_length=HARAC_CT_LENGTH)
    local_window                = window_list.get_local_window()
    command                     = onion_pub_key == LOCAL_PUBKEY

    p_type    = "command" if command                                      else "packet"
    direction = "from"    if command or (origin == ORIGIN_CONTACT_HEADER) else "sent to"
    nick      = contact_list.get_contact_by_pub_key(onion_pub_key).nick

    # Load keys
    keyset  = key_list.get_keyset(onion_pub_key)
    key_dir = TX if origin == ORIGIN_USER_HEADER else RX

    header_key  = getattr(keyset, f'{key_dir}_hk')  # type: bytes
    message_key = getattr(keyset, f'{key_dir}_mk')  # type: bytes

    if any(k == bytes(SYMMETRIC_KEY_LENGTH) for k in [header_key, message_key]):
        raise FunctionReturn("Warning! Loaded zero-key for packet decryption.")

    # Decrypt hash ratchet counter
    try:
        harac_bytes = auth_and_decrypt(ct_harac, header_key)
    except nacl.exceptions.CryptoError:
        raise FunctionReturn(
            f"Warning! Received {p_type} {direction} {nick} had an invalid hash ratchet MAC.", window=local_window)

    # Catch up with hash ratchet offset
    purp_harac   = bytes_to_int(harac_bytes)
    stored_harac = getattr(keyset, f'{key_dir}_harac')
    offset       = purp_harac - stored_harac
    if offset < 0:
        raise FunctionReturn(
            f"Warning! Received {p_type} {direction} {nick} had an expired hash ratchet counter.", window=local_window)

    process_offset(offset, origin, direction, nick, local_window)
    for harac in range(stored_harac, stored_harac + offset):
        message_key = blake2b(message_key + int_to_bytes(harac), digest_size=SYMMETRIC_KEY_LENGTH)

    # Decrypt packet
    try:
        assembly_packet = auth_and_decrypt(ct_assemby_packet, message_key)
    except nacl.exceptions.CryptoError:
        raise FunctionReturn(f"Warning! Received {p_type} {direction} {nick} had an invalid MAC.", window=local_window)

    # Update message key and harac
    keyset.update_mk(key_dir,
                     blake2b(message_key + int_to_bytes(stored_harac + offset), digest_size=SYMMETRIC_KEY_LENGTH),
                     offset + 1)

    return assembly_packet
Beispiel #5
0
    def process_long_header(self,
                            packet: bytes,
                            packet_ct: Optional[bytes] = None) -> None:
        """Process first packet of long transmission."""
        if self.long_active:
            self.add_masking_packet_to_log_file(
                increase=len(self.assembly_pt_list))

        if self.type == FILE:
            self.new_file_packet()
            try:
                _, no_p_bytes, time_bytes, size_bytes, name_us_data \
                    = separate_headers(packet, [ASSEMBLY_PACKET_HEADER_LENGTH] + 3*[ENCODED_INTEGER_LENGTH])

                self.packets = bytes_to_int(
                    no_p_bytes
                )  # added by transmitter.packet.split_to_assembly_packets
                self.time = str(timedelta(seconds=bytes_to_int(time_bytes)))
                self.size = readable_size(bytes_to_int(size_bytes))
                self.name = name_us_data.split(US_BYTE, 1)[0].decode()

                m_print([
                    f'Receiving file from {self.contact.nick}:',
                    f'{self.name} ({self.size})',
                    f'ETA {self.time} ({self.packets} packets)'
                ],
                        bold=True,
                        head=1,
                        tail=1)

            except (struct.error, UnicodeError, ValueError):
                self.add_masking_packet_to_log_file()
                raise SoftError(
                    "Error: Received file packet had an invalid header.")

        self.assembly_pt_list = [packet]
        self.long_active = True
        self.is_complete = False

        if packet_ct is not None:
            self.log_ct_list = [packet_ct]
Beispiel #6
0
def display_logs(cmd_data: bytes, window_list: 'WindowList',
                 contact_list: 'ContactList', settings: 'Settings',
                 master_key: 'MasterKey') -> None:
    """Display log file for active window."""
    win_uid, no_msg_bytes = cmd_data.split(US_BYTE)
    no_messages = bytes_to_int(no_msg_bytes)
    window = window_list.get_window(win_uid.decode())
    access_history(window,
                   contact_list,
                   settings,
                   master_key,
                   msg_to_load=no_messages)
Beispiel #7
0
    def load_keys(self) -> None:
        """Load keys from encrypted database."""
        with open(self.file_name, 'rb') as f:
            ct_bytes = f.read()

        pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key)
        entries = split_byte_string(pt_bytes, item_len=KEYSET_LENGTH)
        keysets = [e for e in entries if not e.startswith(self.dummy_id)]

        for k in keysets:
            assert len(k) == KEYSET_LENGTH

            self.keysets.append(
                KeySet(rx_account=bytes_to_str(k[0:1024]),
                       tx_key=k[1024:1056],
                       rx_key=k[1056:1088],
                       tx_hek=k[1088:1120],
                       rx_hek=k[1120:1152],
                       tx_harac=bytes_to_int(k[1152:1160]),
                       rx_harac=bytes_to_int(k[1160:1168]),
                       store_keys=self.store_keys))
Beispiel #8
0
def decrypt_assembly_packet(packet: bytes, window_list: 'WindowList',
                            contact_list: 'ContactList',
                            key_list: 'KeyList') -> Tuple[bytes, str, bytes]:
    """Decrypt assembly packet from contact/local TxM."""
    enc_harac = packet[1:49]
    enc_msg = packet[49:345]
    window = window_list.get_local_window()

    origin, direction, key_dir, p_type, account, nick = get_packet_values(
        packet, window, contact_list)

    # Load keys
    keyset = key_list.get_keyset(account)
    header_key = getattr(keyset, f'{key_dir}_hek')
    message_key = getattr(keyset, f'{key_dir}_key')

    if any(k == bytes(KEY_LENGTH) for k in [header_key, message_key]):
        raise FunctionReturn("Warning! Loaded zero-key for packet decryption.")

    # Decrypt hash ratchet counter
    try:
        harac_bytes = auth_and_decrypt(enc_harac, header_key, soft_e=True)
    except nacl.exceptions.CryptoError:
        raise FunctionReturn(
            f"Warning! Received {p_type} {direction} {nick} had an invalid hash ratchet MAC.",
            window=window)

    # Catch up with hash ratchet offset
    purp_harac = bytes_to_int(harac_bytes)
    stored_harac = getattr(keyset, f'{key_dir}_harac')
    offset = purp_harac - stored_harac
    if offset < 0:
        raise FunctionReturn(
            f"Warning! Received {p_type} {direction} {nick} had an expired hash ratchet counter.",
            window=window)

    process_offset(offset, origin, direction, nick, window)
    for _ in range(offset):
        message_key = hash_chain(message_key)

    # Decrypt packet
    try:
        assembly_packet = auth_and_decrypt(enc_msg, message_key, soft_e=True)
    except nacl.exceptions.CryptoError:
        raise FunctionReturn(
            f"Warning! Received {p_type} {direction} {nick} had an invalid MAC.",
            window=window)

    # Update keys in database
    keyset.update_key(key_dir, hash_chain(message_key), offset + 1)

    return assembly_packet, account, origin
Beispiel #9
0
def process_file_datagram(ts: 'datetime', packet: bytes, header: bytes,
                          queues: 'QueueDict') -> None:
    """Process file datagram."""
    files_to_flask = queues[F_TO_FLASK_QUEUE]
    no_contacts_b, payload = separate_header(packet, ENCODED_INTEGER_LENGTH)
    no_contacts = bytes_to_int(no_contacts_b)
    ser_accounts, file_ct = separate_header(
        payload, no_contacts * ONION_SERVICE_PUBLIC_KEY_LENGTH)
    pub_keys = split_byte_string(ser_accounts,
                                 item_len=ONION_SERVICE_PUBLIC_KEY_LENGTH)

    for onion_pub_key in pub_keys:
        queue_to_flask(file_ct, onion_pub_key, files_to_flask, ts, header)
Beispiel #10
0
def process_group_management_message(
        ts: 'datetime', packet: bytes, header: bytes,
        messages_to_flask: 'Queue[Tuple[Union[bytes, str], bytes]]') -> None:
    """Parse and display group management message."""
    header_str = header.decode()
    group_id, packet = separate_header(packet, GROUP_ID_LENGTH)

    if header in [GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER]:
        pub_keys = split_byte_string(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH)
        for onion_pub_key in pub_keys:
            others = [k for k in pub_keys if k != onion_pub_key]
            packet_str = header_str + b85encode(group_id + b''.join(others))
            queue_to_flask(packet_str, onion_pub_key, messages_to_flask, ts,
                           header)

    elif header in [GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER]:
        first_list_len_b, packet = separate_header(packet,
                                                   ENCODED_INTEGER_LENGTH)
        first_list_length = bytes_to_int(first_list_len_b)
        pub_keys = split_byte_string(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH)
        before_adding = remaining = pub_keys[:first_list_length]
        new_in_group = removable = pub_keys[first_list_length:]

        if header == GROUP_MSG_MEMBER_ADD_HEADER:

            packet_str = GROUP_MSG_MEMBER_ADD_HEADER.decode() + b85encode(
                group_id + b''.join(new_in_group))
            for onion_pub_key in before_adding:
                queue_to_flask(packet_str, onion_pub_key, messages_to_flask,
                               ts, header)

            for onion_pub_key in new_in_group:
                other_new = [k for k in new_in_group if k != onion_pub_key]
                packet_str = (
                    GROUP_MSG_INVITE_HEADER.decode() +
                    b85encode(group_id + b''.join(other_new + before_adding)))
                queue_to_flask(packet_str, onion_pub_key, messages_to_flask,
                               ts, header)

        elif header == GROUP_MSG_MEMBER_REM_HEADER:
            packet_str = header_str + b85encode(group_id + b''.join(removable))
            for onion_pub_key in remaining:
                queue_to_flask(packet_str, onion_pub_key, messages_to_flask,
                               ts, header)

    elif header == GROUP_MSG_EXIT_GROUP_HEADER:
        pub_keys = split_byte_string(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH)
        packet_str = header_str + b85encode(group_id)
        for onion_pub_key in pub_keys:
            queue_to_flask(packet_str, onion_pub_key, messages_to_flask, ts,
                           header)
Beispiel #11
0
    def load_master_key(self) -> bytes:
        """Derive the master key from password and salt.

        Load the salt, hash, and key derivation settings from the login
        database. Derive the purported master key from the salt and
        entered password. If the BLAKE2b hash of derived master key
        matches the hash in the login database, accept the derived
        master key.
        """
        with open(self.file_name, 'rb') as f:
            data = f.read()

        if len(data) != MASTERKEY_DB_SIZE:
            raise CriticalError(f"Invalid {self.file_name} database size.")

        salt, key_hash, time_bytes, memory_bytes, parallelism_bytes \
            = separate_headers(data, [ARGON2_SALT_LENGTH, BLAKE2_DIGEST_LENGTH,
                                      ENCODED_INTEGER_LENGTH, ENCODED_INTEGER_LENGTH])

        time_cost = bytes_to_int(time_bytes)
        memory_cost = bytes_to_int(memory_bytes)
        parallelism = bytes_to_int(parallelism_bytes)

        while True:
            password = MasterKey.get_password()
            phase("Deriving master key",
                  head=2,
                  offset=len("Password correct"))
            purp_key = argon2_kdf(password, salt, time_cost, memory_cost,
                                  parallelism)

            if blake2b(purp_key) == key_hash:
                phase("Password correct", done=True, delay=1)
                clear_screen()
                return purp_key
            else:
                phase("Invalid password", done=True, delay=1)
                print_on_previous_line(reps=5)
Beispiel #12
0
    def load_keys(self) -> None:
        """Load keys from encrypted database."""
        ensure_dir(f'{DIR_USER_DATA}/')
        with open(self.file_name, 'rb') as f:
            ct_bytes = f.read()

        pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key)
        keysets = split_byte_string(pt_bytes,
                                    item_len=1168)  # 1024 + 4 * 32 + 2 * 8
        dummy_id = 'dummy_contact'.encode('utf-32')
        keysets = [k for k in keysets if not k.startswith(dummy_id)]

        for k in keysets:
            rx_account = bytes_to_str(k[0:1024])
            tx_key = k[1024:1056]
            rx_key = k[1056:1088]
            tx_hek = k[1088:1120]
            rx_hek = k[1120:1152]
            tx_harac = bytes_to_int(k[1152:1160])
            rx_harac = bytes_to_int(k[1160:1168])

            self.keysets.append(
                KeySet(rx_account, tx_key, rx_key, tx_hek, rx_hek, tx_harac,
                       rx_harac, self.store_keys))
Beispiel #13
0
    def load_master_key(self) -> None:
        """Derive master key from password and salt."""
        with open(self.file_name, 'rb') as f:
            data = f.read()
        salt = data[0:32]
        key_hash = data[32:64]
        rounds = bytes_to_int(data[64:72])
        memory = bytes_to_int(data[72:80])
        parallelism = bytes_to_int(data[80:88])

        while True:
            password = MasterKey.get_password()
            phase("Deriving master key", head=2, offset=16)
            purp_key, _ = argon2_kdf(password, salt, rounds, memory,
                                     parallelism)

            if hash_chain(purp_key) == key_hash:
                self.master_key = purp_key
                phase("Password correct", done=True)
                clear_screen(delay=0.5)
                break
            else:
                phase("Invalid password", done=True)
                print_on_previous_line(reps=5, delay=1)
Beispiel #14
0
    def _load_keys(self) -> None:
        """Load KeySets from the encrypted database.

        This function first reads and decrypts the database content. It
        then splits the plaintext into a list of 176-byte blocks. Each
        block contains the serialized data of one KeySet. Next, the
        function will remove from the list all dummy KeySets (that start
        with the `dummy_id` byte string). The function will then
        populate the `self.keysets` list with KeySet objects, the data
        of which is sliced and decoded from the dummy-free blocks.
        """
        with open(self.file_name, 'rb') as f:
            ct_bytes = f.read()

        pt_bytes = auth_and_decrypt(ct_bytes,
                                    self.master_key.master_key,
                                    database=self.file_name)
        blocks = split_byte_string(pt_bytes, item_len=KEYSET_LENGTH)
        df_blocks = [b for b in blocks if not b.startswith(self.dummy_id)]

        for block in df_blocks:
            if len(block) != KEYSET_LENGTH:
                raise CriticalError("Invalid data in key database.")

            onion_pub_key, tx_mk, rx_mk, tx_hk, rx_hk, tx_harac_bytes, rx_harac_bytes \
                = separate_headers(block, [ONION_SERVICE_PUBLIC_KEY_LENGTH] + 4*[SYMMETRIC_KEY_LENGTH] + [HARAC_LENGTH])

            self.keysets.append(
                KeySet(onion_pub_key=onion_pub_key,
                       tx_mk=tx_mk,
                       rx_mk=rx_mk,
                       tx_hk=tx_hk,
                       rx_hk=rx_hk,
                       tx_harac=bytes_to_int(tx_harac_bytes),
                       rx_harac=bytes_to_int(rx_harac_bytes),
                       store_keys=self.store_keys))
Beispiel #15
0
def process_file_datagram(
    ts: 'datetime',
    packet: bytes,
    header: bytes,
    buf_key: bytes,
) -> None:
    """Process file datagram."""
    no_contacts_b, payload = separate_header(packet, ENCODED_INTEGER_LENGTH)
    no_contacts = bytes_to_int(no_contacts_b)
    ser_accounts, file_ct = separate_header(
        payload, no_contacts * ONION_SERVICE_PUBLIC_KEY_LENGTH)
    pub_keys = split_byte_string(ser_accounts,
                                 item_len=ONION_SERVICE_PUBLIC_KEY_LENGTH)

    for onion_pub_key in pub_keys:
        buffer_to_flask(file_ct, onion_pub_key, ts, header, buf_key, file=True)
Beispiel #16
0
def export_logs(cmd_data: bytes, ts: 'datetime', window_list: 'WindowList',
                contact_list: 'ContactList', settings: 'Settings',
                master_key: 'MasterKey') -> None:
    """Export log file for active window."""
    win_uid, no_msg_bytes = cmd_data.split(US_BYTE)
    no_messages = bytes_to_int(no_msg_bytes)
    window = window_list.get_window(win_uid.decode())
    access_history(window,
                   contact_list,
                   settings,
                   master_key,
                   msg_to_load=no_messages,
                   export=True)

    local_win = window_list.get_window('local')
    local_win.print_new(ts,
                        f"Exported logfile of {window.type} {window.name}.")
Beispiel #17
0
def process_group_management_message(
    ts: 'datetime',
    packet: bytes,
    header: bytes,
    buf_key: bytes,
) -> None:
    """Parse and display group management message."""
    header_str = header.decode()
    group_id, packet = separate_header(packet, GROUP_ID_LENGTH)

    if header in [GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER]:
        pub_keys = split_byte_string(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH)
        for onion_pub_key in pub_keys:
            others = [k for k in pub_keys if k != onion_pub_key]
            packet_str = header_str + b85encode(group_id + b''.join(others))
            buffer_to_flask(packet_str, onion_pub_key, ts, header, buf_key)

    elif header in [GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER]:
        first_list_len_b, packet = separate_header(packet,
                                                   ENCODED_INTEGER_LENGTH)
        first_list_length = bytes_to_int(first_list_len_b)
        pub_keys = split_byte_string(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH)
        before_adding = remaining = pub_keys[:first_list_length]
        new_in_group = removable = pub_keys[first_list_length:]

        if header == GROUP_MSG_MEMBER_ADD_HEADER:

            process_add_or_group_remove_member(ts, header, buf_key, header_str,
                                               group_id, before_adding,
                                               new_in_group)

            for onion_pub_key in new_in_group:
                other_new = [k for k in new_in_group if k != onion_pub_key]
                packet_str = (
                    GROUP_MSG_INVITE_HEADER.decode() +
                    b85encode(group_id + b''.join(other_new + before_adding)))
                buffer_to_flask(packet_str, onion_pub_key, ts, header, buf_key)

        elif header == GROUP_MSG_MEMBER_REM_HEADER:
            process_add_or_group_remove_member(ts, header, buf_key, header_str,
                                               group_id, remaining, removable)

    elif header == GROUP_MSG_EXIT_GROUP_HEADER:
        process_group_exit_header(ts, packet, header, buf_key, header_str,
                                  group_id)
Beispiel #18
0
def log_command(cmd_data: bytes, ts: 'datetime', window_list: 'WindowList',
                contact_list: 'ContactList', group_list: 'GroupList',
                settings: 'Settings', master_key: 'MasterKey') -> None:
    """Display or export log file for the active window."""
    export = ts is not None
    ser_no_msg, uid = separate_header(cmd_data, ENCODED_INTEGER_LENGTH)
    no_messages = bytes_to_int(ser_no_msg)
    window = window_list.get_window(uid)
    access_logs(window,
                contact_list,
                group_list,
                settings,
                master_key,
                msg_to_load=no_messages,
                export=export)

    if export:
        local_win = window_list.get_local_window()
        local_win.add_new(
            ts,
            f"Exported log file of {window.type} '{window.name}'.",
            output=True)
Beispiel #19
0
def log_command(cmd_data: bytes, ts: 'datetime', window_list: 'WindowList',
                contact_list: 'ContactList', group_list: 'GroupList',
                settings: 'Settings', master_key: 'MasterKey') -> None:
    """Display or export logfile for active window."""
    export = ts is not None
    win_uid, no_msg_bytes = cmd_data.split(US_BYTE)
    no_messages = bytes_to_int(no_msg_bytes)
    window = window_list.get_window(win_uid.decode())
    access_logs(window,
                contact_list,
                group_list,
                settings,
                master_key,
                msg_to_load=no_messages,
                export=export)

    if export:
        local_win = window_list.get_window(LOCAL_ID)
        local_win.add_new(
            ts,
            f"Exported logfile of {window.type_print} {window.name}.",
            output=True)
Beispiel #20
0
    def load_settings(self) -> None:
        """Load settings from encrypted database."""
        ensure_dir(f'{DIR_USER_DATA}/')
        with open(self.file_name, 'rb') as f:
            ct_bytes = f.read()

        pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key)

        # Update settings based on plaintext byte string content
        for i, key in enumerate(self.key_list):

            attribute = self.__getattribute__(key)

            if isinstance(attribute, bool):
                value = bytes_to_bool(
                    pt_bytes[0])  # type: Union[bool, int, float, str]
                pt_bytes = pt_bytes[1:]

            elif isinstance(attribute, int):
                value = bytes_to_int(pt_bytes[:8])
                pt_bytes = pt_bytes[8:]

            elif isinstance(attribute, float):
                value = bytes_to_double(pt_bytes[:8])
                pt_bytes = pt_bytes[8:]

            elif isinstance(attribute, str):
                value = bytes_to_str(pt_bytes[:1024])
                pt_bytes = pt_bytes[
                    1024:]  # 255 * 4 = 1020. The four additional bytes is the UTF-32 BOM.

            else:
                raise CriticalError(
                    "Invalid data type in settings default values.")

            setattr(self, key, value)
Beispiel #21
0
    def load_groups(self) -> None:
        """Load groups from encrypted database."""
        ensure_dir(f'{DIR_USER_DATA}/')
        with open(self.file_name, 'rb') as f:
            ct_bytes = f.read()

        pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key)
        update_db = False

        # Slice and decode headers
        padding_for_g = bytes_to_int(pt_bytes[0:8])
        padding_for_m = bytes_to_int(pt_bytes[8:16])
        n_of_actual_g = bytes_to_int(pt_bytes[16:24])
        largest_group = bytes_to_int(pt_bytes[24:32])

        if n_of_actual_g > self.settings.m_number_of_groups:
            self.settings.m_number_of_groups = round_up(n_of_actual_g)
            self.settings.store_settings()
            update_db = True
            print(
                "Group database had {} groups. Increased max number of groups to {}."
                .format(n_of_actual_g, self.settings.m_number_of_groups))

        if largest_group > self.settings.m_members_in_group:
            self.settings.m_members_in_group = round_up(largest_group)
            self.settings.store_settings()
            update_db = True
            print(
                "A group in group database had {} members. Increased max size of groups to {}."
                .format(largest_group, self.settings.m_members_in_group))

        # Strip header bytes
        pt_bytes = pt_bytes[32:]

        #                 (      no_fields     * (padding + BOM) * bytes/char) + booleans
        bytes_per_group = ((1 + padding_for_m) * (255 + 1) * 4) + 2

        # Remove dummy groups
        no_dummy_groups = padding_for_g - n_of_actual_g
        pt_bytes = pt_bytes[:-(no_dummy_groups * bytes_per_group)]

        groups = split_byte_string(pt_bytes, item_len=bytes_per_group)

        for g in groups:

            # Remove padding
            name = bytes_to_str(g[0:1024])
            log_messages = bytes_to_bool(g[1024:1025])
            notifications = bytes_to_bool(g[1025:1026])
            members_b = split_byte_string(g[1026:], item_len=1024)
            members = [bytes_to_str(m) for m in members_b]

            # Remove dummy members
            members_df = [m for m in members if not m == 'dummy_member']

            # Load contacts based on stored rx_account
            group_members = [
                self.contact_list.get_contact(m) for m in members_df
                if self.contact_list.has_contact(m)
            ]

            self.groups.append(
                Group(name, log_messages, notifications, group_members,
                      self.settings, self.store_groups))

        if update_db:
            self.store_groups()
Beispiel #22
0
def decrypt_assembly_packet(
    packet: bytes,  # Assembly packet ciphertext
    onion_pub_key: bytes,  # Onion Service pubkey of associated contact
    origin: bytes,  # Direction of packet
    window_list: 'WindowList',  # WindowList object
    contact_list: 'ContactList',  # ContactList object
    key_list: 'KeyList'  # Keylist object
) -> bytes:  # Decrypted assembly packet
    """Decrypt assembly packet from contact/local Transmitter.

    This function authenticates and decrypts incoming message and
    command datagrams. This function does not authenticate/decrypt
    incoming file and/or local key datagrams.

    While all message datagrams have been implicitly assumed to have
    originated from some contact until this point, to prevent the
    possibility of existential forgeries, the origin of the message will
    be validated at this point with the cryptographic Poly1305-tag.

    As per the cryptographic doom principle, the message won't be even
    decrypted unless the Poly1305 tag of the ciphertext is valid.

    This function also authenticates packets that handle control flow of
    the Receiver program. Like messages, command datagrams have been
    implicitly assumed to be commands until this point. However, unless
    the Poly1305-tag of the purported command is found to be valid with
    the forward secret local key, it will not be even decrypted, let
    alone processed.
    """
    ct_harac, ct_assembly_packet = separate_header(
        packet, header_length=HARAC_CT_LENGTH)
    cmd_win = window_list.get_command_window()
    command = onion_pub_key == LOCAL_PUBKEY

    p_type = "command" if command else "packet"
    direction = "from" if command or (origin
                                      == ORIGIN_CONTACT_HEADER) else "sent to"
    nick = contact_list.get_nick_by_pub_key(onion_pub_key)

    # Load keys
    keyset = key_list.get_keyset(onion_pub_key)
    key_dir = TX if origin == ORIGIN_USER_HEADER else RX

    header_key = getattr(keyset, f'{key_dir}_hk')  # type: bytes
    message_key = getattr(keyset, f'{key_dir}_mk')  # type: bytes

    if any(k == bytes(SYMMETRIC_KEY_LENGTH)
           for k in [header_key, message_key]):
        raise SoftError("Warning! Loaded zero-key for packet decryption.")

    # Decrypt hash ratchet counter
    try:
        harac_bytes = auth_and_decrypt(ct_harac, header_key)
    except nacl.exceptions.CryptoError:
        raise SoftError(
            f"Warning! Received {p_type} {direction} {nick} had an invalid hash ratchet MAC.",
            window=cmd_win)

    # Catch up with hash ratchet offset
    purp_harac = bytes_to_int(harac_bytes)
    stored_harac = getattr(keyset, f'{key_dir}_harac')
    offset = purp_harac - stored_harac
    if offset < 0:
        raise SoftError(
            f"Warning! Received {p_type} {direction} {nick} had an expired hash ratchet counter.",
            window=cmd_win)

    process_offset(offset, origin, direction, nick, cmd_win)
    for harac in range(stored_harac, stored_harac + offset):
        message_key = blake2b(message_key + int_to_bytes(harac),
                              digest_size=SYMMETRIC_KEY_LENGTH)

    # Decrypt packet
    try:
        assembly_packet = auth_and_decrypt(ct_assembly_packet, message_key)
    except nacl.exceptions.CryptoError:
        raise SoftError(
            f"Warning! Received {p_type} {direction} {nick} had an invalid MAC.",
            window=cmd_win)

    # Update message key and harac
    new_key = blake2b(message_key + int_to_bytes(stored_harac + offset),
                      digest_size=SYMMETRIC_KEY_LENGTH)
    keyset.update_mk(key_dir, new_key, offset + 1)

    return assembly_packet
Beispiel #23
0
 def test_bytes_to_int(self):
     self.assertEqual(bytes_to_int(b'\x00\x00\x00\x00\x00\x00\x00\x01'), 1)
Beispiel #24
0
    def load_groups(self) -> None:
        """Load groups from encrypted database."""
        with open(self.file_name, 'rb') as f:
            ct_bytes = f.read()

        pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key)
        update_db = False

        # Slice and decode headers
        padding_for_group_db = bytes_to_int(pt_bytes[0:8])
        padding_for_members = bytes_to_int(pt_bytes[8:16])
        number_of_actual_groups = bytes_to_int(pt_bytes[16:24])
        largest_group = bytes_to_int(pt_bytes[24:32])

        if number_of_actual_groups > self.settings.max_number_of_groups:
            self.settings.max_number_of_groups = round_up(
                number_of_actual_groups)
            self.settings.store_settings()
            update_db = True
            print(
                "Group database had {} groups. Increased max number of groups to {}."
                .format(number_of_actual_groups,
                        self.settings.max_number_of_groups))

        if largest_group > self.settings.max_number_of_group_members:
            self.settings.max_number_of_group_members = round_up(largest_group)
            self.settings.store_settings()
            update_db = True
            print(
                "A group in group database had {} members. Increased max size of groups to {}."
                .format(largest_group,
                        self.settings.max_number_of_group_members))

        group_name_field = 1
        string_fields_in_group = padding_for_members + group_name_field
        bytes_per_group = string_fields_in_group * PADDED_UTF32_STR_LEN + 2 * BOOLEAN_SETTING_LEN

        # Remove group header and dummy groups
        dummy_group_data = (padding_for_group_db -
                            number_of_actual_groups) * bytes_per_group
        group_data = pt_bytes[GROUP_DB_HEADER_LEN:-dummy_group_data]

        groups = split_byte_string(group_data, item_len=bytes_per_group)

        for g in groups:
            assert len(g) == bytes_per_group

            name = bytes_to_str(g[0:1024])
            log_messages = bytes_to_bool(g[1024:1025])
            notifications = bytes_to_bool(g[1025:1026])
            members_bytes = split_byte_string(g[1026:],
                                              item_len=PADDED_UTF32_STR_LEN)
            members_w_dummies = [bytes_to_str(m) for m in members_bytes]
            members = [m for m in members_w_dummies if m != DUMMY_MEMBER]

            # Load contacts based on stored rx_account
            group_members = [
                self.contact_list.get_contact(m) for m in members
                if self.contact_list.has_contact(m)
            ]

            # Update group database if any member has been removed from contact database
            if not all(m in self.contact_list.get_list_of_accounts()
                       for m in members):
                update_db = True

            self.groups.append(
                Group(name, log_messages, notifications, group_members,
                      self.settings, self.store_groups))

        if update_db:
            self.store_groups()