Example #1
0
class GroupList(Iterable[Group], Sized):
    """\
    GroupList object manages TFC's Group objects and the storage of the
    objects in an encrypted database.

    The main purpose of this object is to manage the `self.groups`-list
    that contains TFC's groups. The database is stored on disk in
    encrypted form. Prior to encryption, the database is padded with
    dummy groups. Because each group might have a different number of
    members, each group is also padded with dummy members. The dummy
    groups and members hide the actual number of groups and members that
    could otherwise be revealed by the size of the encrypted database.

    As long as the user sticks to default settings that limits TFC's
    group database to 50 groups and 50 members per group, the database
    will effectively hide the actual number of groups and number of
    members in them. The maximum number of groups and number of members
    per group can be changed by editing the `max_number_of_groups` and
    `max_number_of_group_members` settings respectively. Deviating from
    the default settings can, however, in theory, reveal to a physical
    attacker the user has more than 50 groups or more than 50 members
    in a group.

    The GroupList object also provides handy methods with human-readable
    names for making queries to the database.
    """
    def __init__(self, master_key: 'MasterKey', settings: 'Settings',
                 contact_list: 'ContactList') -> None:
        """Create a new GroupList object."""
        self.settings = settings
        self.contact_list = contact_list
        self.groups = []  # type: List[Group]
        self.file_name = f'{DIR_USER_DATA}{settings.software_operation}_groups'
        self.database = TFCDatabase(self.file_name, master_key)

        ensure_dir(DIR_USER_DATA)
        if os.path.isfile(self.file_name):
            self._load_groups()
        else:
            self.store_groups()

    def __iter__(self) -> Iterator[Group]:
        """Iterate over Group objects in `self.groups`."""
        yield from self.groups

    def __len__(self) -> int:
        """Return the number of Group objects in `self.groups`."""
        return len(self.groups)

    def store_groups(self, replace: bool = True) -> None:
        """Write the list of groups to an encrypted database.

        This function will first generate a header that stores
        information about the group database content and padding at the
        moment of calling. Next, the function will serialize every Group
        object (including dummy groups) to form the constant length
        plaintext that will be encrypted and stored in the database.

        By default, TFC has a maximum number of 50 groups with 50
        members. In addition, the group database stores the header that
        contains four 8-byte values. The database plaintext length with
        50 groups, each with 50 members is
            4*8 + 50*(1024 + 4 + 2*1 + 50*32)
          =  32 + 50*2630
          = 131532 bytes.

        The ciphertext includes a 24-byte nonce and a 16-byte tag, so
        the size of the final database is 131572 bytes.
        """
        pt_bytes = self._generate_group_db_header()
        pt_bytes += b''.join(
            [g.serialize_g() for g in (self.groups + self._dummy_groups())])
        self.database.store_database(pt_bytes, replace)

    def _load_groups(self) -> None:
        """Load groups from the encrypted database.

        The function first reads, authenticates and decrypts the group
        database data. Next, it slices and decodes the header values
        that help the function to properly de-serialize the database
        content. The function then removes dummy groups based on header
        data. Next, the function updates the group database settings if
        necessary. It then splits group data based on header data into
        blocks, which are further sliced, and processed if necessary,
        to obtain data required to create Group objects. Finally, if
        needed, the function will update the group database content.
        """
        pt_bytes = self.database.load_database()

        # Slice and decode headers
        group_db_headers, pt_bytes = separate_header(pt_bytes,
                                                     GROUP_DB_HEADER_LENGTH)

        padding_for_group_db, padding_for_members, number_of_groups, members_in_largest_group \
            = list(map(bytes_to_int, split_byte_string(group_db_headers, ENCODED_INTEGER_LENGTH)))

        # Slice dummy groups
        bytes_per_group = GROUP_STATIC_LENGTH + padding_for_members * ONION_SERVICE_PUBLIC_KEY_LENGTH
        dummy_data_len = (padding_for_group_db -
                          number_of_groups) * bytes_per_group
        group_data = pt_bytes[:-dummy_data_len]

        update_db = self._check_db_settings(number_of_groups,
                                            members_in_largest_group)
        blocks = split_byte_string(group_data, item_len=bytes_per_group)

        all_pub_keys = self.contact_list.get_list_of_pub_keys()
        dummy_pub_key = onion_address_to_pub_key(DUMMY_MEMBER)

        # Deserialize group objects
        for block in blocks:
            if len(block) != bytes_per_group:
                raise CriticalError("Invalid data in group database.")

            name_bytes, group_id, log_messages_byte, notification_byte, ser_pub_keys \
                = separate_headers(block, [PADDED_UTF32_STR_LENGTH, GROUP_ID_LENGTH] + 2*[ENCODED_BOOLEAN_LENGTH])

            pub_key_list = split_byte_string(
                ser_pub_keys, item_len=ONION_SERVICE_PUBLIC_KEY_LENGTH)
            group_pub_keys = [k for k in pub_key_list if k != dummy_pub_key]
            group_members = [
                self.contact_list.get_contact_by_pub_key(k)
                for k in group_pub_keys if k in all_pub_keys
            ]

            self.groups.append(
                Group(name=bytes_to_str(name_bytes),
                      group_id=group_id,
                      log_messages=bytes_to_bool(log_messages_byte),
                      notifications=bytes_to_bool(notification_byte),
                      members=group_members,
                      settings=self.settings,
                      store_groups=self.store_groups))

            update_db |= set(all_pub_keys) > set(group_pub_keys)

        if update_db:
            self.store_groups()

    def _check_db_settings(self, number_of_actual_groups: int,
                           members_in_largest_group: int) -> bool:
        """\
        Adjust TFC's settings automatically if the loaded group database
        was stored using larger database setting values.

        If settings had to be adjusted, return True so the method
        `self._load_groups` knows to write changes to a new database.
        """
        update_db = False

        if number_of_actual_groups > self.settings.max_number_of_groups:
            self.settings.max_number_of_groups = round_up(
                number_of_actual_groups)
            update_db = True

        if members_in_largest_group > self.settings.max_number_of_group_members:
            self.settings.max_number_of_group_members = round_up(
                members_in_largest_group)
            update_db = True

        if update_db:
            self.settings.store_settings()

        return update_db

    def _generate_group_db_header(self) -> bytes:
        """Generate group database metadata header.

        This function produces a 32-byte bytestring that contains four
        values that allow the Transmitter or Receiver program to
        properly de-serialize the database content:

               `max_number_of_groups` helps slice off dummy groups when
                                      loading the database.

        `max_number_of_group_members` helps split dummy free group data
                                      into proper length blocks that can
                                      be further sliced and decoded to
                                      data used to build Group objects.

                   `len(self.groups)` helps slice off dummy groups when
                                      loading the database. It also
                                      allows TFC to automatically adjust
                                      the max_number_of_groups setting.
                                      The value is needed, e.g., in
                                      cases where the group database is
                                      swapped to a backup that has a
                                      different number of groups than
                                      TFC's settings expect.

               `self.largest_group()` helps TFC to automatically adjust
                                      the max_number_of_group_members
                                      setting (e.g., in cases like the
                                      one described above).
        """
        return b''.join(
            list(
                map(int_to_bytes, [
                    self.settings.max_number_of_groups,
                    self.settings.max_number_of_group_members,
                    len(self.groups),
                    self.largest_group()
                ])))

    def _generate_dummy_group(self) -> 'Group':
        """Generate a dummy Group object.

        The dummy group simplifies the code around the constant length
        serialization when the data is stored to, or read from the
        database.
        """
        dummy_member = self.contact_list.generate_dummy_contact()

        return Group(name=DUMMY_GROUP,
                     group_id=bytes(GROUP_ID_LENGTH),
                     log_messages=False,
                     notifications=False,
                     members=self.settings.max_number_of_group_members *
                     [dummy_member],
                     settings=self.settings,
                     store_groups=lambda: None)

    def _dummy_groups(self) -> List[Group]:
        """Generate a proper size list of dummy groups for database padding."""
        number_of_dummies = self.settings.max_number_of_groups - len(
            self.groups)
        dummy_group = self._generate_dummy_group()
        return [dummy_group] * number_of_dummies

    def add_group(self, name: str, group_id: bytes, log_messages: bool,
                  notifications: bool, members: List['Contact']) -> None:
        """Add a new group to `self.groups` and write changes to the database."""
        if self.has_group(name):
            self.remove_group_by_name(name)

        self.groups.append(
            Group(name, group_id, log_messages, notifications, members,
                  self.settings, self.store_groups))
        self.store_groups()

    def remove_group_by_name(self, name: str) -> None:
        """Remove the specified group from the group list.

        If a group with the matching name was found and removed, write
        changes to the database.
        """
        for i, g in enumerate(self.groups):
            if g.name == name:
                del self.groups[i]
                self.store_groups()
                break

    def remove_group_by_id(self, group_id: bytes) -> None:
        """Remove the specified group from the group list.

        If a group with the matching group ID was found and removed,
        write changes to the database.
        """
        for i, g in enumerate(self.groups):
            if g.group_id == group_id:
                del self.groups[i]
                self.store_groups()
                break

    def get_group(self, name: str) -> Group:
        """Return Group object based on its name."""
        return next(g for g in self.groups if g.name == name)

    def get_group_by_id(self, group_id: bytes) -> Group:
        """Return Group object based on its group ID."""
        return next(g for g in self.groups if g.group_id == group_id)

    def get_list_of_group_names(self) -> List[str]:
        """Return list of group names."""
        return [g.name for g in self.groups]

    def get_list_of_group_ids(self) -> List[bytes]:
        """Return list of group IDs."""
        return [g.group_id for g in self.groups]

    def get_list_of_hr_group_ids(self) -> List[str]:
        """Return list of human readable (B58 encoded) group IDs."""
        return [b58encode(g.group_id) for g in self.groups]

    def get_group_members(self, group_id: bytes) -> List['Contact']:
        """Return list of group members (Contact objects)."""
        return self.get_group_by_id(group_id).members

    def has_group(self, name: str) -> bool:
        """Return True if group list has a group with the specified name, else False."""
        return any(g.name == name for g in self.groups)

    def has_group_id(self, group_id: bytes) -> bool:
        """Return True if group list has a group with the specified group ID, else False."""
        return any(g.group_id == group_id for g in self.groups)

    def largest_group(self) -> int:
        """Return size of the group that has the most members."""
        return max([0] + [len(g) for g in self.groups])

    def print_groups(self) -> None:
        """Print list of groups.

        Neatly printed group list allows easy group management and it
        also allows the user to check active logging and notification
        setting, as well as what group ID Relay Program shows
        corresponds to what group, and which contacts are in the group.
        """
        # Initialize columns
        c1 = ['Group']
        c2 = ['Group ID']
        c3 = ['Logging ']
        c4 = ['Notify']
        c5 = ['Members']

        # Populate columns with group data that has only a single line
        for g in self.groups:
            c1.append(g.name)
            c2.append(b58encode(g.group_id))
            c3.append('Yes' if g.log_messages else 'No')
            c4.append('Yes' if g.notifications else 'No')

        # Calculate the width of single-line columns
        c1w, c2w, c3w, c4w = [
            max(len(v) for v in column) + CONTACT_LIST_INDENT
            for column in [c1, c2, c3, c4]
        ]

        # Create a wrapper for Members-column
        wrapped_members_line_indent = c1w + c2w + c3w + c4w
        members_column_width = max(
            1,
            get_terminal_width() - wrapped_members_line_indent)
        wrapper = textwrap.TextWrapper(width=members_column_width)

        # Populate the Members-column
        for g in self.groups:
            if g.empty():
                c5.append("<Empty group>\n")
            else:
                comma_separated_nicks = ', '.join(
                    sorted([m.nick for m in g.members]))
                members_column_lines = wrapper.fill(
                    comma_separated_nicks).split('\n')

                final_str = members_column_lines[0] + '\n'
                for line in members_column_lines[1:]:
                    final_str += wrapped_members_line_indent * ' ' + line + '\n'

                c5.append(final_str)

        # Align columns by adding whitespace between fields of each line
        lines = [
            f'{f1:{c1w}}{f2:{c2w}}{f3:{c3w}}{f4:{c4w}}{f5}'
            for f1, f2, f3, f4, f5 in zip(c1, c2, c3, c4, c5)
        ]

        # Add a terminal-wide line between the column names and the data
        lines.insert(1, get_terminal_width() * '─')

        # Print the group list
        print('\n'.join(lines) + '\n')
Example #2
0
class TestTFCDatabase(unittest.TestCase):

    def setUp(self) -> None:
        """Pre-test actions."""
        self.unit_test_dir = cd_unit_test()
        self.database_name = 'unittest_db'
        self.master_key    = MasterKey()
        self.database      = TFCDatabase(self.database_name, self.master_key)

    def tearDown(self) -> None:
        """Post-test actions."""
        cleanup(self.unit_test_dir)

    @mock.patch('os.fsync', return_value=MagicMock)
    def test_write_to_file(self, mock_os_fsync) -> None:
        # Setup
        data = os.urandom(MASTERKEY_DB_SIZE)

        # Test
        self.assertIsNone(self.database.write_to_file(self.database_name, data))

        with open(self.database_name, 'rb') as f:
            stored_data = f.read()
        self.assertEqual(data, stored_data)

        mock_os_fsync.assert_called()

    def test_verify_file(self) -> None:
        # Setup
        pt_bytes = os.urandom(MASTERKEY_DB_SIZE)
        ct_bytes = encrypt_and_sign(pt_bytes, self.master_key.master_key)
        with open(self.database_name, 'wb') as f:
            f.write(ct_bytes)

        # Test valid file content returns True.
        self.assertTrue(self.database.verify_file(self.database_name))

        # Test invalid file content returns False.
        tamper_file(self.database_name, tamper_size=1)
        self.assertFalse(self.database.verify_file(self.database_name))

    def test_ensure_temp_write_raises_critical_error_after_exceeding_retry_limit(self) -> None:
        # Setup
        orig_verify_file          = self.database.verify_file
        self.database.verify_file = MagicMock(side_effect=DB_WRITE_RETRY_LIMIT*[False])

        # Test
        with self.assertRaises(SystemExit):
            self.database.store_database(os.urandom(MASTERKEY_DB_SIZE))

        # Teardown
        self.database.verify_file = orig_verify_file

    def test_ensure_temp_write_succeeds_just_before_limit(self) -> None:
        # Setup
        orig_verify_file          = self.database.verify_file
        self.database.verify_file = MagicMock(side_effect=(DB_WRITE_RETRY_LIMIT-1)*[False] + [True])

        # Test
        self.assertIsNone(self.database.store_database(os.urandom(MASTERKEY_DB_SIZE)))

        # Teardown
        self.database.verify_file = orig_verify_file

    def test_store_database_encrypts_data_with_master_key_and_replaces_temp_file_and_original_file(self) -> None:
        # Setup
        pt_old = os.urandom(MASTERKEY_DB_SIZE)
        ct_old = encrypt_and_sign(pt_old, self.master_key.master_key)
        with open(self.database_name, 'wb') as f:
            f.write(ct_old)

        pt_new = os.urandom(MASTERKEY_DB_SIZE)

        ct_temp = os.urandom(MASTERKEY_DB_SIZE)
        with open(self.database.database_temp, 'wb') as f:
            f.write(ct_temp)

        # Test
        self.assertTrue(os.path.isfile(self.database.database_temp))
        self.assertIsNone(self.database.store_database(pt_new))
        self.assertFalse(os.path.isfile(self.database.database_temp))

        with open(self.database_name, 'rb') as f:
            purp_data = f.read()
        purp_pt = auth_and_decrypt(purp_data, self.master_key.master_key)
        self.assertEqual(purp_pt, pt_new)

    def test_replace_database(self) -> None:
        # Setup
        self.assertFalse(os.path.isfile(self.database.database_name))
        self.assertFalse(os.path.isfile(self.database.database_temp))

        with open(self.database.database_temp, 'wb') as f:
            f.write(b'temp_file')

        self.assertFalse(os.path.isfile(self.database.database_name))
        self.assertTrue(os.path.isfile(self.database.database_temp))

        # Test
        self.assertIsNone(self.database.replace_database())

        self.assertFalse(os.path.isfile(self.database.database_temp))
        self.assertTrue(os.path.isfile(self.database.database_name))

    def test_load_database_ignores_invalid_temp_database(self) -> None:
        # Setup
        pt_old = os.urandom(MASTERKEY_DB_SIZE)
        ct_old = encrypt_and_sign(pt_old, self.master_key.master_key)
        with open(self.database_name, 'wb') as f:
            f.write(ct_old)

        ct_temp = os.urandom(MASTERKEY_DB_SIZE)
        with open(self.database.database_temp, 'wb') as f:
            f.write(ct_temp)

        # Test
        self.assertTrue(os.path.isfile(self.database.database_temp))
        self.assertEqual(self.database.load_database(), pt_old)
        self.assertFalse(os.path.isfile(self.database.database_temp))

    def test_load_database_prefers_valid_temp_database(self) -> None:
        # Setup
        pt_old = os.urandom(MASTERKEY_DB_SIZE)
        ct_old = encrypt_and_sign(pt_old, self.master_key.master_key)
        with open(self.database_name, 'wb') as f:
            f.write(ct_old)

        pt_temp = os.urandom(MASTERKEY_DB_SIZE)
        ct_temp = encrypt_and_sign(pt_temp, self.master_key.master_key)
        with open(self.database.database_temp, 'wb') as f:
            f.write(ct_temp)

        # Test
        self.assertTrue(os.path.isfile(self.database.database_temp))
        self.assertEqual(self.database.load_database(), pt_temp)
        self.assertFalse(os.path.isfile(self.database.database_temp))
Example #3
0
class KeyList(object):
    """\
    KeyList object manages TFC's KeySet objects and the storage of the
    objects in an encrypted database.

    The main purpose of this object is to manage the `self.keysets`-list
    that contains TFC's keys. The database is stored on disk in
    encrypted form. Prior to encryption, the database is padded with
    dummy KeySets. The dummy KeySets hide the number of actual KeySets
    and thus the number of contacts, that would otherwise be revealed by
    the size of the encrypted database. As long as the user has less
    than 50 contacts, the database will effectively hide the actual
    number of contacts.

    The KeySet database is separated from contact database as traffic
    masking needs to update keys frequently with no risk of read/write
    queue blocking that occurs, e.g., when an updated nick of contact is
    being stored in the database.
    """
    def __init__(self, master_key: 'MasterKey', settings: 'Settings') -> None:
        """Create a new KeyList object."""
        self.master_key = master_key
        self.settings = settings
        self.keysets = []  # type: List[KeySet]
        self.dummy_keyset = self.generate_dummy_keyset()
        self.dummy_id = self.dummy_keyset.onion_pub_key
        self.file_name = f'{DIR_USER_DATA}{settings.software_operation}_keys'
        self.database = TFCDatabase(self.file_name, master_key)

        ensure_dir(DIR_USER_DATA)
        if os.path.isfile(self.file_name):
            self._load_keys()
        else:
            self.store_keys()

    def store_keys(self, replace: bool = True) -> None:
        """Write the list of KeySet objects to an encrypted database.

        This function will first create a list of KeySets and dummy
        KeySets. It will then serialize every KeySet object on that list
        and join the constant length byte strings to form the plaintext
        that will be encrypted and stored in the database.

        By default, TFC has a maximum number of 50 contacts. In
        addition, the database stores the KeySet used to encrypt
        commands from Transmitter to Receiver Program). The plaintext
        length of 51 serialized KeySets is 51*176 = 8976 bytes. The
        ciphertext includes a 24-byte nonce and a 16-byte tag, so the
        size of the final database is 9016 bytes.
        """
        pt_bytes = b''.join(
            [k.serialize_k() for k in self.keysets + self._dummy_keysets()])
        self.database.store_database(pt_bytes, replace)

    def _load_keys(self) -> None:
        """Load KeySets from the encrypted database.

        This function first reads and decrypts the database content. It
        then splits the plaintext into a list of 176-byte blocks. Each
        block contains the serialized data of one KeySet. Next, the
        function will remove from the list all dummy KeySets (that start
        with the `dummy_id` byte string). The function will then
        populate the `self.keysets` list with KeySet objects, the data
        of which is sliced and decoded from the dummy-free blocks.
        """
        pt_bytes = self.database.load_database()
        blocks = split_byte_string(pt_bytes, item_len=KEYSET_LENGTH)
        df_blocks = [b for b in blocks if not b.startswith(self.dummy_id)]

        for block in df_blocks:
            if len(block) != KEYSET_LENGTH:
                raise CriticalError("Invalid data in key database.")

            onion_pub_key, tx_mk, rx_mk, tx_hk, rx_hk, tx_harac_bytes, rx_harac_bytes \
                = separate_headers(block, [ONION_SERVICE_PUBLIC_KEY_LENGTH] + 4*[SYMMETRIC_KEY_LENGTH] + [HARAC_LENGTH])

            self.keysets.append(
                KeySet(onion_pub_key=onion_pub_key,
                       tx_mk=tx_mk,
                       rx_mk=rx_mk,
                       tx_hk=tx_hk,
                       rx_hk=rx_hk,
                       tx_harac=bytes_to_int(tx_harac_bytes),
                       rx_harac=bytes_to_int(rx_harac_bytes),
                       store_keys=self.store_keys))

    @staticmethod
    def generate_dummy_keyset() -> 'KeySet':
        """Generate a dummy KeySet object.

        The dummy KeySet simplifies the code around the constant length
        serialization when the data is stored to, or read from the
        database.

        In case the dummy keyset would ever be loaded accidentally, it
        uses a set of random keys to prevent decryption by eavesdropper.
        """
        return KeySet(onion_pub_key=onion_address_to_pub_key(DUMMY_CONTACT),
                      tx_mk=csprng(),
                      rx_mk=csprng(),
                      tx_hk=csprng(),
                      rx_hk=csprng(),
                      tx_harac=INITIAL_HARAC,
                      rx_harac=INITIAL_HARAC,
                      store_keys=lambda: None)

    def _dummy_keysets(self) -> List[KeySet]:
        """\
        Generate a proper size list of dummy KeySets for database
        padding.

        The additional contact (+1) is the local key.
        """
        number_of_contacts_to_store = self.settings.max_number_of_contacts + 1
        number_of_dummies = number_of_contacts_to_store - len(self.keysets)
        return [self.dummy_keyset] * number_of_dummies

    def add_keyset(self, onion_pub_key: bytes, tx_mk: bytes, rx_mk: bytes,
                   tx_hk: bytes, rx_hk: bytes) -> None:
        """\
        Add a new KeySet to `self.keysets` list and write changes to the
        database.
        """
        if self.has_keyset(onion_pub_key):
            self.remove_keyset(onion_pub_key)

        self.keysets.append(
            KeySet(onion_pub_key=onion_pub_key,
                   tx_mk=tx_mk,
                   rx_mk=rx_mk,
                   tx_hk=tx_hk,
                   rx_hk=rx_hk,
                   tx_harac=INITIAL_HARAC,
                   rx_harac=INITIAL_HARAC,
                   store_keys=self.store_keys))
        self.store_keys()

    def remove_keyset(self, onion_pub_key: bytes) -> None:
        """\
        Remove KeySet from `self.keysets` based on Onion Service public key.

        If the KeySet was found and removed, write changes to the database.
        """
        for i, k in enumerate(self.keysets):
            if k.onion_pub_key == onion_pub_key:
                del self.keysets[i]
                self.store_keys()
                break

    def change_master_key(self, queues: 'QueueDict') -> None:
        """Change the master key and encrypt the database with the new key."""
        key_queue = queues[KEY_MANAGEMENT_QUEUE]
        ack_queue = queues[KEY_MGMT_ACK_QUEUE]

        # Halt sender loop here until keys have been replaced by the
        # `input_loop` process, and new master key is delivered.
        ack_queue.put(KDB_HALT_ACK_HEADER)
        while key_queue.qsize() == 0:
            time.sleep(0.001)
        new_master_key = key_queue.get()

        # Replace master key.
        self.database.database_key = new_master_key
        self.master_key.master_key = new_master_key

        # Send new master key back to `input_loop` process to verify it was received.
        ack_queue.put(new_master_key)

    def update_database(self, settings: 'Settings') -> None:
        """Update settings and database size."""
        self.settings = settings
        self.store_keys()

    def get_keyset(self, onion_pub_key: bytes) -> KeySet:
        """\
        Return KeySet object from `self.keysets`-list that matches the
        Onion Service public key used as the selector.
        """
        return next(k for k in self.keysets
                    if k.onion_pub_key == onion_pub_key)

    def get_list_of_pub_keys(self) -> List[bytes]:
        """Return list of Onion Service public keys for KeySets."""
        return [
            k.onion_pub_key for k in self.keysets
            if k.onion_pub_key != LOCAL_PUBKEY
        ]

    def has_keyset(self, onion_pub_key: bytes) -> bool:
        """Return True if KeySet with matching Onion Service public key exists, else False."""
        return any(onion_pub_key == k.onion_pub_key for k in self.keysets)

    def has_rx_mk(self, onion_pub_key: bytes) -> bool:
        """\
        Return True if KeySet with matching Onion Service public key has
        rx-message key, else False.

        When the PSK key exchange option is selected, the KeySet for
        newly created contact on Receiver Program is a null-byte string.
        This default value indicates the PSK of contact has not yet been
        imported.
        """
        return self.get_keyset(onion_pub_key).rx_mk != bytes(
            SYMMETRIC_KEY_LENGTH)

    def has_local_keyset(self) -> bool:
        """Return True if local KeySet object exists, else False."""
        return any(k.onion_pub_key == LOCAL_PUBKEY for k in self.keysets)

    def manage(self, queues: 'QueueDict', command: str, *params: Any) -> None:
        """Manage KeyList based on a command.

        The command is delivered from `input_process` to `sender_loop`
        process via the `KEY_MANAGEMENT_QUEUE`.
        """
        if command == KDB_ADD_ENTRY_HEADER:
            self.add_keyset(*params)
        elif command == KDB_REMOVE_ENTRY_HEADER:
            self.remove_keyset(*params)
        elif command == KDB_M_KEY_CHANGE_HALT_HEADER:
            self.change_master_key(queues)
        elif command == KDB_UPDATE_SIZE_HEADER:
            self.update_database(*params)
        else:
            raise CriticalError(
                f"Invalid KeyList management command '{command}'.")
Example #4
0
class Settings(object):
    """\
    Settings object stores user adjustable settings (excluding those
    related to serial interface) under an encrypted database.
    """

    def __init__(self,
                 master_key: 'MasterKey',  # MasterKey object
                 operation:  str,          # Operation mode of the program (Tx or Rx)
                 local_test: bool,         # Local testing setting from command-line argument
                 qubes:      bool = False  # Qubes setting from command-line argument
                 ) -> None:
        """Create a new Settings object.

        The settings below are defaults, and are only to be altered from
        within the program itself. Changes made to the default settings
        are stored in the encrypted settings database, from which they
        are loaded when the program starts.
        """
        # Common settings
        self.disable_gui_dialog            = False
        self.max_number_of_group_members   = 50
        self.max_number_of_groups          = 50
        self.max_number_of_contacts        = 50
        self.log_messages_by_default       = False
        self.accept_files_by_default       = False
        self.show_notifications_by_default = True
        self.log_file_masking              = False
        self.ask_password_for_log_access   = True

        # Transmitter settings
        self.nc_bypass_messages = False
        self.confirm_sent_files = True
        self.double_space_exits = False
        self.traffic_masking    = False
        self.tm_static_delay    = 2.0
        self.tm_random_delay    = 2.0

        # Relay Settings
        self.allow_contact_requests = True

        # Receiver settings
        self.new_message_notify_preview  = False
        self.new_message_notify_duration = 1.0
        self.max_decompress_size         = 100_000_000

        self.master_key         = master_key
        self.software_operation = operation
        self.local_testing_mode = local_test
        self.qubes              = qubes

        self.file_name = f'{DIR_USER_DATA}{operation}_settings'
        self.database  = TFCDatabase(self.file_name, master_key)

        self.all_keys = list(vars(self).keys())
        self.key_list = self.all_keys[:self.all_keys.index('master_key')]
        self.defaults = {k: self.__dict__[k] for k in self.key_list}

        ensure_dir(DIR_USER_DATA)
        if os.path.isfile(self.file_name):
            self.load_settings()
        else:
            self.store_settings()

    def store_settings(self, replace: bool = True) -> None:
        """Store settings to an encrypted database.

        The plaintext in the encrypted database is a constant
        length bytestring regardless of stored setting values.
        """
        attribute_list = [self.__getattribute__(k) for k in self.key_list]

        bytes_lst = []
        for a in attribute_list:
            if isinstance(a, bool):
                bytes_lst.append(bool_to_bytes(a))
            elif isinstance(a, int):
                bytes_lst.append(int_to_bytes(a))
            elif isinstance(a, float):
                bytes_lst.append(double_to_bytes(a))
            else:
                raise CriticalError("Invalid attribute type in settings.")

        pt_bytes = b''.join(bytes_lst)
        self.database.store_database(pt_bytes, replace)

    def load_settings(self) -> None:
        """Load settings from the encrypted database."""
        pt_bytes = self.database.load_database()

        # Update settings based on plaintext byte string content
        for key in self.key_list:

            attribute = self.__getattribute__(key)

            if isinstance(attribute, bool):
                value    = bytes_to_bool(pt_bytes[0])  # type: Union[bool, int, float]
                pt_bytes = pt_bytes[ENCODED_BOOLEAN_LENGTH:]

            elif isinstance(attribute, int):
                value    = bytes_to_int(pt_bytes[:ENCODED_INTEGER_LENGTH])
                pt_bytes = pt_bytes[ENCODED_INTEGER_LENGTH:]

            elif isinstance(attribute, float):
                value    = bytes_to_double(pt_bytes[:ENCODED_FLOAT_LENGTH])
                pt_bytes = pt_bytes[ENCODED_FLOAT_LENGTH:]

            else:
                raise CriticalError("Invalid data type in settings default values.")

            setattr(self, key, value)

    def change_setting(self,
                       key:          str,  # Name of the setting
                       value_str:    str,  # Value of the setting
                       contact_list: 'ContactList',
                       group_list:   'GroupList'
                       ) -> None:
        """Parse, update and store new setting value."""
        attribute = self.__getattribute__(key)

        try:
            if isinstance(attribute, bool):
                value = dict(true=True, false=False)[value_str.lower()]  # type: Union[bool, int, float]

            elif isinstance(attribute, int):
                value = int(value_str)
                if value < 0 or value > MAX_INT:
                    raise ValueError

            elif isinstance(attribute, float):
                value = float(value_str)
                if value < 0.0:
                    raise ValueError

            else:
                raise CriticalError("Invalid attribute type in settings.")

        except (KeyError, ValueError):
            raise SoftError(f"Error: Invalid setting value '{value_str}'.", head_clear=True)

        self.validate_key_value_pair(key, value, contact_list, group_list)

        setattr(self, key, value)
        self.store_settings()

    @staticmethod
    def validate_key_value_pair(key:          str,            # Name of the setting
                                value:        'SettingType',  # Value of the setting
                                contact_list: 'ContactList',  # ContactList object
                                group_list:   'GroupList',    # GroupList object
                                ) -> None:
        """Evaluate values for settings that have further restrictions."""
        Settings.validate_database_limit(key, value)
        Settings.validate_max_number_of_group_members(key, value, group_list)
        Settings.validate_max_number_of_groups(key, value, group_list)
        Settings.validate_max_number_of_contacts(key, value, contact_list)
        Settings.validate_new_message_notify_duration(key, value)
        Settings.validate_traffic_masking_delay(key, value, contact_list)

    @staticmethod
    def validate_database_limit(key: str, value: 'SettingType') -> None:
        """Validate setting values for database entry limits."""
        if key in ["max_number_of_group_members", "max_number_of_groups", "max_number_of_contacts"]:
            if value % 10 != 0 or value == 0:
                raise SoftError("Error: Database padding settings must be divisible by 10.", head_clear=True)

    @staticmethod
    def validate_max_number_of_group_members(key:        str,
                                             value:      'SettingType',
                                             group_list: 'GroupList'
                                             ) -> None:
        """Validate setting value for maximum number of group members."""
        if key == "max_number_of_group_members":
            min_size = round_up(group_list.largest_group())
            if value < min_size:
                raise SoftError(f"Error: Can't set the max number of members lower than {min_size}.", head_clear=True)

    @staticmethod
    def validate_max_number_of_groups(key:        str,
                                      value:      'SettingType',
                                      group_list: 'GroupList'
                                      ) -> None:
        """Validate setting value for maximum number of groups."""
        if key == "max_number_of_groups":
            min_size = round_up(len(group_list))
            if value < min_size:
                raise SoftError(f"Error: Can't set the max number of groups lower than {min_size}.", head_clear=True)

    @staticmethod
    def validate_max_number_of_contacts(key:          str,
                                        value:        'SettingType',
                                        contact_list: 'ContactList'
                                        ) -> None:
        """Validate setting value for maximum number of contacts."""
        if key == "max_number_of_contacts":
            min_size = round_up(len(contact_list))
            if value < min_size:
                raise SoftError(f"Error: Can't set the max number of contacts lower than {min_size}.", head_clear=True)

    @staticmethod
    def validate_new_message_notify_duration(key: str, value: 'SettingType') -> None:
        """Validate setting value for duration of new message notification."""
        if key == "new_message_notify_duration" and value < 0.05:
            raise SoftError("Error: Too small value for message notify duration.", head_clear=True)

    @staticmethod
    def validate_traffic_masking_delay(key:          str,
                                       value:        'SettingType',
                                       contact_list: 'ContactList'
                                       ) -> None:
        """Validate setting value for traffic masking delays."""
        if key in ["tm_static_delay", "tm_random_delay"]:

            for key_, name, min_setting in [("tm_static_delay", "static", TRAFFIC_MASKING_MIN_STATIC_DELAY),
                                            ("tm_random_delay", "random", TRAFFIC_MASKING_MIN_RANDOM_DELAY)]:
                if key == key_ and value < min_setting:
                    raise SoftError(f"Error: Can't set {name} delay lower than {min_setting}.", head_clear=True)

            if contact_list.settings.software_operation == TX:
                m_print(["WARNING!", "Changing traffic masking delay can make your endpoint and traffic look unique!"],
                        bold=True, head=1, tail=1)

                if not yes("Proceed anyway?"):
                    raise SoftError("Aborted traffic masking setting change.", head_clear=True)

            m_print("Traffic masking setting will change on restart.", head=1, tail=1)

    def print_settings(self) -> None:
        """\
        Print list of settings, their current and
        default values, and setting descriptions.
        """
        desc_d = {
            # Common settings
            "disable_gui_dialog":            "True replaces GUI dialogs with CLI prompts",
            "max_number_of_group_members":   "Maximum number of members in a group",
            "max_number_of_groups":          "Maximum number of groups",
            "max_number_of_contacts":        "Maximum number of contacts",
            "log_messages_by_default":       "Default logging setting for new contacts/groups",
            "accept_files_by_default":       "Default file reception setting for new contacts",
            "show_notifications_by_default": "Default message notification setting for new contacts/groups",
            "log_file_masking":              "True hides real size of log file during traffic masking",
            "ask_password_for_log_access":   "False disables password prompt when viewing/exporting logs",

            # Transmitter settings
            "nc_bypass_messages":            "False removes Networked Computer bypass interrupt messages",
            "confirm_sent_files":            "False sends files without asking for confirmation",
            "double_space_exits":            "True exits, False clears screen with double space command",
            "traffic_masking":               "True enables traffic masking to hide metadata",
            "tm_static_delay":               "The static delay between traffic masking packets",
            "tm_random_delay":               "Max random delay for traffic masking timing obfuscation",

            # Relay settings
            "allow_contact_requests":        "When False, does not show TFC contact requests",

            # Receiver settings
            "new_message_notify_preview":    "When True, shows a preview of the received message",
            "new_message_notify_duration":   "Number of seconds new message notification appears",
            "max_decompress_size":           "Max size Receiver accepts when decompressing file"}

        # Columns
        c1 = ['Setting name']
        c2 = ['Current value']
        c3 = ['Default value']
        c4 = ['Description']

        terminal_width     = get_terminal_width()
        description_indent = 64

        if terminal_width < description_indent + 1:
            raise SoftError("Error: Screen width is too small.", head_clear=True)

        # Populate columns with setting data
        for key in self.defaults:
            c1.append(key)
            c2.append(str(self.__getattribute__(key)))
            c3.append(str(self.defaults[key]))

            description = desc_d[key]
            wrapper     = textwrap.TextWrapper(width=max(1, (terminal_width - description_indent)))
            desc_lines  = wrapper.fill(description).split('\n')
            desc_string = desc_lines[0]

            for line in desc_lines[1:]:
                desc_string += '\n' + description_indent * ' ' + line

            if len(desc_lines) > 1:
                desc_string += '\n'

            c4.append(desc_string)

        # Calculate column widths
        c1w, c2w, c3w = [max(len(v) for v in column) + SETTINGS_INDENT for column in [c1, c2, c3]]

        # Align columns by adding whitespace between fields of each line
        lines = [f'{f1:{c1w}} {f2:{c2w}} {f3:{c3w}} {f4}' for f1, f2, f3, f4 in zip(c1, c2, c3, c4)]

        # Add a terminal-wide line between the column names and the data
        lines.insert(1, get_terminal_width() * '─')

        # Print the settings
        clear_screen()
        print('\n' + '\n'.join(lines))
Example #5
0
class ContactList(Iterable[Contact], Sized):
    """\
    ContactList object manages TFC's Contact objects and the storage of
    the objects in an encrypted database.

    The main purpose of this object is to manage the `self.contacts`
    list that contains TFC's contacts. The database is stored on disk
    in encrypted form. Prior to encryption, the database is padded with
    dummy contacts. The dummy contacts hide the number of actual
    contacts that would otherwise be revealed by the size of the
    encrypted database. As long as the user has less than 50 contacts,
    the database will effectively hide the actual number of contacts.
    The maximum number of contacts (and thus the size of the database)
    can be changed by editing the `max_number_of_contacts` setting. This
    can however, in theory, reveal to a physical attacker the user has
    more than 50 contacts.

    The ContactList object also provides handy methods with human-
    readable names for making queries to the database.
    """
    def __init__(self, master_key: 'MasterKey', settings: 'Settings') -> None:
        """Create a new ContactList object."""
        self.settings = settings
        self.contacts = []  # type: List[Contact]
        self.dummy_contact = self.generate_dummy_contact()
        self.file_name = f'{DIR_USER_DATA}{settings.software_operation}_contacts'
        self.database = TFCDatabase(self.file_name, master_key)

        ensure_dir(DIR_USER_DATA)
        if os.path.isfile(self.file_name):
            self._load_contacts()
        else:
            self.store_contacts()

    def __iter__(self) -> Iterator[Contact]:
        """Iterate over Contact objects in `self.contacts`."""
        yield from self.contacts

    def __len__(self) -> int:
        """Return the number of contacts in `self.contacts`.

        The Contact object that represents the local key is left out of
        the calculation.
        """
        return len(self.get_list_of_contacts())

    def store_contacts(self, replace: bool = True) -> None:
        """Write the list of contacts to an encrypted database.

        This function will first create a list of contacts and dummy
        contacts. It will then serialize every Contact object on that
        list and join the constant length byte strings to form the
        plaintext that will be encrypted and stored in the database.

        By default, TFC has a maximum number of 50 contacts. In
        addition, the database stores the contact that represents the
        local key (used to encrypt commands from Transmitter to Receiver
        Program). The plaintext length of 51 serialized contacts is
        51*1124 = 57364 bytes. The ciphertext includes a 24-byte nonce
        and a 16-byte tag, so the size of the final database is 57313
        bytes.
        """
        pt_bytes = b''.join(
            [c.serialize_c() for c in self.contacts + self._dummy_contacts()])
        self.database.store_database(pt_bytes, replace)

    def _load_contacts(self) -> None:
        """Load contacts from the encrypted database.

        This function first reads and decrypts the database content. It
        then splits the plaintext into a list of 1124-byte blocks: each
        block contains the serialized data of one contact. Next, the
        function will remove from the list all dummy contacts (that
        start with dummy contact's public key). The function will then
        populate the `self.contacts` list with Contact objects, the data
        of which is sliced and decoded from the dummy-free blocks.
        """
        pt_bytes = self.database.load_database()
        blocks = split_byte_string(pt_bytes, item_len=CONTACT_LENGTH)
        df_blocks = [
            b for b in blocks
            if not b.startswith(self.dummy_contact.onion_pub_key)
        ]

        for block in df_blocks:
            if len(block) != CONTACT_LENGTH:
                raise CriticalError("Invalid data in contact database.")

            (onion_pub_key, tx_fingerprint, rx_fingerprint, kex_status_byte,
             log_messages_byte, file_reception_byte,
             notifications_byte, nick_bytes) = separate_headers(
                 block,
                 [ONION_SERVICE_PUBLIC_KEY_LENGTH] + 2 * [FINGERPRINT_LENGTH] +
                 [KEX_STATUS_LENGTH] + 3 * [ENCODED_BOOLEAN_LENGTH])

            self.contacts.append(
                Contact(onion_pub_key=onion_pub_key,
                        tx_fingerprint=tx_fingerprint,
                        rx_fingerprint=rx_fingerprint,
                        kex_status=kex_status_byte,
                        log_messages=bytes_to_bool(log_messages_byte),
                        file_reception=bytes_to_bool(file_reception_byte),
                        notifications=bytes_to_bool(notifications_byte),
                        nick=bytes_to_str(nick_bytes)))

    @staticmethod
    def generate_dummy_contact() -> Contact:
        """Generate a dummy Contact object.

        The dummy contact simplifies the code around the constant length
        serialization when the data is stored to, or read from the
        database.
        """
        return Contact(onion_pub_key=onion_address_to_pub_key(DUMMY_CONTACT),
                       nick=DUMMY_NICK,
                       tx_fingerprint=bytes(FINGERPRINT_LENGTH),
                       rx_fingerprint=bytes(FINGERPRINT_LENGTH),
                       kex_status=KEX_STATUS_NONE,
                       log_messages=False,
                       file_reception=False,
                       notifications=False)

    def _dummy_contacts(self) -> List[Contact]:
        """\
        Generate a list of dummy contacts for database padding.

        The number of dummy contacts depends on the number of actual
        contacts.

        The additional contact (+1) is the local contact used to
        represent the presence of the local key on Transmitter Program's
        `input_loop` process side that does not have access to the
        KeyList database that contains the local key.
        """
        number_of_contacts_to_store = self.settings.max_number_of_contacts + 1
        number_of_dummies = number_of_contacts_to_store - len(self.contacts)
        return [self.dummy_contact] * number_of_dummies

    def add_contact(self, onion_pub_key: bytes, nick: str,
                    tx_fingerprint: bytes, rx_fingerprint: bytes,
                    kex_status: bytes, log_messages: bool,
                    file_reception: bool, notifications: bool) -> None:
        """\
        Add a new contact to `self.contacts` list and write changes to
        the database.

        Because TFC's hardware separation prevents automated DH-ratchet,
        the only way for the users to re-negotiate new keys is to start
        a new session by re-adding the contact. If the contact is
        re-added, TFC will need to remove the existing Contact object
        before adding the new one. In such case, TFC will update the
        nick, kex status, and fingerprints, but it will keep the old
        logging, file reception, and notification settings of the
        contact (as opposed to using the defaults determined by TFC's
        Settings object).
        """
        if self.has_pub_key(onion_pub_key):
            current_contact = self.get_contact_by_pub_key(onion_pub_key)
            log_messages = current_contact.log_messages
            file_reception = current_contact.file_reception
            notifications = current_contact.notifications
            self.remove_contact_by_pub_key(onion_pub_key)

        self.contacts.append(
            Contact(onion_pub_key, nick, tx_fingerprint, rx_fingerprint,
                    kex_status, log_messages, file_reception, notifications))
        self.store_contacts()

    def remove_contact_by_pub_key(self, onion_pub_key: bytes) -> None:
        """Remove the contact that has a matching Onion Service public key.

        If the contact was found and removed, write changes to the database.
        """
        for i, c in enumerate(self.contacts):
            if c.onion_pub_key == onion_pub_key:
                del self.contacts[i]
                self.store_contacts()
                break

    def remove_contact_by_address_or_nick(self, selector: str) -> None:
        """Remove the contact that has a matching nick or Onion Service address.

        If the contact was found and removed, write changes to the database.
        """
        for i, c in enumerate(self.contacts):
            if selector in [c.onion_address, c.nick]:
                del self.contacts[i]
                self.store_contacts()
                break

    def get_contact_by_pub_key(self, onion_pub_key: bytes) -> Contact:
        """\
        Return the Contact object from `self.contacts` list that has the
        matching Onion Service public key.
        """
        return next(c for c in self.contacts
                    if onion_pub_key == c.onion_pub_key)

    def get_contact_by_address_or_nick(self, selector: str) -> Contact:
        """\
        Return the Contact object from `self.contacts` list that has the
        matching nick or Onion Service address.
        """
        return next(c for c in self.contacts
                    if selector in [c.onion_address, c.nick])

    def get_nick_by_pub_key(self, onion_pub_key: bytes) -> str:
        """Return nick of contact that has a matching Onion Service public key."""
        return next(c.nick for c in self.contacts
                    if onion_pub_key == c.onion_pub_key)

    def get_list_of_contacts(self) -> List[Contact]:
        """Return list of Contact objects in `self.contacts` list."""
        return [c for c in self.contacts if c.onion_address != LOCAL_ID]

    def get_list_of_addresses(self) -> List[str]:
        """Return list of contacts' TFC accounts."""
        return [
            c.onion_address for c in self.contacts
            if c.onion_address != LOCAL_ID
        ]

    def get_list_of_nicks(self) -> List[str]:
        """Return list of contacts' nicks."""
        return [c.nick for c in self.contacts if c.onion_address != LOCAL_ID]

    def get_list_of_pub_keys(self) -> List[bytes]:
        """Return list of contacts' public keys."""
        return [
            c.onion_pub_key for c in self.contacts
            if c.onion_address != LOCAL_ID
        ]

    def get_list_of_pending_pub_keys(self) -> List[bytes]:
        """Return list of public keys for contacts that haven't completed key exchange yet."""
        return [
            c.onion_pub_key for c in self.contacts
            if c.kex_status == KEX_STATUS_PENDING
        ]

    def get_list_of_existing_pub_keys(self) -> List[bytes]:
        """Return list of public keys for contacts with whom key exchange has been completed."""
        return [
            c.onion_pub_key for c in self.get_list_of_contacts()
            if c.kex_status in [
                KEX_STATUS_UNVERIFIED, KEX_STATUS_VERIFIED,
                KEX_STATUS_HAS_RX_PSK, KEX_STATUS_NO_RX_PSK
            ]
        ]

    def contact_selectors(self) -> List[str]:
        """Return list of string-type UIDs that can be used to select a contact."""
        return self.get_list_of_addresses() + self.get_list_of_nicks()

    def has_contacts(self) -> bool:
        """Return True if ContactList has any contacts, else False."""
        return any(self.get_list_of_contacts())

    def has_only_pending_contacts(self) -> bool:
        """Return True if ContactList only has pending contacts, else False."""
        return all(c.kex_status == KEX_STATUS_PENDING
                   for c in self.get_list_of_contacts())

    def has_pub_key(self, onion_pub_key: bytes) -> bool:
        """Return True if contact with public key exists, else False."""
        return onion_pub_key in self.get_list_of_pub_keys()

    def has_local_contact(self) -> bool:
        """Return True if the local key has been exchanged, else False."""
        return any(c.onion_address == LOCAL_ID for c in self.contacts)

    def print_contacts(self) -> None:
        """Print the list of contacts.

        Neatly printed contact list allows easy contact management:
        It allows the user to check active logging, file reception and
        notification settings, as well as what key exchange was used
        and what is the state of that key exchange. The contact list
        also shows and what the account displayed by the Relay Program
        corresponds to what nick etc.
        """
        # Initialize columns
        c1 = ['Contact']
        c2 = ['Account']
        c3 = ['Logging']
        c4 = ['Notify']
        c5 = ['Files ']
        c6 = ['Key Ex']

        # Key exchange status dictionary
        kex_dict = {
            KEX_STATUS_PENDING: f"{ECDHE} (Pending)",
            KEX_STATUS_UNVERIFIED: f"{ECDHE} (Unverified)",
            KEX_STATUS_VERIFIED: f"{ECDHE} (Verified)",
            KEX_STATUS_NO_RX_PSK: f"{PSK}  (No contact key)",
            KEX_STATUS_HAS_RX_PSK: PSK
        }

        # Populate columns with contact data
        for c in self.get_list_of_contacts():
            c1.append(c.nick)
            c2.append(c.short_address)
            c3.append('Yes' if c.log_messages else 'No')
            c4.append('Yes' if c.notifications else 'No')
            c5.append('Accept' if c.file_reception else 'Reject')
            c6.append(kex_dict[c.kex_status])

        # Calculate column widths
        c1w, c2w, c3w, c4w, c5w, = [
            max(len(v) for v in column) + CONTACT_LIST_INDENT
            for column in [c1, c2, c3, c4, c5]
        ]

        # Align columns by adding whitespace between fields of each line
        lines = [
            f'{f1:{c1w}}{f2:{c2w}}{f3:{c3w}}{f4:{c4w}}{f5:{c5w}}{f6}'
            for f1, f2, f3, f4, f5, f6 in zip(c1, c2, c3, c4, c5, c6)
        ]

        # Add a terminal-wide line between the column names and the data
        lines.insert(1, get_terminal_width() * '─')

        # Print the contact list
        clear_screen()
        print('\n' + '\n'.join(lines) + '\n\n')
Example #6
0
class OnionService(object):
    """\
    OnionService object manages the persistent Ed25519 key used
    to create a v3 Tor Onion Service on the Networked Computer.

    The reason the key is generated by Transmitter Program on Source
    Computer, is this ensures that even when Networked Computer runs an
    amnesic Linux distribution like Tails, the long term private
    signing key is not lost between sessions.

    The private key for Onion Service can not be kept as protected as
    TFC's other private message/header keys (that never leave
    Source/Destination computer). This is however OK, as the Onion
    Service private key is only as secure as the networked endpoint
    anyway.
    """
    def __init__(self, master_key: 'MasterKey') -> None:
        """Create a new OnionService object."""
        self.master_key = master_key
        self.file_name = f'{DIR_USER_DATA}{TX}_onion_db'
        self.database = TFCDatabase(self.file_name, self.master_key)
        self.is_delivered = False
        self.conf_code = csprng(CONFIRM_CODE_LENGTH)

        ensure_dir(DIR_USER_DATA)
        if os.path.isfile(self.file_name):
            self.onion_private_key = self.load_onion_service_private_key()
        else:
            self.onion_private_key = self.new_onion_service_private_key()
            self.store_onion_service_private_key()

        self.public_key = bytes(
            nacl.signing.SigningKey(seed=self.onion_private_key).verify_key)

        self.user_onion_address = pub_key_to_onion_address(self.public_key)
        self.user_short_address = pub_key_to_short_address(self.public_key)

    @staticmethod
    def new_onion_service_private_key() -> bytes:
        """Generate a new Onion Service private key and store it."""
        phase("Generate Tor OS key")
        onion_private_key = csprng(ONION_SERVICE_PRIVATE_KEY_LENGTH)
        phase(DONE)
        return onion_private_key

    def store_onion_service_private_key(self, replace: bool = True) -> None:
        """Store Onion Service private key to an encrypted database."""
        self.database.store_database(self.onion_private_key, replace)

    def load_onion_service_private_key(self) -> bytes:
        """Load the Onion Service private key from the encrypted database."""
        onion_private_key = self.database.load_database()

        if len(onion_private_key) != ONION_SERVICE_PRIVATE_KEY_LENGTH:
            raise CriticalError("Invalid Onion Service private key length.")

        return onion_private_key

    def new_confirmation_code(self) -> None:
        """Generate new confirmation code for Onion Service data."""
        self.conf_code = csprng(CONFIRM_CODE_LENGTH)