Ejemplo n.º 1
0
    def decrypt_container_from_storage(self, container_name_or_idx):
        """
        Return the decrypted content of the container `filename` (which must be in `list_container_names()`,
        or an index suitable for this list).
        """
        if isinstance(container_name_or_idx, int):
            container_names = self.list_container_names(as_sorted=True,
                                                        as_absolute=False)
            container_name = container_names[
                container_name_or_idx]  # Will break if idx is out of bounds
        else:

            assert isinstance(container_name_or_idx,
                              (Path, str)), repr(container_name_or_idx)
            container_name = Path(container_name_or_idx)

        assert not container_name.is_absolute(), container_name

        logger.info("Decrypting container %r from storage", container_name)

        container_filepath = self._make_absolute(container_name)
        container = load_from_json_file(container_filepath)

        result = self._decrypt_data_from_container(container)
        logger.info("Container %r successfully decrypted", container_name)
        return result
def test_serialization_utilities(tmp_path):

    uid = uuid.UUID("7c0b18f5-f410-4e83-9263-b38c2328e516")
    data = dict(b=b"xyz", a="hêllo", c=uid)

    serialized_str = dump_to_json_str(data)
    # Keys are sorted
    assert (
        serialized_str
        == r'{"a": "h\u00eallo", "b": {"$binary": {"base64": "eHl6", "subType": "00"}}, "c": {"$binary": {"base64": "fAsY9fQQToOSY7OMIyjlFg==", "subType": "03"}}}'
    )
    deserialized = load_from_json_str(serialized_str)
    assert deserialized == data

    serialized_str = dump_to_json_str(
        data, ensure_ascii=False
    )  # Json arguments well propagated
    assert (
        serialized_str
        == r'{"a": "hêllo", "b": {"$binary": {"base64": "eHl6", "subType": "00"}}, "c": {"$binary": {"base64": "fAsY9fQQToOSY7OMIyjlFg==", "subType": "03"}}}'
    )
    deserialized = load_from_json_str(serialized_str)
    assert deserialized == data

    serialized_str = dump_to_json_bytes(data)
    # Keys are sorted
    assert (
        serialized_str
        == rb'{"a": "h\u00eallo", "b": {"$binary": {"base64": "eHl6", "subType": "00"}}, "c": {"$binary": {"base64": "fAsY9fQQToOSY7OMIyjlFg==", "subType": "03"}}}'
    )
    deserialized = load_from_json_bytes(serialized_str)
    assert deserialized == data

    serialized_str = dump_to_json_bytes(
        data, ensure_ascii=False
    )  # Json arguments well propagated
    assert (
        serialized_str
        == b'{"a": "h\xc3\xaallo", "b": {"$binary": {"base64": "eHl6", "subType": "00"}}, "c": {"$binary": {"base64": "fAsY9fQQToOSY7OMIyjlFg==", "subType": "03"}}}'
    )
    deserialized = load_from_json_bytes(serialized_str)
    assert deserialized == data

    tmp_filepath = os.path.join(tmp_path, "dummy_temp_file.dat")
    serialized_str = dump_to_json_file(
        tmp_filepath, data=data, ensure_ascii=True
    )  # Json arguments well propagated
    assert (
        serialized_str
        == b'{"a": "h\u00eallo", "b": {"$binary": {"base64": "eHl6", "subType": "00"}}, "c": {"$binary": {"base64": "fAsY9fQQToOSY7OMIyjlFg==", "subType": "03"}}}'
    )
    deserialized = load_from_json_file(tmp_filepath)
    assert deserialized == data
def load_authentication_device_metadata(authentication_device: dict) -> dict:
    """
    Return the device metadata stored in the given mountpoint, after checking that it contains at least mandatory
    (user and device_uid) fields.

    Raises `ValueError` or json decoding exceptions if device appears initialized, but has corrupted metadata.
    """
    metadata_file = _get_metadata_file_path(
        authentication_device=authentication_device)

    metadata = load_from_json_file(metadata_file)

    _check_authentication_device_metadata(metadata)  # Raises if troubles
    return metadata
    def list_imported_key_storage_metadata(
            self) -> dict:  # FIXME doesn't return a list??
        """Return a dict mapping key storage UUIDs to the dicts of their metadata.

        Raises if any metadata loading fails.
        """
        key_storage_uids = self.list_imported_key_storage_uids()

        metadata_mapper = {}
        for key_storage_uid in key_storage_uids:
            _key_storage_path = self._get_imported_key_storage_path(
                key_storage_uid=key_storage_uid)
            metadata = load_from_json_file(
                get_metadata_file_path(
                    _key_storage_path))  # TODO Factorize this ?
            metadata_mapper[key_storage_uid] = metadata

        return metadata_mapper
Ejemplo n.º 5
0
    def get_container_info(self, filepath):
        """Return a text with info about the container algorithms and inner members metadata.
         """
        if not filepath:
            return "Please select a container"

        filename = os.path.basename(filepath)
        try:
            container = load_from_json_file(filepath)

            info_lines = []

            metadata = extract_metadata_from_container(container)
            if not metadata:
                info_lines.append(
                    "No metadata found in container regarding inner files.")

            info_lines.append("MEMBERS:")
            for member_name, member_metadata in sorted(
                    metadata["members"].items()):
                # TODO later add more info
                nice_size = self.get_nice_size(member_metadata["size"])
                info_lines.append("- %s (%s)" % (member_name, nice_size))

            info_lines.append("")

            info_lines.append("KEYCHAIN ID:")
            info_lines.append(str(container.get("keychain_uid",
                                                "<not found>")))

            info_lines.append("")

            info_lines.append("ALGORITHMS:")
            summary = get_encryption_configuration_summary(container)
            info_lines.append(summary)

            return "\n".join(info_lines)

        except FileNotFoundError:
            return "This container was deleted"
        except Exception as exc:
            logging.error("Error when reading container %s: %r", filename, exc)
            return "Container analysis failed"
    def import_key_storage_from_folder(self, key_storage_path: Path):
        """
        Create a local import of a remote key storage folder (which must have a proper metadata file).

        Raises KeyStorageAlreadyExists if this key storage was already imported.
        """
        assert key_storage_path.exists(), key_storage_path

        metadata = load_from_json_file(
            get_metadata_file_path(key_storage_path))
        key_storage_uid = metadata[
            "device_uid"]  # Fails badly if metadata file is corrupted

        if key_storage_uid in self.list_imported_key_storage_uids():
            raise KeyStorageAlreadyExists(
                "Key storage with UUID %s was already imported locally" %
                key_storage_uid)

        imported_key_storage_path = self._get_imported_key_storage_path(
            key_storage_uid=key_storage_uid)
        safe_copy_directory(
            key_storage_path,
            imported_key_storage_path)  # Must not fail, due to previous checks
        assert imported_key_storage_path.exists()
def test_filesystem_container_loading_and_dumping(tmp_path, container_conf):

    data = b"jhf" * 200

    keychain_uid = random.choice([None, uuid.UUID("450fc293-b702-42d3-ae65-e9cc58e5a62a")])

    metadata = random.choice([None, dict(a=[123])])

    container = encrypt_data_into_container(
        data=data, conf=container_conf, keychain_uid=keychain_uid, metadata=metadata
    )
    container_ciphertext_before_dump = container["data_ciphertext"]

    container_without_ciphertext = copy.deepcopy(container)
    del container_without_ciphertext["data_ciphertext"]

    # CASE 1 - MONOLITHIC JSON FILE

    container_filepath = tmp_path / "mycontainer_monolithic.crypt"
    dump_container_to_filesystem(container_filepath, container=container, offload_data_ciphertext=False)
    container_reloaded = load_from_json_file(container_filepath)
    assert container_reloaded["data_ciphertext"] == container_ciphertext_before_dump  # NO OFFLOADING
    assert load_container_from_filesystem(container_filepath) == container  # UNCHANGED from original

    container_truncated = load_container_from_filesystem(container_filepath, include_data_ciphertext=False)
    assert "data_ciphertext" not in container_truncated
    assert container_truncated == container_without_ciphertext

    assert container["data_ciphertext"] == container_ciphertext_before_dump  # Original dict unchanged

    size1 = get_container_size_on_filesystem(container_filepath)
    assert size1

    assert container_filepath.exists()
    #delete_container_from_filesystem(container_filepath)
    #assert not container_filepath.exists()

    # CASE 2 - OFFLOADED CIPHERTEXT FILE

    container_filepath = tmp_path / "mycontainer_offloaded.crypt"

    dump_container_to_filesystem(container_filepath, container=container)  # OVERWRITE, with offloading by default
    container_reloaded = load_from_json_file(container_filepath)
    assert container_reloaded["data_ciphertext"] == "[OFFLOADED]"

    container_offloaded_filepath = Path(str(container_filepath) + ".data")
    offloaded_data_reloaded = container_offloaded_filepath.read_bytes()
    assert offloaded_data_reloaded == container_ciphertext_before_dump  # WELL OFFLOADED as DIRECT BYTES
    assert load_container_from_filesystem(container_filepath) == container  # UNCHANGED from original

    container_truncated = load_container_from_filesystem(container_filepath, include_data_ciphertext=False)
    assert "data_ciphertext" not in container_truncated
    assert container_truncated == container_without_ciphertext

    assert container["data_ciphertext"] == container_ciphertext_before_dump  # Original dict unchanged

    size2 = get_container_size_on_filesystem(container_filepath)
    assert size2 < size1   # Overhead of base64 encoding in monolithic file!
    assert size1 < size2 + 1000  # Overhead remaings limited though

    assert container_filepath.exists()
    assert container_offloaded_filepath.exists()
    delete_container_from_filesystem(container_filepath)
    assert not container_filepath.exists()
    assert not container_offloaded_filepath.exists()