def test_serialization_utilities(tmp_path):

    uid = uuid.UUID("7c0b18f5-f410-4e83-9263-b38c2328e516")
    data = dict(b=b"xyz", a="hêllo", c=uid)

    serialized_str = dump_to_json_str(data)
    # Keys are sorted
    assert (
        serialized_str
        == r'{"a": "h\u00eallo", "b": {"$binary": {"base64": "eHl6", "subType": "00"}}, "c": {"$binary": {"base64": "fAsY9fQQToOSY7OMIyjlFg==", "subType": "03"}}}'
    )
    deserialized = load_from_json_str(serialized_str)
    assert deserialized == data

    serialized_str = dump_to_json_str(
        data, ensure_ascii=False
    )  # Json arguments well propagated
    assert (
        serialized_str
        == r'{"a": "hêllo", "b": {"$binary": {"base64": "eHl6", "subType": "00"}}, "c": {"$binary": {"base64": "fAsY9fQQToOSY7OMIyjlFg==", "subType": "03"}}}'
    )
    deserialized = load_from_json_str(serialized_str)
    assert deserialized == data

    serialized_str = dump_to_json_bytes(data)
    # Keys are sorted
    assert (
        serialized_str
        == rb'{"a": "h\u00eallo", "b": {"$binary": {"base64": "eHl6", "subType": "00"}}, "c": {"$binary": {"base64": "fAsY9fQQToOSY7OMIyjlFg==", "subType": "03"}}}'
    )
    deserialized = load_from_json_bytes(serialized_str)
    assert deserialized == data

    serialized_str = dump_to_json_bytes(
        data, ensure_ascii=False
    )  # Json arguments well propagated
    assert (
        serialized_str
        == b'{"a": "h\xc3\xaallo", "b": {"$binary": {"base64": "eHl6", "subType": "00"}}, "c": {"$binary": {"base64": "fAsY9fQQToOSY7OMIyjlFg==", "subType": "03"}}}'
    )
    deserialized = load_from_json_bytes(serialized_str)
    assert deserialized == data

    tmp_filepath = os.path.join(tmp_path, "dummy_temp_file.dat")
    serialized_str = dump_to_json_file(
        tmp_filepath, data=data, ensure_ascii=True
    )  # Json arguments well propagated
    assert (
        serialized_str
        == b'{"a": "h\u00eallo", "b": {"$binary": {"base64": "eHl6", "subType": "00"}}, "c": {"$binary": {"base64": "fAsY9fQQToOSY7OMIyjlFg==", "subType": "03"}}}'
    )
    deserialized = load_from_json_file(tmp_filepath)
    assert deserialized == data
    def decrypt_data(self, container: dict) -> bytes:
        assert isinstance(container, dict), container

        container_format = container["container_format"]
        if container_format != CONTAINER_FORMAT:
            raise ValueError("Unknown container format %s" % container_format)

        container_uid = container["container_format"]
        del container_uid  # Might be used for logging etc, later...

        keychain_uid = container["keychain_uid"]

        data_current = container["data_ciphertext"]

        for data_encryption_stratum in reversed(
                container["data_encryption_strata"]):

            data_encryption_algo = data_encryption_stratum[
                "data_encryption_algo"]

            for signature_conf in data_encryption_stratum["data_signatures"]:
                self._verify_message_signature(keychain_uid=keychain_uid,
                                               message=data_current,
                                               conf=signature_conf)

            symmetric_key_data = data_encryption_stratum[
                "key_ciphertext"]  # We start fully encrypted, and unravel it
            for key_encryption_stratum in data_encryption_stratum[
                    "key_encryption_strata"]:
                symmetric_key_cipherdict = load_from_json_bytes(
                    symmetric_key_data)  # We remain as bytes all along
                symmetric_key_data = self._decrypt_symmetric_key(
                    keychain_uid=keychain_uid,
                    symmetric_key_cipherdict=symmetric_key_cipherdict,
                    conf=key_encryption_stratum,
                )

            assert isinstance(symmetric_key_data, bytes), symmetric_key_data
            data_cipherdict = load_from_json_bytes(data_current)
            data_current = decrypt_bytestring(
                cipherdict=data_cipherdict,
                key=symmetric_key_data,
                encryption_algo=data_encryption_algo,
            )

        data = data_current  # Now decrypted
        return data
示例#3
0
def decrypt(input_container, output_medium):
    """Turn a container file back into its original media file."""
    if not output_medium:
        if input_container.name.endswith(CONTAINER_SUFFIX):
            output_medium_name = input_container.name[:-len(CONTAINER_SUFFIX)]
        else:
            output_medium_name = input_container.name + MEDIUM_SUFFIX
        output_medium = LazyFile(output_medium_name, "wb")

    click.echo("In decrypt: %s" % str(locals()))

    container = load_from_json_bytes(input_container.read())

    medium_content = _do_decrypt(container=container)

    with output_medium:
        output_medium.write(medium_content)
    def decrypt_container_from_storage(self, container_name_or_idx):
        """
        Return the decrypted content of the container `filename` (which must be in `list_container_names()`,
        or an index suitable for this list).
        """
        if isinstance(container_name_or_idx, int):
            container_names = self.list_container_names(
                as_sorted_relative_paths=True)
            container_name = container_names[
                container_name_or_idx]  # Will break if idx is out of bounds
        else:
            assert isinstance(container_name_or_idx,
                              str), repr(container_name_or_idx)
            container_name = container_name_or_idx

        assert not os.path.isabs(container_name), container_name
        with open(os.path.join(self._output_dir, container_name), "rb") as f:
            data = f.read()
        container = load_from_json_bytes(data)
        return self._decrypt_data_from_container(container)
示例#5
0
def test_gyroscope_sensor():

    from waclient.sensors.gyroscope import get_gyroscope_sensor

    fake_tarfile_aggregator = FakeTarfileRecordsAggregator()

    json_aggregator = JsonDataAggregator(
        max_duration_s=0.5,
        tarfile_aggregator=fake_tarfile_aggregator,
        sensor_name="test_gyroscope",
    )

    sensor = get_gyroscope_sensor(json_aggregator=json_aggregator,
                                  polling_interval_s=0.1)

    sensor.start()

    time.sleep(0.9)

    sensor.stop()
    sensor.join()

    json_aggregator.flush_dataset()
    assert not json_aggregator._current_dataset

    assert len(fake_tarfile_aggregator._test_records) == 2

    print("TEST RECORDS:", fake_tarfile_aggregator._test_records)

    for record in fake_tarfile_aggregator._test_records:
        sensor_entries = load_from_json_bytes(record["data"])
        assert len(sensor_entries) >= 1  # Depends on current user config
        for sensor_entry in sensor_entries:
            assert "rotation_rate_x" in sensor_entry, sensor_entry
            assert "rotation_rate_y" in sensor_entry, sensor_entry
            assert "rotation_rate_z" in sensor_entry, sensor_entry
示例#6
0
def test_aggregators_thread_safety(tmp_path):

    offload_data_ciphertext = random.choice((True, False))
    container_storage = FakeTestContainerStorage(
        default_encryption_conf={"zesvscc": True},
        containers_dir=tmp_path,
        offload_data_ciphertext=offload_data_ciphertext,
    )

    tarfile_aggregator = TarfileRecordsAggregator(
        container_storage=container_storage, max_duration_s=100)
    json_aggregator = JsonDataAggregator(max_duration_s=1,
                                         tarfile_aggregator=tarfile_aggregator,
                                         sensor_name="some_sensors")

    misc_futures = []

    record_data = "hêllo".encode("utf8")

    with ThreadPoolExecutor(max_workers=30) as executor:
        for burst in range(10):
            for idx in range(100):
                misc_futures.append(
                    executor.submit(json_aggregator.add_data, dict(res=idx)))
                misc_futures.append(
                    executor.submit(json_aggregator.flush_dataset))
                misc_futures.append(
                    executor.submit(
                        tarfile_aggregator.add_record,
                        sensor_name="some_recorder_%s_%s" % (burst, idx),
                        from_datetime=datetime(year=2017,
                                               month=10,
                                               day=11,
                                               tzinfo=timezone.utc),
                        to_datetime=datetime(year=2017,
                                             month=12,
                                             day=1,
                                             tzinfo=timezone.utc),
                        extension=".txt",
                        data=record_data,
                    ))
                misc_futures.append(
                    executor.submit(tarfile_aggregator.finalize_tarfile))
            time.sleep(0.2)

    json_aggregator.flush_dataset()
    tarfile_aggregator.finalize_tarfile()
    container_storage.wait_for_idle_state()

    misc_results = set(future.result() for future in misc_futures)
    assert misc_results == set(
        [None])  # No results expected from any of these methods

    container_names = container_storage.list_container_names(as_sorted=True)

    tarfiles_bytes = [
        container_storage.decrypt_container_from_storage(container_name)
        for container_name in container_names
    ]

    tarfiles = [
        TarfileRecordsAggregator.read_tarfile_from_bytestring(bytestring)
        for bytestring in tarfiles_bytes if bytestring
    ]

    tarfiles_count = len(tarfiles)
    print("Tarfiles count:", tarfiles_count)

    total_idx = 0
    txt_count = 0

    for tarfile in tarfiles:
        print("NEW TARFILE")
        members = tarfile.getmembers()
        for member in members:
            print(">>>>", member.name)
            ext = os.path.splitext(member.name)[1]
            record_bytes = tarfile.extractfile(member).read()
            if ext == ".json":
                data_array = load_from_json_bytes(record_bytes)
                total_idx += sum(data["res"] for data in data_array)
            elif ext == ".txt":
                assert record_bytes == record_data
                txt_count += 1
            else:
                raise RuntimeError(ext)

    assert txt_count == 1000
    assert total_idx == 1000 * 99 / 2 == 49500  # Sum of idx sequences
示例#7
0
def test_nominal_recording_toolchain_case():

    config = ConfigParser()  # Empty but OK
    config.setdefaults("usersettings", {
        "record_gyroscope": 1,
        "record_gps": 1,
        "record_microphone": 1
    })

    key_storage_pool = FilesystemKeyStoragePool(INTERNAL_KEYS_DIR)
    encryption_conf = get_encryption_conf("test")
    toolchain = build_recording_toolchain(config,
                                          key_storage_pool=key_storage_pool,
                                          encryption_conf=encryption_conf)
    sensors_manager = toolchain["sensors_manager"]
    data_aggregators = toolchain["data_aggregators"]
    tarfile_aggregators = toolchain["tarfile_aggregators"]
    container_storage = toolchain["container_storage"]

    purge_test_containers()

    # TODO - make this a PURGE() methods of storage!!!
    # CLEANUP of already existing containers
    # for container_name in container_storage.list_container_names(sorted=True):
    #    container_storage._delete_container(container_name)
    # assert not len(container_storage)

    start_recording_toolchain(toolchain)
    time.sleep(2)
    stop_recording_toolchain(toolchain)

    for i in range(2):
        assert not sensors_manager.is_running
        for data_aggregator in data_aggregators:
            assert len(data_aggregator) == 0
        for tarfile_aggregator in tarfile_aggregators:
            assert len(tarfile_aggregator) == 0
        time.sleep(1)

    assert len(container_storage
               ) == 1  # Too quick recording to have container rotation
    (container_name, ) = container_storage.list_container_names(as_sorted=True)

    tarfile_bytestring = container_storage.decrypt_container_from_storage(
        container_name)

    tar_file = TarfileRecordsAggregator.read_tarfile_from_bytestring(
        tarfile_bytestring)
    tarfile_members = tar_file.getnames()
    assert len(tarfile_members) == 3

    # Gyroscope data

    gyroscope_filenames = [m for m in tarfile_members if "gyroscope" in m]
    assert len(gyroscope_filenames) == 1
    assert gyroscope_filenames[0].endswith(".json")

    json_bytestring = tar_file.extractfile(gyroscope_filenames[0]).read()
    gyroscope_data = load_from_json_bytes(json_bytestring)
    assert isinstance(gyroscope_data, list)
    assert len(gyroscope_data) >= 4
    assert gyroscope_data[0] == {
        "rotation_rate_x": None,
        "rotation_rate_y": None,
        "rotation_rate_z": None,
    }

    # GPS data

    microphone_filenames = [m for m in tarfile_members if "gps" in m]
    assert len(microphone_filenames) == 1
    assert microphone_filenames[0].endswith(".json")

    json_bytestring = tar_file.extractfile(microphone_filenames[0]).read()
    gyroscope_data = load_from_json_bytes(json_bytestring)
    # Fake data pushed by sensor
    assert gyroscope_data == [{
        'altitude': 2.2
    }, {
        'message_type': 'some_message_type',
        'status': 'some_status_value'
    }]

    # Microphone data

    microphone_filenames = [m for m in tarfile_members if "microphone" in m]
    assert len(microphone_filenames) == 1
    assert microphone_filenames[0].endswith(".mp4")

    mp4_bytestring = tar_file.extractfile(microphone_filenames[0]).read()
    assert mp4_bytestring == b"fake_microphone_recording_data"
def test_shamir_container_encryption_and_decryption(shamir_container_conf, escrow_dependencies_builder):
    data = b"abc"  # get_random_bytes(random.randint(1, 1000))   # FIXME reactivate ???

    keychain_uid = random.choice([None, uuid.UUID("450fc293-b702-42d3-ae65-e9cc58e5a62a")])

    metadata = random.choice([None, dict(a=[123])])

    container = encrypt_data_into_container(
        data=data, conf=shamir_container_conf, keychain_uid=keychain_uid, metadata=metadata
    )

    assert container["keychain_uid"]
    if keychain_uid:
        assert container["keychain_uid"] == keychain_uid

    escrow_dependencies = gather_escrow_dependencies(containers=[container])
    assert escrow_dependencies == escrow_dependencies_builder(container["keychain_uid"])

    assert isinstance(container["data_ciphertext"], bytes)

    result_data = decrypt_data_from_container(container=container)

    assert result_data == data

    data_encryption_shamir = {}
    # Delete 1, 2 and too many share(s) from cipherdict key
    for data_encryption in container["data_encryption_strata"]:
        for key_encryption in data_encryption["key_encryption_strata"]:
            if key_encryption["key_encryption_algo"] == SHARED_SECRET_MARKER:
                data_encryption_shamir = data_encryption

    key_ciphertext_shares = load_from_json_bytes(data_encryption_shamir["key_ciphertext"])

    # 1 share is deleted

    del key_ciphertext_shares["shares"][-1]

    data_encryption_shamir["key_ciphertext"] = dump_to_json_bytes(key_ciphertext_shares)

    result_data = decrypt_data_from_container(container=container)
    assert result_data == data

    # Another share is deleted

    del key_ciphertext_shares["shares"][-1]

    data_encryption_shamir["key_ciphertext"] = dump_to_json_bytes(key_ciphertext_shares)

    result_data = decrypt_data_from_container(container=container)
    assert result_data == data

    # Another share is deleted and now there aren't enough valid ones to decipher data

    del key_ciphertext_shares["shares"][-1]

    data_encryption_shamir["key_ciphertext"] = dump_to_json_bytes(key_ciphertext_shares)

    with pytest.raises(DecryptionError, match="share.*missing"):
        decrypt_data_from_container(container=container)

    result_metadata = extract_metadata_from_container(container=container)
    assert result_metadata == metadata

    container["container_format"] = "OAJKB"
    with pytest.raises(ValueError, match="Unknown container format"):
        decrypt_data_from_container(container=container)