def test_serialization_utilities(tmp_path): uid = uuid.UUID("7c0b18f5-f410-4e83-9263-b38c2328e516") data = dict(b=b"xyz", a="hêllo", c=uid) serialized_str = dump_to_json_str(data) # Keys are sorted assert ( serialized_str == r'{"a": "h\u00eallo", "b": {"$binary": {"base64": "eHl6", "subType": "00"}}, "c": {"$binary": {"base64": "fAsY9fQQToOSY7OMIyjlFg==", "subType": "03"}}}' ) deserialized = load_from_json_str(serialized_str) assert deserialized == data serialized_str = dump_to_json_str( data, ensure_ascii=False ) # Json arguments well propagated assert ( serialized_str == r'{"a": "hêllo", "b": {"$binary": {"base64": "eHl6", "subType": "00"}}, "c": {"$binary": {"base64": "fAsY9fQQToOSY7OMIyjlFg==", "subType": "03"}}}' ) deserialized = load_from_json_str(serialized_str) assert deserialized == data serialized_str = dump_to_json_bytes(data) # Keys are sorted assert ( serialized_str == rb'{"a": "h\u00eallo", "b": {"$binary": {"base64": "eHl6", "subType": "00"}}, "c": {"$binary": {"base64": "fAsY9fQQToOSY7OMIyjlFg==", "subType": "03"}}}' ) deserialized = load_from_json_bytes(serialized_str) assert deserialized == data serialized_str = dump_to_json_bytes( data, ensure_ascii=False ) # Json arguments well propagated assert ( serialized_str == b'{"a": "h\xc3\xaallo", "b": {"$binary": {"base64": "eHl6", "subType": "00"}}, "c": {"$binary": {"base64": "fAsY9fQQToOSY7OMIyjlFg==", "subType": "03"}}}' ) deserialized = load_from_json_bytes(serialized_str) assert deserialized == data tmp_filepath = os.path.join(tmp_path, "dummy_temp_file.dat") serialized_str = dump_to_json_file( tmp_filepath, data=data, ensure_ascii=True ) # Json arguments well propagated assert ( serialized_str == b'{"a": "h\u00eallo", "b": {"$binary": {"base64": "eHl6", "subType": "00"}}, "c": {"$binary": {"base64": "fAsY9fQQToOSY7OMIyjlFg==", "subType": "03"}}}' ) deserialized = load_from_json_file(tmp_filepath) assert deserialized == data
def _process_and_store_file(self, filename_base, data): container_filepath = self._make_absolute_container_path( filename_base + CONTAINER_SUFFIX) container = self._encrypt_data_into_container(data) container_bytes = dump_to_json_bytes(container, indent=4) # Beware, this might erase existing file, it's accepted with open(container_filepath, "wb") as f: f.write(container_bytes) self._purge_exceeding_containers() # AFTER new container is created
def encrypt(input_medium, output_container): """Turn a media file into a secure container.""" if not output_container: output_container = LazyFile(input_medium.name + CONTAINER_SUFFIX, "wb") click.echo("In encrypt: %s" % str(locals())) container_data = _do_encrypt(data=input_medium.read()) container_data_bytes = dump_to_json_bytes(container_data, indent=4) with output_container as f: f.write(container_data_bytes)
def _flush_aggregated_data(self): if not self._current_start_time: assert not self._current_dataset return end_time = datetime.now(tz=timezone.utc) dataset_bytes = dump_to_json_bytes(self._current_dataset) self._tarfile_aggregator.add_record( data=dataset_bytes, sensor_name=self._sensor_name, from_datetime=self._current_start_time, to_datetime=end_time, extension=".json", ) self._current_dataset = None super()._flush_aggregated_data()
def encrypt_data(self, data: bytes, *, conf: dict, keychain_uid=None, metadata=None) -> dict: assert metadata is None or isinstance(metadata, dict), metadata container_format = CONTAINER_FORMAT container_uid = generate_uuid0() # ALWAYS UNIQUE! keychain_uid = (keychain_uid or generate_uuid0() ) # Might be shared by lots of containers conf = copy.deepcopy(conf) # So that we can manipulate it assert isinstance(data, bytes), data assert isinstance(conf, dict), conf data_current = data # Initially unencrypted, might remain so if no strata result_data_encryption_strata = [] for data_encryption_stratum in conf["data_encryption_strata"]: data_encryption_algo = data_encryption_stratum[ "data_encryption_algo"] logger.debug("Generating symmetric key of type %r", data_encryption_algo) symmetric_key = generate_symmetric_key( encryption_algo=data_encryption_algo) logger.debug("Encrypting data with symmetric key of type %r", data_encryption_algo) data_cipherdict = encrypt_bytestring( plaintext=data_current, encryption_algo=data_encryption_algo, key=symmetric_key, ) assert isinstance(data_cipherdict, dict), data_cipherdict data_current = dump_to_json_bytes(data_cipherdict) symmetric_key_data = ( symmetric_key ) # Initially unencrypted, might remain so if no strata result_key_encryption_strata = [] for key_encryption_stratum in data_encryption_stratum[ "key_encryption_strata"]: symmetric_key_cipherdict = self._encrypt_symmetric_key( keychain_uid=keychain_uid, symmetric_key_data=symmetric_key_data, conf=key_encryption_stratum, ) symmetric_key_data = dump_to_json_bytes( symmetric_key_cipherdict) # Remain as bytes all along result_key_encryption_strata.append( key_encryption_stratum) # Unmodified for now data_signatures = [] for signature_conf in data_encryption_stratum["data_signatures"]: signature_value = self._generate_signature( keychain_uid=keychain_uid, data_ciphertext=data_current, conf=signature_conf, ) signature_conf["signature_value"] = signature_value data_signatures.append(signature_conf) result_data_encryption_strata.append( dict( data_encryption_algo=data_encryption_algo, key_ciphertext=symmetric_key_data, key_encryption_strata=result_key_encryption_strata, data_signatures=data_signatures, )) data_ciphertext = ( data_current ) # New fully encrypted (unless data_encryption_strata is empty) return dict( container_format=container_format, container_uid=container_uid, keychain_uid=keychain_uid, data_ciphertext=data_ciphertext, data_encryption_strata=result_data_encryption_strata, metadata=metadata, )
def test_shamir_container_encryption_and_decryption(shamir_container_conf, escrow_dependencies_builder): data = b"abc" # get_random_bytes(random.randint(1, 1000)) # FIXME reactivate ??? keychain_uid = random.choice([None, uuid.UUID("450fc293-b702-42d3-ae65-e9cc58e5a62a")]) metadata = random.choice([None, dict(a=[123])]) container = encrypt_data_into_container( data=data, conf=shamir_container_conf, keychain_uid=keychain_uid, metadata=metadata ) assert container["keychain_uid"] if keychain_uid: assert container["keychain_uid"] == keychain_uid escrow_dependencies = gather_escrow_dependencies(containers=[container]) assert escrow_dependencies == escrow_dependencies_builder(container["keychain_uid"]) assert isinstance(container["data_ciphertext"], bytes) result_data = decrypt_data_from_container(container=container) assert result_data == data data_encryption_shamir = {} # Delete 1, 2 and too many share(s) from cipherdict key for data_encryption in container["data_encryption_strata"]: for key_encryption in data_encryption["key_encryption_strata"]: if key_encryption["key_encryption_algo"] == SHARED_SECRET_MARKER: data_encryption_shamir = data_encryption key_ciphertext_shares = load_from_json_bytes(data_encryption_shamir["key_ciphertext"]) # 1 share is deleted del key_ciphertext_shares["shares"][-1] data_encryption_shamir["key_ciphertext"] = dump_to_json_bytes(key_ciphertext_shares) result_data = decrypt_data_from_container(container=container) assert result_data == data # Another share is deleted del key_ciphertext_shares["shares"][-1] data_encryption_shamir["key_ciphertext"] = dump_to_json_bytes(key_ciphertext_shares) result_data = decrypt_data_from_container(container=container) assert result_data == data # Another share is deleted and now there aren't enough valid ones to decipher data del key_ciphertext_shares["shares"][-1] data_encryption_shamir["key_ciphertext"] = dump_to_json_bytes(key_ciphertext_shares) with pytest.raises(DecryptionError, match="share.*missing"): decrypt_data_from_container(container=container) result_metadata = extract_metadata_from_container(container=container) assert result_metadata == metadata container["container_format"] = "OAJKB" with pytest.raises(ValueError, match="Unknown container format"): decrypt_data_from_container(container=container)