def test_put_blob_chunking_required_mult_of_block_size( self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live if not self.is_live: pytest.skip("live only") self._setup(storage_account.name, storage_account_key) self.bsc.key_encryption_key = KeyWrapper('key1') self.bsc.require_encryption = True content = self.get_random_bytes(self.config.max_single_put_size + self.config.max_block_size) blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act blob.upload_blob(content, max_concurrency=3) blob_content = blob.download_blob().content_as_bytes(max_concurrency=3) # Assert self.assertEqual(content, blob_content)
async def test_put_with_strict_mode(self, storage_account_name, storage_account_key): qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), storage_account_key, transport=AiohttpTestTransport()) # Arrange queue = await self._create_queue(qsc) kek = KeyWrapper('key1') queue.key_encryption_key = kek queue.require_encryption = True await queue.send_message(u'message') queue.key_encryption_key = None # Assert with self.assertRaises(ValueError) as e: await queue.send_message(u'message') self.assertEqual(str(e.exception), "Encryption required but no key was provided.")
async def test_peek_messages_encrypted_resolver(self, resource_group, location, storage_account, storage_account_key): qsc = QueueServiceClient(self.account_url(storage_account.name, "queue"), storage_account_key, transport=AiohttpTestTransport()) # Arrange qsc.key_encryption_key = KeyWrapper('key1') queue = await self._create_queue(qsc) await queue.send_message(u'encrypted_message_4') key_resolver = KeyResolver() key_resolver.put_key(qsc.key_encryption_key) queue.key_resolver_function = key_resolver.resolve_key queue.key_encryption_key = None # Ensure that the resolver is used # Act li = await queue.peek_messages() # Assert self.assertEqual(li[0].content, u'encrypted_message_4')
def test_update_encrypted_raw_text_message(self, storage_account_name, storage_account_key): # TODO: Recording doesn't work # Arrange qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), storage_account_key) queue = self._create_queue(qsc, message_encode_policy=None, message_decode_policy=None) queue.key_encryption_key = KeyWrapper('key1') raw_text = u'Update Me' queue.send_message(raw_text) messages = queue.receive_messages() list_result1 = next(messages) # Act raw_text = u'Updated' list_result1.content = raw_text queue.update_message(list_result1) list_result2 = next(messages) # Assert self.assertEqual(raw_text, list_result2.content)
async def test_put_blob_chunking_required_range_specified_async( self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live await self._setup(storage_account, storage_account_key) self.bsc.key_encryption_key = KeyWrapper('key1') self.bsc.require_encryption = True content = self.get_random_bytes(self.config.max_single_put_size * 2) blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act await blob.upload_blob(content, length=self.config.max_single_put_size + 53, max_concurrency=3) blob_content = await (await blob.download_blob()).content_as_bytes( max_concurrency=3) # Assert self.assertEqual(content[:self.config.max_single_put_size + 53], blob_content)
async def test_get_blob_range_middle_to_end_async(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.key_encryption_key = KeyWrapper('key1') self.bsc.require_encryption = True content = self.get_random_bytes(128) blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act await blob.upload_blob(content, max_concurrency=1) blob_content = await (await blob.download_blob( offset=100, length=28)).content_as_bytes() blob_content2 = await (await blob.download_blob(offset=100 )).content_as_bytes() # Assert self.assertEqual(content[100:], blob_content) self.assertEqual(content[100:], blob_content2)
def test_put_blob_invalid_stream_type(self): # Arrange self.bsc.require_encryption = True self.bsc.key_encryption_key = KeyWrapper('key1') small_stream = StringIO(u'small') large_stream = StringIO(u'large' * self.config.max_single_put_size) blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Assert # Block blob specific single shot with self.assertRaises(TypeError) as e: blob.upload_blob(small_stream, length=5) self.assertTrue( 'Blob data should be of type bytes.' in str(e.exception)) # Generic blob chunked with self.assertRaises(TypeError) as e: blob.upload_blob(large_stream) self.assertTrue( 'Blob data should be of type bytes.' in str(e.exception))
async def test_invalid_methods_fail_page_async(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.key_encryption_key = KeyWrapper('key1') blob_name = self._get_blob_reference(BlobType.PageBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Assert with self.assertRaises(ValueError) as e: await blob.upload_page(urandom(512), offset=0, length=512, blob_type=BlobType.PageBlob) self.assertEqual(str(e.exception), _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) with self.assertRaises(ValueError) as e: await blob.create_page_blob(512) self.assertEqual(str(e.exception), _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION)
async def _test_put_blob_chunking_required_non_mult_of_block_size_async( self): # parallel tests introduce random order of requests, can only run live if TestMode.need_recording_file(self.test_mode): return # Arrange await self._setup() self.bsc.key_encryption_key = KeyWrapper('key1') self.bsc.require_encryption = True content = urandom(self.config.max_single_put_size + 1) blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act await blob.upload_blob(content, max_concurrency=3) blob_content = await (await blob.download_blob()).content_as_bytes( max_concurrency=3) # Assert self.assertEqual(content, blob_content)
def test_update_encrypted_binary_message(self): # Arrange queue = self._create_queue() queue.key_encryption_key = KeyWrapper('key1') queue._config.message_encode_policy = BinaryBase64EncodePolicy() queue._config.message_decode_policy = BinaryBase64DecodePolicy() binary_message = self.get_random_bytes(100) queue.enqueue_message(binary_message) messages = queue.receive_messages() list_result1 = next(messages) # Act binary_message = self.get_random_bytes(100) list_result1.content = binary_message queue.update_message(list_result1) list_result2 = next(messages) # Assert self.assertEqual(binary_message, list_result2.content)
def test_put_blob_chunking_required_range_specified(self): # parallel tests introduce random order of requests, can only run live if TestMode.need_recording_file(self.test_mode): return # Arrange self.bsc.key_encryption_key = KeyWrapper('key1') self.bsc.require_encryption = True content = self.get_random_bytes(self.config.max_single_put_size * 2) blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act blob.upload_blob(content, length=self.config.max_single_put_size + 53, max_connections=3) blob_content = blob.download_blob().content_as_bytes(max_connections=3) # Assert self.assertEqual(content[:self.config.max_single_put_size + 53], blob_content)
def test_put_blob_range(self, resource_group, location, storage_account, storage_account_key): self._setup(storage_account.name, storage_account_key) self.bsc.require_encryption = True self.bsc.key_encryption_key = KeyWrapper('key1') content = b'Random repeats' * self.config.max_single_put_size * 5 # All page blob uploads call _upload_chunks, so this will test the ability # of that function to handle ranges even though it's a small blob blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act blob.upload_blob(content[2:], length=self.config.max_single_put_size + 5, max_concurrency=1) blob_content = blob.download_blob().content_as_bytes(max_concurrency=1) # Assert self.assertEqual(content[2:2 + self.config.max_single_put_size + 5], blob_content)
async def test_encryption_nonmatching_kid(self, storage_account_name, storage_account_key): qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), storage_account_key, transport=AiohttpTestTransport()) # Arrange queue = await self._create_queue(qsc) queue.key_encryption_key = KeyWrapper('key1') await queue.send_message(u'message') # Act queue.key_encryption_key.kid = 'Invalid' # Assert with self.assertRaises(HttpResponseError) as e: messages = [] async for m in queue.receive_messages(): messages.append(m) assert "Decryption failed." in str(e.exception)
def test_update_encrypted_message(self, storage_account_name, storage_account_key): # TODO: Recording doesn't work # Arrange qsc = QueueServiceClient( self.account_url(storage_account_name, "queue"), storage_account_key) queue = self._create_queue(qsc) queue.key_encryption_key = KeyWrapper('key1') queue.send_message(u'Update Me') messages = queue.receive_messages() list_result1 = next(messages) list_result1.content = u'Updated' # Act message = queue.update_message(list_result1) list_result2 = next(messages) # Assert self.assertEqual(u'Updated', list_result2.content)
def test_update_encrypted_message(self, resource_group, location, storage_account, storage_account_key): # TODO: Recording doesn't work if not self.is_live: return # Arrange qsc = QueueServiceClient(self._account_url(storage_account.name), storage_account_key) queue = self._create_queue(qsc) queue.key_encryption_key = KeyWrapper('key1') queue.enqueue_message(u'Update Me') messages = queue.receive_messages() list_result1 = next(messages) list_result1.content = u'Updated' # Act message = queue.update_message(list_result1) list_result2 = next(messages) # Assert self.assertEqual(u'Updated', list_result2.content)
async def test_get_blob_to_star_async(self, storage_account_name, storage_account_key): await self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True self.bsc.key_encryption_key = KeyWrapper('key1') blob = await self._create_small_blob(BlobType.BlockBlob) # Act content = await blob.download_blob() iter_blob = b"" async for data in content.chunks(): iter_blob += data bytes_blob = await (await blob.download_blob()).content_as_bytes() stream_blob = BytesIO() await (await blob.download_blob()).download_to_stream(stream_blob) stream_blob.seek(0) text_blob = await (await blob.download_blob()).content_as_text() # Assert self.assertEqual(self.bytes, iter_blob) self.assertEqual(self.bytes, bytes_blob) self.assertEqual(self.bytes, stream_blob.read()) self.assertEqual(self.bytes.decode(), text_blob)
async def _test_put_blob_range_async(self): # Arrange await self._setup() self.bsc.require_encryption = True self.bsc.key_encryption_key = KeyWrapper('key1') content = b'Random repeats' * self.config.max_single_put_size * 5 # All page blob uploads call _upload_chunks, so this will test the ability # of that function to handle ranges even though it's a small blob blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act await blob.upload_blob(content[2:], length=self.config.max_single_put_size + 5, max_connections=1) blob_content = await (await blob.download_blob()).content_as_bytes( max_connections=1) # Assert self.assertEqual(content[2:2 + self.config.max_single_put_size + 5], blob_content)
def test_invalid_methods_fail_append(self): # Arrange self.bsc.key_encryption_key = KeyWrapper('key1') blob_name = self._get_blob_reference(BlobType.AppendBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Assert with self.assertRaises(ValueError) as e: blob.append_block(urandom(32)) self.assertEqual(str(e.exception), _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) with self.assertRaises(ValueError) as e: blob.create_append_blob() self.assertEqual(str(e.exception), _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) # All append_from operations funnel into append_from_stream, so testing one is sufficient with self.assertRaises(ValueError) as e: blob.upload_blob(b'To encrypt', blob_type=BlobType.AppendBlob) self.assertEqual(str(e.exception), _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION)
async def test_get_messages_encrypted_resolver(self, storage_account_name, storage_account_key): # Arrange qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), storage_account_key, transport=AiohttpTestTransport()) qsc.key_encryption_key = KeyWrapper('key1') queue = await self._create_queue(qsc) await queue.send_message(u'encrypted_message_2') key_resolver = KeyResolver() key_resolver.put_key(qsc.key_encryption_key) queue.key_resolver_function = key_resolver.resolve_key queue.key_encryption_key = None # Ensure that the resolver is used # Act li = None async for m in queue.receive_messages(): li = m # Assert self.assertEqual(li.content, u'encrypted_message_2')
async def _test_update_encrypted_message(self): # TODO: Recording doesn't work if TestMode.need_recording_file(self.test_mode): return # Arrange queue = await self._create_queue() queue.key_encryption_key = KeyWrapper('key1') await queue.enqueue_message(u'Update Me') messages = [] async for m in queue.receive_messages(): messages.append(m) list_result1 = messages[0] list_result1.content = u'Updated' # Act message = await queue.update_message(list_result1) async for m in queue.receive_messages(): messages.append(m) list_result2 = messages[0] # Assert self.assertEqual(u'Updated', list_result2.content)
async def test_missing_attribute_kek_wrap(self, resource_group, location, storage_account, storage_account_key): qsc = QueueServiceClient(self.account_url(storage_account.name, "queue"), storage_account_key, transport=AiohttpTestTransport()) # Arrange queue = await self._create_queue(qsc) valid_key = KeyWrapper('key1') # Act invalid_key_1 = lambda: None # functions are objects, so this effectively creates an empty object invalid_key_1.get_key_wrap_algorithm = valid_key.get_key_wrap_algorithm invalid_key_1.get_kid = valid_key.get_kid # No attribute wrap_key queue.key_encryption_key = invalid_key_1 with self.assertRaises(AttributeError): await queue.send_message(u'message') invalid_key_2 = lambda: None # functions are objects, so this effectively creates an empty object invalid_key_2.wrap_key = valid_key.wrap_key invalid_key_2.get_kid = valid_key.get_kid # No attribute get_key_wrap_algorithm queue.key_encryption_key = invalid_key_2 with self.assertRaises(AttributeError): await queue.send_message(u'message') invalid_key_3 = lambda: None # functions are objects, so this effectively creates an empty object invalid_key_3.get_key_wrap_algorithm = valid_key.get_key_wrap_algorithm invalid_key_3.wrap_key = valid_key.wrap_key # No attribute get_kid queue.key_encryption_key = invalid_key_3 with self.assertRaises(AttributeError): await queue.send_message(u'message')
async def test_update_encrypted_message(self, resource_group, location, storage_account, storage_account_key): qsc = QueueServiceClient(self._account_url(storage_account.name), storage_account_key, transport=AiohttpTestTransport()) # TODO: Recording doesn't work if not self.is_live: return # Arrange queue = await self._create_queue(qsc) queue.key_encryption_key = KeyWrapper('key1') await queue.send_message(u'Update Me') messages = [] async for m in queue.receive_messages(): messages.append(m) list_result1 = messages[0] list_result1.content = u'Updated' # Act message = await queue.update_message(list_result1) async for m in queue.receive_messages(): messages.append(m) list_result2 = messages[0] # Assert self.assertEqual(u'Updated', list_result2.content)
async def test_update_encrypted_binary_message(self, resource_group, location, storage_account, storage_account_key): qsc = QueueServiceClient(self._account_url(storage_account.name), storage_account_key, transport=AiohttpTestTransport()) # Arrange queue = await self._create_queue(qsc, message_encode_policy=BinaryBase64EncodePolicy(), message_decode_policy=BinaryBase64DecodePolicy()) queue.key_encryption_key = KeyWrapper('key1') binary_message = self.get_random_bytes(100) await queue.send_message(binary_message) messages = [] async for m in queue.receive_messages(): messages.append(m) list_result1 = messages[0] # Act binary_message = self.get_random_bytes(100) list_result1.content = binary_message await queue.update_message(list_result1) async for m in queue.receive_messages(): messages.append(m) list_result2 = messages[0] # Assert self.assertEqual(binary_message, list_result2.content)
def test_validate_encryption(self, storage_account_name, storage_account_key): self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True kek = KeyWrapper('key1') self.bsc.key_encryption_key = kek blob = self._create_small_blob(BlobType.BlockBlob) # Act blob.require_encryption = False blob.key_encryption_key = None content = blob.download_blob() data = content.content_as_bytes() encryption_data = _dict_to_encryption_data(loads(content.properties.metadata['encryptiondata'])) iv = encryption_data.content_encryption_IV content_encryption_key = _validate_and_unwrap_cek(encryption_data, kek, None) cipher = _generate_AES_CBC_cipher(content_encryption_key, iv) decryptor = cipher.decryptor() unpadder = PKCS7(128).unpadder() content = decryptor.update(data) + decryptor.finalize() content = unpadder.update(content) + unpadder.finalize() self.assertEqual(self.bytes, content)
def test_update_encrypted_raw_text_message(self): # TODO: Recording doesn't work if TestMode.need_recording_file(self.test_mode): return # Arrange queue = self._create_queue() queue.key_encryption_key = KeyWrapper('key1') queue._config.message_encode_policy = NoEncodePolicy() queue._config.message_decode_policy = NoDecodePolicy() raw_text = u'Update Me' queue.enqueue_message(raw_text) messages = queue.receive_messages() list_result1 = next(messages) # Act raw_text = u'Updated' list_result1.content = raw_text queue.update_message(list_result1) list_result2 = next(messages) # Assert self.assertEqual(raw_text, list_result2.content)
def test_missing_attribute_kek_unwrap(self): # Shared between all services in decrypt_blob # Arrange self.bsc.require_encryption = True valid_key = KeyWrapper('key1') self.bsc.key_encryption_key = valid_key blob = self._create_small_blob(BlobType.BlockBlob) # Act # Note that KeyWrapper has a default value for key_id, so these Exceptions # are not due to non_matching kids. invalid_key_1 = lambda: None #functions are objects, so this effectively creates an empty object invalid_key_1.get_kid = valid_key.get_kid #No attribute unwrap_key blob.key_encryption_key = invalid_key_1 with self.assertRaises(HttpResponseError): blob.download_blob().content_as_bytes() invalid_key_2 = lambda: None #functions are objects, so this effectively creates an empty object invalid_key_2.unwrap_key = valid_key.unwrap_key blob.key_encryption_key = invalid_key_2 #No attribute get_kid with self.assertRaises(HttpResponseError): blob.download_blob().content_as_bytes()
async def test_put_blob_invalid_stream_type_async(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account.name, storage_account_key) self.bsc.require_encryption = True self.bsc.key_encryption_key = KeyWrapper('key1') small_stream = StringIO(u'small') large_stream = StringIO(u'large' * self.config.max_single_put_size) blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Assert # Block blob specific single shot with self.assertRaises(TypeError) as e: await blob.upload_blob(small_stream, length=5) self.assertTrue( 'Blob data should be of type bytes.' in str(e.exception)) # Generic blob chunked with self.assertRaises(TypeError) as e: await blob.upload_blob(large_stream) self.assertTrue( 'Blob data should be of type bytes.' in str(e.exception))
async def test_update_encrypted_json_message(self, resource_group, location, storage_account, storage_account_key): qsc = QueueServiceClient(self._account_url(storage_account.name), storage_account_key, transport=AiohttpTestTransport()) # TODO: Recording doesn't work if not self.is_live: return # Arrange queue = await self._create_queue(qsc) queue.key_encryption_key = KeyWrapper('key1') queue._config.message_encode_policy = NoEncodePolicy() queue._config.message_decode_policy = NoDecodePolicy() message_dict = {'val1': 1, 'val2': '2'} json_text = dumps(message_dict) await queue.enqueue_message(json_text) messages = [] async for m in queue.receive_messages(): messages.append(m) list_result1 = messages[0] # Act message_dict['val1'] = 0 message_dict['val2'] = 'updated' json_text = dumps(message_dict) list_result1.content = json_text await queue.update_message(list_result1) async for m in queue.receive_messages(): messages.append(m) list_result2 = messages[0] # Assert self.assertEqual(message_dict, loads(list_result2.content))
def test_update_encrypted_json_message(self, storage_account_name, storage_account_key): # TODO: Recording doesn't work # Arrange qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), storage_account_key) queue = self._create_queue(qsc, message_encode_policy=None, message_decode_policy=None) queue.key_encryption_key = KeyWrapper('key1') message_dict = {'val1': 1, 'val2': '2'} json_text = dumps(message_dict) queue.send_message(json_text) messages = queue.receive_messages() list_result1 = next(messages) # Act message_dict['val1'] = 0 message_dict['val2'] = 'updated' json_text = dumps(message_dict) list_result1.content = json_text queue.update_message(list_result1) list_result2 = next(messages) # Assert self.assertEqual(message_dict, loads(list_result2.content))
async def test_validate_encryption(self, storage_account_name, storage_account_key): qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), storage_account_key, transport=AiohttpTestTransport()) # Arrange queue = await self._create_queue(qsc) kek = KeyWrapper('key1') queue.key_encryption_key = kek await queue.send_message(u'message') # Act queue.key_encryption_key = None # Message will not be decrypted li = await queue.peek_messages() message = li[0].content message = loads(message) encryption_data = message['EncryptionData'] wrapped_content_key = encryption_data['WrappedContentKey'] wrapped_content_key = _WrappedContentKey( wrapped_content_key['Algorithm'], b64decode( wrapped_content_key['EncryptedKey'].encode(encoding='utf-8')), wrapped_content_key['KeyId']) encryption_agent = encryption_data['EncryptionAgent'] encryption_agent = _EncryptionAgent( encryption_agent['EncryptionAlgorithm'], encryption_agent['Protocol']) encryption_data = _EncryptionData( b64decode(encryption_data['ContentEncryptionIV'].encode( encoding='utf-8')), encryption_agent, wrapped_content_key, {'EncryptionLibrary': VERSION}) message = message['EncryptedMessageContents'] content_encryption_key = kek.unwrap_key( encryption_data.wrapped_content_key.encrypted_key, encryption_data.wrapped_content_key.algorithm) # Create decryption cipher backend = backends.default_backend() algorithm = AES(content_encryption_key) mode = CBC(encryption_data.content_encryption_IV) cipher = Cipher(algorithm, mode, backend) # decode and decrypt data decrypted_data = _decode_base64_to_bytes(message) decryptor = cipher.decryptor() decrypted_data = (decryptor.update(decrypted_data) + decryptor.finalize()) # unpad data unpadder = PKCS7(128).unpadder() decrypted_data = (unpadder.update(decrypted_data) + unpadder.finalize()) decrypted_data = decrypted_data.decode(encoding='utf-8') # Assert self.assertEqual(decrypted_data, u'message')