def _delete_all_barbican_secrets(self): filters = {'metadata.storagePolicy': 'encrypted'} # NOTE(felipemonteiro): Don't pass `unique_only` because we want # all unique secret references (just the data section), not unique # documents, which considers all attributes. encrypted_documents = db_api.document_get_all(**filters) for document in encrypted_documents: secrets_manager.SecretsManager.delete(document)
def _test_doc_substitution(self, document_mapping, secret_documents, expected_data): payload = self.document_factory.gen_test(document_mapping, global_abstract=False) bucket_name = test_utils.rand_name('bucket') documents = self.create_documents(bucket_name, secret_documents + [payload[-1]]) expected_document = copy.deepcopy(documents[-1]) expected_document['data'] = expected_data substitution_sources = db_api.document_get_all( **{'metadata.layeringDefinition.abstract': False}) secret_substitution = secrets_manager.SecretsSubstitution( substitution_sources) substituted_docs = list(secret_substitution.substitute_all(documents)) self.assertIn(expected_document, substituted_docs)
def _register_data_schemas(cls): """Dynamically detect schemas for document validation that have been registered by external services via ``DataSchema`` documents. """ data_schemas = db_api.document_get_all( schema=types.DATA_SCHEMA_SCHEMA) for data_schema in data_schemas: if cls.schema_re.match(data_schema['metadata']['name']): schema_id = '/'.join( data_schema['metadata']['name'].split('/')[:2]) else: schema_id = data_schema['metadata']['name'] cls.schema_versions_info.append({ 'id': schema_id, 'schema': data_schema['data'], 'version': '1.0', 'registered': True, })
def _retrieve_substitution_sources(self): # Return all concrete documents as potential substitution sources. return db_api.document_get_all( **{'metadata.layeringDefinition.abstract': False})