Beispiel #1
0
    def _test_doc_substitution(self,
                               document_mapping,
                               substitution_sources,
                               expected_data,
                               encryption_sources=None,
                               cleartext_secrets=True):
        payload = self.document_factory.gen_test(document_mapping,
                                                 global_abstract=False)
        bucket_name = test_utils.rand_name('bucket')
        documents = self.create_documents(bucket_name,
                                          substitution_sources + [payload[-1]])

        expected_document = copy.deepcopy(documents[-1])
        expected_document['data'] = expected_data

        secret_substitution = secrets_manager.SecretsSubstitution(
            encryption_sources=encryption_sources,
            substitution_sources=substitution_sources,
            cleartext_secrets=cleartext_secrets)
        substituted_docs = list(secret_substitution.substitute_all(documents))
        self.assertIn(expected_document, substituted_docs)
    def test_show_revision(self):
        payload = [
            base.DocumentFixture.get_minimal_fixture() for _ in range(4)
        ]
        bucket_name = test_utils.rand_name('bucket')
        documents = self.create_documents(bucket_name, payload)

        # Validate that each document points to the same revision.
        revision_ids = set([d['revision_id'] for d in documents])
        self.assertEqual(1, len(revision_ids))

        revision = self.show_revision(documents[0]['revision_id'])
        revision_view = self.view_builder.show(revision)

        expected_attrs = ('id', 'url', 'createdAt', 'validationPolicies',
                          'status')
        for attr in expected_attrs:
            self.assertIn(attr, revision_view)

        self.assertIsInstance(revision_view['validationPolicies'], list)
        self.assertEqual(revision_view['validationPolicies'], [])
Beispiel #3
0
    def test_delete_all_documents(self):
        payload = self.documents_factory.gen_test(self.document_mapping)
        bucket_name = test_utils.rand_name('bucket')
        created_documents = self.create_documents(bucket_name, payload)
        self.assertIsInstance(created_documents, list)
        self.assertEqual(3, len(created_documents))

        deleted_documents = self.create_documents(bucket_name, [])

        # Verify that all the expected documents were deleted.
        self.assertEqual(
            sorted([(d['metadata']['name'], d['schema'])
                    for d in created_documents]),
            sorted([(d['name'], d['schema']) for d in deleted_documents]))

        # Verify that all their attributes have been cleared and that the
        # deleted/deleted_at attributes have been set to True.
        for deleted_document in deleted_documents:
            self.assertTrue(deleted_document['deleted'])
            self.assertTrue(deleted_document['deleted_at'])
            self.assertEmpty(deleted_document['data'])
Beispiel #4
0
    def test_create_validation(self):
        rules = {
            'deckhand:create_cleartext_documents': '@',
            'deckhand:create_validation': '@'
        }
        self.policy.set_rules(rules)

        revision_id = self._create_revision()
        validation_name = test_utils.rand_name('validation')
        resp = self._create_validation(revision_id, validation_name,
                                       VALIDATION_RESULT)

        self.assertEqual(201, resp.status_code)
        expected_body = {
            'status': 'failure',
            'validator': {
                'name': 'promenade',
                'version': '1.1.2'
            }
        }
        self.assertEqual(expected_body, yaml.safe_load(resp.text))
Beispiel #5
0
    def test_show_nonexistent_validation_entry_returns_404(self):
        rules = {'deckhand:create_cleartext_documents': '@',
                 'deckhand:create_validation': '@',
                 'deckhand:show_validation': '@'}
        self.policy.set_rules(rules)

        revision_id = self._create_revision()
        validation_name = test_utils.rand_name('validation')
        resp = self._create_validation(revision_id, validation_name,
                                       VALIDATION_FAILURE_RESULT)
        self.assertEqual(201, resp.status_code)
        expected_error = ('The requested validation entry 5 was not found for '
                          'validation name %s and revision ID %d.' % (
                              validation_name, revision_id))

        resp = self.app.simulate_get(
            '/api/v1.0/revisions/%s/validations/%s/entries/5' % (
                revision_id, validation_name),
            headers={'Content-Type': 'application/x-yaml'})
        self.assertEqual(404, resp.status_code)
        self.assertEqual(expected_error, yaml.safe_load(resp.text)['message'])
Beispiel #6
0
    def test_list_documents_by_revision_id_and_filters(self):
        payload = self.documents_factory.gen_test(self.document_mapping)
        bucket_name = test_utils.rand_name('bucket')
        document = self.create_documents(bucket_name, payload)[1]

        filters = {
            'schema':
            document['schema'],
            'metadata.name':
            document['metadata']['name'],
            'metadata.layeringDefinition.abstract':
            document['metadata']['layeringDefinition']['abstract'],
            'metadata.layeringDefinition.layer':
            document['metadata']['layeringDefinition']['layer']
        }

        documents = self.list_revision_documents(document['revision_id'],
                                                 **filters)

        self.assertEqual(1, len(documents))
        self.assertEqual(document, documents[0])
Beispiel #7
0
    def test_get_documents_by_revision_id_and_wrong_filters(self):
        payload = base.DocumentFixture.get_minimal_fixture()
        bucket_name = test_utils.rand_name('bucket')
        document = self.create_documents(bucket_name, payload)[0]
        filters = {
            'schema': 'fake_schema',
            'metadata.name': 'fake_meta_name',
            'metadata.layeringDefinition.abstract':
                not document['metadata']['layeringDefinition']['abstract'],
            'metadata.layeringDefinition.layer': 'fake_layer',
            'metadata.label': 'fake_label'
        }

        documents = self.list_revision_documents(
            document['revision_id'], **filters)
        self.assertEmpty(documents)

        for filter_key, filter_val in filters.items():
            documents = self.list_revision_documents(
                document['revision_id'], filter_key=filter_val)
            self.assertEmpty(documents)
Beispiel #8
0
    def test_create_show_and_list_many_tags_with_data(self):
        expected_tags = []
        for _ in range(4):
            rand_prefix = test_utils.rand_name(self.__class__.__name__)
            tag = rand_prefix + '-Tag'
            data_key = rand_prefix + '-Key'
            data_val = rand_prefix + '-Val'

            db_api.revision_tag_create(
                self.revision_id, tag, {data_key: data_val})
            expected_tags.append({'tag': tag, 'data': {data_key: data_val}})
        expected_tags = sorted(expected_tags, key=lambda t: t['tag'])

        retrieved_tags = db_api.revision_tag_get_all(self.revision_id)
        self.assertEqual(4, len(retrieved_tags))

        retrieved_tags = [
            {k: t[k] for k in t.keys() if k in ('data', 'tag')}
            for t in retrieved_tags]
        self.assertEqual(sorted(expected_tags, key=lambda t: t['tag']),
                         retrieved_tags)
Beispiel #9
0
    def test_show_validation_entry(self):
        rules = {'deckhand:create_cleartext_documents': '@',
                 'deckhand:create_validation': '@',
                 'deckhand:show_validation': '@'}
        self.policy.set_rules(rules)

        revision_id = self._create_revision()
        validation_name = test_utils.rand_name('validation')
        resp = self._create_validation(revision_id, validation_name,
                                       VALIDATION_FAILURE_RESULT)

        resp = self.app.simulate_get(
            '/api/v1.0/revisions/%s/validations/%s/entries/0' % (
                revision_id, validation_name),
            headers={'Content-Type': 'application/x-yaml'})
        self.assertEqual(200, resp.status_code)

        body = yaml.safe_load(resp.text)
        expected_body = {
            'name': validation_name,
            'status': 'failure',
            'createdAt': None,
            'expiresAfter': None,
            'errors': [
                {
                    'documents': [
                        {
                            'name': 'node-document-name',
                            'schema': 'promenade/Node/v1'
                        }, {
                            'name': 'kubernetes-masters',
                            'schema': 'promenade/Masters/v1'
                        }
                    ],
                    'message': 'Node has master role, but not included in '
                               'cluster masters list.'
                }
            ]
        }
        self.assertEqual(expected_body, body)
Beispiel #10
0
    def test_layering_documents_with_different_schemas(self):
        """Validate that attempting to layer documents with different schemas
        results in errors.
        """
        doc_factory = factories.DocumentFactory(3, [1, 1, 1])
        documents = doc_factory.gen_test({})

        # Region and site documents should result in no parent being found
        # since their schemas will not match that of their parent's.
        for idx in range(2, 4):  # Only region/site have parent.
            prev_schema = documents[idx]['schema']
            documents[idx]['schema'] = test_utils.rand_name('schema')

            # Escape '[' and ']' for regex to work.
            expected_err = ("Missing parent document for document %s." %
                            documents[idx]).replace('[',
                                                    '\[').replace(']', '\]')
            self.assertRaisesRegex(errors.MissingDocumentParent, expected_err,
                                   layering.DocumentLayering, documents)

            # Restore schema for next test run.
            documents[idx]['schema'] = prev_schema
Beispiel #11
0
    def test_create_duplicate_bucket(self):
        """Validates that creating a bucket with the exact same documents
        references the last revision via orig_revision_id, as in reality
        the bucket is merely a revision history placeholder for the first
        bucket, since nothing was changed.

        Note that this is different from creating a duplicate document or
        creating a duplicate document in a separate bucket.

        """
        bucket_name = test_utils.rand_name('bucket')
        payload = base.DocumentFixture.get_minimal_fixture()

        orig_documents = self.create_documents(bucket_name, [payload])
        duplicate_documents = self.create_documents(bucket_name, [payload])

        self.assertEqual(orig_documents[0]['revision_id'],
                         duplicate_documents[0]['orig_revision_id'])
        self.assertDictItemsAlmostEqual(
            sorted(orig_documents, key=lambda d: d['created_at']),
            sorted(duplicate_documents, key=lambda d: d['created_at']),
            ignore=['created_at', 'updated_at', 'revision_id', 'id'])
Beispiel #12
0
    def test_list_multiple_revisions(self):
        docs_count = []
        for _ in range(3):
            doc_count = test_utils.rand_int(3, 9)
            docs_count.append(doc_count)

            payload = [
                base.DocumentFixture.get_minimal_fixture()
                for _ in range(doc_count)
            ]
            bucket_name = test_utils.rand_name('bucket')
            self.create_documents(bucket_name, payload)
            revisions = self.list_revisions()
        revisions_view = self.view_builder.list(revisions)

        self.assertIn('results', revisions_view)
        # Validate that only 1 revision was returned.
        self.assertEqual(3, revisions_view['count'])

        # Validate that each revision has correct number of documents.
        for idx, doc_count in enumerate(docs_count):
            self.assertIn('id', revisions_view['results'][idx])
Beispiel #13
0
    def test_get_documents_by_revision_id_and_filters(self):
        payload = self.documents_factory.gen_test(self.document_mapping)
        bucket_name = test_utils.rand_name('bucket')
        created_documents = self.create_documents(bucket_name, payload)

        for document in created_documents[1:]:
            filters = {
                'schema':
                document['schema'],
                'metadata.name':
                document['metadata']['name'],
                'metadata.layeringDefinition.abstract':
                document['metadata']['layeringDefinition']['abstract'],
                'metadata.layeringDefinition.layer':
                document['metadata']['layeringDefinition']['layer']
            }
            filtered_documents = self.list_revision_documents(
                document['revision_id'], **filters)

            self.assertEqual(1, len(filtered_documents))
            self.assertIsNone(filtered_documents[0].pop('orig_revision_id'))
            self.assertEqual(document, filtered_documents[0])
    def _test_document_creation_view(self, count):
        # Test document creation view with the number of documents being
        # created specified by `count`.
        payload = [
            base.DocumentFixture.get_minimal_fixture() for _ in range(count)
        ]
        bucket_name = test_utils.rand_name('bucket')
        created_documents = self.create_documents(bucket_name, payload)
        document_view = self.view_builder.list(created_documents)

        self.assertIsInstance(document_view, list)
        self.assertEqual(count, len(document_view))

        expected_attrs = ('id', 'status', 'metadata', 'data', 'schema')
        for idx in range(count):
            for attr in expected_attrs:
                self.assertIn(attr, document_view[idx])
            for attr in ('bucket', 'revision'):
                self.assertIn(attr, document_view[idx]['status'])

        revision_ids = set([v['status']['revision'] for v in document_view])
        self.assertEqual([1], list(revision_ids))
    def test_revision_diff_multi_bucket_created(self):
        revision_ids = []
        bucket_names = []

        for _ in range(3):
            payload = base.DocumentFixture.get_minimal_multi_fixture(count=3)
            bucket_name = test_utils.rand_name('bucket')
            bucket_names.append(bucket_name)
            documents = self.create_documents(bucket_name, payload)
            revision_id = documents[0]['revision_id']
            revision_ids.append(revision_id)

        # Between revision 1 and 0, 1 bucket is created.
        self._verify_buckets_status(
            0, revision_ids[0], {b: 'created' for b in bucket_names[:1]})

        # Between revision 2 and 0, 2 buckets are created.
        self._verify_buckets_status(
            0, revision_ids[1], {b: 'created' for b in bucket_names[:2]})

        # Between revision 3 and 0, 3 buckets are created.
        self._verify_buckets_status(
            0, revision_ids[2], {b: 'created' for b in bucket_names})
    def test_revision_diff_delete_then_recreate(self):
        payload = base.DocumentFixture.get_minimal_fixture()
        bucket_name = test_utils.rand_name('bucket')
        created_documents = self.create_documents(bucket_name, payload)
        revision_id_1 = created_documents[0]['revision_id']

        # Delete the previously created document.
        deleted_documents = self.create_documents(bucket_name, [])
        revision_id_2 = deleted_documents[0]['revision_id']

        # Recreate the previously deleted document.
        recreated_documents = self.create_documents(bucket_name, payload)
        revision_id_3 = recreated_documents[0]['revision_id']

        # Verify that the revision for recreated document compared to revision
        # for deleted document is created, ignoring order.
        self._verify_buckets_status(
            revision_id_2, revision_id_3, {bucket_name: 'created'})

        # Verify that the revision for recreated document compared to revision
        # for created document is unmodified, ignoring order.
        self._verify_buckets_status(
            revision_id_1, revision_id_3, {bucket_name: 'unmodified'})
Beispiel #17
0
    def test_delete_and_create_document_in_same_payload(self):
        payload = self.documents_factory.gen_test(self.document_mapping)
        bucket_name = test_utils.rand_name('bucket')
        # Create just 1 document.
        self.create_documents(bucket_name, payload[0])

        # Create the document in payload[0] but create a new document for
        # payload[1].
        documents = self.create_documents(bucket_name, payload[1])
        # Information about the deleted and created document should've been
        # returned. The 1st document is the deleted one and the 2nd document
        # is the created one.
        self.assertEqual(2, len(documents))
        # Check that deleted doc is formatted correctly.
        self.assertTrue(documents[0]['deleted'])
        self.assertTrue(documents[0]['deleted_at'])
        self.assertEmpty(documents[0]['data'])
        # Check that created doc isn't deleted.
        self.assertFalse(documents[1]['deleted'])

        for idx in range(2):
            self.assertEqual(documents[idx]['schema'], payload[idx]['schema'])
            self.assertEqual(documents[idx]['name'],
                             payload[idx]['metadata']['name'])
Beispiel #18
0
 def test_create_duplicate_document_same_bucket_raises_exc(self):
     bucket_name = test_utils.rand_name('bucket')
     document = base.DocumentFixture.get_minimal_fixture()
     payload = [document, document.copy()]
     self.assertRaises(errors.DuplicateDocumentExists,
                       self.create_documents, bucket_name, payload)
Beispiel #19
0
    def gen_test(self,
                 mapping,
                 site_abstract=True,
                 region_abstract=True,
                 global_abstract=True,
                 site_parent_selectors=None):
        """Generate the document template.

        Generate the document template based on the arguments passed to
        the constructor and to this function.

        :param mapping: A list of dictionaries that specify the "data" and
            "actions" parameters for each document. A valid mapping is::

                mapping = {
                    "_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}},
                    "_SITE_DATA_1_": {"data": {"a": {"x": 7, "z": 3}, "b": 4}},
                    "_SITE_ACTIONS_1_": {
                        "actions": [{"method": "merge", "path": path}]}
                }

            Each key must be of the form "_{LAYER_NAME}_{KEY_NAME}_{N}_"
            where:

                - {LAYER_NAME} is the name of the layer ("global", "region",
                    "site")
                - {KEY_NAME} is either "DATA" or "ACTIONS"
                - {N} is the occurrence of the document based on the
                    values in ``docs_per_layer``. If ``docs_per_layer`` is
                    (1, 2) then _GLOBAL_DATA_1_, _SITE_DATA_1_, _SITE_DATA_2_,
                    _SITE_ACTIONS_1_ and _SITE_ACTIONS_2_ must be provided.
                    _GLOBAL_ACTIONS_{N}_ is ignored.

        :type mapping: dict
        :param site_abstract: Whether site layers are abstract/concrete.
        :type site_abstract: boolean
        :param region_abstract: Whether region layers are abstract/concrete.
        :type region_abstract: boolean
        :param global_abstract: Whether global layers are abstract/concrete.
        :type global_abstract: boolean
        :param site_parent_selectors: Override the default parent selector
            for each site. Assuming that ``docs_per_layer`` is (2, 2), for
            example, a valid value is::

                [{'global': 'global1'}, {'global': 'global2'}]

            If not specified, each site will default to the first parent.
        :type site_parent_selectors: list
        :returns: Rendered template of the form specified above.
        """
        rendered_template = [self.layering_policy]
        layer_order = rendered_template[0]['data']['layerOrder']

        for layer_idx in range(self.num_layers):
            for count in range(self.docs_per_layer[layer_idx]):
                layer_template = copy.deepcopy(self.DOCUMENT_TEMPLATE)
                layer_name = layer_order[layer_idx]

                layer_template = copy.deepcopy(layer_template)

                # Set name.
                name_key = "_%s_NAME_%d_" % (layer_name.upper(), count + 1)
                if name_key in mapping:
                    layer_template['metadata']['name'] = mapping[name_key]
                else:
                    layer_template['metadata']['name'] = "%s%d" % (
                        test_utils.rand_name(layer_name), count + 1)

                # Set schema.
                schema_key = "_%s_SCHEMA_%d_" % (layer_name.upper(), count + 1)
                if schema_key in mapping:
                    layer_template['schema'] = mapping[schema_key]

                # Set layer.
                layer_template['metadata']['layeringDefinition'][
                    'layer'] = layer_name

                # Set labels.
                layer_template['metadata']['labels'] = {
                    layer_name: "%s%d" % (layer_name, count + 1)
                }

                # Set parentSelector.
                if layer_name == 'site' and site_parent_selectors:
                    parent_selector = site_parent_selectors[count]
                    layer_template['metadata']['layeringDefinition'][
                        'parentSelector'] = parent_selector
                elif layer_idx > 0:
                    parent_selector = rendered_template[layer_idx]['metadata'][
                        'labels']
                    layer_template['metadata']['layeringDefinition'][
                        'parentSelector'] = parent_selector

                # Set abstract.
                if layer_name == 'site':
                    layer_template['metadata']['layeringDefinition'][
                        'abstract'] = site_abstract
                if layer_name == 'region':
                    layer_template['metadata']['layeringDefinition'][
                        'abstract'] = region_abstract
                if layer_name == 'global':
                    layer_template['metadata']['layeringDefinition'][
                        'abstract'] = global_abstract

                # Set data and actions.
                data_key = "_%s_DATA_%d_" % (layer_name.upper(), count + 1)
                actions_key = "_%s_ACTIONS_%d_" % (layer_name.upper(),
                                                   count + 1)
                sub_key = "_%s_SUBSTITUTIONS_%d_" % (layer_name.upper(),
                                                     count + 1)

                try:
                    layer_template['data'] = mapping[data_key]['data']
                except KeyError as e:
                    LOG.debug(
                        'Could not map %s because it was not found in '
                        'the `mapping` dict.', e.args[0])

                try:
                    layer_template['metadata']['layeringDefinition'][
                        'actions'] = mapping[actions_key]['actions']
                except KeyError as e:
                    LOG.debug(
                        'Could not map %s because it was not found in '
                        'the `mapping` dict.', e.args[0])

                try:
                    layer_template['metadata']['substitutions'] = mapping[
                        sub_key]
                except KeyError as e:
                    LOG.debug(
                        'Could not map %s because it was not found in '
                        'the `mapping` dict.', e.args[0])

                rendered_template.append(layer_template)

        return rendered_template
Beispiel #20
0
    def __init__(self, num_layers, docs_per_layer):
        """Constructor for ``DocumentFactory``.

        Returns a template whose JSON representation is of the form::

            [{'data': {'layerOrder': ['global', 'region', 'site']},
              'metadata': {'name': 'layering-policy',
                           'schema': 'metadata/Control/v1'},
              'schema': 'deckhand/LayeringPolicy/v1'},
             {'data': {'a': 1, 'b': 2},
              'metadata': {'labels': {'global': 'global1'},
                           'layeringDefinition': {'abstract': True,
                                                  'actions': [],
                                                  'layer': 'global',
                                                  'parentSelector': ''},
                           'name': 'global1',
                           'schema': 'metadata/Document/v1'},
              'schema': 'example/Kind/v1'}
             ...
            ]

        :param num_layers: Total number of layers. Only supported values
            include 1, 2 or 3.
        :type num_layers: integer
        :param docs_per_layer: The number of documents to be included per
            layer. For example, if ``num_layers`` is 3, then ``docs_per_layer``
            can be (1, 1, 1) for 1 document for each layer or (1, 2, 3) for 1
            doc for the 1st layer, 2 docs for the 2nd layer, and 3 docs for the
            3rd layer.
        :type docs_per_layer: tuple, list
        :raises TypeError: If ``docs_per_layer`` is not the right type.
        :raises ValueError: If ``num_layers`` is not the right value or isn't
            compatible with ``docs_per_layer``.
        """
        # Set up the layering definition's layerOrder.
        if num_layers == 1:
            layer_order = ["global"]
        elif num_layers == 2:
            layer_order = ["global", "site"]
        elif num_layers == 3:
            layer_order = ["global", "region", "site"]
        else:
            raise ValueError("'num_layers' must be a value between 1 - 3.")
        self.layering_policy = copy.deepcopy(self.LAYERING_POLICY_TEMPLATE)
        self.layering_policy['metadata']['name'] = test_utils.rand_name(
            'layering-policy')
        self.layering_policy['data']['layerOrder'] = layer_order
        self.layering_policy['metadata']['layeringDefinition'][
            'layer'] = layer_order[0]

        if not isinstance(docs_per_layer, (list, tuple)):
            raise TypeError("'docs_per_layer' must be a list or tuple "
                            "indicating the number of documents per layer.")
        elif not len(docs_per_layer) == num_layers:
            raise ValueError("The number of entries in 'docs_per_layer' must"
                             "be equal to the value of 'num_layers'.")

        for doc_count in docs_per_layer:
            if doc_count < 0:
                raise ValueError(
                    "Each entry in 'docs_per_layer' must be >= 1.")

        self.num_layers = num_layers
        self.docs_per_layer = docs_per_layer