Exemple #1
0
    def on_get(self, req, resp, sanitized_params, revision_id):
        """Returns all documents for a `revision_id`.

        Returns a multi-document YAML response containing all the documents
        matching the filters specified via query string parameters. Returned
        documents will be as originally posted with no substitutions or
        layering applied.
        """
        include_encrypted = policy.conditional_authorize(
            'deckhand:list_encrypted_documents', req.context, do_raise=False)

        order_by = sanitized_params.pop('order', None)
        sort_by = sanitized_params.pop('sort', None)

        filters = sanitized_params.copy()
        filters['metadata.storagePolicy'] = ['cleartext']
        if include_encrypted:
            filters['metadata.storagePolicy'].append('encrypted')
        filters['deleted'] = False  # Never return deleted documents to user.

        try:
            documents = db_api.revision_documents_get(revision_id, **filters)
        except errors.RevisionNotFound as e:
            LOG.exception(six.text_type(e))
            raise falcon.HTTPNotFound(description=e.format_message())

        # Sorts by creation date by default.
        documents = utils.multisort(documents, sort_by, order_by)

        resp.status = falcon.HTTP_200
        resp.body = self.view_builder.list(documents)
Exemple #2
0
    def on_put(self, req, resp, bucket_name=None):
        data = self.from_yaml(req, expect_list=True, allow_empty=True)
        documents = document_wrapper.DocumentDict.from_list(data)

        # NOTE: Must validate documents before doing policy enforcement,
        # because we expect certain formatting of the documents while doing
        # policy enforcement. If any documents fail basic schema validaiton
        # raise an exception immediately.
        data_schemas = db_api.revision_documents_get(
            schema=types.DATA_SCHEMA_SCHEMA, deleted=False)
        try:
            doc_validator = document_validation.DocumentValidation(
                documents, data_schemas, pre_validate=True)
            validations = doc_validator.validate_all()
        except deckhand_errors.InvalidDocumentFormat as e:
            with excutils.save_and_reraise_exception():
                LOG.exception(e.format_message())

        for document in documents:
            if secrets_manager.SecretsManager.requires_encryption(document):
                policy.conditional_authorize(
                    'deckhand:create_encrypted_documents', req.context)
                break

        documents = self._encrypt_secret_documents(documents)

        created_documents = self._create_revision_documents(
            bucket_name, documents)

        if created_documents:
            revision_id = created_documents[0]['revision_id']
            self._create_revision_validations(revision_id, validations)

        resp.body = self.view_builder.list(created_documents)
        resp.status = falcon.HTTP_200
Exemple #3
0
    def _retrieve_documents_for_rendering(self, revision_id, **filters):
        """Retrieve all necessary documents needed for rendering. If a layering
        policy isn't found in the current revision, retrieve it in a subsequent
        call and add it to the list of documents.
        """
        try:
            documents = db_api.revision_documents_get(revision_id, **filters)
        except errors.RevisionNotFound as e:
            LOG.exception(six.text_type(e))
            raise falcon.HTTPNotFound(description=e.format_message())

        if not any([
                d['schema'].startswith(types.LAYERING_POLICY_SCHEMA)
                for d in documents
        ]):
            try:
                layering_policy_filters = {
                    'deleted': False,
                    'schema': types.LAYERING_POLICY_SCHEMA
                }
                layering_policy = db_api.document_get(
                    **layering_policy_filters)
            except errors.DocumentNotFound as e:
                LOG.exception(e.format_message())
            else:
                documents.append(layering_policy)

        return documents
Exemple #4
0
    def on_get(self, req, resp, revision_id):
        include_encrypted = policy.conditional_authorize(
            'deckhand:list_encrypted_documents', req.context, do_raise=False)
        filters = {
            'metadata.storagePolicy': ['cleartext'],
            'deleted': False
        }
        if include_encrypted:
            filters['metadata.storagePolicy'].append('encrypted')

        cleartext_secrets = req.get_param_as_bool('cleartext-secrets')
        if cleartext_secrets is None:
            cleartext_secrets = True
        req.params.pop('cleartext-secrets', None)
        rendered_documents, cache_hit = common.get_rendered_docs(
            revision_id, cleartext_secrets, **filters)

        # If the rendered documents result set is cached, then post-validation
        # for that result set has already been performed successfully, so it
        # can be safely skipped over as an optimization.
        if not cache_hit:
            data_schemas = db_api.revision_documents_get(
                schema=types.DATA_SCHEMA_SCHEMA, deleted=False)
            validator = document_validation.DocumentValidation(
                rendered_documents, data_schemas, pre_validate=False)
            engine.validate_render(revision_id, rendered_documents, validator)

        # Filters to be applied post-rendering, because many documents are
        # involved in rendering. User filters can only be applied once all
        # documents have been rendered. Note that `layering` module only
        # returns concrete documents, so no filtering for that is needed here.
        order_by = req.params.pop('order', None)
        sort_by = req.params.pop('sort', None)
        limit = req.params.pop('limit', None)
        user_filters = req.params.copy()

        if not cleartext_secrets:
            rendered_documents = utils.redact_documents(rendered_documents)

        rendered_documents = [
            d for d in rendered_documents if utils.deepfilter(
                d, **user_filters)]

        if sort_by:
            rendered_documents = utils.multisort(
                rendered_documents, sort_by, order_by)

        if limit is not None:
            rendered_documents = rendered_documents[:limit]

        resp.status = falcon.HTTP_200
        resp.body = self.view_builder.list(rendered_documents)
 def _post_validate(self, documents):
     # Perform schema validation post-rendering to ensure that rendering
     # and substitution didn't break anything.
     data_schemas = db_api.revision_documents_get(
         schema=types.DATA_SCHEMA_SCHEMA, deleted=False)
     doc_validator = document_validation.DocumentValidation(
         documents, data_schemas)
     try:
         doc_validator.validate_all()
     except errors.InvalidDocumentFormat as e:
         LOG.error('Failed to post-validate rendered documents.')
         LOG.exception(e.format_message())
         raise falcon.HTTPInternalServerError(
             description=e.format_message())
    def _post_validate(self, rendered_documents):
        # Perform schema validation post-rendering to ensure that rendering
        # and substitution didn't break anything.
        data_schemas = db_api.revision_documents_get(
            schema=types.DATA_SCHEMA_SCHEMA, deleted=False)
        doc_validator = document_validation.DocumentValidation(
            rendered_documents, data_schemas, pre_validate=False)
        try:
            validations = doc_validator.validate_all()
        except errors.InvalidDocumentFormat as e:
            with excutils.save_and_reraise_exception():
                # Post-rendering validation errors likely indicate an internal
                # rendering bug, so override the default code to 500.
                e.code = 500
                LOG.error('Failed to post-validate rendered documents.')
                LOG.exception(e.format_message())
        else:
            error_list = []

            for validation in validations:
                if validation['status'] == 'failure':
                    error_list.extend([
                        vm.ValidationMessage(
                            message=error['message'],
                            name=vm.DOCUMENT_POST_RENDERING_FAILURE,
                            doc_schema=error['schema'],
                            doc_name=error['name'],
                            doc_layer=error['layer'],
                            diagnostic={
                                k: v for k, v in error.items() if k in (
                                    'schema_path',
                                    'validation_schema',
                                    'error_section'
                                )
                            }
                        )
                        for error in validation['errors']
                    ])

            if error_list:
                raise errors.InvalidDocumentFormat(
                    error_list=error_list,
                    reason='Validation'
                )
Exemple #7
0
    def on_put(self, req, resp, bucket_name=None):
        document_data = req.stream.read(req.content_length or 0)
        try:
            documents = list(yaml.safe_load_all(document_data))
        except yaml.YAMLError as e:
            error_msg = ("Could not parse the document into YAML data. "
                         "Details: %s." % e)
            LOG.error(error_msg)
            raise falcon.HTTPBadRequest(description=six.text_type(e))

        # NOTE: Must validate documents before doing policy enforcement,
        # because we expect certain formatting of the documents while doing
        # policy enforcement. If any documents fail basic schema validaiton
        # raise an exception immediately.
        data_schemas = db_api.revision_documents_get(
            schema=types.DATA_SCHEMA_SCHEMA, deleted=False)
        try:
            doc_validator = document_validation.DocumentValidation(
                documents, data_schemas, pre_validate=True)
            validations = doc_validator.validate_all()
        except deckhand_errors.InvalidDocumentFormat as e:
            LOG.exception(e.format_message())
            raise falcon.HTTPBadRequest(description=e.format_message())

        for document in documents:
            if document['metadata'].get('storagePolicy') == 'encrypted':
                policy.conditional_authorize(
                    'deckhand:create_encrypted_documents', req.context)
                break

        self._prepare_secret_documents(documents)

        created_documents = self._create_revision_documents(
            bucket_name, documents, validations)

        resp.body = self.view_builder.list(created_documents)
        resp.status = falcon.HTTP_200
Exemple #8
0
 def list_revision_documents(self, revision_id, **filters):
     documents = db_api.revision_documents_get(revision_id, **filters)
     for document in documents:
         self.validate_document(document)
     return documents
Exemple #9
0
def revision_diff(revision_id, comparison_revision_id, deepdiff=False):
    """Generate the diff between two revisions.

    Generate the diff between the two revisions: `revision_id` and
    `comparison_revision_id`.
    a. When deepdiff=False: A basic comparison of the revisions in terms of
    how the buckets involved have changed is generated. Only buckets with
    existing documents in either of the two revisions in question will be
    reported.
    b. When deepdiff=True: Along with basic comparision, It will generate deep
    diff between revisions' modified buckets.

    Only in case of diff, The ordering of the two revision IDs is
    interchangeable, i.e. no matter the order, the same result is generated.

    The differences include:

        - "created": A bucket has been created between the revisions.
        - "deleted": A bucket has been deleted between the revisions.
        - "modified": A bucket has been modified between the revisions.
                      When deepdiff is enabled, It also includes deep
                      difference between the revisions.
        - "unmodified": A bucket remains unmodified between the revisions.

    :param revision_id: ID of the first revision.
    :param comparison_revision_id: ID of the second revision.
    :param deepdiff: Whether deepdiff needed or not.
    :returns: A dictionary, keyed with the bucket IDs, containing any of the
        differences enumerated above.

    Examples Diff::

        # GET /api/v1.0/revisions/6/diff/3
        bucket_a: created
        bucket_b: deleted
        bucket_c: modified
        bucket_d: unmodified

        # GET /api/v1.0/revisions/0/diff/6
        bucket_a: created
        bucket_c: created
        bucket_d: created

        # GET /api/v1.0/revisions/6/diff/6
        bucket_a: unmodified
        bucket_c: unmodified
        bucket_d: unmodified

        # GET /api/v1.0/revisions/0/diff/0
        {}

    Examples DeepDiff::

        # GET /api/v1.0/revisions/3/deepdiff/4
        bucket_a: modified
        bucket_a diff:
          document_changed:
            count: 1
            details:
              ('example/Kind/v1', 'doc-b'):
                data_changed:
                  values_changed:
                    root['foo']: {new_value: 3, old_value: 2}
                metadata_changed: {}

        # GET /api/v1.0/revisions/2/deepdiff/3
        bucket_a: modified
        bucket_a diff:
          document_added:
            count: 1
            details:
            - [example/Kind/v1, doc-c]

        # GET /api/v1.0/revisions/0/deepdiff/0
        {}

        # GET /api/v1.0/revisions/0/deepdiff/3
        bucket_a: created
    """
    if deepdiff:
        docs = (_rendered_doc(revision_id) if revision_id != 0 else [])
        comparison_docs = (_rendered_doc(comparison_revision_id)
                           if comparison_revision_id != 0 else [])
    else:
        # Retrieve document history for each revision. Since `revision_id` of 0
        # doesn't exist, treat it as a special case: empty list.
        docs = (db_api.revision_documents_get(
            revision_id, include_history=True, unique_only=False)
                if revision_id != 0 else [])
        comparison_docs = (db_api.revision_documents_get(
            comparison_revision_id, include_history=True, unique_only=False)
                           if comparison_revision_id != 0 else [])

    # Remove each deleted document and its older counterparts because those
    # documents technically don't exist.
    docs = utils.exclude_deleted_documents(docs)
    comparison_docs = utils.exclude_deleted_documents(comparison_docs)

    revision = db_api.revision_get(revision_id) if revision_id != 0 else None
    comparison_revision = (db_api.revision_get(comparison_revision_id)
                           if comparison_revision_id != 0 else None)

    # Each dictionary below, keyed with the bucket's name, references the list
    # of documents related to each bucket.
    buckets = {}
    comparison_buckets = {}
    for doc in docs:
        buckets.setdefault(doc['bucket_name'], [])
        buckets[doc['bucket_name']].append(doc)
    for doc in comparison_docs:
        comparison_buckets.setdefault(doc['bucket_name'], [])
        comparison_buckets[doc['bucket_name']].append(doc)

    # `shared_buckets` references buckets shared by both `revision_id` and
    # `comparison_revision_id` -- i.e. their intersection.
    shared_buckets = set(buckets.keys()).intersection(
        comparison_buckets.keys())
    # `unshared_buckets` references buckets not shared by both `revision_id`
    # and `comparison_revision_id` -- i.e. their non-intersection.
    unshared_buckets = set(buckets.keys()).union(
        comparison_buckets.keys()) - shared_buckets

    result = {}

    def _compare_buckets(b1, b2):
        # Checks whether buckets' documents are identical.
        return (sorted([
            (d['data_hash'], d['metadata_hash']) for d in b1
        ]) == sorted([(d['data_hash'], d['metadata_hash']) for d in b2]))

    # If the list of documents for each bucket is identical, then the result
    # is "unmodified", else "modified".
    for bucket_name in shared_buckets:
        unmodified = _compare_buckets(buckets[bucket_name],
                                      comparison_buckets[bucket_name])
        if unmodified:
            result[bucket_name] = 'unmodified'
        else:
            result[bucket_name] = 'modified'
            # If deepdiff enabled
            if deepdiff:
                # find out diff between buckets
                bucket_diff = _diff_buckets(buckets[bucket_name],
                                            comparison_buckets[bucket_name])
                result[bucket_name + ' diff'] = bucket_diff

    for bucket_name in unshared_buckets:
        # If neither revision has documents, then there's nothing to compare.
        # This is always True for revision_id == comparison_revision_id == 0.
        if not any([revision, comparison_revision]):
            break
        # Else if one revision == 0 and the other revision != 0, then the
        # bucket has been created. Which is zero or non-zero doesn't matter.
        elif not all([revision, comparison_revision]):
            result[bucket_name] = 'created'
        # Else if `revision` is newer than `comparison_revision`, then if the
        # `bucket_name` isn't in the `revision` buckets, then it has been
        # deleted. Otherwise it has been created.
        elif revision['created_at'] > comparison_revision['created_at']:
            if bucket_name not in buckets:
                result[bucket_name] = 'deleted'
            elif bucket_name not in comparison_buckets:
                result[bucket_name] = 'created'
        # Else if `comparison_revision` is newer than `revision`, then if the
        # `bucket_name` isn't in the `revision` buckets, then it has been
        # created. Otherwise it has been deleted.
        else:
            if bucket_name not in buckets:
                result[bucket_name] = 'created'
            elif bucket_name not in comparison_buckets:
                result[bucket_name] = 'deleted'

    return result