def test_jsonpath_parse_replace_cache(self): """Validate caching for both parsing and replacing functions.""" path = ".values.endpoints.admin" expected = {'values': {'endpoints': {'admin': 'foo'}}} # Mock jsonpath_ng to return a monkey-patched parse function that # keeps track of call count and yet calls the actual function. with mock.patch.object( utils, 'jsonpath_ng', # noqa: H210 parse=self.fake_jsonpath_ng): # Though this is called 3 times, the cached function should only # be called once, with the cache returning the cached value early. for _ in range(3): result = utils.jsonpath_replace({}, 'foo', path) self.assertEqual(expected, result) # Though this is called 3 times, the cached function should only # be called once, with the cache returning the cached value early. for _ in range(3): result = utils.jsonpath_parse(expected, path) self.assertEqual('foo', result) # Assert that the actual function was called <= 1 times. (Allow for 0 # in case CI jobs clash.) self.assertThat(self.jsonpath_call_count, MatchesAny(Equals(0), Equals(1)))
def _generate_validation_error_output(self, schema, document, error, root_path): """Returns a formatted output with necessary details for debugging why a validation failed. The response is a dictionary with the following keys: * validation_schema: The schema body that was used to validate the document. * schema_path: The JSON path in the schema where the failure originated. * name: The document name. * schema: The document schema. * path: The JSON path in the document where the failure originated. * error_section: The "section" in the document above which the error originated (i.e. the dict in which ``path`` is found). * message: The error message returned by the ``jsonschema`` validator. :returns: Dictionary in the above format. """ error_path = '.'.join([str(x) for x in error.path]) if error_path: path_to_error_in_document = '.'.join([root_path, error_path]) else: path_to_error_in_document = root_path path_to_error_in_schema = '.' + '.'.join( [str(x) for x in error.schema_path]) parent_path_to_error_in_document = '.'.join( path_to_error_in_document.split('.')[:-1]) or '.' try: # NOTE(fmontei): Because validation is performed on fully rendered # documents, it is necessary to omit the parts of the data section # where substitution may have occurred to avoid exposing any # secrets. While this may make debugging a few validation failures # more difficult, it is a necessary evil. sanitized_document = ( SecretsSubstitution.sanitize_potential_secrets( error, document)) parent_error_section = utils.jsonpath_parse( sanitized_document, parent_path_to_error_in_document) except Exception: parent_error_section = ( 'Failed to find parent section above where error occurred.') error_output = { 'validation_schema': schema, 'schema_path': path_to_error_in_schema, 'name': document.name, 'schema': document.schema, 'layer': document.layer, 'path': path_to_error_in_document, 'error_section': parent_error_section, 'message': error.message } return error_output
def _get_schema_parts(document, schema_key='schema'): schema_parts = utils.jsonpath_parse(document, schema_key).split('/') schema_prefix = '/'.join(schema_parts[:2]) schema_version = schema_parts[2] return schema_prefix, schema_version
def substitute_all(self, documents): """Substitute all documents that have a `metadata.substitutions` field. Concrete (non-abstract) documents can be used as a source of substitution into other documents. This substitution is layer-independent, a document in the region layer could insert data from a document in the site layer. :param documents: List of documents that are candidates for substitution. :type documents: dict or List[dict] :returns: List of fully substituted documents. :rtype: Generator[:class:`DocumentDict`] :raises SubstitutionSourceNotFound: If a substitution source document is referenced by another document but wasn't found. :raises UnknownSubstitutionError: If an unknown error occurred during substitution. """ documents_to_substitute = [] if not isinstance(documents, list): documents = [documents] for document in documents: if not isinstance(document, dd): document = dd(document) # If the document has substitutions include it. if document.substitutions: documents_to_substitute.append(document) LOG.debug( 'Performing substitution on following documents: %s', ', '.join( ['[%s, %s] %s' % d.meta for d in documents_to_substitute])) for document in documents_to_substitute: redact_dest = False LOG.debug('Checking for substitutions for document [%s, %s] %s.', *document.meta) for sub in document.substitutions: src_schema = sub['src']['schema'] src_name = sub['src']['name'] src_path = sub['src']['path'] if (src_schema, src_name) in self._substitution_sources: src_doc = self._substitution_sources[(src_schema, src_name)] else: message = ('Could not find substitution source document ' '[%s] %s among the provided substitution ' 'sources.' % (src_schema, src_name)) if self._fail_on_missing_sub_src: LOG.error(message) raise errors.SubstitutionSourceNotFound( src_schema=src_schema, src_name=src_name, document_schema=document.schema, document_name=document.name) else: LOG.warning(message) continue if src_doc.is_encrypted: redact_dest = True # If the data is a dictionary, retrieve the nested secret # via jsonpath_parse, else the secret is the primitive/string # stored in the data section itself. if isinstance(src_doc.get('data'), dict): src_secret = utils.jsonpath_parse(src_doc.get('data', {}), src_path) else: src_secret = src_doc.get('data') self._check_src_secret_is_not_none(src_secret, src_path, src_doc, document) # If the document has storagePolicy == encrypted then resolve # the Barbican reference into the actual secret. if src_doc.is_encrypted and src_doc.has_barbican_ref: src_secret = self.get_unencrypted_data( src_secret, src_doc, document) if not isinstance(sub['dest'], list): dest_array = [sub['dest']] dest_is_list = False else: dest_array = sub['dest'] dest_is_list = True for i, each_dest_path in enumerate(dest_array): dest_path = each_dest_path['path'] dest_pattern = each_dest_path.get('pattern', None) dest_recurse = each_dest_path.get('recurse', {}) # If the source document is encrypted and cleartext_secrets # is False, then redact the substitution metadata in the # destination document to prevent reverse-engineering of # where the sensitive data came from. if src_doc.is_encrypted and not self._cleartext_secrets: sub['src']['path'] = dd.redact(src_path) if dest_is_list: sub['dest'][i]['path'] = dd.redact(dest_path) else: sub['dest']['path'] = dd.redact(dest_path) LOG.debug( 'Substituting from schema=%s layer=%s name=%s ' 'src_path=%s into dest_path=%s, dest_pattern=%s', src_schema, src_doc.layer, src_name, src_path, dest_path, dest_pattern) document = self._substitute_one(document, src_doc=src_doc, src_secret=src_secret, dest_path=dest_path, dest_pattern=dest_pattern, dest_recurse=dest_recurse) # If we just substituted from an encrypted document # into a cleartext document, we need to redact the # dest document as well so the secret stays hidden if (not document.is_encrypted and redact_dest and not self._cleartext_secrets): document.storage_policy = 'encrypted' yield document
def parent_selector(self): return utils.jsonpath_parse( self, 'metadata.layeringDefinition.parentSelector') or {}
def _apply_action(self, action, child_data, overall_data): """Apply actions to each layer that is rendered. Supported actions include: * ``merge`` - a "deep" merge that layers new and modified data onto existing data * ``replace`` - overwrite data at the specified path and replace it with the data given in this document * ``delete`` - remove the data at the specified path :raises UnsupportedActionMethod: If the layering action isn't found among ``self.SUPPORTED_METHODS``. :raises MissingDocumentKey: If a layering action path isn't found in the child document. """ method = action['method'] if method not in self._SUPPORTED_METHODS: raise errors.UnsupportedActionMethod( action=action, document=child_data) # Use copy to prevent these data from being updated referentially. overall_data = copy.deepcopy(overall_data) child_data = copy.deepcopy(child_data) # If None is used, then consider it as a placeholder and coerce the # data into a dictionary. if overall_data is None: overall_data = {} if child_data is None: child_data = {} action_path = action['path'] if action_path.startswith('.data'): action_path = action_path[5:] elif action_path.startswith('$.data'): action_path = action_path[6:] if not (action_path.startswith('.') or action_path.startswith('$.')): action_path = '.' + action_path if method == self._DELETE_ACTION: if action_path == '.': overall_data.data = {} else: from_child = utils.jsonpath_parse(overall_data.data, action_path) if from_child is None: raise errors.MissingDocumentKey( child_schema=child_data.schema, child_layer=child_data.layer, child_name=child_data.name, parent_schema=overall_data.schema, parent_layer=overall_data.layer, parent_name=overall_data.name, action=action) engine_utils.deep_delete(from_child, overall_data.data, None) elif method == self._MERGE_ACTION: from_overall = utils.jsonpath_parse(overall_data.data, action_path) from_child = utils.jsonpath_parse(child_data.data, action_path) if from_child is None: raise errors.MissingDocumentKey( child_schema=child_data.schema, child_layer=child_data.layer, child_name=child_data.name, parent_schema=overall_data.schema, parent_layer=overall_data.layer, parent_name=overall_data.name, action=action) # If both the child and parent data are dictionaries, then # traditional merging is possible using JSON path resolution. # Otherwise, JSON path resolution is not possible, so the only # way to perform layering is to prioritize the child data over # that of the parent. This applies when the child data is a # non-dict, the parent data is a non-dict, or both. if all(isinstance(x, dict) for x in (from_overall, from_child)): engine_utils.deep_merge(from_overall, from_child) else: LOG.info('Child data is type: %s for [%s, %s] %s. Parent data ' 'is type: %s for [%s, %s] %s. Both must be ' 'dictionaries for regular JSON path merging to work. ' 'Because this is not the case, child data will be ' 'prioritized over parent data for "merge" action.', type(from_child), child_data.schema, child_data.layer, child_data.name, type(from_overall), overall_data.schema, overall_data.layer, overall_data.name) from_overall = from_child if from_overall is not None: overall_data.data = utils.jsonpath_replace( overall_data.data, from_overall, action_path) else: overall_data.data = utils.jsonpath_replace( overall_data.data, from_child, action_path) elif method == self._REPLACE_ACTION: from_child = utils.jsonpath_parse(child_data.data, action_path) if from_child is None: raise errors.MissingDocumentKey( child_schema=child_data.schema, child_layer=child_data.layer, child_name=child_data.name, parent_schema=overall_data.schema, parent_layer=overall_data.layer, parent_name=overall_data.name, action=action) overall_data.data = utils.jsonpath_replace( overall_data.data, from_child, action_path) return overall_data
def layer(self): return utils.jsonpath_parse(self, 'metadata.layeringDefinition.layer')
def layer_order(self): return utils.jsonpath_parse(self, 'data.layerOrder')
def is_abstract(self): return utils.jsonpath_parse( self, 'metadata.layeringDefinition.abstract') is True
def name(self): return utils.jsonpath_parse(self, 'metadata.name')
def is_replacement(self): return utils.jsonpath_parse(self, 'metadata.replacement') is True
def storage_policy(self): return utils.jsonpath_parse(self, 'metadata.storagePolicy') or ''
def actions(self): return utils.jsonpath_parse( self, 'metadata.layeringDefinition.actions') or []
def substitutions(self): return utils.jsonpath_parse(self, 'metadata.substitutions') or []
def labels(self): return utils.jsonpath_parse(self, 'metadata.labels') or {}