Example #1
0
 def test_patch_equality(self):
     p = jsonpatch.JsonPatch([{
         'op': 'add',
         'path': '/foo',
         'value': 'bar'
     }])
     q = jsonpatch.JsonPatch([{
         'op': 'add',
         'path': '/foo',
         'value': 'bar'
     }])
     different_op = jsonpatch.JsonPatch([{'op': 'remove', 'path': '/foo'}])
     different_path = jsonpatch.JsonPatch([{
         'op': 'add',
         'path': '/bar',
         'value': 'bar'
     }])
     different_value = jsonpatch.JsonPatch([{
         'op': 'add',
         'path': '/foo',
         'value': 'foo'
     }])
     self.assertNotEqual(p, different_op)
     self.assertNotEqual(p, different_path)
     self.assertNotEqual(p, different_value)
     self.assertEqual(p, q)
    def test_apply_change_to_config_db__multiple_calls__changes_imitated_config_db(
            self):
        # Arrange
        imitated_config_db = Files.CONFIG_DB_AS_JSON
        config_wrapper = gu_common.DryRunConfigWrapper(imitated_config_db)

        changes = [
            gu_common.JsonChange(
                jsonpatch.JsonPatch([{
                    'op': 'remove',
                    'path': '/VLAN'
                }])),
            gu_common.JsonChange(
                jsonpatch.JsonPatch([{
                    'op': 'remove',
                    'path': '/ACL_TABLE'
                }])),
            gu_common.JsonChange(
                jsonpatch.JsonPatch([{
                    'op': 'remove',
                    'path': '/PORT'
                }]))
        ]

        expected = imitated_config_db
        for change in changes:
            # Act
            config_wrapper.apply_change_to_config_db(change)

            actual = config_wrapper.get_config_db_as_json()
            expected = change.apply(expected)

            # Assert
            self.assertDictEqual(expected, actual)
Example #3
0
    def test_creation_fails_with_invalid_patch(self):
        invalid_patches = [
            {
                'path': '/foo',
                'value': 'bar'
            },
            {
                'op': 0xADD,
                'path': '/foo',
                'value': 'bar'
            },
            {
                'op': 'boo',
                'path': '/foo',
                'value': 'bar'
            },
            {
                'op': 'add',
                'value': 'bar'
            },
        ]
        for patch in invalid_patches:
            with self.assertRaises(jsonpatch.InvalidJsonPatch):
                jsonpatch.JsonPatch([patch])

        with self.assertRaises(jsonpointer.JsonPointerException):
            jsonpatch.JsonPatch([{'op': 'add', 'path': 'foo', 'value': 'bar'}])
Example #4
0
def patch_spec(content, region):
    """Patch the spec file"""
    LOGGER.info('Patching spec file for region "%s"', region)

    append_dir = os.path.join(os.path.dirname(__file__), 'data',
                              'ExtendedSpecs', region)
    for _, _, filenames in os.walk(append_dir):
        filenames.sort()
        for filename in fnmatch.filter(filenames, '*.json'):
            LOGGER.info('Processing %s (%s)', filename, region)
            all_patches = jsonpatch.JsonPatch(
                cfnlint.helpers.load_resources(
                    'data/ExtendedSpecs/{}/{}'.format(region, filename)))

            # Process the generic patches 1 by 1 so we can "ignore" failed ones
            for all_patch in all_patches:
                try:
                    jsonpatch.JsonPatch([all_patch]).apply(content,
                                                           in_place=True)
                except jsonpatch.JsonPatchConflict:
                    LOGGER.debug('Patch (%s) not applied in region %s',
                                 all_patch, region)
                except jsonpointer.JsonPointerException:
                    # Debug as the parent element isn't supported in the region
                    LOGGER.debug(
                        'Parent element not found for patch (%s) in region %s',
                        all_patch, region)

    return content
    def test_convert_sonic_yang_patch_to_config_db_patch__empty_patch__returns_empty_patch(self):
        # Arrange
        patch_wrapper = gu_common.PatchWrapper(config_wrapper = self.config_wrapper_mock)
        patch = jsonpatch.JsonPatch([])
        expected = jsonpatch.JsonPatch([])

        # Act
        actual = patch_wrapper.convert_sonic_yang_patch_to_config_db_patch(patch)

        # Assert
        self.assertEqual(expected, actual)
Example #6
0
 def test_patch_hash_equality(self):
     patch1 = jsonpatch.JsonPatch([{
         "op": "add",
         "path": "/a/b/c",
         "value": "foo"
     }])
     patch2 = jsonpatch.JsonPatch([{
         "path": "/a/b/c",
         "op": "add",
         "value": "foo"
     }])
     self.assertEqual(hash(patch1), hash(patch2))
Example #7
0
    def patch(self, load_id, patch):
        """Update an existing load."""

        # TODO (dsulliva)
        # This is a stub. We will need to place reasonable limits on what can
        # be patched as we add to the upgrade system. This portion of the API
        # likely will not be publicly accessible.
        rpc_load = objects.load.get_by_uuid(pecan.request.context, load_id)

        utils.validate_patch(patch)
        patch_obj = jsonpatch.JsonPatch(patch)

        try:
            load = Load(**jsonpatch.apply_patch(rpc_load.as_dict(), patch_obj))

        except utils.JSONPATCH_EXCEPTIONS as e:
            raise exception.PatchError(patch=patch, reason=e)

        fields = objects.load.fields

        for field in fields:
            if rpc_load[field] != getattr(load, field):
                rpc_load[field] = getattr(load, field)

        rpc_load.save()

        return Load.convert_with_links(rpc_load)
Example #8
0
def patch(src, path_to_patch):
    """
    Apply a JSON Patch file.
    """
    patch = jsonpatch.JsonPatch(read_json(path_to_patch))
    data = apply_patch(read_json(src), patch)
    print(json.dumps(data, indent=DEFAULT_INDENT))
Example #9
0
def json2xml(workspace, output_dir, specroot, dataroot, resultprefix, patch):
    """Convert pyhf JSON back to XML + ROOT files."""
    try:
        import uproot

        assert uproot
    except ImportError:
        log.error("json2xml requires uproot, please install pyhf using the "
                  "xmlio extra: python -m pip install pyhf[xmlio]")
    from .. import writexml

    os.makedirs(output_dir, exist_ok=True)
    with click.open_file(workspace, 'r') as specstream:
        spec = json.load(specstream)
        for pfile in patch:
            patch = json.loads(click.open_file(pfile, 'r').read())
            spec = jsonpatch.JsonPatch(patch).apply(spec)
        os.makedirs(Path(output_dir).joinpath(specroot), exist_ok=True)
        os.makedirs(Path(output_dir).joinpath(dataroot), exist_ok=True)
        with click.open_file(
                Path(output_dir).joinpath(f'{resultprefix}.xml'),
                'w') as outstream:
            outstream.write(
                writexml.writexml(
                    spec,
                    Path(output_dir).joinpath(specroot),
                    Path(output_dir).joinpath(dataroot),
                    resultprefix,
                ).decode('utf-8'))
  def load(self, path, attempt_patch=False):
    path = os.path.join(FIXTURES, path)

    with open(path) as f:
      if path.endswith('.json'):
        data = json.load(f, object_pairs_hook=OrderedDict)
      elif path.endswith('.yml'):
        data = yaml.load(f)
      else:
        data = f.read()

    patch = path + '.' + self.client + '.patch'

    if not attempt_patch or not os.path.exists(patch):
      loaded = path
    else:
      for ext in ['.schomd.json', '.csl.json', os.path.splitext(path)[1]]:
        if path.endswith(ext):
          loaded = path[:-len(ext)] + '.' + self.client + ext
          break

      if path.endswith('.json') or path.endswith('.yml'):
        with open(patch) as f:
          data = jsonpatch.JsonPatch(json.load(f)).apply(data)
      else:
        with open(patch) as f:
          dmp = diff_match_patch()
          data = dmp.patch_apply(dmp.patch_fromText(f.read()), data)[0]

    self.loaded(loaded)
    return (data, loaded)
Example #11
0
    def model(self, **config_kwargs):
        """
        Create a model object with/without patches applied.

        See :func:`pyhf.workspace.Workspace.get_measurement` and :class:`pyhf.pdf.Model` for possible keyword arguments.

        Args:
            patches: A list of JSON patches to apply to the model specification
            config_kwargs: Possible keyword arguments for the measurement and model configuration

        Returns:
            ~pyhf.pdf.Model: A model object adhering to the schema model.json

        """

        poi_name = config_kwargs.pop('poi_name', None)
        measurement_name = config_kwargs.pop('measurement_name', None)
        measurement_index = config_kwargs.pop('measurement_index', None)
        measurement = self.get_measurement(
            poi_name=poi_name,
            measurement_name=measurement_name,
            measurement_index=measurement_index,
        )
        log.debug(f"model being created for measurement {measurement['name']:s}")

        patches = config_kwargs.pop('patches', [])

        modelspec = {
            'channels': self['channels'],
            'parameters': measurement['config']['parameters'],
        }
        for patch in patches:
            modelspec = jsonpatch.JsonPatch(patch).apply(modelspec)

        return Model(modelspec, poi_name=measurement['config']['poi'], **config_kwargs)
def convert_patch_to_supported_values(patch):
    supported_ops = ['replace']
    supported_paths = ['/tweeted', '/approved', '/tip']
    new_patch = []

    for oper in list(patch):
        if oper['op'] not in supported_ops:
            raise UpdateTipError(status=UpdateStatus.UNSUPPORTED_OP)

        path = oper['path']
        value = oper['value']

        if not any(path.endswith(supported) for supported in supported_paths):
            raise UpdateTipError(status=UpdateStatus.UNSUPPORTED_PATH)

        if path.endswith('/tweeted'):
            try:
                # Allow null values
                if value is not None:
                    oper['value'] = int(value)
            except Exception:
                # It wasn't worth converting anyway
                raise UpdateTipError(status=UpdateStatus.UNSUPPORTED_VALUE)

        if path.endswith('/approved'):
            # If the tip is explicitly approved or disapproved of, then it is moderated forever more
            new_patch.append({
                'op': 'replace',
                'path': '/moderated',
                'value': True
            })

        new_patch.append(oper)

    return jsonpatch.JsonPatch(new_patch)
Example #13
0
def json2xml(workspace, output_dir, specroot, dataroot, resultprefix, patch):
    try:
        import uproot

        assert uproot
    except ImportError:
        log.error("json2xml requires uproot, please install pyhf using the "
                  "xmlio extra: pip install pyhf[xmlio] or install uproot "
                  "manually: pip install uproot")
    from . import writexml

    ensure_dirs(output_dir)
    with click.open_file(workspace, 'r') as specstream:
        d = json.load(specstream)
        for pfile in patch:
            patch = json.loads(click.open_file(pfile, 'r').read())
            d = jsonpatch.JsonPatch(patch).apply(d)
        ensure_dirs(os.path.join(output_dir, specroot))
        ensure_dirs(os.path.join(output_dir, dataroot))
        with click.open_file(
                os.path.join(output_dir, '{0:s}.xml'.format(resultprefix)),
                'w') as outstream:
            outstream.write(
                writexml.writexml(
                    d,
                    os.path.join(output_dir, specroot),
                    os.path.join(output_dir, dataroot),
                    resultprefix,
                ).decode('utf-8'))
Example #14
0
    def patch(self, patch_data):

        if not isinstance(patch_data, (list, tuple)):
            raise WrongType(
                "patch value must be list or tuple type! but got `{0}` type.".format(
                    type(patch_data)
                ),
                type(patch_data),
                None,
            )

        if not bool(self):
            raise Invalid(
                "None object cannot be patched! "
                "Make sure fhir resource value is not empty!"
            )
        try:
            patcher = jsonpatch.JsonPatch(patch_data)
            value = patcher.apply(self._resource_obj.as_json())

            new_value = self._resource_obj.__class__(value)

            object.__setattr__(self, "_resource_obj", new_value)

        except jsonpatch.JsonPatchException as exc:
            return reraise(Invalid, str(exc))
Example #15
0
def pod_webhook_mutate():
    request_info = request.get_json()
    containers = request_info['request']['object']['spec']['containers']

    replacements = os.environ.get("REGISTRY_REGEX", '')
    replacements = replacements.split(' ') if replacements else []

    patches = []

    for counter, container in enumerate(containers):
        for r in replacements:
            split = r.split('|')
            search = split[0]
            replace = split[1]

            new_image = re.sub(search, replace, container['image'])

            if new_image != container['image']:
                patches.append({
                    'op': 'replace',
                    'path': f'/spec/containers/{counter}/image',
                    'value': new_image
                })
                break

    return admission_response_patch(True,
                                    "Adding nodeSelector ",
                                    json_patch=jsonpatch.JsonPatch(patches))
Example #16
0
    def update(self, **kwargs):
        """ Update our internal storage from the file on disk.

            If this is a sequential store it will almost certainly mess up any
            referenced rows
        """
        if 'indent' not in kwargs:
            kwargs["indent"] = 2
        if not os.path.exists(self._db_file):
            # If the file doesn't exist then we don't need to do anything
            return
        # We have to try and patch the existing file
        with open(self._db_file, 'r') as fp:
            on_disk = json.load(fp)
        try:
            patch = jsonpatch.JsonPatch(self._patches)
            patch.apply(on_disk, in_place=True)
        except Exception as e:
            # Use the current POSIX time stamp to make a unique filename
            stamp = int(time.time() )
            tmp_db = "{0}.{1}".format(self._db_file, stamp)
            tmp_patches = tmp_db + ".jsonpatch"

            logger.error((
                "Failed to apply patches! Will write current info in {0},"+
                "{1} files").format(tmp_db, tmp_patches))
            with open(tmp_db, 'w') as fp:
                json.dump(self.to_dict(), fp, **kwargs)
            raise e
        self.from_dict(on_disk, "JSON")
Example #17
0
async def gen_genesis(cfg):
    genesis = {
        "genesis_time": cfg['genesis_time'],
        "chain_id": cfg['chain_id'],
        "consensus_params": {
            "block": {
                "max_bytes": "22020096",
                "max_gas": "-1",
                "time_iota_ms": "1000"
            },
            "evidence": {
                "max_age_num_blocks": "100000",
                "max_age_duration": "20000000000"
            },
            "validator": {
                "pub_key_types": ["ed25519"]
            }
        },
        'validators': [],
    }

    patch = jsonpatch.JsonPatch(cfg['chain_config_patch'])
    cfg['genesis_fingerprint'], genesis = await fix_genesis(
        genesis, patch.apply(app_state_cfg(cfg)))
    return genesis
Example #18
0
def apply_jsonpatch(doc, patch):
    """Apply a JSON patch, one operation at a time.

    If the patch fails to apply, this allows us to determine which operation
    failed, making the error message a little less cryptic.

    :param doc: The JSON document to patch.
    :param patch: The JSON patch to apply.
    :returns: The result of the patch operation.
    :raises: PatchError if the patch fails to apply.
    :raises: exception.ClientSideError if the patch adds a new root attribute.
    """
    # Prevent removal of root attributes.
    for p in patch:
        if p['op'] == 'add' and p['path'].count('/') == 1:
            if p['path'].lstrip('/') not in doc:
                msg = _('Adding a new attribute (%s) to the root of '
                        'the resource is not allowed')
                raise exception.ClientSideError(msg % p['path'])

    # Apply operations one at a time, to improve error reporting.
    for patch_op in patch:
        try:
            doc = jsonpatch.apply_patch(doc, jsonpatch.JsonPatch([patch_op]))
        except _JSONPATCH_EXCEPTIONS as e:
            raise exception.PatchError(patch=patch_op, reason=e)
    return doc
Example #19
0
    def patch(self, icommunity_uuid, patch):
        """Update an existing icommunity.

        :param icommunity_uuid: UUID of a icommunity.
        :param patch: a json PATCH document to apply to this icommunity.
        """
        rpc_icommunity = objects.community.get_by_uuid(pecan.request.context,
                                                       icommunity_uuid)
        try:
            icomm = Community(**jsonpatch.apply_patch(rpc_icommunity.as_dict(),
                                                      jsonpatch.JsonPatch(patch)))
        except api_utils.JSONPATCH_EXCEPTIONS as e:
            raise exception.PatchError(patch=patch, reason=e)

        # Update only the fields that have changed
        comm = ""
        for field in objects.community.fields:
            if rpc_icommunity[field] != getattr(icomm, field):
                rpc_icommunity[field] = getattr(icomm, field)
                if field == 'community':
                    comm = rpc_icommunity[field]

        rpc_icommunity.save()

        if comm:
            LOG.debug("Modify community: uuid (%s) community (%s) ",
                      icommunity_uuid, comm)

        return Community.convert_with_links(rpc_icommunity)
Example #20
0
    def patch(self, itrapdest_uuid, patch):
        """Update an existing itrapdest.

        :param itrapdest_uuid: UUID of a itrapdest.
        :param patch: a json PATCH document to apply to this itrapdest.
        """
        rpc_itrapdest = objects.trapdest.get_by_uuid(pecan.request.context,
                                                     itrapdest_uuid)
        try:
            itrap = TrapDest(**jsonpatch.apply_patch(
                rpc_itrapdest.as_dict(), jsonpatch.JsonPatch(patch)))
        except api_utils.JSONPATCH_EXCEPTIONS as e:
            raise exception.PatchError(patch=patch, reason=e)

        # Update only the fields that have changed
        ip = ""
        for field in objects.trapdest.fields:
            if rpc_itrapdest[field] != getattr(itrap, field):
                rpc_itrapdest[field] = getattr(itrap, field)
                if field == 'ip_address':
                    ip = rpc_itrapdest[field]

        rpc_itrapdest.save()

        if ip:
            LOG.debug("Modify destination IP: uuid (%s), ip (%s",
                      itrapdest_uuid, ip)

        return TrapDest.convert_with_links(rpc_itrapdest)
Example #21
0
    def update_model(self, request, environment_id, body=None):
        if not body:
            msg = _('Request body is empty: please, provide '
                    'environment object model patch')
            LOG.error(msg)
            raise exc.HTTPBadRequest(msg)
        LOG.debug('Environments:UpdateModel <Id: %(env_id)s, Body: %(body)s>',
                  {
                      'env_id': environment_id,
                      'body': body
                  })
        target = {"environment_id": environment_id}
        policy.check('update_environment', request.context, target)

        session_id = None
        if hasattr(request, 'context') and request.context.session:
            session_id = request.context.session

        get_description = envs.EnvironmentServices.get_environment_description
        env_model = get_description(environment_id, session_id)

        for change in body:
            change['path'] = '/' + '/'.join(change['path'])

        patch = jsonpatch.JsonPatch(body)
        try:
            patch.apply(env_model, in_place=True)
        except jsonpatch.JsonPatchException as e:
            raise exc.HTTPNotFound(str(e))

        save_description = envs.EnvironmentServices. \
            save_environment_description
        save_description(session_id, env_model)

        return env_model
Example #22
0
    def patch(self, port_uuid, patch):
        """Update an existing port.

        :param port_uuid: UUID of a port.
        :param patch: a json PATCH document to apply to this port.
        """
        if self._from_nodes:
            raise exception.OperationNotPermitted

        rpc_port = objects.Port.get_by_uuid(pecan.request.context, port_uuid)
        try:
            port = Port(**jsonpatch.apply_patch(rpc_port.as_dict(),
                                                jsonpatch.JsonPatch(patch)))
        except api_utils.JSONPATCH_EXCEPTIONS as e:
            raise exception.PatchError(patch=patch, reason=e)

        # Update only the fields that have changed
        for field in objects.Port.fields:
            if rpc_port[field] != getattr(port, field):
                rpc_port[field] = getattr(port, field)

        rpc_node = objects.Node.get_by_uuid(pecan.request.context,
                                            rpc_port.node_id)
        topic = pecan.request.rpcapi.get_topic_for(rpc_node)

        new_port = pecan.request.rpcapi.update_port(pecan.request.context,
                                                    rpc_port, topic)

        return Port.convert_with_links(new_port)
Example #23
0
    def patch(self, node_uuid, patch):
        """Update an existing node."""
        if self._from_ihosts:
            raise exception.OperationNotPermitted

        rpc_node = objects.node.get_by_uuid(pecan.request.context, node_uuid)

        # replace ihost_uuid and inode_uuid with corresponding
        patch_obj = jsonpatch.JsonPatch(patch)
        for p in patch_obj:
            if p['path'] == '/ihost_uuid':
                p['path'] = '/forihostid'
                ihost = objects.host.get_by_uuid(pecan.request.context,
                                                 p['value'])
                p['value'] = ihost.id

        try:
            node = Node(**jsonpatch.apply_patch(rpc_node.as_dict(), patch_obj))

        except utils.JSONPATCH_EXCEPTIONS as e:
            raise exception.PatchError(patch=patch, reason=e)

        # Update only the fields that have changed
        for field in objects.node.fields:
            if rpc_node[field] != getattr(node, field):
                rpc_node[field] = getattr(node, field)

        rpc_node.save()
        return Node.convert_with_links(rpc_node)
Example #24
0
 def test_json_patch_from_object(self):
     patch = [{'op': 'add', 'path': '/baz', 'value': 'qux'}]
     res = jsonpatch.JsonPatch(
         patch,
         pointer_cls=CustomJsonPointer,
     )
     self.assertEqual(res.pointer_cls, CustomJsonPointer)
Example #25
0
def module_add(patch_module):
    if patch_module['value'] == []:
        return

    module_json = patch_module['value']
    if isinstance(module_json, list):
        module_json = module_json[0]
    path = patch_module['path']
    ds_path = datasource_path(path)
    data_source = find_datasource(ds_path.split('/')[1:])

    state = PipelineStateManager().add_module(ds_path, data_source.id,
                                              module_json['type'])
    # If we are adding our first module just find the datasource and the first
    #  module
    parts = path.split('/')[1:]
    if path.endswith('modules'):
        module = find_datasource(parts).modules[0]
        path += '/0'
    else:
        module = find_module(parts)

    module.properties.visibility = True
    updates = module._updates()
    state = jsonpatch.JsonPatch(updates).apply(json.loads(state))

    # Now update the module
    PipelineStateManager().update_module(path, json.dumps(state))

    module_state = PipelineStateManager().serialize_module(path, state['id'])
    new_module = load_module(json.loads(module_state))
    update(new_module, module)
Example #26
0
def patch(operations):
    if jsonpatch:
        json_patch = jsonpatch.JsonPatch(operations)

        def patcher(doc):
            return json_patch.apply(doc)

    else:

        def patcher(doc):
            # If jsonpatch could not be imported, then `@chaos_utils.patch()`
            # will be disabled, and will silently return values unmodified,
            # without applying the JSON patch operations.
            return doc

    def inner(patched_function):
        def patched_inner(*args, **kwargs):
            return_value = patched_function(*args, **kwargs)
            not_json = False
            if not isinstance(return_value, six.text_type):
                return_value = json.dumps(return_value)
                not_json = True
            return_value = patcher(return_value)
            if not_json:
                return_value = json.loads(return_value)
            return return_value
        return patched_inner
    return inner
Example #27
0
def pod_webhook_mutate():
    request_info = request.get_json()
    # admission_controller.logger.warning("mutate %s", request_info)
    return admission_response_patch(
        True,
        "Adding nodeSelector ",
        json_patch=jsonpatch.JsonPatch([{
            "op": "replace",
            "path": "/spec/affinity",
            "value": {
                "nodeAffinity": {
                    "requiredDuringSchedulingIgnoredDuringExecution": {
                        "nodeSelectorTerms": [{
                            "matchExpressions": [{
                                "key":
                                "ns-affinity",
                                "operator":
                                "In",
                                "values":
                                [request_info["request"]["namespace"]]
                            }]
                        }]
                    }
                }
            }
        }]))
Example #28
0
    def patch(self, op: str, path: str, value: Any) -> "ExpectationConfiguration":
        """

        Args:
            op: A jsonpatch operation. One of 'add', 'replace', or 'remove'
            path: A jsonpatch path for the patch operation
            value: The value to patch

        Returns:
            The patched ExpectationConfiguration object
        """
        if op not in ["add", "replace", "remove"]:
            raise ValueError("Op must be either 'add', 'replace', or 'remove'")

        try:
            valid_path = path.split("/")[1]
        except IndexError:
            raise IndexError(
                "Ensure you have a valid jsonpatch path of the form '/path/foo' "
                "(see http://jsonpatch.com/)"
            )

        if valid_path not in self.get_runtime_kwargs().keys():
            raise ValueError("Path not available in kwargs (see http://jsonpatch.com/)")

        # TODO: Call validate_kwargs when implemented
        patch = jsonpatch.JsonPatch([{"op": op, "path": path, "value": value}])

        patch.apply(self.kwargs, in_place=True)
        return self
Example #29
0
    def model(self, **config_kwargs):
        """
        Create a model object with/without patches applied.

        Args:
            patches: A list of JSON patches to apply to the model specification

        Returns:
            ~pyhf.pdf.Model: A model object adhering to the schema model.json

        """
        measurement = self.get_measurement(**config_kwargs)
        log.debug(
            'model being created for measurement {0:s}'.format(measurement['name'])
        )

        patches = config_kwargs.get('patches', [])

        modelspec = {
            'channels': self['channels'],
            'parameters': measurement['config']['parameters'],
        }
        for patch in patches:
            modelspec = jsonpatch.JsonPatch(patch).apply(modelspec)

        return Model(modelspec, poiname=measurement['config']['poi'], **config_kwargs)
def mutate_dockerhub():
    request_info = request.get_json()
    uid = request_info["request"]["uid"]
    obj = request_info["request"]["object"]
    kind = obj["kind"]
    patches = list()
    if kind == "Pod":
        patches = pod(obj)
    elif kind == "Deployment":
        patches = deployment(obj)
    elif kind == "Job":
        patches = job(obj)
    elif kind == "CronJob":
        patches = cronjob(obj)
    elif kind == "ReplicaSet":
        patches = replicaset(obj)
    elif kind == "ReplicationController":
        patches = replicationController(obj)
    elif kind == "DaemonSet":
        patches = daemonset(obj)
    elif kind == "StatefulSet":
        patches = statefulset(obj)
    elif kind == "PodTemplate":
        patches = podtemplate(obj)
    if len(patches) > 0:
        patches.append({
            "op": "add",
            "path": "/metadata/labels/mutated",
            "value": "yes"
        })
    return admission_response_patch(
        uid,
        "Mutating containers images from dockerhub",
        json_patch=jsonpatch.JsonPatch(patches))