def main(): parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, allow_abbrev=False) parser.add_argument( '--remove-descriptions', '-r', action='store_true', default=False, help= 'Remove object descriptions from referenced resources to reduce size') parser.add_argument('--jsonpatch', '-j', nargs='?', default=None, help='JSON patch to apply on the resolved CRD') parser.add_argument('source', help='Source ("-" for stdin)') parser.add_argument('destination', help='Destination ("-" for stdout)') args = parser.parse_args() # Load CRD if args.source != '-': with open(args.source, 'r', encoding='utf-8') as source_f: source = ruamel.yaml.load(source_f, Loader=ruamel.yaml.SafeLoader) else: source = ruamel.yaml.load(sys.stdin, Loader=ruamel.yaml.SafeLoader) # Load JSON patch (if any) jsonpatch = None if args.jsonpatch: with open(args.jsonpatch, 'r', encoding='utf-8') as jsonpatch_f: jsonpatch = JsonPatch.from_string(jsonpatch_f.read()) if source['kind'] != 'CustomResourceDefinition': raise TypeError('Input file is not a CustomResourceDefinition.') if source['apiVersion'] == 'apiextensions.k8s.io/v1beta1': resolved_schema = parse_and_resolve( source['spec']['validation']['openAPIV3Schema'], remove_desciptions=args.remove_descriptions) source['spec']['validation']['openAPIV3Schema'] = resolved_schema elif source['apiVersion'] == 'apiextensions.k8s.io/v1': for version in source['spec']['versions']: resolved_schema = parse_and_resolve( version['schema']['openAPIV3Schema'], remove_desciptions=args.remove_descriptions) version['schema']['openAPIV3Schema'] = resolved_schema else: raise TypeError('Unsupported CRD version {}'.format(source['version'])) if jsonpatch: jsonpatch.apply(source, in_place=True) if args.destination != '-': with open(args.destination, 'w', encoding='utf-8') as destination_f: ruamel.yaml.dump(source, destination_f, default_flow_style=False) else: ruamel.yaml.dump(source, sys.stdout, default_flow_style=False)
def patch_user_api(event, context): logger = event["logger"] correlation_id = event["correlation_id"] # get info supplied to api call user_id = event["pathParameters"]["id"] try: user_jsonpatch = JsonPatch.from_string(event["body"]) except InvalidJsonPatch: raise utils.DetailedValueError( "invalid jsonpatch", details={ "traceback": traceback.format_exc(), "correlation_id": correlation_id, }, ) # convert email to lowercase for p in user_jsonpatch: if p.get("path") == "/email": p["value"] = p["value"].lower() # strip leading and trailing spaces try: p["value"] = p["value"].strip() except KeyError: raise utils.DetailedValueError( "invalid jsonpatch", details={ "traceback": traceback.format_exc(), "correlation_id": correlation_id, }, ) logger.info( "API call", extra={ "user_id": user_id, "user_jsonpatch": user_jsonpatch, "correlation_id": correlation_id, "event": event, }, ) modified_time = utils.now_with_tz() # create an audit record of update, inc 'undo' patch entity_update = create_user_entity_update(user_id, user_jsonpatch, modified_time, correlation_id) patch_user(user_id, user_jsonpatch, modified_time, correlation_id) # on successful update save audit record entity_update.save() return {"statusCode": HTTPStatus.NO_CONTENT, "body": json.dumps("")}
def test_invalid_bin_wise_modifier(datadir, patch_file): """ Test that bin-wise modifiers will raise an exception if their data shape differs from their sample's. """ spec = json.load(open(datadir.join("spec.json"))) assert pyhf.Model(spec) patch = JsonPatch.from_string(open(datadir.join(patch_file)).read()) bad_spec = patch.apply(spec) with pytest.raises(pyhf.exceptions.InvalidModifier): pyhf.Model(bad_spec)
def _patched_torrent(self, hash, patch_path=None): if patch_path is None: patch_path = join(self.config_path, f'{hash}.json-patch') result = self._get(self.endpoint / 'ajax.php' % { 'action': 'torrent', 'hash': hash.upper() }) if not exists(patch_path): logger.debug(f'{patch_path} does not exist, creating empty patch') with open(patch_path, 'w') as patch_file: dump([], patch_file) logger.trace(f'Applying json patch {patch_path}') with open(patch_path, 'r') as patch_file: patch = JsonPatch.from_string(patch_file.read()) result = patch.apply(result) return result
def merge_epoch_and_patches(epoch_path, patches_path): # read the 0-day (original) file with open(epoch_path) as epoch_fp: obj = json.load(epoch_fp) # apply patches, one by one patches_applied = 0 if os.path.exists(patches_path): with open(patches_path) as patches_fp: for patch_line in patches_fp: patch = JsonPatch.from_string(patch_line) patch.apply(obj, in_place=True) patches_applied += 1 logger.info('Read %d patchsets from %s and applied them to %s', patches_applied, patches_path, epoch_path) return obj
def cli(filepath, jsonpatch, file_format): """Cocof runs the provided 'jsonpatch' modifications on the configuration file given with the 'filepath' argument. Use the '--format' option to tell the file format. If not given cocof will try to guess the file format based on the file extension. Use '-' as filepath for stdin, in which case the output goes to stdout and you must provide the format of the data via the '--format' option.""" try: file_format = guessFileFormat(filepath, file_format) data = parse_file(filepath, file_format) patch = JsonPatch.from_string(jsonpatch) result = patch.apply(data) write_to_file(filepath, result, file_format) except BaseException as e: click.secho('Error: Something went wrong. Developer info:', fg='red', err=True) click.secho(str(e), err=True) return 101 return 0
def compare_json_patches(a, b): """Return 0 if a and b describe the same JSON patch.""" return JsonPatch.from_string(a) == JsonPatch.from_string(b)