def patch_action(action): """ :type action: dart.model.action.Action """ p = JsonPatch(request.get_json()) sanitized_action = action.copy() patched_action = Action.from_dict(p.apply(action.to_dict())) # only allow updating fields that are editable sanitized_action.data.name = patched_action.data.name sanitized_action.data.args = patched_action.data.args sanitized_action.data.tags = patched_action.data.tags sanitized_action.data.progress = patched_action.data.progress sanitized_action.data.order_idx = patched_action.data.order_idx sanitized_action.data.on_failure = patched_action.data.on_failure sanitized_action.data.on_failure_email = patched_action.data.on_failure_email sanitized_action.data.on_success_email = patched_action.data.on_success_email sanitized_action.data.extra_data = patched_action.data.extra_data # revalidate sanitized_action = action_service().default_and_validate_action( sanitized_action) return { 'results': action_service().patch_action(action, sanitized_action).to_dict() }
def patch(data, patches=None): """ loads a data file into a VSansData obj and returns that. **Inputs** data (raw[]): datafiles with metadata to patch patches (patch_metadata[]:run.filename): patches to be applied, with run.filename used as unique key **Returns** patched (raw[]): datafiles with patched metadata 2019-07-26 Brian Maranville """ if patches is None: return data from jsonpatch import JsonPatch from collections import OrderedDict # make a master dict of metadata from provided key: key="run.filename" master = OrderedDict([(_s(d.metadata[key]), d.metadata) for d in data]) to_apply = JsonPatch(patches) to_apply.apply(master, in_place=True) return data
def put(self, id): permission = auth.UpdatePatchPermission(id) if not permission.can(): raise auth.PermissionDenied(permission) try: patch = JsonPatch(parse_json(request)) affected_entities = patching.validate( patch, database.get_dataset()) except ResourceError as e: return e.response() except patching.InvalidPatchError as e: if str(e) != 'Could not apply JSON patch to dataset.': return {'status': 400, 'message': str(e)}, 400 db = database.get_db() curs = db.cursor() curs.execute(''' UPDATE patch_request SET original_patch = ?, updated_entities = ?, removed_entities = ?, updated_by = ? WHERE id = ? ''', (patch.to_string(), json.dumps(sorted(affected_entities['updated'])), json.dumps(sorted(affected_entities['removed'])), g.identity.id, id) ) db.commit()
def partial_update(self, request, *args, **kwargs): patch = JsonPatch(request.DATA) obj = self.get_object() serializer = self.get_serializer(instance=obj) doc = serializer.data try: # `jsonpatch` does not force documents to be array of operations # So we have to do it manually if not isinstance(request.DATA, list): raise JsonPatchException( "The patch must be supplied as a list", ) modified = patch.apply(doc) # Set the modified data to the request data # This will allow us to update the object using it request._data = modified return super(JsonPatchMixin, self).update(request, *args, **kwargs) except JsonPatchException as ex: message = force_text(ex) # `jsonpatch` does not handle unicode transparently # So we have to strip out the `u'` in Python 2 if "Unknown operation u'" in message and sys.version_info < (3, 0): message = message.replace("u'", "'") data = { "detail": message, } return response.Response(data, status=400)
def patch_datastore(datastore): """ :type datastore: dart.model.datastore.Datastore """ p = JsonPatch(request.get_json()) sanitized_datastore = datastore.copy() patched_datastore = Datastore.from_dict(p.apply(datastore.to_dict())) # only allow updating fields that are editable sanitized_datastore.data.name = patched_datastore.data.name sanitized_datastore.data.host = patched_datastore.data.host sanitized_datastore.data.port = patched_datastore.data.port sanitized_datastore.data.connection_url = patched_datastore.data.connection_url sanitized_datastore.data.state = patched_datastore.data.state sanitized_datastore.data.concurrency = patched_datastore.data.concurrency sanitized_datastore.data.args = patched_datastore.data.args sanitized_datastore.data.extra_data = patched_datastore.data.extra_data sanitized_datastore.data.tags = patched_datastore.data.tags # revalidate sanitized_datastore = datastore_service().default_and_validate_datastore( sanitized_datastore) return { 'results': datastore_service().patch_datastore(datastore, sanitized_datastore).to_dict() }
def patch_json(site, config, logger): """Update the site JSON configuration file """ patching_config = config.get('patching_config') output_file = os.path.join(config["build_site_dir"], site, \ patching_config["viewer_configs"][site]) input_file = output_file + "_org" logger.info("Backup original config") is_patched = False try: os.remove(input_file) except OSError: pass os.rename(output_file, input_file) with open(input_file) as file_data: json_data_to_patch = json.load(file_data) items_to_patch = [] patch_requests = patching_config.get('patches') for request in patch_requests: op = request.get('op') patch_path = parse(request.get('path')) matching_pattern = request.get('pattern') replacement = request.get('replacement') # Find matches for path matches = [(match.value, str(match.full_path)) for match in patch_path.find(json_data_to_patch)] # Find items matching with value filtered = filter(lambda x: is_matched(x, matching_pattern), matches) # Prepare patch items_to_patch += \ map(lambda x: prepare_patch(x, matching_pattern, replacement, op), filtered) # Patch json if len(items_to_patch) > 0: logger.info("Patches available") patches = JsonPatch(items_to_patch) result = patches.apply(json_data_to_patch) # Save json with open(output_file, 'w') as save_file: json.dump(result, save_file, sort_keys=False) is_patched = True logger.info("Json config of site '%s' patched", site) if not is_patched: os.rename(input_file, output_file) logger.info("Json config of site '%s' not patched", site) logger.info("JSON patch process for site '%s' completed", site)
def patch(instance, **kwargs): # Create the patch object patch = JsonPatch(request.get_json()) # Get a dictionary instance of the model instance data = instance.asdict(exclude_pk=True, **kwargs) print ('THIS IS THE DATA:', data) # Apply the patch to the dictionary instance of the model data = patch.apply(data) # Apply the patched dictionary back to the model instance.fromdict(data)
def put(self, challenge_id): try: patch = JsonPatch(request.get_json(force=True)) except (KeyError, AttributeError) as e: log("Request missing values", error=e) abort(400) schema = s.ChallengeSchema() challenge = m.Challenge.query.get_or_404(challenge_id) data = schema.dump(challenge) new_data = patch.apply(data) schema.load(new_data, instance=challenge).save() return new_data, 200
def __import_patch(self, bg_only_file: Path, patch_file: Path) -> List[Dict]: with bg_only_file.open() as json_file: bgonly = json.load(json_file) with patch_file.open() as json_file: patch = JsonPatch(json.load(json_file)) return [bgonly, patch]
def test_basic_game_info_update(self): """Make sure an update is properly parsed and stored""" borgia_info = read_game_info(self.borgia_game_path) # make some basic changes modified_borgia_info = borgia_info modified_borgia_info["title"] = "Coucou" updated_game_info = update_game_info(self.borgia_game_path, modified_borgia_info) # make sure changes have been done self.assertEquals(modified_borgia_info["title"], updated_game_info["title"]) self.assertEquals(updated_game_info["title"], "Coucou") # make sure there is a new event in history self.assertEquals(len(updated_game_info["history"]), 1) event = updated_game_info["history"][0]["patch"] print event # try to apply patch again new = JsonPatch(event).apply(modified_borgia_info) # make sure the original title is back self.assertEquals(new["title"], "Borgia, le jeu malsain")
def persist_new_state(epoch_path, patches_path, new_state, persisted_state_cache=None): ''' Read in the original file, apply patches, and add patch for diff between filesystem state and the provided new_state argument. ''' if not os.path.exists(epoch_path): with open(epoch_path, 'w') as epoch_fp: json.dump(new_state, epoch_fp) logger.info('Created file: %s', epoch_path) else: # load 0-day json and merge with patches. cache_key = epoch_path + ':' + patches_path persisted_state = persisted_state_cache.get(cache_key) if persisted_state_cache is not None else None if not persisted_state: logger.debug('Cache miss (%s), loading from epoch-patches pair: %s, %s', cache_key, epoch_path, patches_path) persisted_state = merge_epoch_and_patches(epoch_path, patches_path) else: logger.info('Previous state retrieved from cache: %s', cache_key) diff = JsonPatch.from_diff(persisted_state, new_state) if len(diff.patch) > 0: with open(patches_path, 'a') as patches_fp: logger.warn('Writing %d-patch patchset to %s', len(diff.patch), patches_path) json.dump(diff.patch, patches_fp) patches_fp.write(os.linesep) else: logger.info('No patches to write to %s', patches_path) if persisted_state_cache is not None: logger.debug('Adding state to persisted cache: %s', cache_key) persisted_state_cache[cache_key] = new_state
def _prune_log_message(self, msg): """ If the "splunk_remove_paths" config item is not set or empty, return ``msg`` unaltered. Otherwise, for each RFC6901 JSON Pointer in "splunk_remove_paths" that points to an element present in ``msg``, remove that element. Return the result. :param msg: Splunk-ready message :type msg: dict :return: msg dict with all ``splunk_remove_paths`` elements removed :rtype: dict """ paths = self.config.get('splunk_remove_paths', []) if not paths: return msg patches = [] for path in paths: try: resolve_pointer(msg, path) patches.append({'op': 'remove', 'path': path}) except JsonPointerException: pass if not patches: return msg msg = JsonPatch(patches).apply(msg) return msg
def mutate(controller, init_image, processor_addr, req): if req["operation"] != "CREATE" or not should_mutate( req["object"]["metadata"]['annotations']): return True, "" req_obj = copy.deepcopy(req).get('object') raw_binds = get_bind_names_from_annotations( req_obj.get('metadata').get('annotations')) if len(raw_binds) == 0: return True, "" initContainer = InitContainer('esk-init', init_image, processor_addr) use_default_volume = False for bind in raw_binds: spec = controller.get_secretbinding_spec( bind.get('name'), req_obj.get('metadata').get('namespace')).get('spec') current_app.logger.debug(bind) if bind.get('target') is None: bind['target'] = spec.get('target') use_default_volume = use_default_volume or bind.get( 'target').startswith('/esk/secrets') initContainer.add_bind(bind, req_obj.get('metadata').get('namespace'), spec) renderedContainer = initContainer.get() # Add our init container if 'initContainers' not in req_obj['spec']: req_obj['spec']['initContainers'] = [renderedContainer] else: req_obj['spec']['initContainers'].append(renderedContainer) pod_vols = initContainer.get_volumes() if use_default_volume: pod_vols.append(__DEFAULT_VOLUME) # Add our secret volumes if 'volumes' not in req_obj['spec']: req_obj['spec']['volumes'] = pod_vols else: req_obj['spec']['volumes'] += pod_vols # Add volumeMounts to every container for index in range(0, len(req_obj['spec']['containers'])): if 'volumeMounts' not in req_obj['spec']['containers'][index]: req_obj['spec']['containers'][index]['volumeMounts'] = [] req_obj['spec']['containers'][index][ 'volumeMounts'] += renderedContainer.get('volumeMounts') patch = JsonPatch.from_diff(req["object"], req_obj) return True, base64.b64encode(str(patch).encode()).decode()
def apply_patch(data, patch: JsonPatch): validate_v1(data) try: data = patch.apply(data) except Exception as e: print(e) # TODO: meta and history stuff. return data
def patch(self, request, pk): doc = self.get_doc(pk) # do JSON-Patch patch_data = JsonPatch(request.data) try: patch_data.apply(doc, in_place=True) except Exception as e: return Response({'jsonpatch_error': get_exception_detail(e)}, status=status.HTTP_400_BAD_REQUEST) # validate data after JSON-Patch form = self.form_cls(doc) if form.is_valid(): self.engine.save(form.document) return Response(status=status.HTTP_204_NO_CONTENT) return Response(form.errors, status=status.HTTP_400_BAD_REQUEST)
def patch(self, pk, data): doc = self.get_doc(pk) # do JSON-Patch patch_data = JsonPatch(data) try: patch_data.apply(doc, in_place=True) except Exception as e: return Response({'jsonpatch_error': get_exception_detail(e)}, status=status.HTTP_400_BAD_REQUEST) # validate data after JSON-Patch form = self.form_cls(doc) if form.is_valid(): self.engine.save(form.document) return Response(status=status.HTTP_204_NO_CONTENT) return Response(form.errors, status=status.HTTP_400_BAD_REQUEST)
def main(): parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, allow_abbrev=False) parser.add_argument( '--remove-descriptions', '-r', action='store_true', default=False, help= 'Remove object descriptions from referenced resources to reduce size') parser.add_argument('--jsonpatch', '-j', nargs='?', default=None, help='JSON patch to apply on the resolved CRD') parser.add_argument('source', help='Source ("-" for stdin)') parser.add_argument('destination', help='Destination ("-" for stdout)') args = parser.parse_args() # Load CRD if args.source != '-': with open(args.source, 'r', encoding='utf-8') as source_f: source = ruamel.yaml.load(source_f, Loader=ruamel.yaml.SafeLoader) else: source = ruamel.yaml.load(sys.stdin, Loader=ruamel.yaml.SafeLoader) # Load JSON patch (if any) jsonpatch = None if args.jsonpatch: with open(args.jsonpatch, 'r', encoding='utf-8') as jsonpatch_f: jsonpatch = JsonPatch.from_string(jsonpatch_f.read()) if source['kind'] != 'CustomResourceDefinition': raise TypeError('Input file is not a CustomResourceDefinition.') if source['apiVersion'] == 'apiextensions.k8s.io/v1beta1': resolved_schema = parse_and_resolve( source['spec']['validation']['openAPIV3Schema'], remove_desciptions=args.remove_descriptions) source['spec']['validation']['openAPIV3Schema'] = resolved_schema elif source['apiVersion'] == 'apiextensions.k8s.io/v1': for version in source['spec']['versions']: resolved_schema = parse_and_resolve( version['schema']['openAPIV3Schema'], remove_desciptions=args.remove_descriptions) version['schema']['openAPIV3Schema'] = resolved_schema else: raise TypeError('Unsupported CRD version {}'.format(source['version'])) if jsonpatch: jsonpatch.apply(source, in_place=True) if args.destination != '-': with open(args.destination, 'w', encoding='utf-8') as destination_f: ruamel.yaml.dump(source, destination_f, default_flow_style=False) else: ruamel.yaml.dump(source, sys.stdout, default_flow_style=False)
def load_data(datafile): with app.app_context(): with open(datafile) as f: data = json.load(f) user_id = "initial-data-loader" patch = JsonPatch.from_diff({}, data) patch_request_id = patching.create_request(patch, user_id) patching.merge(patch_request_id, user_id) database.commit()
def patch(self, pk, data): row = self.get_row(pk) doc = self.as_dict(row) # do JSON-Patch patch_data = JsonPatch(data) try: patch_data.apply(doc, in_place=True) except Exception as e: return Response({'jsonpatch_error': get_exception_detail(e)}, status=status.HTTP_400_BAD_REQUEST) # validate data after JSON-Patch form = self.form_cls(doc) if form.is_valid(): self.from_dict(row, doc) self.session.commit() return Response(status=status.HTTP_204_NO_CONTENT) return Response(form.errors, status=status.HTTP_400_BAD_REQUEST)
def patch(self, request, pk): row = self.get_row(pk) doc = self.as_dict(row) # do JSON-Patch patch_data = JsonPatch(request.data) try: patch_data.apply(doc, in_place=True) except Exception as e: return Response({'jsonpatch_error': get_exception_detail(e)}, status=status.HTTP_400_BAD_REQUEST) # validate data after JSON-Patch form = self.form_cls(doc) if form.is_valid(): self.from_dict(row, doc) self.session.commit() return Response(status=status.HTTP_204_NO_CONTENT) return Response(form.errors, status=status.HTTP_400_BAD_REQUEST)
def diff(self, hash1, hash2=None, txid=None): branch = self._branches[txid] rev1 = branch[hash1] rev2 = branch[hash2] if hash2 else branch._latest if rev1.hash == rev2.hash: return JsonPatch([]) else: dict1 = message_to_dict(rev1.data) dict2 = message_to_dict(rev2.data) return make_patch(dict1, dict2)
def load_config( config_content: str, merge_content: Optional[str] = None, patch_content: Optional[str] = None, ) -> _JSONDict: config_data = yaml.safe_load(config_content) if config_data is None: config_data = {} if not isinstance(config_data, dict): raise SystemExit(f"Invalid configuration format: {type(config_data)!r}") if merge_content is not None: merge_data = yaml.safe_load(merge_content) config_data = merge(config_data, merge_data) if patch_content is not None: patch_data = yaml.safe_load(patch_content) json_patch = JsonPatch(patch_data) config_data = json_patch.apply(config_data) return cast(_JSONDict, config_data)
def patch_datastore(datastore): """ :type datastore: dart.model.datastore.Datastore """ p = JsonPatch(request.get_json()) sanitized_datastore = datastore.copy() patched_datastore = Datastore.from_dict(p.apply(datastore.to_dict())) # only allow updating fields that are editable sanitized_datastore.data.name = patched_datastore.data.name sanitized_datastore.data.host = patched_datastore.data.host sanitized_datastore.data.port = patched_datastore.data.port sanitized_datastore.data.connection_url = patched_datastore.data.connection_url sanitized_datastore.data.state = patched_datastore.data.state sanitized_datastore.data.concurrency = patched_datastore.data.concurrency sanitized_datastore.data.args = patched_datastore.data.args sanitized_datastore.data.extra_data = patched_datastore.data.extra_data sanitized_datastore.data.tags = patched_datastore.data.tags # revalidate sanitized_datastore = datastore_service().default_and_validate_datastore(sanitized_datastore) return {'results': datastore_service().patch_datastore(datastore, sanitized_datastore).to_dict()}
def patch_user_api(event, context): logger = event["logger"] correlation_id = event["correlation_id"] # get info supplied to api call user_id = event["pathParameters"]["id"] try: user_jsonpatch = JsonPatch.from_string(event["body"]) except InvalidJsonPatch: raise utils.DetailedValueError( "invalid jsonpatch", details={ "traceback": traceback.format_exc(), "correlation_id": correlation_id, }, ) # convert email to lowercase for p in user_jsonpatch: if p.get("path") == "/email": p["value"] = p["value"].lower() # strip leading and trailing spaces try: p["value"] = p["value"].strip() except KeyError: raise utils.DetailedValueError( "invalid jsonpatch", details={ "traceback": traceback.format_exc(), "correlation_id": correlation_id, }, ) logger.info( "API call", extra={ "user_id": user_id, "user_jsonpatch": user_jsonpatch, "correlation_id": correlation_id, "event": event, }, ) modified_time = utils.now_with_tz() # create an audit record of update, inc 'undo' patch entity_update = create_user_entity_update(user_id, user_jsonpatch, modified_time, correlation_id) patch_user(user_id, user_jsonpatch, modified_time, correlation_id) # on successful update save audit record entity_update.save() return {"statusCode": HTTPStatus.NO_CONTENT, "body": json.dumps("")}
def _update(self): "Update the level, entities, etc" # TODO: Should be possible to be smarter here and not generate # the dicts on each update if nothing actually happened. self.level.update_entities() data = self.level.to_dict() if self._cache: patch = JsonPatch.from_diff(self._cache, data) if patch: # something has changed! self._cache = data return patch else: self._cache = data
def admission_response_patch(allowed, message, json_patch: jsonpatch.JsonPatch): base64_patch = base64.b64encode( json_patch.to_string().encode("utf-8")).decode("utf-8") return jsonify({ "response": { "allowed": allowed, "status": { "message": message }, "patchType": "JSONPatch", "patch": base64_patch } })
def test_invalid_bin_wise_modifier(datadir, patch_file): """ Test that bin-wise modifiers will raise an exception if their data shape differs from their sample's. """ spec = json.load(open(datadir.join("spec.json"))) assert pyhf.Model(spec) patch = JsonPatch.from_string(open(datadir.join(patch_file)).read()) bad_spec = patch.apply(spec) with pytest.raises(pyhf.exceptions.InvalidModifier): pyhf.Model(bad_spec)
def _patched_torrent(self, hash, patch_path=None): if patch_path is None: patch_path = join(self.config_path, f'{hash}.json-patch') result = self._get(self.endpoint / 'ajax.php' % { 'action': 'torrent', 'hash': hash.upper() }) if not exists(patch_path): logger.debug(f'{patch_path} does not exist, creating empty patch') with open(patch_path, 'w') as patch_file: dump([], patch_file) logger.trace(f'Applying json patch {patch_path}') with open(patch_path, 'r') as patch_file: patch = JsonPatch.from_string(patch_file.read()) result = patch.apply(result) return result
def merge_epoch_and_patches(epoch_path, patches_path): # read the 0-day (original) file with open(epoch_path) as epoch_fp: obj = json.load(epoch_fp) # apply patches, one by one patches_applied = 0 if os.path.exists(patches_path): with open(patches_path) as patches_fp: for patch_line in patches_fp: patch = JsonPatch.from_string(patch_line) patch.apply(obj, in_place=True) patches_applied += 1 logger.info('Read %d patchsets from %s and applied them to %s', patches_applied, patches_path, epoch_path) return obj
def patch_predicate(value, patches): patched_value = value for patch in patches: if "unwind" in patch: json_patch_list = [ convert_to_json_patches(p, patched_value) for p in unwind(patch, value) ] json_patch_list = itertools.chain( *json_patch_list) # Flatten list of lists else: json_patch_list = convert_to_json_patches(patch, patched_value) for patch in json_patch_list: try: patched_value = JsonPatch(patch).apply(patched_value) except (JsonPatchTestFailed, JsonPatchConflict): pass return patched_value
def cli(filepath, jsonpatch, file_format): """Cocof runs the provided 'jsonpatch' modifications on the configuration file given with the 'filepath' argument. Use the '--format' option to tell the file format. If not given cocof will try to guess the file format based on the file extension. Use '-' as filepath for stdin, in which case the output goes to stdout and you must provide the format of the data via the '--format' option.""" try: file_format = guessFileFormat(filepath, file_format) data = parse_file(filepath, file_format) patch = JsonPatch.from_string(jsonpatch) result = patch.apply(data) write_to_file(filepath, result, file_format) except BaseException as e: click.secho('Error: Something went wrong. Developer info:', fg='red', err=True) click.secho(str(e), err=True) return 101 return 0
def update_school_preferences(resp): response_object = {'status': 'fail', 'message': 'School does not exist.'} user = User.query.get(resp) school = School.query.get(user.school_id) if not school: return jsonify(response_object), 400 response_object = {'status': 'fail', 'message': 'Malformed patch.'} # get patch object from client patch_raw = request.get_json() if not patch_raw or not isinstance(patch_raw, list): return jsonify(response_object), 400 # for any times or dates in the patch object, check correct formatting for edit in patch_raw: try: if str(edit['path']) not in EDITABLE_PREFERENCES: return jsonify(response_object), 400 except KeyError: return jsonify(response_object), 400 if edit['path'] == '/term_dates': for halfterm in edit['value']: # dict try: datetime.strptime(halfterm[0], DATE_FORMAT) datetime.strptime(halfterm[1], DATE_FORMAT) except ValueError: return jsonify(response_object), 400 elif edit['path'] == '/period_start_times': for period in edit['value']: try: datetime.strptime(edit['value'][period], TIME_FORMAT) except ValueError: return jsonify(response_object), 400 elif edit['path'] == '/period_length_in_minutes': try: int(edit['value']) except ValueError as e: response_object['message'] = str(e) return jsonify(response_object), 400 elif edit['path'] == '/weeks_timetable': try: assert int(edit['value']) in [1, 2] except (AssertionError): return jsonify(response_object), 400 except (ValueError): return jsonify(response_object), 400 elif edit['path'] == '/days_notice': try: int(edit['value']) except ValueError: return jsonify(response_object), 400 # convert raw JSON from client into JSONPatch format patch = JsonPatch(patch_raw) # get preferences JSON object from school preferences = school.preferences # Apply the patch to the dictionary instance of the model try: preferences_update = patch.apply(preferences) except (JsonPatchConflict, JsonPatchException): return jsonify(response_object), 400 change = diff(preferences, preferences_update) if not change: response_object = { 'status': 'success', 'message': '{} preferences unchanged.'.format(school.name) } return jsonify(response_object), 200 # check new preferences object for consistency, and process try: response_object = process_preferences(preferences_update) except BaseException as e: response_object = {'status': 'fail', 'message': e} school.preferences = preferences_update db.session.commit() response_object = { 'status': 'success', 'message': 'Preferences for {} have been updated.'.format(school.name), 'data': { 'school': school.asdict() } } return jsonify(response_object), 200
def patch_event(event): """ :type event: dart.model.event.Event """ p = JsonPatch(request.get_json()) return update_event(event, Event.from_dict(p.apply(event.to_dict())))
def patch_subscription(subscription): """ :type subscription: dart.model.subscription.Subscription """ p = JsonPatch(request.get_json()) return update_subscription(subscription, Subscription.from_dict(p.apply(subscription.to_dict())))
username = "******" password = "******" auth = requests.post(aspace_url + "/users/" + username + "/login?password="******"session"] headers = {"X-ArchivesSpace-Session": session} # get repository # rep = requests.get(aspace_url+"/repositories",headers=headers).json() # print rep # get all resource ids res = requests.get(aspace_url + "/repositories/2/resources?all_ids=True", headers=headers).json() # print res # get resource record record = requests.get(aspace_url + "/repositories/2/resources/2", headers=headers).json() # get elements and values in record for key, value in record.items(): if key == "level": # if the value is collection or something else if value == "collection": # change it to file test = JsonPatch([{"op": "replace", "path": "/level", "value": "file"}]) applyPatch = test.apply(record, in_place=True) updated_level = requests.post( aspace_url + "/repositories/2/resources/2", headers=headers, data=json.dumps(applyPatch) ).json()
def patch(self): config_patch = config_patch_schema.load(request.get_json(), many=True) config = self._config_service.get_config() patched_config = JsonPatch(config_patch).apply(config) self._config_service.update_config(patched_config) return self._config_service.get_config(), 200
def patch_subscription(subscription): """ :type subscription: dart.model.subscription.Subscription """ p = JsonPatch(request.get_json()) return update_subscription( subscription, Subscription.from_dict(p.apply(subscription.to_dict())))
def patch_action(action): """ :type action: dart.model.action.Action """ p = JsonPatch(request.get_json()) return update_action(action, Action.from_dict(p.apply(action.to_dict())))
def compare_json_patches(a, b): """Return 0 if a and b describe the same JSON patch.""" return JsonPatch.from_string(a) == JsonPatch.from_string(b)
def patch_trigger(trigger): """ :type trigger: dart.model.trigger.Trigger """ p = JsonPatch(request.get_json()) return update_trigger(trigger, Trigger.from_dict(p.apply(trigger.to_dict())))
def patch_workflow(workflow): """ :type workflow: dart.model.workflow.Workflow """ p = JsonPatch(request.get_json()) return update_workflow(workflow, Workflow.from_dict(p.apply(workflow.to_dict())))
for y in z: dtitle=y['title'] digrecord_uri=y['uri'] if atitle==dtitle: indv_ao=requests.get(aspace_url+recordURI,headers=headers).json() # print(indv_ao) # print(aspace_url+recordURI) dig_instance=[{"instance_type":"digital_object","jsonmodel_type":"instance","digital_object":{"ref":digrecord_uri}}] # update=JsonPatch([{"op": "add", "path":"/instances", "value":dig_instance}]) applyupdate=update.apply(indv_ao,in_place=True) # print(applyupdate) newInstances=requests.post(aspace_url+recordURI,headers=headers,data=json.dumps(applyupdate)).json() print(newInstances) # ------ # link archival objects to digital object components--not actually possible! # ao=requests.get(aspace_url+"/repositories/2/resources/3/tree",headers=headers).json() # do=requests.get(aspace_url+"/repositories/2/digital_objects/1/tree",headers=headers).json() # arch_children=[] # for x in do["children"]:
atitle=x['title'] arecord=str(x['id']) arch_records.append(str(arecord)) for z in digital_ob: for y in z: dtitle=y['title'] digrecord_uri=y['uri'] if atitle==dtitle: dig_instance=[{"instance_type":"digital_object","digital_object":{"ref":digrecord_uri}}] update=JsonPatch([{"op": "add", "path":"/instances", "value":dig_instance},{"op":"add","path":"/lock_version","value":"1"}]) applyupdate=update.apply(x,in_place=True) newInstances=requests.post(aspace_url+"/repositories/2/archival_objects/"+arecord,headers=headers,data=json.dumps(applyupdate)).json() print(newInstances) # end_children={"id":arecord,"jsonmodel_type":"archival_object"} # #
def patch_datastore(datastore): """ :type datastore: dart.model.datastore.Datastore """ p = JsonPatch(request.get_json()) return update_datastore(datastore, Datastore.from_dict(p.apply(datastore.to_dict())))