def test_lists(self): result = 'pull', '', [1, 2] swapped = 'push', '', [1, 2] assert next(swap([result])) == swapped result = 'push', '', [1, 2] swapped = 'pull', '', [1, 2] assert next(swap([result])) == swapped
def test_addition(self): result = "add", "", [("a", "b")] swapped = "remove", "", [("a", "b")] assert next(swap([result])) == swapped result = "remove", "a.b", [("c", "d")] swapped = "add", "a.b", [("c", "d")] assert next(swap([result])) == swapped
def test_addition(self): result = 'add', '', [('a', 'b')] swapped = 'remove', '', [('a', 'b')] assert next(swap([result])) == swapped result = 'remove', 'a.b', [('c', 'd')] swapped = 'add', 'a.b', [('c', 'd')] assert next(swap([result])) == swapped
def diff(self): diff = dictdiffer.diff(self.nml1, self.nml2, expand=True) diff_swap = dictdiffer.swap( dictdiffer.diff(self.nml1, self.nml2, expand=True)) f1 = tempfile.NamedTemporaryFile(mode="w+") f2 = tempfile.NamedTemporaryFile(mode="w+") # f1 = open("nml_diff1", mode="w+") # f2 = open("nml_diff2", mode="w+") print("Formatting nml1...") format_nml(self.nml1, f1, diff) print("Formatting nml2...") format_nml(self.nml2, f2, diff_swap) f1.flush() f2.flush() f1.seek(0) f2.seek(0) # nml1_human_readable = open(args.nml1).read().split("\n") # nml2_human_readable = open(args.nml2).read().split("\n") nml1_human_readable = f1.read().split("\n") nml1_human_readable = [fr"{item}" for item in nml1_human_readable] # nml2_human_readable = [fr"{item}" for item in nml2_human_readable] longest_line1 = len(max(nml1_human_readable, key=len)) + 3 # PG: BAD HACK: longest_line1 = longest_line_global f1.seek(0) f2.seek(0) f3 = tempfile.NamedTemporaryFile(mode="w+") for line in f1.readlines(): f3.write(ansi_ljust(line.rstrip("\n"), longest_line1) + "\n") # f3.write(line.rstrip("\n").ljust(longest_line1)+"\n") f3.seek(0) f4 = tempfile.NamedTemporaryFile(mode="w+") for old_line, new_line in zip_longest(f3.readlines(), f2.readlines()): if old_line and new_line: f4.write(old_line.rstrip("\n") + " | " + new_line) elif old_line: f4.write(old_line.rstrip("\n") + " |\n") elif new_line: f4.write(" ".ljust(longest_line1) + "| " + new_line) f4.seek(0) for line in f4.readlines(): print(line.rstrip("\n"))
def revert(diff_result, destination): """ A helper function that calles swap function to revert patched dictionary object. >>> first = {'a': 'b'} >>> second = {'a': 'c'} >>> revert(diff(first, second), second) {'a': 'b'} """ return patch(swap(diff_result), destination)
async def patch_to_version( db, data_path: str, otu_id: str, version: Union[str, int]) -> Tuple[Dict, Dict, List]: """Take a joined otu back in time to the passed ``version``. Uses the diffs in the change documents associated withthe otu. :param db: the Virtool database :param data_path: the Virtool data path :param otu_id: the id of the otu to patch :param version: the version to patch to :return: the current joined otu, patched otu, and the ids of changes reverted in the process """ # A list of history_ids reverted to produce the patched entry. reverted_history_ids = list() current = await virtool_core.otus.db.join(db, otu_id) or dict() if "version" in current and current["version"] == version: return current, deepcopy(current), reverted_history_ids patched = deepcopy(current) # Sort the changes by descending timestamp. async for change in db.history.find({"otu.id": otu_id}, sort=[("otu.version", -1)]): if change["otu"][ "version"] == "removed" or change["otu"]["version"] > version: reverted_history_ids.append(change["_id"]) if change["diff"] == "file": change["diff"] = await utils.read_diff_file( data_path, otu_id, change["otu"]["version"]) if change["method_name"] == "remove": patched = change["diff"] elif change["method_name"] == "create": patched = None else: diff = dictdiffer.swap(change["diff"]) patched = dictdiffer.patch(diff, patched) else: break if current == {}: current = None return current, patched, reverted_history_ids
async def patch_to_version(db, otu_id, version): """ Take a joined otu back in time to the passed ``version``. Uses the diffs in the change documents associated with the otu. :param db: the application database client :type db: :class:`~motor.motor_asyncio.AsyncIOMotorClient` :param otu_id: the id of the otu to patch :type otu_id: str :param version: the version to patch to :type version: str or int :return: the current joined otu, patched otu, and the ids of changes reverted in the process :rtype: Coroutine[tuple] """ # A list of history_ids reverted to produce the patched entry. reverted_history_ids = list() current = await virtool.db.otus.join(db, otu_id) or dict() if "version" in current and current["version"] == version: return current, deepcopy(current), reverted_history_ids patched = deepcopy(current) # Sort the changes by descending timestamp. async for change in db.history.find({"otu.id": otu_id}, sort=[("otu.version", -1)]): if change["otu"][ "version"] == "removed" or change["otu"]["version"] > version: reverted_history_ids.append(change["_id"]) if change["method_name"] == "remove": patched = change["diff"] elif change["method_name"] == "create": patched = None else: diff = dictdiffer.swap(change["diff"]) patched = dictdiffer.patch(diff, patched) else: break if current == {}: current = None return current, patched, reverted_history_ids
def patch_otu_to_version(db, settings: dict, otu_id: str, version: Union[str, int]) -> tuple: """ Take a joined otu back in time to the passed ``version``. Uses the diffs in the change documents associated with the otu. :param db: the application database object :param settings: the application settings :param otu_id: the id of the otu to patch :param version: the version to patch to :return: the current joined otu, patched otu, and the ids of changes reverted in the process """ # A list of history_ids reverted to produce the patched entry. reverted_history_ids = list() current = join_otu(db, otu_id) or dict() if "version" in current and current["version"] == version: return current, deepcopy(current), reverted_history_ids patched = deepcopy(current) # Sort the changes by descending timestamp. for change in db.history.find({"otu.id": otu_id}, sort=[("otu.version", -1)]): if change["otu"][ "version"] == "removed" or change["otu"]["version"] > version: reverted_history_ids.append(change["_id"]) if change["diff"] == "file": change["diff"] = read_diff_file(settings["data_path"], otu_id, change["otu"]["version"]) if change["method_name"] == "remove": patched = change["diff"] elif change["method_name"] == "create": patched = None else: diff = dictdiffer.swap(change["diff"]) patched = dictdiffer.patch(diff, patched) else: break if current == {}: current = None return current, patched, reverted_history_ids
async def patch_to_verified(db, otu_id): current = await virtool.otus.db.join(db, otu_id) or dict() if current and current["verified"]: return current patched = deepcopy(current) async for change in db.history.find({"otu.id": otu_id}, sort=[("otu.version", -1)]): if change["method_name"] == "remove": patched = change["diff"] elif change["method_name"] == "create": return None else: diff = dictdiffer.swap(change["diff"]) patched = dictdiffer.patch(diff, patched) if patched["verified"]: return patched
async def patch_to_verified(app, otu_id: str) -> Union[dict, None]: """ Patch the OTU identified by `otu_id` to the last verified version. :param app: the application object :param otu_id: the ID of the OTU to patch :return: the patched otu """ db = app["db"] current = await virtool.otus.db.join(db, otu_id) or dict() if current and current["verified"]: return current patched = deepcopy(current) async for change in db.history.find({"otu.id": otu_id}, sort=[("otu.version", -1)]): if change["diff"] == "file": change["diff"] = await virtool.history.utils.read_diff_file( app["settings"]["data_path"], otu_id, change["otu"]["version"] ) if change["method_name"] == "remove": patched = change["diff"] elif change["method_name"] == "create": return None else: diff = dictdiffer.swap(change["diff"]) patched = dictdiffer.patch(diff, patched) if patched["verified"]: return patched
def downgrade(self, migration_name: str, graph: Optional[MigrationsGraph] = None): """ Downgrade db to the given migration :param migration_name: target migration name :param graph: Optional. Migrations graph. If omitted, then it will be loaded :return: """ if graph is None: log.debug('Loading migration files...') graph = self.build_graph() log.debug('Loading schema from database...') left_schema = self.load_db_schema() if migration_name not in graph.migrations: raise MigrationGraphError(f'Migration {migration_name} not found') log.debug('Precalculating schema diffs...') # Collect schema diffs across all migrations migration_diffs = {} # {migration_name: [action1_diff, ...]} temp_left_schema = Schema() for migration in graph.walk_down(graph.initial, unapplied_only=False): migration_diffs[migration.name] = [] for action in migration.get_actions(): forward_patch = action.to_schema_patch(temp_left_schema) migration_diffs[migration.name].append(forward_patch) try: temp_left_schema = patch(forward_patch, temp_left_schema) except (TypeError, ValueError, KeyError) as e: raise ActionError( f"Unable to apply schema patch of {action!r}. More likely that the " f"schema is corrupted. You can use schema repair tools to fix this issue" ) from e db = self.db for migration in graph.walk_up(graph.last, applied_only=True): if migration.name == migration_name: break # We've reached the target migration log.info('Downgrading %s...', migration.name) action_diffs = zip(migration.get_actions(), migration_diffs[migration.name], range(1, len(migration.get_actions()) + 1)) for action_object, action_diff, idx in reversed( list(action_diffs)): log.debug('> [%d] %s', idx, str(action_object)) try: left_schema = patch(list(swap(action_diff)), left_schema) except (TypeError, ValueError, KeyError) as e: raise ActionError( f"Unable to apply schema patch of {action_object!r}. More likely that the " f"schema is corrupted. You can use schema repair tools to fix this issue" ) from e if not action_object.dummy_action and not runtime_flags.schema_only: action_object.prepare(db, left_schema, migration.policy) action_object.run_backward() action_object.cleanup() graph.migrations[migration.name].applied = False if not runtime_flags.dry_run: log.debug('Writing db schema and migrations graph...') self.write_db_schema(left_schema) self.write_db_migrations_graph(graph) self._verify_schema(left_schema)
def test_changes(self): result = 'change', '', ('a', 'b') swapped = 'change', '', ('b', 'a') assert next(swap([result])) == swapped
"fork_count": 20, "stargazers": ["/users/20", "/users/30", "/users/40"], "settings": { "assignees": [100, 101, 202], } } result = diff(first, second) # 获取的是相对于第一个,第二个更改了什么 print(list(result)) """ [('change', 'title', ('hello', 'hellooo')), ('add', 'stargazers', [(2, '/users/40')]), ('change', ['settings', 'assignees', 2], (201, 202)), ('remove', '', [('test', 1234)])] """ result = diff(first, second) patched = patch(result, first) assert patched == second diff = diff(first, second) diff = list(diff) print("diff:", diff) swapped = swap(result) print("swapped:", list(swapped)) mark = json.dumps(diff, indent=2, ensure_ascii=False) print("更改:", mark)
def test_changes(self): result = "change", "", ("a", "b") swapped = "change", "", ("b", "a") assert next(swap([result])) == swapped