def history(id): post = query_db(g.db, "SELECT * FROM posts WHERE id = ?", [id], one=True) edits = query_db(g.db, "SELECT * FROM edits WHERE postid = ?", [id]) for edit in edits: delta = loads(edit['edit']) edit['old'] = ''.join(restore(delta, 1)) edit['new'] = ''.join(restore(delta, 2)) edit['old'] = sub(r'\n', '<br />\n', edit['old']) edit['new'] = sub(r'\n', '<br />\n', edit['new']) return render_template('history.html', post=post, edits=edits)
def wiki_history(title): page = query_db(g.db, "SELECT pages.id, pages.title, pages.content, pages.created, pages.created_by, pages.edited, users.username FROM pages INNER JOIN users ON pages.created_by = users.id WHERE title = ?", [title], one=True) edits = query_db(g.db, "SELECT * FROM page_edits INNER JOIN users ON page_edits.userid = users.id WHERE pageid = ?", [page['id']]) for edit in edits: delta = loads(edit['edit']) edit['old'] = ''.join(restore(delta, 1)) edit['new'] = ''.join(restore(delta, 2)) edit['old'] = sub(r'\n', '<br />\n', edit['old']) edit['new'] = sub(r'\n', '<br />\n', edit['new']) return render_template('wiki_history.html', page=page, edits=edits, title=title, show_edit=can_edit(page))
def unapply(self, a): if not self.address.lines: return ''.join(difflib.restore(self.diffs, 1)) a1 = copy.deepcopy(a) a2 = navigate(a1, Address(self.address.lines[:-1])) a2[self.address.lines[-1].key] = ''.join(difflib.restore( self.diffs, 1)) return a1
def edited_fragments(self, old_frags, new_frags): try: raw_diff = ndiff(old_frags, new_frags) except: return [] edits = [] for edit in self.__diff_fragments(raw_diff): old_edit = '\n'.join(restore(edit, 1)) new_edit = '\n'.join(restore(edit, 2)) edits.append((old_edit, new_edit)) return edits
def edited_fragments(self, old_frags, new_frags): try: raw_diff = ndiff(old_frags, new_frags) except: return [] edits = [] for edit in self.__diff_fragments(raw_diff): old_edit = '\n'.join(restore(edit, 1)) new_edit = '\n'.join(restore(edit, 2)) edits.append( (old_edit, new_edit) ) return edits
def get_sitelevel_changes(self, file_name): with open(file_name) as f: for i, ln in enumerate(f.readlines()): # DEBUG log data # (Contain data for site level names and/or data) site_level_obj = re.search('\s(\w*)list:\s(.*[^\s+])', ln) if site_level_obj is not None: sitelevels = site_level_obj.group().strip().split(':') if sitelevels[1].strip()[0:6] == 'siteid': pass else: self.log_dict[ 'sitelevels_{}'.format(sitelevels[0])] = sitelevels[1].strip() else: pass f.close() site_to = self.log_dict['sitelevels_changed_site_list'] site_to_list = site_to.split() site_from = self.log_dict['sitelevels_list'] if ' or ' in site_from: print('in or block') site_from = site_from.replace(' or ', '_or_') site_from_list = site_from.split() site_from_list = [re.sub('_', ' ', x) for x in site_from_list] else: site_from_list = site_from.split() original_site_list = [None]*len(site_to_list) indexer = 0 changed = list(difflib.restore(difflib.ndiff(site_to, site_from), 1)) revert = list(difflib.restore(difflib.ndiff(site_to, site_from), 2)) print(changed) print(revert) if len(changed) == len(revert) : for i, s in enumerate(changed): if i == 0 or s == ' ': word = revert[i] if i == 0: original_site_list[indexer] = word if s == ' ': indexer += 1 elif i > 0: word = word + revert[i] original_site_list[indexer] = word original_site_list = [x.strip() for x in original_site_list] site_from_list = original_site_list elif len(site_to_list) == len(site_from_list): pass change_dict = {} for i, value in enumerate(site_from_list): change_dict[value] = site_to_list[i] return change_dict
def edited_tokens(new_tokens, old_tokens): try: raw_diff = ndiff(new_tokens, old_tokens) except: return [] edits = [] for edit, start, end in __diff_tokens(raw_diff): old_edit = ' '.join(restore(edit, 1)) new_edit = ' '.join(restore(edit, 2)) edits.append((old_edit, new_edit, start, end)) return edits
def restore (self, string_to_restore): diffie = string_to_restore.split('\n')[len(self.umit_top_banner):-(len\ (self.end_diff)+1)] self.restored1 = [] for i in restore (diffie, 1): self.restored1.append (i+'\n') self.restored2 = [] for i in restore (diffie, 2): self.restored2.append (i+'\n') return self.restored1, self.restored2
def restore(self, string_to_restore): diffie = string_to_restore.split('\n')[len(self.top_banner):-(len\ (self.end_diff)+1)] self.restored1 = [] for i in restore(diffie, 1): self.restored1.append(i + '\n') self.restored2 = [] for i in restore(diffie, 2): self.restored2.append(i + '\n') return self.restored1, self.restored2
def get_sitelevel_changes(self, file_name): with open(file_name) as f: for i, ln in enumerate(f.readlines()): # DEBUG log data # (Contain data for site level names and/or data) site_level_obj = re.search('\s(\w*)list:\s(.*[^\s+])', ln) if site_level_obj is not None: sitelevels = site_level_obj.group().strip().split(':') if sitelevels[1].strip()[0:6] == 'siteid': pass else: self.log_dict['sitelevels_{}'.format( sitelevels[0])] = sitelevels[1].strip() else: pass f.close() site_to = self.log_dict['sitelevels_changed_site_list'] site_to_list = site_to.split() site_from = self.log_dict['sitelevels_list'] if ' or ' in site_from: site_from = site_from.replace(' or ', '_or_') site_from_list = site_from.split() site_from_list = [re.sub('_', ' ', x) for x in site_from_list] else: site_from_list = site_from.split() original_site_list = [None] * len(site_to_list) indexer = 0 changed = list(difflib.restore(difflib.ndiff(site_to, site_from), 1)) revert = list(difflib.restore(difflib.ndiff(site_to, site_from), 2)) if len(changed) == len(revert): for i, s in enumerate(changed): if i == 0 or s == ' ': word = revert[i] if i == 0: original_site_list[indexer] = word if s == ' ': indexer += 1 elif i > 0: word = word + revert[i] original_site_list[indexer] = word original_site_list = [x.strip() for x in original_site_list] site_from_list = original_site_list elif len(site_to_list) == len(site_from_list): pass change_dict = {} for i, value in enumerate(site_from_list): change_dict[value] = site_to_list[i] return change_dict
def __applyChanges(forward=False): global lines global UndoStacks global UndoIndex changes, stack, undoIndex = UndoStacks[CurrentBuffer] change = changes[UndoIndex] stack[UndoIndex] if forward: UndoIndex += 1 lines = (''.join(difflib.restore(change, 2))).splitlines(1) else: UndoIndex -= 1 lines = (''.join(difflib.restore(change, 1))).splitlines(1) print "Index: %d" % UndoIndex
def build_snap_ent(entry): basefields = [] if entry.tag in ['Package', 'Service']: basefields += ['type'] desired = dict([(key, u_str(entry.get(key))) for key in basefields]) state = dict([(key, u_str(entry.get(key))) for key in basefields]) desired.update([(key, u_str(entry.get(key))) for key in \ datafields[entry.tag]]) if entry.tag == 'ConfigFile' or \ ((entry.tag == 'Path') and (entry.get('type') == 'file')): if entry.text == None: desired['contents'] = None else: if entry.get('encoding', 'ascii') == 'ascii': desired['contents'] = u_str(entry.text) else: desired['contents'] = u_str(binascii.a2b_base64(entry.text)) if 'current_bfile' in entry.attrib: state['contents'] = u_str(binascii.a2b_base64( \ entry.get('current_bfile'))) elif 'current_bdiff' in entry.attrib: diff = binascii.a2b_base64(entry.get('current_bdiff')) state['contents'] = u_str( \ '\n'.join(difflib.restore(diff.split('\n'), 1))) state.update([(key, u_str(entry.get('current_' + key, entry.get(key)))) \ for key in datafields[entry.tag]]) if entry.tag in ['ConfigFile', 'Path'] and entry.get('exists', 'true') == 'false': state = None return [desired, state]
def process_with_args(args): logging.basicConfig(format='%(message)s', level=args.loglevel) filename = args.filename if not filename.endswith('.m'): logging.error('File "%s" is not .m file, skip' % filename) sys.exit(1) tks = Tokenizer.from_file(filename) diff_generator = get_fix_test_diff(tks) if diff_generator is None: logging.error('File "%s" does not seem to define a test suite' 'for MOxUnit, skip' % filename) sys.exit(1) diff = list(diff_generator) if content_changes(diff): logging.info('Changes for "%s"\n%s' % (filename, get_diff_summary(diff))) if args.apply: fixed_lines = list(difflib.restore(diff, 2)) with open(filename, 'w') as f: f.write('\n'.join(fixed_lines)) logging.info('File "%s" was rewritten' % filename) else: logging.info('Changes not applied; use --apply to rewrite "%s"' % filename) else: logging.info('File "%s" does not require changes' % filename)
def build_snap_ent(entry): basefields = [] if entry.tag in ['Package', 'Service']: basefields += ['type'] desired = dict([(key, u_str(entry.get(key))) for key in basefields]) state = dict([(key, u_str(entry.get(key))) for key in basefields]) desired.update([(key, u_str(entry.get(key))) for key in \ datafields[entry.tag]]) if entry.tag == 'ConfigFile' or \ ((entry.tag == 'Path') and (entry.get('type') == 'file')): if entry.text == None: desired['contents'] = None else: if entry.get('encoding', 'ascii') == 'ascii': desired['contents'] = u_str(entry.text) else: desired['contents'] = u_str(b64decode(entry.text)) if 'current_bfile' in entry.attrib: state['contents'] = u_str(b64decode(entry.get('current_bfile'))) elif 'current_bdiff' in entry.attrib: diff = b64decode(entry.get('current_bdiff')) state['contents'] = u_str( \ '\n'.join(difflib.restore(diff.split('\n'), 1))) state.update([(key, u_str(entry.get('current_' + key, entry.get(key)))) \ for key in datafields[entry.tag]]) if entry.tag in ['ConfigFile', 'Path'] and entry.get('exists', 'true') == 'false': state = None return [desired, state]
def GetCurrentEntry(self, client, e_type, e_name): try: c_inst = Client.objects.filter(name=client)[0] except IndexError: self.logger.error("Unknown client: %s" % client) raise Bcfg2.Server.Plugin.PluginExecutionError result = c_inst.current_interaction.bad().filter(entry__kind=e_type, entry__name=e_name) if not result: raise Bcfg2.Server.Plugin.PluginExecutionError entry = result[0] ret = [] data = ('owner', 'group', 'perms') for t in data: if getattr(entry.reason, "current_%s" % t) == '': ret.append(getattr(entry.reason, t)) else: ret.append(getattr(entry.reason, "current_%s" % t)) if entry.reason.is_sensitive: raise Bcfg2.Server.Plugin.PluginExecutionError elif entry.reason.current_diff != '': if entry.reason.is_binary: ret.append(binascii.a2b_base64(entry.reason.current_diff)) else: ret.append('\n'.join(difflib.restore(\ entry.reason.current_diff.split('\n'), 1))) elif entry.reason.is_binary: # If len is zero the object was too large to store raise Bcfg2.Server.Plugin.PluginExecutionError else: ret.append(None) return ret
def GetCurrentEntry(self, client, e_type, e_name): try: c_inst = Client.objects.filter(name=client)[0] except IndexError: self.logger.error("Unknown client: %s" % client) raise Bcfg2.Server.Plugin.PluginExecutionError result = c_inst.current_interaction.bad().filter(entry__kind=e_type, entry__name=e_name) if not result: raise Bcfg2.Server.Plugin.PluginExecutionError entry = result[0] ret = [] data = ("owner", "group", "perms") for t in data: if getattr(entry.reason, "current_%s" % t) == "": ret.append(getattr(entry.reason, t)) else: ret.append(getattr(entry.reason, "current_%s" % t)) if entry.reason.is_sensitive: raise Bcfg2.Server.Plugin.PluginExecutionError elif len(entry.reason.unpruned) != 0: ret.append("\n".join(entry.reason.unpruned)) elif entry.reason.current_diff != "": if entry.reason.is_binary: ret.append(b64decode(entry.reason.current_diff)) else: ret.append("\n".join(difflib.restore(entry.reason.current_diff.split("\n"), 1))) elif entry.reason.is_binary: # If len is zero the object was too large to store raise Bcfg2.Server.Plugin.PluginExecutionError else: ret.append(None) return ret
def process_with_args(args): logging.basicConfig(format='%(message)s', level=args.loglevel) filename = args.filename if not filename.endswith('.m'): logging.error('File "%s" is not .m file, skip' % filename) sys.exit(1) tks = Tokenizer.from_file(filename) diff_generator = get_fix_test_diff(tks) if diff_generator is None: logging.error('File "%s" does not seem to define a test suite' 'for MOxUnit, skip' % filename) sys.exit(1) diff = list(diff_generator) if content_changes(diff): logging.info( 'Changes for "%s"\n%s' % (filename, get_diff_summary(diff))) if args.apply: fixed_lines = list(difflib.restore(diff, 2)) with open(filename, 'w') as f: f.write('\n'.join(fixed_lines)) logging.info('File "%s" was rewritten' % filename) else: logging.info('Changes not applied; use --apply to rewrite "%s"' % filename) else: logging.info('File "%s" does not require changes' % filename)
def __init__(self, lines): assert lines[2].startswith('--- '), lines[:3] assert lines[3].startswith('+++ '), lines[:3] self.a_path = lines[2][4:].replace('\n', '').replace('a/', '') self.b_path = lines[3][4:].replace('\n', '').replace('b/', '') self.new_file = self.a_path == FormatPatchDiff.DEV_NULL self.deleted_file = self.b_path == FormatPatchDiff.DEV_NULL self.file_name = self.a_path if self.deleted_file else self.b_path self.before_contents = [''] self.after_contents = [''] if not '.java' in self.a_path and not '.java' in self.b_path: return self.normal_diff = list(map(lambda x: x[0] + " " + x[1:], lines[5:])) if not self.new_file: self.before_contents = list(restore(self.normal_diff, 1)) if not self.deleted_file: self.after_contents = list(restore(self.normal_diff, 2))
def test_apply_blackadder(test_case): # NOTE: Get the file from the test case name (undoing previous slimming) lines = list((DATA_DIR / test_case).read_text().splitlines()) # Construct before/after fixer applied unchanged = "\n".join(difflib.restore(lines, 1)) # Lines starting with '-' expected = "\n".join(difflib.restore(lines, 2)) # Lines starting with '+' # Fixer works as expected fixed = blackadder.format_str_override(unchanged, ) assert fixed == expected # Fixer doesn't change critical compiler artifacts in any way unchanged_compilation = vyper.compile_code(unchanged, output_formats=OUTPUT_FORMATS) fixed_compilation = vyper.compile_code(fixed, output_formats=OUTPUT_FORMATS) for artifact in OUTPUT_FORMATS: assert unchanged_compilation[artifact] == fixed_compilation[artifact]
def rollback(self): transaction_to_rollback = self._transactions.pop() for attribute, value in transaction_to_rollback.items(): self.call_cascade(value) if attribute in RESERVED: continue #TODO: delegar en otro metodo if isinstance(value, str): #delegar en otro metodo tambien diff = ndiff(value.splitlines(), getattr(self, attribute).splitlines()) diff = list(diff) setattr(self, attribute, "\n".join(restore(diff, 1))) else: setattr(self, attribute, value)
def GetCurrentEntry(self, client, e_type, e_name): """"GetCurrentEntry: Used by PullSource""" try: c_inst = Client.objects.get(name=client) except ObjectDoesNotExist: self.logger.error("Unknown client: %s" % client) raise PluginExecutionError except MultipleObjectsReturned: self.logger.error("%s Inconsistency: Multiple entries for %s." % (self.__class__.__name__, client)) raise PluginExecutionError try: cls = BaseEntry.entry_from_name(e_type + "Entry") result = cls.objects.filter(name=e_name, state=TYPE_BAD, interaction=c_inst.current_interaction) except ValueError: self.logger.error("Unhandled type %s" % e_type) raise PluginExecutionError if not result: raise PluginExecutionError entry = result[0] ret = [] for p_entry in ('owner', 'group', 'mode'): this_entry = getattr(entry.current_perms, p_entry) if this_entry == '': ret.append(getattr(entry.target_perms, p_entry)) else: ret.append(this_entry) if entry.entry_type == 'Path': if entry.is_sensitive(): raise PluginExecutionError elif entry.detail_type == PathEntry.DETAIL_PRUNED: ret.append('\n'.join(entry.details)) elif entry.is_binary(): ret.append(b64decode(entry.details)) elif entry.is_diff(): ret.append('\n'.join(difflib.restore(\ entry.details.split('\n'), 1))) elif entry.is_too_large(): # If len is zero the object was too large to store raise PluginExecutionError else: ret.append(None) return ret
def _interactive_patch(src_lines, dst_lines): differ = difflib.Differ() diff = differ.compare(dst_lines, src_lines) diff = list(diff) indices = list(range(len(diff))) output = [] while indices: i = indices.pop(0) if diff[i][:2] != " ": # new block # roll up the block begin = i while indices: line = diff[indices[0]] if line[:2] != " ": i = indices.pop(0) else: # block ended break end = i + 1 # decide what to do while True: _print_block(diff, begin, end) key = input("Apply change [y, n, q]? ").lower().strip() if key not in ["y", "n", "q"]: continue if key == "q": sys.exit() which = {"n": 1, "y": 2}[key] restored = list(difflib.restore(diff[begin:end], which)) output.extend(restored) break else: output.append(diff[i][2:]) return output
def GetCurrentEntry(self, client, e_type, e_name): c_inst = Client.objects.filter(name=client)[0] result = c_inst.current_interaction.bad().filter(entry__kind=e_type, entry__name=e_name) if not result: raise Bcfg2.Server.Plugin.PluginExecutionError entry = result[0] ret = [] data = ('owner', 'group', 'perms') for t in data: if getattr(entry.reason, "current_%s" % t) == '': ret.append(getattr(entry.reason, t)) else: ret.append(getattr(entry.reason, "current_%s" % t)) if entry.reason.current_diff != '': ret.append('\n'.join(difflib.restore(\ entry.reason.current_diff.split('\n'), 1))) else: ret.append(None) return ret
def GetCurrentEntry(self, client, e_type, e_name): curr = self.FindCurrent(client) entry = curr.xpath('.//Bad/%s[@name="%s"]' % (e_type, e_name)) if not entry: raise Bcfg2.Server.Plugin.PluginExecutionError cfentry = entry[-1] owner = cfentry.get('current_owner', cfentry.get('owner')) group = cfentry.get('current_group', cfentry.get('group')) perms = cfentry.get('current_perms', cfentry.get('perms')) if cfentry.get('sensitive') in ['true', 'True']: raise Bcfg2.Server.Plugin.PluginExecutionError elif 'current_bfile' in cfentry.attrib: contents = binascii.a2b_base64(cfentry.get('current_bfile')) elif 'current_bdiff' in cfentry.attrib: diff = binascii.a2b_base64(cfentry.get('current_bdiff')) contents = '\n'.join(difflib.restore(diff.split('\n'), 1)) else: contents = None return (owner, group, perms, contents)
def GetCurrentEntry(self, client, e_type, e_name): curr = self.FindCurrent(client) entry = curr.xpath('.//Bad/%s[@name="%s"]' % (e_type, e_name)) if not entry: raise Bcfg2.Server.Plugin.PluginExecutionError cfentry = entry[-1] owner = cfentry.get("current_owner", cfentry.get("owner")) group = cfentry.get("current_group", cfentry.get("group")) perms = cfentry.get("current_perms", cfentry.get("perms")) if cfentry.get("sensitive") in ["true", "True"]: raise Bcfg2.Server.Plugin.PluginExecutionError elif "current_bfile" in cfentry.attrib: contents = binascii.a2b_base64(cfentry.get("current_bfile")) elif "current_bdiff" in cfentry.attrib: diff = binascii.a2b_base64(cfentry.get("current_bdiff")) contents = "\n".join(difflib.restore(diff.split("\n"), 1)) else: contents = None return (owner, group, perms, contents)
import sys sys.stdout.writelines( difflib.context_diff("e abcd", "abcd abcd", "before", "after")) print("-----------") sys.stdout.writelines( difflib.unified_diff("e abcd", "abcd abcd", "before", "after")) print("-----------") # 最佳“近似”匹配构成的列表 # 可选参数 n (默认为 3) 指定最多返回多少个近似匹配; n 必须大于 0. # # 可选参数 cutoff (默认为 0.6) 是一个 [0, 1] 范围内的浮点数。 与 word 相似度得分未达到该值的候选匹配将被忽略。 print(difflib.get_close_matches('appel', ['ape', 'apple', 'peach', 'puppy'])) import keyword print(difflib.get_close_matches('wheel', keyword.kwlist)) diff = difflib.ndiff("e abcd", "abcd abcd") print(''.join(diff), end="") diff = difflib.ndiff('one\ntwo\nthree\n'.splitlines(keepends=True), 'ore\ntree\nemu\n'.splitlines(keepends=True)) print(''.join(diff), end="") print("-----------") diff = difflib.ndiff('one\ntwo\nthree\n'.splitlines(keepends=True), 'ore\ntree\nemu\n'.splitlines(keepends=True)) diff = list(diff) # materialize the generated delta into a list print(''.join(difflib.restore(diff, 1)), end="") print(''.join(difflib.restore(diff, 2)), end="")
#! /usr/bin/env python
def tock(compressed, direction): """Decompress a stored object""" uncompressed = uncompressDiff(compressed) uncompressed = ensureString(uncompressed) original = '\n'.join(list(restore(uncompressed, direction))) return original
def rebuildFile(diff, option): """Restore an Ndiff""" result = ''.join(restore(diff, option)) return result
def _apply_diff(patch_lines, dest_file): patched = difflib.restore(patch_lines, 2) with open(dest_file, "w") as f: f.writelines(patched)
Optional keyword parameters linejunk and charjunk are filtering functions (or None) linejunk: A function that accepts a single string argument, and returns true if the string is junk, or false if not. The default is None. charjunk: A function that accepts a character (a string of length 1) and returns true if the character is junk, or false if not. """ diff = difflib.ndiff("one\ntwo\nthree\n".splitlines(keepends=True), "ore\ntree\nemu\n".splitlines(keepends=True)) print(" ".join(diff), end="") """ difflib.restore(sequence, which) Return one of the two sequences that generated a delta Given a sequence produced by Differ.compar() or ndiff(), extract lines originating from file 1 or 2 (parameter which), stripping off line prefixes """ diff = difflib.ndiff("one\ntwo\nthree\n".splitlines(keepends=True), "ore\ntree\nemu\n".splitlines(keepends=True)) diff = list(diff) # materialize the generated delta into a list print("".join(difflib.restore(diff, 1)), end="") print("".join(difflib.restore(diff, 2)), end="") """ difflib.unified_diff(a, b, fromfile="", tofile="",
def task(ex): for i in range(len(ex)): for j in range(len(ex)): diff = difflib.ndiff(ex[i], ex[j]) print(list(difflib.restore(diff, 1)))
diff = difflib.ndiff('one\ntwo\nthree\n'.splitlines(keepends=True), 'ore\ntree\nemu\n'.splitlines(keepends=True)) print(diff) print("".join(diff), end=" ") '''Result <generator object Differ.compare at 0x101191990> - one ? ^ + ore ? ^ - two - three ? - + tree + emu ''' # difflib.restore(sequence, which) # Return one of the two sequences that generated a delta. # materialize the generated delta into a list diff = list(diff) # TODO 空串2015-12-31 06:21 没有达到预期的效果? print(diff) # restore seq1 form the difflib.ndiff print(' '.join(difflib.restore(diff, 1)), end=' ') # restore seq2 form the difflib.ndiff print(' '.join(difflib.restore(diff, 2)), end=' ')
def restore_diff(diff, obj=1): orig = difflib.restore(diff, obj) return '\n'.join(orig)
def _patch_change_str(self, destination, node, changes): self._set_nodes(destination, node, ''.join(difflib.restore(changes[0], changes[1] + 1)))
def get_version(delta): return ''.join(restore(delta, 2))
def get_original(cls, diff): return ''.join(restore(diff, 1))
def update_event(self, inp=-1): self.set_output_val(0, difflib.restore(self.input(0), self.input(1)))
def restore(which): restored = difflib.restore(sys.stdin.readlines(), which) sys.stdout.writelines(restored)
def get_revised(cls, diff): return ''.join(restore(diff, 2))
import sys import difflib s1 = ['bacon\n', 'eggs\n', 'ham\n', 'guido\n'] s2 = ['python\n', 'eggy\n', 'hamster\n', 'guido\n'] sys.stdout.writelines(difflib.context_diff( s1, s2, fromfile='before.py', tofile='after.py')) print('-' * 50) sys.stdout.writelines(difflib.unified_diff( s1, s2, fromfile='before.py', tofile='after.py')) out = difflib.get_close_matches('appel', ['ape', 'apple', 'peach', 'puppy']) print(out) out = difflib.get_close_matches('wheel', keyword.kwlist) print(out) out = difflib.get_close_matches('accept', keyword.kwlist) print(out) print('-' * 50) diff = difflib.ndiff('one\ntwo\nthree\n'.splitlines(keepends=True), 'ore\ntree\nemu\n'.splitlines(keepends=True)) # print(''.join(diff), end='') print('-' * 50) # print(''.join(difflib.restore(list(diff), 1)), end='') print(''.join(difflib.restore(list(diff), 2)), end='')