def gen_diff(co, handle, precursors, lines, txn): pres, plen = simplify_precursors(co, handle, co.contents, precursors, txn) file_points = [] for pre, index in pres: info = handle_contents_at_point(co, handle, pre, txn) file_points.append((info['lines'], info['line points'], info['points'])) result, ms, newlines = find_resolution(file_points, lines) # explanation of conditions: # 1: check for a merge # 2: check if new lines were added by the user # 3: safety for the 4th condition # 4: check if the first match in the first (only) file covers everything if len(pres) > 1 or \ len(newlines) != 0 or \ not len(ms[0]) or \ ms[0][0][2] != len(file_points[0][0]): # create a set of correct matches, minus ones which are optimized out matches = [[] for i in xrange(plen)] i = 0 for pre, index in pres: matches[index] = ms[i] i += 1 return {'matches': matches, 'newlines': newlines} return None
def gen_diff(co, handle, precursors, lines, txn): pres, plen = simplify_precursors(co, handle, co.contents, precursors, txn) file_points = [] for pre, index in pres: info = handle_contents_at_point(co, handle, pre, txn) file_points.append( (info['lines'], info['line points'], info['points'])) result, ms, newlines = find_resolution(file_points, lines) # explanation of conditions: # 1: check for a merge # 2: check if new lines were added by the user # 3: safety for the 4th condition # 4: check if the first match in the first (only) file covers everything if len(pres) > 1 or \ len(newlines) != 0 or \ not len(ms[0]) or \ ms[0][0][2] != len(file_points[0][0]): # create a set of correct matches, minus ones which are optimized out matches = [[] for i in xrange(plen)] i = 0 for pre, index in pres: matches[index] = ms[i] i += 1 return {'matches': matches, 'newlines': newlines} return None
def update_file(co, handle, pre_heads, rhead, names, dcache, txn): def gen_file_points(prune): file_points, points = [], ['1'] true_pre_heads = simplify_precursors(co, handle, co.contents, pre_heads, txn)[0] # don't use pre_heads which are ancestors of rhead for pre, index in true_pre_heads: if prune and is_ancestor(co, pre, rhead, txn): continue info = handle_contents_at_point(co, handle, pre, txn, dcache=dcache) if info is None: continue points = dmerge(points, info['points']) file_points.append((info['lines'], info['line points'], info['points'])) return (file_points, points) if not co.merge: return 0 rinfo = handle_contents_at_point(co, handle, rhead, txn, dcache=dcache) if rinfo.has_key('delete'): # File was deleted remotely, we're done # XXX: validate remote history return 0 elif co.editsdb.has_key(handle) and bdecode(co.editsdb.get(handle)).has_key('hash'): lfile = _handle_to_filename(co, handle, names, txn) lfile = path.join(co.local, lfile) h = open(lfile, 'rb') lines = h.read().split('\n') h.close() file_points, points = gen_file_points(0) line_points = find_resolution(file_points, lines)[0] for i in xrange(len(line_points)): if line_points[i] is None: line_points[i] = '1' olines = find_conflict(lines, line_points, points, rinfo['lines'], rinfo['line points'], rinfo['points']) else: file_points, points = gen_file_points(1) if file_points == []: # The remote copy is a superset of local changes olines = rinfo['lines'] else: lines, line_points, points = find_conflict_multiple_safe(file_points) olines = find_conflict(lines, line_points, points, rinfo['lines'], rinfo['line points'], rinfo['points']) ls, conflict = [], 0 for l in olines: if type(l) is str: ls.append(l) else: conflict = 1 ls.append('<<<<<<< local') ls.extend(l[0]) ls.append('=======') ls.extend(l[1]) ls.append('>>>>>>> remote') lfile = path.join(co.temppath, binascii.hexlify(handle)) h = open(lfile, 'wb') h.write('\n'.join(ls)) h.close() if conflict: set_edit(co, handle, {'hash': 1}, txn) mtime = int(path.getmtime(lfile)) co.modtimesdb.put(handle, bencode(mtime), txn=txn) return conflict
def update_file(co, handle, pre_heads, rhead, names, dcache, txn): def gen_file_points(prune): file_points, points = [], ['1'] true_pre_heads = simplify_precursors(co, handle, co.contents, pre_heads, txn)[0] # don't use pre_heads which are ancestors of rhead for pre, index in true_pre_heads: if prune and is_ancestor(co, pre, rhead, txn): continue info = handle_contents_at_point(co, handle, pre, txn, dcache=dcache) if info is None: continue points = dmerge(points, info['points']) file_points.append( (info['lines'], info['line points'], info['points'])) return (file_points, points) if not co.merge: return 0 rinfo = handle_contents_at_point(co, handle, rhead, txn, dcache=dcache) if rinfo.has_key('delete'): # File was deleted remotely, we're done # XXX: validate remote history return 0 elif co.editsdb.has_key(handle) and bdecode( co.editsdb.get(handle)).has_key('hash'): lfile = _handle_to_filename(co, handle, names, txn) lfile = path.join(co.local, lfile) h = open(lfile, 'rb') lines = h.read().split('\n') h.close() file_points, points = gen_file_points(0) line_points = find_resolution(file_points, lines)[0] for i in xrange(len(line_points)): if line_points[i] is None: line_points[i] = '1' olines = find_conflict(lines, line_points, points, rinfo['lines'], rinfo['line points'], rinfo['points']) else: file_points, points = gen_file_points(1) if file_points == []: # The remote copy is a superset of local changes olines = rinfo['lines'] else: lines, line_points, points = find_conflict_multiple_safe( file_points) olines = find_conflict(lines, line_points, points, rinfo['lines'], rinfo['line points'], rinfo['points']) ls, conflict = [], 0 for l in olines: if type(l) is str: ls.append(l) else: conflict = 1 ls.append('<<<<<<< local') ls.extend(l[0]) ls.append('=======') ls.extend(l[1]) ls.append('>>>>>>> remote') lfile = path.join(co.temppath, binascii.hexlify(handle)) h = open(lfile, 'wb') h.write('\n'.join(ls)) h.close() if conflict: set_edit(co, handle, {'hash': 1}, txn) mtime = int(path.getmtime(lfile)) co.modtimesdb.put(handle, bencode(mtime), txn=txn) return conflict