if not state.has_key('delete'): fpre = [] for pre in precursors: fpre.append((pre['lines'], pre['line points'], pre['points'])) if diff is not None: try: lines, line_points = replay(fpre, matches, diff['newlines'], point) except MergeError, msg: raise HistoryError, 'merge error: ' + str(msg) except KeyError: raise HistoryError, 'malformed change' points.append(point) else: lines, line_points, points = find_conflict_multiple_safe(fpre) if lines is None: # XXX: this is a pretty gross hack if len(fpre) == 2: s0 = Set(fpre[0][2]) s1 = Set(fpre[1][2]) if s0 == s1: raise HistoryError, 'merge error' elif s0.issubset(s1): lines, line_points, points = fpre[1] elif s0.issuperset(s1): lines, line_points, points = fpre[0] else: raise HistoryError, 'merge error' else: raise HistoryError, 'merge error'
def update_file(co, handle, pre_heads, rhead, names, dcache, txn): def gen_file_points(prune): file_points, points = [], ['1'] true_pre_heads = simplify_precursors(co, handle, co.contents, pre_heads, txn)[0] # don't use pre_heads which are ancestors of rhead for pre, index in true_pre_heads: if prune and is_ancestor(co, pre, rhead, txn): continue info = handle_contents_at_point(co, handle, pre, txn, dcache=dcache) if info is None: continue points = dmerge(points, info['points']) file_points.append((info['lines'], info['line points'], info['points'])) return (file_points, points) if not co.merge: return 0 rinfo = handle_contents_at_point(co, handle, rhead, txn, dcache=dcache) if rinfo.has_key('delete'): # File was deleted remotely, we're done # XXX: validate remote history return 0 elif co.editsdb.has_key(handle) and bdecode(co.editsdb.get(handle)).has_key('hash'): lfile = _handle_to_filename(co, handle, names, txn) lfile = path.join(co.local, lfile) h = open(lfile, 'rb') lines = h.read().split('\n') h.close() file_points, points = gen_file_points(0) line_points = find_resolution(file_points, lines)[0] for i in xrange(len(line_points)): if line_points[i] is None: line_points[i] = '1' olines = find_conflict(lines, line_points, points, rinfo['lines'], rinfo['line points'], rinfo['points']) else: file_points, points = gen_file_points(1) if file_points == []: # The remote copy is a superset of local changes olines = rinfo['lines'] else: lines, line_points, points = find_conflict_multiple_safe(file_points) olines = find_conflict(lines, line_points, points, rinfo['lines'], rinfo['line points'], rinfo['points']) ls, conflict = [], 0 for l in olines: if type(l) is str: ls.append(l) else: conflict = 1 ls.append('<<<<<<< local') ls.extend(l[0]) ls.append('=======') ls.extend(l[1]) ls.append('>>>>>>> remote') lfile = path.join(co.temppath, binascii.hexlify(handle)) h = open(lfile, 'wb') h.write('\n'.join(ls)) h.close() if conflict: set_edit(co, handle, {'hash': 1}, txn) mtime = int(path.getmtime(lfile)) co.modtimesdb.put(handle, bencode(mtime), txn=txn) return conflict
def update_file(co, handle, pre_heads, rhead, names, dcache, txn): def gen_file_points(prune): file_points, points = [], ['1'] true_pre_heads = simplify_precursors(co, handle, co.contents, pre_heads, txn)[0] # don't use pre_heads which are ancestors of rhead for pre, index in true_pre_heads: if prune and is_ancestor(co, pre, rhead, txn): continue info = handle_contents_at_point(co, handle, pre, txn, dcache=dcache) if info is None: continue points = dmerge(points, info['points']) file_points.append( (info['lines'], info['line points'], info['points'])) return (file_points, points) if not co.merge: return 0 rinfo = handle_contents_at_point(co, handle, rhead, txn, dcache=dcache) if rinfo.has_key('delete'): # File was deleted remotely, we're done # XXX: validate remote history return 0 elif co.editsdb.has_key(handle) and bdecode( co.editsdb.get(handle)).has_key('hash'): lfile = _handle_to_filename(co, handle, names, txn) lfile = path.join(co.local, lfile) h = open(lfile, 'rb') lines = h.read().split('\n') h.close() file_points, points = gen_file_points(0) line_points = find_resolution(file_points, lines)[0] for i in xrange(len(line_points)): if line_points[i] is None: line_points[i] = '1' olines = find_conflict(lines, line_points, points, rinfo['lines'], rinfo['line points'], rinfo['points']) else: file_points, points = gen_file_points(1) if file_points == []: # The remote copy is a superset of local changes olines = rinfo['lines'] else: lines, line_points, points = find_conflict_multiple_safe( file_points) olines = find_conflict(lines, line_points, points, rinfo['lines'], rinfo['line points'], rinfo['points']) ls, conflict = [], 0 for l in olines: if type(l) is str: ls.append(l) else: conflict = 1 ls.append('<<<<<<< local') ls.extend(l[0]) ls.append('=======') ls.extend(l[1]) ls.append('>>>>>>> remote') lfile = path.join(co.temppath, binascii.hexlify(handle)) h = open(lfile, 'wb') h.write('\n'.join(ls)) h.close() if conflict: set_edit(co, handle, {'hash': 1}, txn) mtime = int(path.getmtime(lfile)) co.modtimesdb.put(handle, bencode(mtime), txn=txn) return conflict