Beispiel #1
0
def gen_diff(co, handle, precursors, lines, txn):
    pres, plen = simplify_precursors(co, handle, co.contents, precursors, txn)

    file_points = []
    for pre, index in pres:
        info = handle_contents_at_point(co, handle, pre, txn)
        file_points.append(
            (info['lines'], info['line points'], info['points']))

    result, ms, newlines = find_resolution(file_points, lines)

    # explanation of conditions:
    # 1: check for a merge
    # 2: check if new lines were added by the user
    # 3: safety for the 4th condition
    # 4: check if the first match in the first (only) file covers everything
    if len(pres) > 1 or \
           len(newlines) != 0 or \
           not len(ms[0]) or \
           ms[0][0][2] != len(file_points[0][0]):

        # create a set of correct matches, minus ones which are optimized out
        matches = [[] for i in xrange(plen)]
        i = 0
        for pre, index in pres:
            matches[index] = ms[i]
            i += 1

        return {'matches': matches, 'newlines': newlines}

    return None
Beispiel #2
0
def gen_diff(co, handle, precursors, lines, txn):
    pres, plen = simplify_precursors(co, handle, co.contents, precursors, txn)

    file_points = []
    for pre, index in pres:
        info = handle_contents_at_point(co, handle, pre, txn)
        file_points.append((info['lines'], info['line points'], info['points']))

    result, ms, newlines = find_resolution(file_points, lines)

    # explanation of conditions:
    # 1: check for a merge
    # 2: check if new lines were added by the user
    # 3: safety for the 4th condition
    # 4: check if the first match in the first (only) file covers everything
    if len(pres) > 1 or \
           len(newlines) != 0 or \
           not len(ms[0]) or \
           ms[0][0][2] != len(file_points[0][0]):

        # create a set of correct matches, minus ones which are optimized out
        matches = [[] for i in xrange(plen)]
        i = 0
        for pre, index in pres:
            matches[index] = ms[i]
            i += 1

        return {'matches': matches, 'newlines': newlines}

    return None
Beispiel #3
0
    def _response_get_diff(self, s, mid, msg, rstate):
        lstate = self.socket[s][Response][rstate['ref']]

        # send out the next one
        lstate['req-outstanding'] -= 1
        self._get_diff(s, rstate['ref'])

        handle = rstate['handle']

        diffs = lstate['diffs']
        diffs[handle][rstate['change']] = msg['diff']
        del self.socket[s][Request][mid]

        lstate['counts'][handle] -= 1
        if lstate['counts'][handle] == 0:
            lstate['count'] -= 1

            # write out the diffs
            WD = WriteDiff(self, handle, lstate['txn'])
            for change, diff in diffs[handle].items():
                WD.write(diff, change)
            WD.close()

            # XXX: suboptimal
            change = handle_last_modified(self, self.contents, handle,
                                          lstate['cur head'], lstate['txn'])
            if change is None or is_ancestor(self, change, lstate['head'],
                                             lstate['txn']):
                handle_contents_at_point(self,
                                         handle,
                                         lstate['head'],
                                         lstate['txn'],
                                         dcache=diffs[handle])
                assert lstate['modified'].has_key(handle)
                lstate['modified'][handle] = 1

            del diffs[handle]

        try:
            if lstate['count'] == 0:
                self._commit_phase_2(s, rstate['ref'])
        except HistoryError, msg:
            self._commit_fail(s, rstate['ref'], str(msg))
            return
Beispiel #4
0
    def _response_get_diff(self, s, mid, msg, rstate):
        lstate = self.socket[s][Response][rstate['ref']]

        # send out the next one
        lstate['req-outstanding'] -= 1
        self._get_diff(s, rstate['ref'])

        handle = rstate['handle']

        diffs = lstate['diffs']
        diffs[handle][rstate['change']] = msg['diff']
        del self.socket[s][Request][mid]

        lstate['counts'][handle] -= 1
        if lstate['counts'][handle] == 0:
            lstate['count'] -= 1

            # write out the diffs
            WD = WriteDiff(self, handle, lstate['txn'])
            for change, diff in diffs[handle].items():
                WD.write(diff, change)
            WD.close()

            # XXX: suboptimal
            change = handle_last_modified(self, self.contents, handle, lstate['cur head'], lstate['txn'])
            if change is None or is_ancestor(self, change, lstate['head'], lstate['txn']):
                handle_contents_at_point(self, handle, lstate['head'], lstate['txn'], dcache=diffs[handle])
                assert lstate['modified'].has_key(handle)
                lstate['modified'][handle] = 1

            del diffs[handle]

        try:
            if lstate['count'] == 0:
                self._commit_phase_2(s, rstate['ref'])
        except HistoryError, msg:
            self._commit_fail(s, rstate['ref'], str(msg))
            return
Beispiel #5
0
    def gen_file_points(prune):
        file_points, points = [], ['1']

        true_pre_heads = simplify_precursors(co, handle, co.contents,
                                             pre_heads, txn)[0]

        # don't use pre_heads which are ancestors of rhead
        for pre, index in true_pre_heads:
            if prune and is_ancestor(co, pre, rhead, txn):
                continue

            info = handle_contents_at_point(co, handle, pre, txn, dcache=dcache)
            if info is None:
                continue
            points = dmerge(points, info['points'])
            file_points.append((info['lines'], info['line points'], info['points']))
        return (file_points, points)
Beispiel #6
0
    def gen_file_points(prune):
        file_points, points = [], ['1']

        true_pre_heads = simplify_precursors(co, handle, co.contents,
                                             pre_heads, txn)[0]

        # don't use pre_heads which are ancestors of rhead
        for pre, index in true_pre_heads:
            if prune and is_ancestor(co, pre, rhead, txn):
                continue

            info = handle_contents_at_point(co,
                                            handle,
                                            pre,
                                            txn,
                                            dcache=dcache)
            if info is None:
                continue
            points = dmerge(points, info['points'])
            file_points.append(
                (info['lines'], info['line points'], info['points']))
        return (file_points, points)
Beispiel #7
0
def update_file(co, handle, pre_heads, rhead, names, dcache, txn):
    def gen_file_points(prune):
        file_points, points = [], ['1']

        true_pre_heads = simplify_precursors(co, handle, co.contents,
                                             pre_heads, txn)[0]

        # don't use pre_heads which are ancestors of rhead
        for pre, index in true_pre_heads:
            if prune and is_ancestor(co, pre, rhead, txn):
                continue

            info = handle_contents_at_point(co, handle, pre, txn, dcache=dcache)
            if info is None:
                continue
            points = dmerge(points, info['points'])
            file_points.append((info['lines'], info['line points'], info['points']))
        return (file_points, points)

    if not co.merge:
        return 0

    rinfo = handle_contents_at_point(co, handle, rhead, txn, dcache=dcache)

    if rinfo.has_key('delete'):
        # File was deleted remotely, we're done
        # XXX: validate remote history
        return 0

    elif co.editsdb.has_key(handle) and bdecode(co.editsdb.get(handle)).has_key('hash'):
        lfile = _handle_to_filename(co, handle, names, txn)
        lfile = path.join(co.local, lfile)

        h = open(lfile, 'rb')
        lines = h.read().split('\n')
        h.close()
        file_points, points = gen_file_points(0)
        line_points = find_resolution(file_points, lines)[0]
        for i in xrange(len(line_points)):
            if line_points[i] is None:
                line_points[i] = '1'

        olines = find_conflict(lines, line_points, points, rinfo['lines'],
                               rinfo['line points'], rinfo['points'])

    else:
        file_points, points = gen_file_points(1)
        if file_points == []:
            # The remote copy is a superset of local changes
            olines = rinfo['lines']
        else:
            lines, line_points, points = find_conflict_multiple_safe(file_points)
            olines = find_conflict(lines, line_points, points, rinfo['lines'],
                                   rinfo['line points'], rinfo['points'])

    ls, conflict = [], 0
    for l in olines:
        if type(l) is str:
            ls.append(l)
        else:
            conflict = 1
            ls.append('<<<<<<< local')
            ls.extend(l[0])
            ls.append('=======')
            ls.extend(l[1])
            ls.append('>>>>>>> remote')
    lfile = path.join(co.temppath, binascii.hexlify(handle))
    h = open(lfile, 'wb')
    h.write('\n'.join(ls))
    h.close()
    if conflict:
        set_edit(co, handle, {'hash': 1}, txn)
    mtime = int(path.getmtime(lfile))
    co.modtimesdb.put(handle, bencode(mtime), txn=txn)

    return conflict
Beispiel #8
0
def update_file(co, handle, pre_heads, rhead, names, dcache, txn):
    def gen_file_points(prune):
        file_points, points = [], ['1']

        true_pre_heads = simplify_precursors(co, handle, co.contents,
                                             pre_heads, txn)[0]

        # don't use pre_heads which are ancestors of rhead
        for pre, index in true_pre_heads:
            if prune and is_ancestor(co, pre, rhead, txn):
                continue

            info = handle_contents_at_point(co,
                                            handle,
                                            pre,
                                            txn,
                                            dcache=dcache)
            if info is None:
                continue
            points = dmerge(points, info['points'])
            file_points.append(
                (info['lines'], info['line points'], info['points']))
        return (file_points, points)

    if not co.merge:
        return 0

    rinfo = handle_contents_at_point(co, handle, rhead, txn, dcache=dcache)

    if rinfo.has_key('delete'):
        # File was deleted remotely, we're done
        # XXX: validate remote history
        return 0

    elif co.editsdb.has_key(handle) and bdecode(
            co.editsdb.get(handle)).has_key('hash'):
        lfile = _handle_to_filename(co, handle, names, txn)
        lfile = path.join(co.local, lfile)

        h = open(lfile, 'rb')
        lines = h.read().split('\n')
        h.close()
        file_points, points = gen_file_points(0)
        line_points = find_resolution(file_points, lines)[0]
        for i in xrange(len(line_points)):
            if line_points[i] is None:
                line_points[i] = '1'

        olines = find_conflict(lines, line_points, points, rinfo['lines'],
                               rinfo['line points'], rinfo['points'])

    else:
        file_points, points = gen_file_points(1)
        if file_points == []:
            # The remote copy is a superset of local changes
            olines = rinfo['lines']
        else:
            lines, line_points, points = find_conflict_multiple_safe(
                file_points)
            olines = find_conflict(lines, line_points, points, rinfo['lines'],
                                   rinfo['line points'], rinfo['points'])

    ls, conflict = [], 0
    for l in olines:
        if type(l) is str:
            ls.append(l)
        else:
            conflict = 1
            ls.append('<<<<<<< local')
            ls.extend(l[0])
            ls.append('=======')
            ls.extend(l[1])
            ls.append('>>>>>>> remote')
    lfile = path.join(co.temppath, binascii.hexlify(handle))
    h = open(lfile, 'wb')
    h.write('\n'.join(ls))
    h.close()
    if conflict:
        set_edit(co, handle, {'hash': 1}, txn)
    mtime = int(path.getmtime(lfile))
    co.modtimesdb.put(handle, bencode(mtime), txn=txn)

    return conflict