示例#1
0
def _add_file(co, rep, parent, required, ltxn):
    mode = os.lstat(path.join(co.local, rep)).st_mode
    handle = filename_to_handle(co, rep)
    if handle:
        #info = bdecode(co.staticdb.get(handle))
        info = db_get(co, co.staticdb, handle, None)
        if info['type'] == 'dir' and not stat.S_ISDIR(mode):
            print 'error - %s already added as a %s' % (rep, info['type'])
            return None
        if info['type'] == 'file' and not stat.S_ISREG(mode):
            print 'error - %s already added as a %s' % (rep, info['type'])
            return None
        if required:
            print 'warning - %s already added' % (rep, )
    else:
        print 'adding: ' + rep
        if stat.S_ISDIR(mode):
            type = 'dir'
        elif stat.S_ISREG(mode):
            type = 'file'
        else:
            print 'error - unrecognized file type for %s' % (rep, )
            return None
        if rep == '':
            handle = roothandle
        else:
            handle = new_handle(co, ltxn)
        #co.staticdb.put(handle, bencode({'type': type}), txn=ltxn)
        db_put(co, co.staticdb, handle, {'type': type}, ltxn)
        info = {'name': path.split(rep)[1], 'parent': parent, 'add': {}}
        set_edit(co, handle, info, ltxn)
        co.allnamesdb.put(parent + info['name'],
                          handle,
                          flags=db.DB_NODUPDATA,
                          txn=ltxn)
        if type == 'file':
            co.modtimesdb.put(handle, bencode(0), txn=ltxn)
            co.filenamesdb.put(rep, handle, txn=ltxn)
    return handle
示例#2
0
def _add_file(co, rep, parent, required, ltxn):
    mode = os.lstat(path.join(co.local, rep)).st_mode
    handle = filename_to_handle(co, rep)
    if handle:
        #info = bdecode(co.staticdb.get(handle))
        info = db_get(co, co.staticdb, handle, None)
        if info['type'] == 'dir' and not stat.S_ISDIR(mode):
            print 'error - %s already added as a %s' % (rep, info['type'])
            return None
        if info['type'] == 'file' and not stat.S_ISREG(mode):
            print 'error - %s already added as a %s' % (rep, info['type'])
            return None
        if required:
            print 'warning - %s already added' % (rep,)
    else:
        print 'adding: ' + rep
        if stat.S_ISDIR(mode):
            type = 'dir'
        elif stat.S_ISREG(mode):
            type = 'file'
        else:
            print 'error - unrecognized file type for %s' % (rep,)
            return None
        if rep == '':
            handle = roothandle
        else:
            handle = new_handle(co, ltxn)
        #co.staticdb.put(handle, bencode({'type': type}), txn=ltxn)
        db_put(co, co.staticdb, handle, {'type': type}, ltxn)
        info = {'name': path.split(rep)[1], 'parent': parent, 'add': {}}
        set_edit(co, handle, info, ltxn)
        co.allnamesdb.put(parent + info['name'], handle, flags=db.DB_NODUPDATA, txn=ltxn)
        if type == 'file':
            co.modtimesdb.put(handle, bencode(0), txn=ltxn)
            co.filenamesdb.put(rep, handle, txn=ltxn)
    return handle
示例#3
0
def gen_changeset(co, files, comment, repohead, txn, tstamp=None):
    def per_file_hash(co, handle, hinfo, precursors, lfile, txn):
        try:
            h = open(lfile, 'rb')
        except IOError:
            raise HistoryError, 'Could not open file ' + lfile
        lines = h.read().split('\n')
        h.close()

        dinfo = gen_diff(co, handle, precursors, lines, txn)
        if hinfo.has_key('add'):
            dinfo['add'] = 1
        if hinfo.has_key('delete'):
            dinfo['delete'] = 1
        try:
            diff = bencode(dinfo)
        except ValueError:
            return None
        mtime = int(path.getmtime(lfile))
        co.modtimesdb.put(handle, bencode(mtime), txn=txn)
        hinfo['hash'] = sha.new(diff).digest()
        return zlib.compress(diff, 6)

    precursors = bdecode(co.linforepo.get('heads'))

    # include the last known repository head in the list of changes. this is
    # extra useful info and also forces a merge change which a comment can
    # then be attached to.
    # XXX: can do the wrong thing in odd merge-and-not-update cases
    if repohead is not None and repohead not in precursors:
        while not _is_ancestor(co, repohead, precursors, txn):
            info = bdecode(co.lcrepo.get(repohead, txn=txn))
            try:
                repohead = info['precursors'][0]
            except IndexError:
                repohead = rootnode

        if repohead not in precursors:
            precursors.insert(0, repohead)

    changeset = {'precursors': precursors}

    changeset['handles'] = handles = {}
    adds, nedits, edits, types, names = {}, [], [], {}, {}
    for handle, linfo in files:
        if linfo.has_key('add'):
            adds[handle] = 1
        #types[handle] = bdecode(co.staticdb.get(handle))['type']
        types[handle] = db_get(co, co.staticdb, handle, None)['type']

        handles[handle] = cinfo = {}
        if linfo.has_key('delete'):
            assert not linfo.has_key('add')
            assert not linfo.has_key('hash')
            cinfo['delete'] = 1
        elif     linfo.has_key('name') or \
                 linfo.has_key('nmerge'):
            nedits.append((handle, linfo))

        if linfo.has_key('add'):
            assert not linfo.has_key('hash')
            cinfo['add'] = {'type': types[handle]}
        elif     linfo.has_key('hash') or \
                 linfo.has_key('cmerge'):
            assert types[handle] == 'file'
            edits.append(handle)
        co.editsdb.delete(handle, txn=txn)

    # generate the name diffs
    for handle, linfo in nedits:
        # check if this is really a merge or not
        # XXX: theoretically we can trust the 'nmerge' flag as set (and
        # cleared) by _update_helper()
        merge = False
        change = prev_change = None
        for head in precursors:
            change = handle_last_modified(co, co.names, handle, head, txn)
            if change is None:
                continue

            if prev_change is None:
                prev_change = change
                continue

            left_anc = _is_ancestor(co, prev_change, [change], txn)
            right_anc = _is_ancestor(co, change, [prev_change], txn)

            if left_anc:
                prev_change = change
            elif not right_anc:
                merge = True
                break

        # XXX: sanity check for now, we have to do most of the work anyway
        assert not (linfo.has_key('nmerge') ^ merge)

        # no merge, but maybe the user made an explicit change
        if not linfo.has_key('nmerge') and change is not None:
            old_info = handle_name_at_point(co,
                                            handle,
                                            change,
                                            txn,
                                            lookup=False)
            if old_info['name'] == linfo['name'] and \
               old_info['parent'] == linfo['parent']:
                continue

        # looks like we need to include an explicit name change
        cinfo = handles[handle]
        hinfo = handle_name(co, handle, txn)
        cinfo['parent'] = hinfo['parent']
        cinfo['name'] = hinfo['name']
        names[handle] = cinfo

    # generate the diffs
    indices = {}
    for handle in edits:
        lfile = path.join(co.local,
                          _handle_to_filename(co, handle, names, txn))
        diff = per_file_hash(co, handle, handles[handle], precursors, lfile,
                             txn)
        if diff is None:
            continue
        indices[handle] = write_diff(co, handle, diff, txn)

    # clear out things which didn't actually have changes
    for handle, linfo in files:
        if handles[handle] == {}:
            del handles[handle]

    # change all the temporary IDs to permanent, verifiable ones
    ladds, nmap = adds.keys(), {}
    while len(ladds):
        handle = ladds.pop()
        # check if this handle was already dealt with
        if not adds.has_key(handle):
            continue
        parent = handles[handle]['parent']
        # if the parent was also added, it needs to be renumbered first
        if adds.has_key(parent):
            ladds.extend((handle, parent))
            continue
        hinfo = handles[handle]
        # if the parent has been renumbered, pick up the change
        if nmap.has_key(parent):
            hinfo['parent'] = nmap[parent]
        # generate the permanent ID
        if types[handle] == 'file':
            # generate diffs
            fname = _handle_to_filename(co, handle, names, txn)
            lfile = path.join(co.local, fname)
            diff = per_file_hash(co, handle, handles[handle], [], lfile, txn)
            newhandle = create_handle(precursors, hinfo)
            indices[newhandle] = write_diff(co, newhandle, diff, txn)
            # update the db accordingly
            co.modtimesdb.delete(handle, txn=txn)
            mtime = int(path.getmtime(lfile))
            co.modtimesdb.put(newhandle, bencode(mtime), txn=txn)
            co.filenamesdb.put(fname, newhandle, txn=txn)
        else:
            newhandle = create_handle(precursors, hinfo)
        handles[newhandle] = handles[handle]
        names[newhandle] = names[handle]
        types[newhandle] = types[handle]
        del handles[handle]
        del adds[handle]

        # more db updating
        co.staticdb.delete(handle, txn=txn)
        #co.staticdb.put(newhandle, bencode({'type': types[handle]}), txn=txn)
        db_put(co, co.staticdb, newhandle, {'type': types[handle]}, txn)
        nmap[handle] = newhandle
        # XXX: clean up allnamesdb

    # do reparenting of all the non-added files
    for handle in names.keys():
        if nmap.has_key(handle):
            continue
        hinfo = handles[handle]
        if hinfo.has_key('delete'):
            continue
        if nmap.has_key(hinfo['parent']):
            hinfo['parent'] = nmap[hinfo['parent']]

    if changeset['handles'] == {} and len(changeset['precursors']) == 1:
        return None

    # fill in a few other pieces of information
    if comment is not None:
        changeset['comment'] = comment
    changeset['user'] = co.user
    if tstamp is None:
        tstamp = time()
    changeset['time'] = int(tstamp)

    # put together the changeset and calculate the point
    bchangeset = bencode(changeset)
    point = sha.new(bchangeset).digest()

    # write the file locations of the diffs to the db
    for handle, index in indices.items():
        write_index(co, point, handle, index, txn)

    # write the new change to the db and make it the new head
    co.lcrepo.put(point, bchangeset, txn=txn)
    co.linforepo.put('heads', bencode([point]), txn=txn)
    #co.editsdb.truncate(txn=txn)
    return point
示例#4
0
def gen_changeset(co, files, comment, repohead, txn, tstamp=None):
    def per_file_hash(co, handle, hinfo, precursors, lfile, txn):
        try:
            h = open(lfile, 'rb')
        except IOError:
            raise HistoryError, 'Could not open file ' + lfile
        lines = h.read().split('\n')
        h.close()

        dinfo = gen_diff(co, handle, precursors, lines, txn)
        if hinfo.has_key('add'):
            dinfo['add'] = 1
        if hinfo.has_key('delete'):
            dinfo['delete'] = 1
        try:
            diff = bencode(dinfo)
        except ValueError:
            return None
        mtime = int(path.getmtime(lfile))
        co.modtimesdb.put(handle, bencode(mtime), txn=txn)
        hinfo['hash'] = sha.new(diff).digest()
        return zlib.compress(diff, 6)

    precursors = bdecode(co.linforepo.get('heads'))

    # include the last known repository head in the list of changes. this is
    # extra useful info and also forces a merge change which a comment can
    # then be attached to.
    # XXX: can do the wrong thing in odd merge-and-not-update cases
    if repohead is not None and repohead not in precursors:
        while not _is_ancestor(co, repohead, precursors, txn):
            info = bdecode(co.lcrepo.get(repohead, txn=txn))
            try:
                repohead = info['precursors'][0]
            except IndexError:
                repohead = rootnode

        if repohead not in precursors:
            precursors.insert(0, repohead)

    changeset = {'precursors': precursors}

    changeset['handles'] = handles = {}
    adds, nedits, edits, types, names = {}, [], [], {}, {}
    for handle, linfo in files:
        if linfo.has_key('add'):
            adds[handle] = 1
        #types[handle] = bdecode(co.staticdb.get(handle))['type']
        types[handle] = db_get(co, co.staticdb, handle, None)['type']

        handles[handle] = cinfo = {}
        if linfo.has_key('delete'):
            assert not linfo.has_key('add')
            assert not linfo.has_key('hash')
            cinfo['delete'] = 1
        elif     linfo.has_key('name') or \
                 linfo.has_key('nmerge'):
            nedits.append((handle, linfo))

        if linfo.has_key('add'):
            assert not linfo.has_key('hash')
            cinfo['add'] = {'type': types[handle]}
        elif     linfo.has_key('hash') or \
                 linfo.has_key('cmerge'):
            assert types[handle] == 'file'
            edits.append(handle)
        co.editsdb.delete(handle, txn=txn)

    # generate the name diffs
    for handle, linfo in nedits:
        # check if this is really a merge or not
        # XXX: theoretically we can trust the 'nmerge' flag as set (and
        # cleared) by _update_helper()
        merge = False
        change = prev_change = None
        for head in precursors:
            change = handle_last_modified(co, co.names, handle, head, txn)
            if change is None:
                continue

            if prev_change is None:
                prev_change = change
                continue

            left_anc = _is_ancestor(co, prev_change, [change], txn)
            right_anc = _is_ancestor(co, change, [prev_change], txn)

            if left_anc:
                prev_change = change
            elif not right_anc:
                merge = True
                break

        # XXX: sanity check for now, we have to do most of the work anyway
        assert not (linfo.has_key('nmerge') ^ merge)

        # no merge, but maybe the user made an explicit change
        if not linfo.has_key('nmerge') and change is not None:
            old_info = handle_name_at_point(co, handle, change, txn, lookup=False)
            if old_info['name'] == linfo['name'] and \
               old_info['parent'] == linfo['parent']:
                continue

        # looks like we need to include an explicit name change
        cinfo = handles[handle]
        hinfo = handle_name(co, handle, txn)
        cinfo['parent'] = hinfo['parent']
        cinfo['name'] = hinfo['name']
        names[handle] = cinfo

    # generate the diffs
    indices = {}
    for handle in edits:
        lfile = path.join(co.local, _handle_to_filename(co, handle, names, txn))
        diff = per_file_hash(co, handle, handles[handle], precursors, lfile, txn)
        if diff is None:
            continue
        indices[handle] = write_diff(co, handle, diff, txn)

    # clear out things which didn't actually have changes
    for handle, linfo in files:
        if handles[handle] == {}:
            del handles[handle]

    # change all the temporary IDs to permanent, verifiable ones
    ladds, nmap = adds.keys(), {}
    while len(ladds):
        handle = ladds.pop()
        # check if this handle was already dealt with
        if not adds.has_key(handle):
            continue
        parent = handles[handle]['parent']
        # if the parent was also added, it needs to be renumbered first
        if adds.has_key(parent):
            ladds.extend((handle, parent))
            continue
        hinfo = handles[handle]
        # if the parent has been renumbered, pick up the change
        if nmap.has_key(parent):
            hinfo['parent'] = nmap[parent]
        # generate the permanent ID
        if types[handle] == 'file':
            # generate diffs
            fname = _handle_to_filename(co, handle, names, txn)
            lfile = path.join(co.local, fname)
            diff = per_file_hash(co, handle, handles[handle], [], lfile, txn)
            newhandle = create_handle(precursors, hinfo)
            indices[newhandle] = write_diff(co, newhandle, diff, txn)
            # update the db accordingly
            co.modtimesdb.delete(handle, txn=txn)
            mtime = int(path.getmtime(lfile))
            co.modtimesdb.put(newhandle, bencode(mtime), txn=txn)
            co.filenamesdb.put(fname, newhandle, txn=txn)
        else:
            newhandle = create_handle(precursors, hinfo)
        handles[newhandle] = handles[handle]
        names[newhandle] = names[handle]
        types[newhandle] = types[handle]
        del handles[handle]
        del adds[handle]

        # more db updating
        co.staticdb.delete(handle, txn=txn)
        #co.staticdb.put(newhandle, bencode({'type': types[handle]}), txn=txn)
        db_put(co, co.staticdb, newhandle, {'type': types[handle]}, txn)
        nmap[handle] = newhandle
        # XXX: clean up allnamesdb

    # do reparenting of all the non-added files
    for handle in names.keys():
        if nmap.has_key(handle):
            continue
        hinfo = handles[handle]
        if hinfo.has_key('delete'):
            continue
        if nmap.has_key(hinfo['parent']):
            hinfo['parent'] = nmap[hinfo['parent']]

    if changeset['handles'] == {} and len(changeset['precursors']) == 1:
        return None

    # fill in a few other pieces of information
    if comment is not None:
        changeset['comment'] = comment
    changeset['user'] = co.user
    if tstamp is None:
        tstamp = time()
    changeset['time'] = int(tstamp)

    # put together the changeset and calculate the point
    bchangeset = bencode(changeset)
    point = sha.new(bchangeset).digest()

    # write the file locations of the diffs to the db
    for handle, index in indices.items():
        write_index(co, point, handle, index, txn)

    # write the new change to the db and make it the new head
    co.lcrepo.put(point, bchangeset, txn=txn)
    co.linforepo.put('heads', bencode([point]), txn=txn)
    #co.editsdb.truncate(txn=txn)
    return point