Ejemplo n.º 1
0
def upload_bfiles(ui, rsrc, rdst, files):
    '''upload big files to the central store'''

    if not files:
        return

    # Don't upload locally. All bfiles are in the system wide cache
    # so the other repo can just get them from there.
    if not rdst.path.startswith('http'):
        return

    store = basestore._open_store(rsrc, rdst.path, put=True)

    at = 0
    for hash in files:
        ui.progress(_('Uploading bfiles'), at, unit='bfile', total=len(files))
        if store.exists(hash):
            at += 1
            continue
        source = bfutil.find_file(rsrc, hash)
        if not source:
            raise util.Abort(_('Missing bfile %s needs to be uploaded') % hash)
        # XXX check for errors here
        store.put(source, hash)
        at += 1
    ui.progress('Uploading bfiles', None)
Ejemplo n.º 2
0
def bfput(ui, repo, *files, **opts):
    '''upload big files to the central store'''

    # XXX should be able to upload uncommitted revisions too!  maybe need
    # --committed/--uncommitted options?
    store = basestore._open_store(ui, repo, put=True)
    pending = bfutil._find_pending_files(
        repo, files, opts, pending=False, committed=True)
    numfiles = numrevs = 0
    for (dirname, filename, revs) in sorted(pending):
        # upload files in order by mtime, then hash, mainly to avoid
        # spurious test failures -- don't want to depend on filesystem
        # order
        sortable = []
        for rev in revs:
            revfile = os.path.join(dirname, filename, rev)
            mtime = os.stat(revfile).st_mtime
            sortable.append((mtime, rev, revfile))
        sortable.sort()                 # by mtime
        for (mtime, rev, revfile) in sortable:
            ui.note('putting %s (rev %s)\n' % (filename, rev))
            path = os.path.join(dirname, filename, rev)
            store.put(path, filename, rev)
            bfutil.unlinkpath(path)
            numrevs += 1
        numfiles += 1
    if numfiles == 0:
        ui.status(_('no pending files to put in store\n'))
        return 1
    else:
        ui.status(_('uploaded %d big files (%d revs total)\n')
                  % (numfiles, numrevs))
        return 0
Ejemplo n.º 3
0
def upload_bfiles(ui, rsrc, rdst, files):
    '''upload big files to the central store'''

    if not files:
        return

    # Don't upload locally. All bfiles are in the system wide cache
    # so the other repo can just get them from there.
    if not rdst.path.startswith('http'):
        return

    store = basestore._open_store(rsrc, rdst.path, put=True)

    at = 0
    for hash in files:
        ui.progress(_('Uploading bfiles'), at, unit='bfile', total=len(files))
        if store.exists(hash):
            at += 1
            continue
        source = bfutil.find_file(rsrc, hash)
        if not source:
            raise util.Abort(_('Missing bfile %s needs to be uploaded') % hash)
        # XXX check for errors here
        store.put(source, hash)
        at += 1
    ui.progress('Uploading bfiles', None)
Ejemplo n.º 4
0
def bfverify(ui, repo, **opts):
    '''Verify that every big file revision in the current changeset
    exists in the central store.  With --contents, also verify that
    the contents of each big file revision are correct (SHA-1 hash
    matches the revision ID).  With --all, check every changeset in
    this repository.'''
    if opts.get('all'):
        revs = xrange(len(repo))
    else:
        revs = ['.']

    store = basestore._open_store(ui, repo)
    return store.verify(revs, contents=opts.get('contents'))
Ejemplo n.º 5
0
def verify_bfiles(ui, repo, all=False, contents=False):
    '''Verify that every big file revision in the current changeset
    exists in the central store.  With --contents, also verify that
    the contents of each big file revision are correct (SHA-1 hash
    matches the revision ID).  With --all, check every changeset in
    this repository.'''
    if all:
        # Pass a list to the function rather than an iterator because we know a list will work.
        revs = range(len(repo))
    else:
        revs = ['.']

    store = basestore._open_store(repo)
    return store.verify(revs, contents=contents)
Ejemplo n.º 6
0
def verify_bfiles(ui, repo, all=False, contents=False):
    '''Verify that every big file revision in the current changeset
    exists in the central store.  With --contents, also verify that
    the contents of each big file revision are correct (SHA-1 hash
    matches the revision ID).  With --all, check every changeset in
    this repository.'''
    if all:
        # Pass a list to the function rather than an iterator because we know a list will work.
        revs = range(len(repo))
    else:
        revs = ['.']

    store = basestore._open_store(repo)
    return store.verify(revs, contents=contents)
Ejemplo n.º 7
0
def update_bfiles(ui, repo):
    wlock = repo.wlock()
    try:
        bfdirstate = bfutil.open_bfdirstate(ui, repo)
        s = bfdirstate.status(match_.always(repo.root, repo.getcwd()), [],
                              False, False, False)
        (unsure, modified, added, removed, missing, unknown, ignored,
         clean) = s

        bfiles = bfutil.list_bfiles(repo)
        toget = []
        at = 0
        updated = 0
        removed = 0
        printed = False
        if bfiles:
            ui.status(_('Getting changed bfiles\n'))
            printed = True

        for bfile in bfiles:
            at += 1
            if os.path.exists(repo.wjoin(bfile)) and not os.path.exists(
                    repo.wjoin(bfutil.standin(bfile))):
                os.unlink(repo.wjoin(bfile))
                removed += 1
                bfdirstate.forget(bfutil.unixpath(bfile))
                continue
            expectedhash = repo[None][bfutil.standin(bfile)].data().strip()
            mode = os.stat(repo.wjoin(bfutil.standin(bfile))).st_mode
            if not os.path.exists(
                    repo.wjoin(bfile)) or expectedhash != bfutil.hashfile(
                        repo.wjoin(bfile)):
                path = bfutil.find_file(repo, expectedhash)
                if not path:
                    toget.append((bfile, expectedhash))
                else:
                    util.makedirs(os.path.dirname(repo.wjoin(bfile)))
                    shutil.copy(path, repo.wjoin(bfile))
                    os.chmod(repo.wjoin(bfile), mode)
                    updated += 1
                    bfdirstate.normal(bfutil.unixpath(bfile))
            elif os.path.exists(repo.wjoin(bfile)) and mode != os.stat(
                    repo.wjoin(bfile)).st_mode:
                os.chmod(repo.wjoin(bfile), mode)
                updated += 1
                bfdirstate.normal(bfutil.unixpath(bfile))

        if toget:
            store = basestore._open_store(repo)
            (success, missing) = store.get(toget)
        else:
            success, missing = [], []

        for (filename, hash) in success:
            mode = os.stat(repo.wjoin(bfutil.standin(filename))).st_mode
            os.chmod(repo.wjoin(filename), mode)
            updated += 1
            bfdirstate.normal(bfutil.unixpath(filename))

        for bfile in bfdirstate:
            if bfile not in bfiles:
                if os.path.exists(repo.wjoin(bfile)):
                    if not printed:
                        ui.status(_('Getting changed bfiles\n'))
                        printed = True
                    os.unlink(repo.wjoin(bfile))
                    removed += 1
                    bfdirstate.forget(bfutil.unixpath(bfile))

        bfdirstate.write()
        if printed:
            ui.status(
                _('%d big files updated, %d removed\n') % (updated, removed))
    finally:
        wlock.release()
Ejemplo n.º 8
0
def revert_bfiles(ui, repo):
    wlock = repo.wlock()
    try:
        bfdirstate = bfutil.open_bfdirstate(ui, repo)
        s = bfdirstate.status(match_.always(repo.root, repo.getcwd()), [],
                              False, False, False)
        (unsure, modified, added, removed, missing, unknown, ignored,
         clean) = s

        bfiles = bfutil.list_bfiles(repo)
        toget = []
        at = 0
        updated = 0
        for bfile in bfiles:
            if not os.path.exists(repo.wjoin(bfutil.standin(bfile))):
                bfdirstate.remove(bfile)
                continue
            if os.path.exists(
                    repo.wjoin(bfutil.standin(os.path.join(bfile + '.orig')))):
                shutil.copyfile(repo.wjoin(bfile), repo.wjoin(bfile + '.orig'))
            at += 1
            expectedhash = repo[None][bfutil.standin(bfile)].data().strip()
            mode = os.stat(repo.wjoin(bfutil.standin(bfile))).st_mode
            if not os.path.exists(
                    repo.wjoin(bfile)) or expectedhash != bfutil.hashfile(
                        repo.wjoin(bfile)):
                path = bfutil.find_file(repo, expectedhash)
                if path is None:
                    toget.append((bfile, expectedhash))
                else:
                    util.makedirs(os.path.dirname(repo.wjoin(bfile)))
                    shutil.copy(path, repo.wjoin(bfile))
                    os.chmod(repo.wjoin(bfile), mode)
                    updated += 1
                    if bfutil.standin(bfile) not in repo['.']:
                        bfdirstate.add(bfutil.unixpath(bfile))
                    elif expectedhash == repo['.'][bfutil.standin(
                            bfile)].data().strip():
                        bfdirstate.normal(bfutil.unixpath(bfile))
                    else:
                        bfutil.dirstate_normaldirty(bfdirstate,
                                                    bfutil.unixpath(bfile))
            elif os.path.exists(repo.wjoin(bfile)) and mode != os.stat(
                    repo.wjoin(bfile)).st_mode:
                os.chmod(repo.wjoin(bfile), mode)
                updated += 1
                if bfutil.standin(bfile) not in repo['.']:
                    bfdirstate.add(bfutil.unixpath(bfile))
                elif expectedhash == repo['.'][bfutil.standin(
                        bfile)].data().strip():
                    bfdirstate.normal(bfutil.unixpath(bfile))
                else:
                    bfutil.dirstate_normaldirty(bfdirstate,
                                                bfutil.unixpath(bfile))

        if toget:
            store = basestore._open_store(repo)
            (success, missing) = store.get(toget)
        else:
            success, missing = [], []

        for (filename, hash) in success:
            mode = os.stat(repo.wjoin(bfutil.standin(filename))).st_mode
            os.chmod(repo.wjoin(filename), mode)
            updated += 1
            if bfutil.standin(filename) not in repo['.']:
                bfdirstate.add(bfutil.unixpath(filename))
            elif hash == repo['.'][bfutil.standin(filename)].data().strip():
                bfdirstate.normal(bfutil.unixpath(filename))
            else:
                bfutil.dirstate_normaldirty(bfdirstate,
                                            bfutil.unixpath(filename))

        removed = 0
        for bfile in bfdirstate:
            if not os.path.exists(repo.wjoin(bfutil.standin(bfile))):
                if os.path.exists(repo.wjoin(bfile)):
                    os.unlink(repo.wjoin(bfile))
                    removed += 1
                    if bfutil.standin(bfile) in repo['.']:
                        bfdirstate.remove(bfutil.unixpath(bfile))
                    else:
                        bfdirstate.forget(bfutil.unixpath(bfile))
            else:
                state = repo.dirstate[bfutil.standin(bfile)]
                if state == 'n':
                    bfdirstate.normal(bfile)
                elif state == 'r':
                    bfdirstate.remove(bfile)
                elif state == 'a':
                    bfdirstate.add(bfile)
                elif state == '?':
                    bfdirstate.forget(bfile)
        bfdirstate.write()
    finally:
        wlock.release()
Ejemplo n.º 9
0
def bfupdate(ui, repo, *pats, **opts):
    '''get/delete big files for current changeset'''
    # prefix user-supplied paths with '.hgbfiles/'
    matcher = bfutil._get_standin_matcher(repo, pats, opts)

    # Check status of standins (.hgbfiles/...) relative to main dirstate
    # (i.e. detect changes to big files that have been bfadd'ed or
    # bfrefresh'ed, but not committed).
    bfutil._check_standins_status(ui, repo, matcher)

    # Check status of actual big files relative to bfiles dirstate
    # (changes that have not been bfrefresh'ed).
    bfdirstate = bfutil._open_bfdirstate(ui, repo)
    checkexec = bfdirstate._checkexec
    bfmatcher = bfutil._get_matcher(repo, pats, opts, showbad=False)
    bfutil._check_bfiles_status(ui, repo, bfdirstate, matcher=bfmatcher)

    store = basestore._open_store(ui, repo)
    updated = removed = 0

    # Use dirstate.walk() rather than repo.walk() for performance: we do
    # not care about unknown files at all, so don't waste time iterating
    # over them.  Should be safe since users should not create files in
    # .hgbfiles; if they do, we'll ignore them here and that's just
    # fine.
    want_files = []                     # hash has changed
    update_mode = []                    # mode may have changed
    for standin in sorted(bfutil.dirstate_walk(repo.dirstate, matcher)):
        want_hash = bfutil._read_standin(repo, standin)
        filename = bfutil._split_standin(standin)
        latest_hash = bfutil._read_latest(repo, filename)

        if (not latest_hash or
              latest_hash != want_hash or
              not os.path.isfile(repo.wjoin(filename))):
            # big file missing or hash has changed
            want_files.append((filename, want_hash))
        else:
            # file content is up-to-date: need to update mode in case
            # only it changed
            if checkexec:
                oldmode = os.stat(repo.wjoin(filename)).st_mode
                newmode = os.stat(repo.wjoin(standin)).st_mode
                if not bfutil._modes_equal(oldmode, newmode):
                    update_mode.append(filename)

    # Some revisions might be stashed in .hg/bfiles/{pending,committed}
    # -- e.g. anything that has been bfadded, bfrefreshed, or committed,
    # but not yet bfput.  Go there first: should be faster than hitting
    # the central store.
    success1 = []
    still_want = []
    for (filename, hash) in want_files:
        found = False
        for dir in ('pending', 'committed'):
            pfilename = repo.join(os.path.join(
                'bfiles', dir, filename, hash))
            if os.path.isfile(pfilename):
                ui.note(_('getting %s\n') % filename)
                destfile = repo.wjoin(filename)
                util.makedirs(os.path.dirname(destfile))
                shutil.copyfile(pfilename, destfile)
                found = True
                break
        if found:
            success1.append((filename, hash))
        else:
            still_want.append((filename, hash))

    # Now go to the central store for whatever we didn't find locally.
    (success2, missing) = store.get(still_want)
    success = success1 + success2
    del success1, success2
    assert len(success) + len(missing) == len(want_files), \
           ('some requested files not accounted for: '
            'wanted %d files, got %d, missing %d'
            % (len(want_files), len(success), len(missing)))

    # Fix permissions of successfully downloaded files plus any files
    # whose mode changed.
    umask = os.umask(0777)
    os.umask(umask)
    update_mode = [fn for (fn, hash) in success] + update_mode
    for filename in update_mode:
        mode = os.stat(repo.wjoin(bfutil._standin(filename))).st_mode
        mode = mode & ~umask
        os.chmod(repo.wjoin(filename), mode)
        bfdirstate.normal(filename)

    for (filename, hash) in success:
        bfutil._write_latest(repo, filename, hash)
        ui.debug('bfdirstate.normal(%r)\n' % filename)
        updated += 1

    # Iterate over bfdirstate to find any big files from the
    # previous changeset that are no longer in .hgbfiles: they can be
    # deleted (unless they are now normal files).
    ui.debug('searching for big files to remove\n')
    for filename in bfdirstate:
        standin = repo.wjoin(bfutil._standin(filename).replace('/', os.path.sep))
        ui.debug('maybe removing %s\n' % filename)
        if os.path.exists(standin):
            continue
        if bfutil._is_normal(repo, filename):
            # big file has become a normal file
            continue

        ui.note('removing %s\n' % filename)
        lfilename = filename.replace('/', os.path.sep)
        try:
            bfutil.unlinkpath(repo.wjoin(lfilename))
            bfutil.unlinkpath(bfutil._get_latest(repo, lfilename))
        except OSError, err:
            # if the file is already deleted, who cares?
            if err.errno != errno.ENOENT:
                raise
        bfutil.dirstate_drop(bfdirstate, filename)
        removed += 1
Ejemplo n.º 10
0
def update_bfiles(ui, repo):
    wlock = repo.wlock()
    try:
        bfdirstate = bfutil.open_bfdirstate(ui, repo)
        s = bfdirstate.status(match_.always(repo.root, repo.getcwd()), [], False, False, False)
        (unsure, modified, added, removed, missing, unknown, ignored, clean) = s

        bfiles = bfutil.list_bfiles(repo)
        toget = []
        at = 0
        updated = 0
        removed = 0
        printed = False
        if bfiles:
            ui.status(_('Getting changed bfiles\n'))
            printed = True

        for bfile in bfiles:
            at += 1
            if os.path.exists(repo.wjoin(bfile)) and not os.path.exists(repo.wjoin(bfutil.standin(bfile))):
                os.unlink(repo.wjoin(bfile))
                removed += 1
                bfdirstate.forget(bfutil.unixpath(bfile))
                continue
            expectedhash = repo[None][bfutil.standin(bfile)].data().strip()
            mode = os.stat(repo.wjoin(bfutil.standin(bfile))).st_mode
            if not os.path.exists(repo.wjoin(bfile)) or expectedhash != bfutil.hashfile(repo.wjoin(bfile)):
                path = bfutil.find_file(repo, expectedhash)
                if not path:
                    toget.append((bfile, expectedhash))
                else:
                    util.makedirs(os.path.dirname(repo.wjoin(bfile)))
                    shutil.copy(path,  repo.wjoin(bfile))
                    os.chmod(repo.wjoin(bfile), mode)
                    updated += 1
                    bfdirstate.normal(bfutil.unixpath(bfile))
            elif os.path.exists(repo.wjoin(bfile)) and mode != os.stat(repo.wjoin(bfile)).st_mode:
                os.chmod(repo.wjoin(bfile), mode)
                updated += 1
                bfdirstate.normal(bfutil.unixpath(bfile))

        if toget:
            store = basestore._open_store(repo)
            (success, missing) = store.get(toget)
        else:
            success, missing = [],[]

        for (filename, hash) in success:
            mode = os.stat(repo.wjoin(bfutil.standin(filename))).st_mode
            os.chmod(repo.wjoin(filename), mode)
            updated += 1
            bfdirstate.normal(bfutil.unixpath(filename))

        for bfile in bfdirstate:
            if bfile not in bfiles:
                if os.path.exists(repo.wjoin(bfile)):
                    if not printed:
                        ui.status(_('Getting changed bfiles\n'))
                        printed = True
                    os.unlink(repo.wjoin(bfile))
                    removed += 1
                    bfdirstate.forget(bfutil.unixpath(bfile))

        bfdirstate.write()
        if printed:
            ui.status(_('%d big files updated, %d removed\n') % (updated, removed))
    finally:
        wlock.release()
Ejemplo n.º 11
0
def revert_bfiles(ui, repo):
    wlock = repo.wlock()
    try:
        bfdirstate = bfutil.open_bfdirstate(ui, repo)
        s = bfdirstate.status(match_.always(repo.root, repo.getcwd()), [], False, False, False)
        (unsure, modified, added, removed, missing, unknown, ignored, clean) = s

        bfiles = bfutil.list_bfiles(repo)
        toget = []
        at = 0
        updated = 0
        for bfile in bfiles:
            if not os.path.exists(repo.wjoin(bfutil.standin(bfile))):
                bfdirstate.remove(bfile)
                continue
            if os.path.exists(repo.wjoin(bfutil.standin(os.path.join(bfile + '.orig')))):
                shutil.copyfile(repo.wjoin(bfile), repo.wjoin(bfile + '.orig'))
            at += 1
            expectedhash = repo[None][bfutil.standin(bfile)].data().strip()
            mode = os.stat(repo.wjoin(bfutil.standin(bfile))).st_mode
            if not os.path.exists(repo.wjoin(bfile)) or expectedhash != bfutil.hashfile(repo.wjoin(bfile)):
                path = bfutil.find_file(repo, expectedhash)
                if path is None:
                    toget.append((bfile, expectedhash))
                else:
                    util.makedirs(os.path.dirname(repo.wjoin(bfile)))
                    shutil.copy(path, repo.wjoin(bfile))
                    os.chmod(repo.wjoin(bfile), mode)
                    updated += 1
                    if bfutil.standin(bfile) not in repo['.']:
                        bfdirstate.add(bfutil.unixpath(bfile))
                    elif expectedhash == repo['.'][bfutil.standin(bfile)].data().strip():
                        bfdirstate.normal(bfutil.unixpath(bfile))
                    else:
                        bfutil.dirstate_normaldirty(bfdirstate, bfutil.unixpath(bfile))
            elif os.path.exists(repo.wjoin(bfile)) and mode != os.stat(repo.wjoin(bfile)).st_mode:
                os.chmod(repo.wjoin(bfile), mode)
                updated += 1
                if bfutil.standin(bfile) not in repo['.']:
                    bfdirstate.add(bfutil.unixpath(bfile))
                elif expectedhash == repo['.'][bfutil.standin(bfile)].data().strip():
                    bfdirstate.normal(bfutil.unixpath(bfile))
                else:
                    bfutil.dirstate_normaldirty(bfdirstate, bfutil.unixpath(bfile))

        if toget:
            store = basestore._open_store(repo)
            (success, missing) = store.get(toget)
        else:
            success, missing = [], []

        for (filename, hash) in success:
            mode = os.stat(repo.wjoin(bfutil.standin(filename))).st_mode
            os.chmod(repo.wjoin(filename), mode)
            updated += 1
            if bfutil.standin(filename) not in repo['.']:
                bfdirstate.add(bfutil.unixpath(filename))
            elif hash == repo['.'][bfutil.standin(filename)].data().strip():
                bfdirstate.normal(bfutil.unixpath(filename))
            else:
                bfutil.dirstate_normaldirty(bfdirstate, bfutil.unixpath(filename))

        removed = 0
        for bfile in bfdirstate:
            if not os.path.exists(repo.wjoin(bfutil.standin(bfile))):
                if os.path.exists(repo.wjoin(bfile)):
                    os.unlink(repo.wjoin(bfile))
                    removed += 1
                    if bfutil.standin(bfile) in repo['.']:
                        bfdirstate.remove(bfutil.unixpath(bfile))
                    else:
                        bfdirstate.forget(bfutil.unixpath(bfile))
            else:
                state = repo.dirstate[bfutil.standin(bfile)]
                if state == 'n':
                    bfdirstate.normal(bfile)
                elif state == 'r':
                    bfdirstate.remove(bfile)
                elif state == 'a':
                    bfdirstate.add(bfile)
                elif state == '?':
                    bfdirstate.forget(bfile)
        bfdirstate.write()
    finally:
        wlock.release()