Example #1
0
def test_metadata_method():
    with no_lingering_errors(), test_tempdir('bup-tmetadata-') as tmpdir:
        bup_dir = tmpdir + '/bup'
        data_path = tmpdir + '/foo'
        os.mkdir(data_path)
        ex('touch', data_path + '/file')
        ex('ln', '-s', 'file', data_path + '/symlink')
        test_time1 = 13 * 1000000000
        test_time2 = 42 * 1000000000
        utime(data_path + '/file', (0, test_time1))
        lutime(data_path + '/symlink', (0, 0))
        utime(data_path, (0, test_time2))
        ex(bup_path, '-d', bup_dir, 'init')
        ex(bup_path, '-d', bup_dir, 'index', '-v', data_path)
        ex(bup_path, '-d', bup_dir, 'save', '-tvvn', 'test', data_path)
        git.check_repo_or_die(bup_dir)
        top = vfs.RefList(None)
        n = top.lresolve('/test/latest' + resolve_parent(data_path))
        m = n.metadata()
        WVPASS(m.mtime == test_time2)
        WVPASS(len(n.subs()) == 2)
        WVPASS(n.name == 'foo')
        WVPASS(set([x.name for x in n.subs()]) == set(['file', 'symlink']))
        for sub in n:
            if sub.name == 'file':
                m = sub.metadata()
                WVPASS(m.mtime == test_time1)
            elif sub.name == 'symlink':
                m = sub.metadata()
                WVPASS(m.mtime == 0)
Example #2
0
def test_metadata_method():
    tmpdir = tempfile.mkdtemp(prefix='bup-tmetadata-')
    try:
        bup_dir = tmpdir + '/bup'
        data_path = tmpdir + '/foo'
        os.mkdir(data_path)
        ex('touch', data_path + '/file')
        ex('ln', '-s', 'file', data_path + '/symlink')
        test_time1 = 13 * 1000000000
        test_time2 = 42 * 1000000000
        utime(data_path + '/file', (0, test_time1))
        lutime(data_path + '/symlink', (0, 0))
        utime(data_path, (0, test_time2))
        ex(bup_path, '-d', bup_dir, 'init')
        ex(bup_path, '-d', bup_dir, 'index', '-v', data_path)
        ex(bup_path, '-d', bup_dir, 'save', '-tvvn', 'test', data_path)
        git.check_repo_or_die(bup_dir)
        top = vfs.RefList(None)
        n = top.lresolve('/test/latest' + realpath(data_path))
        m = n.metadata()
        WVPASS(m.mtime == test_time2)
        WVPASS(len(n.subs()) == 2)
        WVPASS(n.name == 'foo')
        WVPASS(set([x.name for x in n.subs()]) == set(['file', 'symlink']))
        for sub in n:
            if sub.name == 'file':
                m = sub.metadata()
                WVPASS(m.mtime == test_time1)
            elif sub.name == 'symlink':
                m = sub.metadata()
                WVPASS(m.mtime == 0)
    finally:
        subprocess.call(['rm', '-rf', tmpdir])
Example #3
0
def call_save(dirname, optDict, callbacks={}):
    global count, subcount, lastremain

    opt = OptionsDict(optDict)
    flags = {}
    extra = [dirname]


    git.check_repo_or_die()
    if not (opt.tree or opt.commit or opt.name):
        o.fatal("use one or more of -t, -c, -n")
    if not extra:
        o.fatal("no filenames given")

    opt.progress = (istty2 and not opt.quiet)
    opt.smaller = parse_num(opt.smaller or 0)
    if opt.bwlimit:
        client.bwlimit = parse_num(opt.bwlimit)

    if opt.date:
        date = parse_date_or_fatal(opt.date, o.fatal)
    else:
        date = time.time()

    if opt.strip and opt.strip_path:
        o.fatal("--strip is incompatible with --strip-path")

    graft_points = []
    if opt.graft:
        if opt.strip:
            o.fatal("--strip is incompatible with --graft")

        if opt.strip_path:
            o.fatal("--strip-path is incompatible with --graft")

        for (option, parameter) in flags:
            if option == "--graft":
                splitted_parameter = parameter.split('=')
                if len(splitted_parameter) != 2:
                    o.fatal("a graft point must be of the form old_path=new_path")
                old_path, new_path = splitted_parameter
                if not (old_path and new_path):
                    o.fatal("a graft point cannot be empty")
                graft_points.append((realpath(old_path), realpath(new_path)))

    is_reverse = os.environ.get('BUP_SERVER_REVERSE')
    if is_reverse and opt.remote:
        o.fatal("don't use -r in reverse mode; it's automatic")

    if opt.name and opt.name.startswith('.'):
        o.fatal("'%s' is not a valid branch name" % opt.name)
    refname = opt.name and 'refs/heads/%s' % opt.name or None
    if opt.remote or is_reverse:
        try:
            cli = client.Client(opt.remote)
        except client.ClientError, e:
            log('error: %s' % e)
            sys.exit(1)
        oldref = refname and cli.read_ref(refname) or None
        w = cli.new_packwriter(compression_level=opt.compress)
Example #4
0
def test_metadata_method():
    tmpdir = tempfile.mkdtemp(prefix="bup-tmetadata-")
    try:
        bup_dir = tmpdir + "/bup"
        data_path = tmpdir + "/foo"
        os.mkdir(data_path)
        ex("touch", data_path + "/file")
        ex("ln", "-s", "file", data_path + "/symlink")
        test_time1 = 13 * 1000000000
        test_time2 = 42 * 1000000000
        utime(data_path + "/file", (0, test_time1))
        lutime(data_path + "/symlink", (0, 0))
        utime(data_path, (0, test_time2))
        ex(bup_path, "-d", bup_dir, "init")
        ex(bup_path, "-d", bup_dir, "index", "-v", data_path)
        ex(bup_path, "-d", bup_dir, "save", "-tvvn", "test", data_path)
        git.check_repo_or_die(bup_dir)
        top = vfs.RefList(None)
        n = top.lresolve("/test/latest" + realpath(data_path))
        m = n.metadata()
        WVPASS(m.mtime == test_time2)
        WVPASS(len(n.subs()) == 2)
        WVPASS(n.name == "foo")
        WVPASS(set([x.name for x in n.subs()]) == set(["file", "symlink"]))
        for sub in n:
            if sub.name == "file":
                m = sub.metadata()
                WVPASS(m.mtime == test_time1)
            elif sub.name == "symlink":
                m = sub.metadata()
                WVPASS(m.mtime == 0)
    finally:
        subprocess.call(["rm", "-rf", tmpdir])
Example #5
0
def set_dir(conn, arg):
    git.check_repo_or_die(arg)
    # OK. we now know the path is a proper repository. Record this path in the
    # environment so that subprocesses inherit it and know where to operate.
    os.environ['BUP_DIR'] = arg
    debug1('bup server: bupdir is %r\n' % git.repodir)
    _set_mode()
    conn.ok()
Example #6
0
 def run(self):
     self.sock.listen(5)
     while True:
         git.check_repo_or_die()
         top = vfs.RefList(None)
         th = FTPserverThread(top, self.sock.accept())
         th.daemon = True
         th.start()
Example #7
0
def send_index(conn, name):
    git.check_repo_or_die()
    assert(name.find('/') < 0)
    assert(name.endswith('.idx'))
    idx = git.open_idx(git.repo('objects/pack/%s' % name))
    conn.write(struct.pack('!I', len(idx.map)))
    conn.write(idx.map)
    conn.ok()
Example #8
0
File: server-cmd.py Project: 3v/bup
def _init_session(reinit_with_new_repopath=None):
    if reinit_with_new_repopath is None and git.repodir:
        return
    git.check_repo_or_die(reinit_with_new_repopath)
    # OK. we now know the path is a proper repository. Record this path in the
    # environment so that subprocesses inherit it and know where to operate.
    os.environ['BUP_DIR'] = git.repodir
    debug1('bup server: bupdir is %r\n' % git.repodir)
    _set_mode()
Example #9
0
def list_indexes(conn, junk):
    git.check_repo_or_die()
    suffix = ''
    if dumb_server_mode:
        suffix = ' load'
    for f in os.listdir(git.repo('objects/pack')):
        if f.endswith('.idx'):
            conn.write('%s%s\n' % (f, suffix))
    conn.ok()
Example #10
0
def receive_objects_v2(conn, junk):
    global suspended_w
    git.check_repo_or_die()
    suggested = set()
    if suspended_w:
        w = suspended_w
        suspended_w = None
    else:
        if dumb_server_mode:
            w = git.PackWriter(objcache_maker=None)
        else:
            w = git.PackWriter()
    while 1:
        ns = conn.read(4)
        if not ns:
            w.abort()
            raise Exception('object read: expected length header, got EOF\n')
        n = struct.unpack('!I', ns)[0]
        #debug2('expecting %d bytes\n' % n)
        if not n:
            debug1('bup server: received %d object%s.\n' 
                % (w.count, w.count!=1 and "s" or ''))
            fullpath = w.close(run_midx=not dumb_server_mode)
            if fullpath:
                (dir, name) = os.path.split(fullpath)
                conn.write('%s.idx\n' % name)
            conn.ok()
            return
        elif n == 0xffffffff:
            debug2('bup server: receive-objects suspended.\n')
            suspended_w = w
            conn.ok()
            return
            
        shar = conn.read(20)
        crcr = struct.unpack('!I', conn.read(4))[0]
        n -= 20 + 4
        buf = conn.read(n)  # object sizes in bup are reasonably small
        #debug2('read %d bytes\n' % n)
        _check(w, n, len(buf), 'object read: expected %d bytes, got %d\n')
        if not dumb_server_mode:
            oldpack = w.exists(shar, want_source=True)
            if oldpack:
                assert(not oldpack == True)
                assert(oldpack.endswith('.idx'))
                (dir,name) = os.path.split(oldpack)
                if not (name in suggested):
                    debug1("bup server: suggesting index %s\n"
                           % git.shorten_hash(name))
                    debug1("bup server:   because of object %s\n"
                           % shar.encode('hex'))
                    conn.write('index %s\n' % name)
                    suggested.add(name)
                continue
        nw, crc = w._raw_write((buf,), sha=shar)
        _check(w, crcr, crc, 'object read: expected crc %d, got %d\n')
Example #11
0
def cat(conn, id):
    git.check_repo_or_die()
    try:
        for blob in git.cat(id):
            conn.write(struct.pack('!I', len(blob)))
            conn.write(blob)
    except KeyError, e:
        log('server: error: %s\n' % e)
        conn.write('\0\0\0\0')
        conn.error(e)
Example #12
0
File: tgit.py Project: Kelimion/bup
def test_check_repo_or_die():
    git.check_repo_or_die()
    WVPASS('check_repo_or_die')  # if we reach this point the call above passed

    os.rename('pybuptest.tmp/objects/pack', 'pybuptest.tmp/objects/pack.tmp')
    open('pybuptest.tmp/objects/pack', 'w').close()
    try:
        git.check_repo_or_die()
    except SystemExit, e:
        WVPASSEQ(e.code, 14)
Example #13
0
def test_commit_parsing():
    def showval(commit, val):
        return readpipe(['git', 'show', '-s',
                         '--pretty=format:%s' % val, commit]).strip()
    initial_failures = wvfailure_count()
    orig_cwd = os.getcwd()
    tmpdir = tempfile.mkdtemp(dir=bup_tmp, prefix='bup-tgit-')
    workdir = tmpdir + "/work"
    repodir = workdir + '/.git'
    try:
        readpipe(['git', 'init', workdir])
        os.environ['GIT_DIR'] = os.environ['BUP_DIR'] = repodir
        git.check_repo_or_die(repodir)
        os.chdir(workdir)
        with open('foo', 'w') as f:
            print >> f, 'bar'
        readpipe(['git', 'add', '.'])
        readpipe(['git', 'commit', '-am', 'Do something',
                  '--author', 'Someone <someone@somewhere>',
                  '--date', 'Sat Oct 3 19:48:49 2009 -0400'])
        commit = readpipe(['git', 'show-ref', '-s', 'master']).strip()
        parents = showval(commit, '%P')
        tree = showval(commit, '%T')
        cname = showval(commit, '%cn')
        cmail = showval(commit, '%ce')
        cdate = showval(commit, '%ct')
        coffs = showval(commit, '%ci')
        coffs = coffs[-5:]
        coff = (int(coffs[-4:-2]) * 60 * 60) + (int(coffs[-2:]) * 60)
        if coffs[-5] == '-':
            coff = - coff
        commit_items = git.get_commit_items(commit, git.cp())
        WVPASSEQ(commit_items.parents, [])
        WVPASSEQ(commit_items.tree, tree)
        WVPASSEQ(commit_items.author_name, 'Someone')
        WVPASSEQ(commit_items.author_mail, 'someone@somewhere')
        WVPASSEQ(commit_items.author_sec, 1254613729)
        WVPASSEQ(commit_items.author_offset, -(4 * 60 * 60))
        WVPASSEQ(commit_items.committer_name, cname)
        WVPASSEQ(commit_items.committer_mail, cmail)
        WVPASSEQ(commit_items.committer_sec, int(cdate))
        WVPASSEQ(commit_items.committer_offset, coff)
        WVPASSEQ(commit_items.message, 'Do something\n')
        with open('bar', 'w') as f:
            print >> f, 'baz'
        readpipe(['git', 'add', '.'])
        readpipe(['git', 'commit', '-am', 'Do something else'])
        child = readpipe(['git', 'show-ref', '-s', 'master']).strip()
        parents = showval(child, '%P')
        commit_items = git.get_commit_items(child, git.cp())
        WVPASSEQ(commit_items.parents, [commit])
    finally:
        os.chdir(orig_cwd)
    if wvfailure_count() == initial_failures:
        subprocess.call(['rm', '-rf', tmpdir])
Example #14
0
def cat(conn, id):
    global cat_pipe
    git.check_repo_or_die()
    if not cat_pipe:
        cat_pipe = git.CatPipe()
    try:
        for blob in cat_pipe.join(id):
            conn.write(struct.pack('!I', len(blob)))
            conn.write(blob)
    except KeyError, e:
        log('server: error: %s\n' % e)
        conn.write('\0\0\0\0')
        conn.error(e)
Example #15
0
def test_check_repo_or_die():
    initial_failures = wvfailure_count()
    orig_cwd = os.getcwd()
    tmpdir = tempfile.mkdtemp(dir=bup_tmp, prefix='bup-tgit-')
    os.environ['BUP_DIR'] = bupdir = tmpdir + "/bup"
    try:
        os.chdir(tmpdir)
        git.init_repo(bupdir)
        git.check_repo_or_die()
        WVPASS('check_repo_or_die')  # if we reach this point the call above passed

        os.rename(bupdir + '/objects/pack', bupdir + '/objects/pack.tmp')
        open(bupdir + '/objects/pack', 'w').close()
        try:
            git.check_repo_or_die()
        except SystemExit, e:
            WVPASSEQ(e.code, 14)
        else:
Example #16
0
File: tgit.py Project: senseb/bup
def test_check_repo_or_die():
    initial_failures = wvfailure_count()
    orig_cwd = os.getcwd()
    tmpdir = tempfile.mkdtemp(dir=bup_tmp, prefix='bup-tgit-')
    os.environ['BUP_DIR'] = bupdir = tmpdir + "/bup"
    try:
        os.chdir(tmpdir)
        git.init_repo(bupdir)
        git.check_repo_or_die()
        WVPASS('check_repo_or_die')  # if we reach this point the call above passed

        os.rename(bupdir + '/objects/pack', bupdir + '/objects/pack.tmp')
        open(bupdir + '/objects/pack', 'w').close()
        try:
            git.check_repo_or_die()
        except SystemExit as e:
            WVPASSEQ(e.code, 14)
        else:
            WVFAIL()
        os.unlink(bupdir + '/objects/pack')
        os.rename(bupdir + '/objects/pack.tmp', bupdir + '/objects/pack')

        try:
            git.check_repo_or_die('nonexistantbup.tmp')
        except SystemExit as e:
            WVPASSEQ(e.code, 15)
        else:
            WVFAIL()
    finally:
        os.chdir(orig_cwd)
    if wvfailure_count() == initial_failures:
        subprocess.call(['rm', '-rf', tmpdir])
Example #17
0
File: tgit.py Project: 0xkag/bup
def test_check_repo_or_die():
    with no_lingering_errors(), test_tempdir('bup-tgit-') as tmpdir:
        os.environ['BUP_DIR'] = bupdir = tmpdir + "/bup"
        orig_cwd = os.getcwd()
        try:
            os.chdir(tmpdir)
            git.init_repo(bupdir)
            git.check_repo_or_die()
            WVPASS('check_repo_or_die')  # if we reach this point the call above passed

            os.rename(bupdir + '/objects/pack', bupdir + '/objects/pack.tmp')
            open(bupdir + '/objects/pack', 'w').close()
            try:
                git.check_repo_or_die()
            except SystemExit as e:
                WVPASSEQ(e.code, 14)
            else:
                WVFAIL()
            os.unlink(bupdir + '/objects/pack')
            os.rename(bupdir + '/objects/pack.tmp', bupdir + '/objects/pack')

            try:
                git.check_repo_or_die('nonexistantbup.tmp')
            except SystemExit as e:
                WVPASSEQ(e.code, 15)
            else:
                WVFAIL()
        finally:
            os.chdir(orig_cwd)
Example #18
0
File: tmetadata.py Project: bup/bup
def test_metadata_method():
    with no_lingering_errors():
        with test_tempdir('bup-tmetadata-') as tmpdir:
            bup_dir = tmpdir + '/bup'
            data_path = tmpdir + '/foo'
            os.mkdir(data_path)
            ex('touch', data_path + '/file')
            ex('ln', '-s', 'file', data_path + '/symlink')
            test_time1 = 13 * 1000000000
            test_time2 = 42 * 1000000000
            utime(data_path + '/file', (0, test_time1))
            lutime(data_path + '/symlink', (0, 0))
            utime(data_path, (0, test_time2))
            ex(bup_path, '-d', bup_dir, 'init')
            ex(bup_path, '-d', bup_dir, 'index', '-v', data_path)
            ex(bup_path, '-d', bup_dir, 'save', '-tvvn', 'test', data_path)
            git.check_repo_or_die(bup_dir)
            repo = LocalRepo()
            resolved = vfs.resolve(repo,
                                   '/test/latest' + resolve_parent(data_path),
                                   follow=False)
            leaf_name, leaf_item = resolved[-1]
            m = leaf_item.meta
            WVPASS(m.mtime == test_time2)
            WVPASS(leaf_name == 'foo')
            contents = tuple(vfs.contents(repo, leaf_item))
            WVPASS(len(contents) == 3)
            WVPASSEQ(frozenset(name for name, item in contents),
                     frozenset(('.', 'file', 'symlink')))
            for name, item in contents:
                if name == 'file':
                    m = item.meta
                    WVPASS(m.mtime == test_time1)
                elif name == 'symlink':
                    m = item.meta
                    WVPASSEQ(m.symlink_target, 'file')
                    WVPASSEQ(m.size, 4)
                    WVPASSEQ(m.mtime, 0)
Example #19
0
def update_ref(conn, refname):
    git.check_repo_or_die()
    newval = conn.readline().strip()
    oldval = conn.readline().strip()
    git.update_ref(refname, newval.decode('hex'), oldval.decode('hex'))
    conn.ok()
Example #20
0
File: save.py Project: fakegit/bup
def main(argv):
    handle_ctrl_c()
    opt = opts_from_cmdline(argv)
    client.bwlimit = opt.bwlimit
    git.check_repo_or_die()

    remote_dest = opt.remote or opt.is_reverse
    if not remote_dest:
        repo = git
        cli = nullcontext()
    else:
        try:
            cli = repo = client.Client(opt.remote)
        except client.ClientError as e:
            log('error: %s' % e)
            sys.exit(1)

    # cli creation must be last nontrivial command in each if clause above
    with cli:
        if not remote_dest:
            w = git.PackWriter(compression_level=opt.compress)
        else:
            w = cli.new_packwriter(compression_level=opt.compress)

        with w:
            sys.stdout.flush()
            out = byte_stream(sys.stdout)

            if opt.name:
                refname = b'refs/heads/%s' % opt.name
                parent = repo.read_ref(refname)
            else:
                refname = parent = None

            indexfile = opt.indexfile or git.repo(b'bupindex')
            try:
                msr = index.MetaStoreReader(indexfile + b'.meta')
            except IOError as ex:
                if ex.errno != ENOENT:
                    raise
                log('error: cannot access %r; have you run bup index?'
                    % path_msg(indexfile))
                sys.exit(1)
            with msr, \
                 hlinkdb.HLinkDB(indexfile + b'.hlink') as hlink_db, \
                 index.Reader(indexfile) as reader:
                tree = save_tree(opt, reader, hlink_db, msr, w)
            if opt.tree:
                out.write(hexlify(tree))
                out.write(b'\n')
            if opt.commit or opt.name:
                commit = commit_tree(tree, parent, opt.date, argv, w)
                if opt.commit:
                    out.write(hexlify(commit))
                    out.write(b'\n')

        # packwriter must be closed before we can update the ref
        if opt.name:
            repo.update_ref(refname, commit, parent)

    if saved_errors:
        log('WARNING: %d errors encountered while saving.\n' % len(saved_errors))
        sys.exit(1)
Example #21
0
def main(argv):
    o = options.Options(optspec)
    opt, flags, extra = o.parse_bytes(argv[1:])

    git.check_repo_or_die()

    tags = [t for sublist in git.tags().values() for t in sublist]

    if opt.delete:
        # git.delete_ref() doesn't complain if a ref doesn't exist.  We
        # could implement this verification but we'd need to read in the
        # contents of the tag file and pass the hash, and we already know
        # about the tag's existance via "tags".
        tag_name = argv_bytes(opt.delete)
        if not opt.force and tag_name not in tags:
            log("error: tag '%s' doesn't exist\n" % path_msg(tag_name))
            sys.exit(1)
        tag_file = b'refs/tags/%s' % tag_name
        git.delete_ref(tag_file)
        sys.exit(0)

    if not extra:
        for t in tags:
            sys.stdout.flush()
            out = byte_stream(sys.stdout)
            out.write(t)
            out.write(b'\n')
        sys.exit(0)
    elif len(extra) != 2:
        o.fatal('expected commit ref and hash')

    tag_name, commit = map(argv_bytes, extra[:2])
    if not tag_name:
        o.fatal("tag name must not be empty.")
    debug1("args: tag name = %s; commit = %s\n"
           % (path_msg(tag_name), commit.decode('ascii')))

    if tag_name in tags and not opt.force:
        log("bup: error: tag '%s' already exists\n" % path_msg(tag_name))
        sys.exit(1)

    if tag_name.startswith(b'.'):
        o.fatal("'%s' is not a valid tag name." % path_msg(tag_name))

    try:
        hash = git.rev_parse(commit)
    except git.GitError as e:
        log("bup: error: %s" % e)
        sys.exit(2)

    if not hash:
        log("bup: error: commit %s not found.\n" % commit.decode('ascii'))
        sys.exit(2)

    pL = git.PackIdxList(git.repo(b'objects/pack'))
    if not pL.exists(hash):
        log("bup: error: commit %s not found.\n" % commit.decode('ascii'))
        sys.exit(2)

    tag_file = git.repo(b'refs/tags/' + tag_name)
    try:
        tag = open(tag_file, 'wb')
    except OSError as e:
        log("bup: error: could not create tag '%s': %s" % (path_msg(tag_name), e))
        sys.exit(3)
    with tag as tag:
        tag.write(hexlify(hash))
        tag.write(b'\n')
Example #22
0
        git.check_repo_or_die()
        WVPASS('check_repo_or_die')  # if we reach this point the call above passed

        os.rename(bupdir + '/objects/pack', bupdir + '/objects/pack.tmp')
        open(bupdir + '/objects/pack', 'w').close()
        try:
            git.check_repo_or_die()
        except SystemExit, e:
            WVPASSEQ(e.code, 14)
        else:
            WVFAIL()
        os.unlink(bupdir + '/objects/pack')
        os.rename(bupdir + '/objects/pack.tmp', bupdir + '/objects/pack')

        try:
            git.check_repo_or_die('nonexistantbup.tmp')
        except SystemExit, e:
            WVPASSEQ(e.code, 15)
        else:
            WVFAIL()
    finally:
        os.chdir(orig_cwd)
    if wvfailure_count() == initial_failures:
        subprocess.call(['rm', '-rf', tmpdir])


@wvtest
def test_commit_parsing():
    def showval(commit, val):
        return readpipe(['git', 'show', '-s',
                         '--pretty=format:%s' % val, commit]).strip()
Example #23
0
File: rm-cmd.py Project: bup/bup
from bup.git import check_repo_or_die
from bup.options import Options
from bup.helpers import die_if_errors, handle_ctrl_c, log
from bup.repo import LocalRepo
from bup.rm import bup_rm

optspec = """
bup rm <branch|save...>
--
#,compress=  set compression level to # (0-9, 9 is highest) [6]
v,verbose    increase verbosity (can be specified multiple times)
unsafe       use the command even though it may be DANGEROUS
"""

handle_ctrl_c()

o = Options(optspec)
opt, flags, extra = o.parse(sys.argv[1:])

if not opt.unsafe:
    o.fatal('refusing to run dangerous, experimental command without --unsafe')

if len(extra) < 1:
    o.fatal('no paths specified')

check_repo_or_die()
repo = LocalRepo()
bup_rm(repo, extra, compression=opt.compress, verbosity=opt.verbose)
die_if_errors()
Example #24
0
def set_dir(conn, arg):
    git.check_repo_or_die(arg)
    debug1('bup server: bupdir is %r\n' % git.repodir)
    _set_mode()
    conn.ok()
Example #25
0
def main():
    o = options.Options(optspec)
    opt, flags, extra = o.parse(sys.argv[1:])
    verbosity = (opt.verbose or 0) if not opt.quiet else -1
    if opt.remote:
        opt.remote = argv_bytes(opt.remote)
    if opt.outdir:
        opt.outdir = argv_bytes(opt.outdir)

    git.check_repo_or_die()

    if not extra:
        o.fatal('must specify at least one filename to restore')

    exclude_rxs = parse_rx_excludes(flags, o.fatal)

    owner_map = {}
    for map_type in ('user', 'group', 'uid', 'gid'):
        owner_map[map_type] = parse_owner_mappings(map_type, flags, o.fatal)

    if opt.outdir:
        mkdirp(opt.outdir)
        os.chdir(opt.outdir)

    if opt.remote:
        opt.remote = argv_bytes(opt.remote)
    repo = from_opts(opt, reverse=False)
    top = fsencode(os.getcwd())
    hardlinks = {}
    for path in [argv_bytes(x) for x in extra]:
        if not valid_restore_path(path):
            add_error("path %r doesn't include a branch and revision" % path)
            continue
        try:
            resolved = vfs.resolve(repo, path, want_meta=True, follow=False)
        except vfs.IOError as e:
            add_error(e)
            continue
        if len(resolved) == 3 and resolved[2][0] == b'latest':
            # Follow latest symlink to the actual save
            try:
                resolved = vfs.resolve(repo,
                                       b'latest',
                                       parent=resolved[:-1],
                                       want_meta=True)
            except vfs.IOError as e:
                add_error(e)
                continue
            # Rename it back to 'latest'
            resolved = tuple(elt if i != 2 else (b'latest', ) + elt[1:]
                             for i, elt in enumerate(resolved))
        path_parent, path_name = os.path.split(path)
        leaf_name, leaf_item = resolved[-1]
        if not leaf_item:
            add_error('error: cannot access %r in %r' %
                      ('/'.join(name for name, item in resolved), path))
            continue
        if not path_name or path_name == b'.':
            # Source is /foo/what/ever/ or /foo/what/ever/. -- extract
            # what/ever/* to the current directory, and if name == '.'
            # (i.e. /foo/what/ever/.), then also restore what/ever's
            # metadata to the current directory.
            treeish = vfs.item_mode(leaf_item)
            if not treeish:
                add_error('%r cannot be restored as a directory' % path)
            else:
                items = vfs.contents(repo, leaf_item, want_meta=True)
                dot, leaf_item = next(items, None)
                assert dot == b'.'
                for sub_name, sub_item in items:
                    restore(repo, b'', sub_name, sub_item, top, opt.sparse,
                            opt.numeric_ids, owner_map, exclude_rxs, verbosity,
                            hardlinks)
                if path_name == b'.':
                    leaf_item = vfs.augment_item_meta(repo,
                                                      leaf_item,
                                                      include_size=True)
                    apply_metadata(leaf_item.meta, b'.', opt.numeric_ids,
                                   owner_map)
        else:
            restore(repo, b'', leaf_name, leaf_item, top, opt.sparse,
                    opt.numeric_ids, owner_map, exclude_rxs, verbosity,
                    hardlinks)

    if verbosity >= 0:
        progress('Restoring: %d, done.\n' % total_restored)
    die_if_errors()
Example #26
0
def main():
    o = options.Options(optspec)
    opt, flags, extra = o.parse(sys.argv[1:])
    verbosity = opt.verbose if not opt.quiet else -1

    git.check_repo_or_die()

    if not extra:
        o.fatal('must specify at least one filename to restore')

    exclude_rxs = parse_rx_excludes(flags, o.fatal)

    owner_map = {}
    for map_type in ('user', 'group', 'uid', 'gid'):
        owner_map[map_type] = parse_owner_mappings(map_type, flags, o.fatal)

    if opt.outdir:
        mkdirp(opt.outdir)
        os.chdir(opt.outdir)

    repo = RemoteRepo(opt.remote) if opt.remote else LocalRepo()
    top = os.getcwd()
    hardlinks = {}
    for path in extra:
        if not valid_restore_path(path):
            add_error("path %r doesn't include a branch and revision" % path)
            continue
        try:
            resolved = vfs.lresolve(repo, path, want_meta=True)
        except vfs.IOError as e:
            add_error(e)
            continue
        path_parent, path_name = os.path.split(path)
        leaf_name, leaf_item = resolved[-1]
        if not leaf_item:
            add_error('error: cannot access %r in %r' %
                      ('/'.join(name for name, item in resolved), path))
            continue
        if not path_name or path_name == '.':
            # Source is /foo/what/ever/ or /foo/what/ever/. -- extract
            # what/ever/* to the current directory, and if name == '.'
            # (i.e. /foo/what/ever/.), then also restore what/ever's
            # metadata to the current directory.
            treeish = vfs.item_mode(leaf_item)
            if not treeish:
                add_error('%r cannot be restored as a directory' % path)
            else:
                items = vfs.contents(repo, leaf_item, want_meta=True)
                dot, leaf_item = next(items, None)
                assert (dot == '.')
                for sub_name, sub_item in items:
                    restore(repo, '', sub_name, sub_item, top, opt.sparse,
                            opt.numeric_ids, owner_map, exclude_rxs, verbosity,
                            hardlinks)
                if path_name == '.':
                    leaf_item = vfs.augment_item_meta(repo,
                                                      leaf_item,
                                                      include_size=True)
                    apply_metadata(leaf_item.meta, '.', opt.numeric_ids,
                                   owner_map)
        else:
            restore(repo, '', leaf_name, leaf_item, top, opt.sparse,
                    opt.numeric_ids, owner_map, exclude_rxs, verbosity,
                    hardlinks)

    if verbosity >= 0:
        progress('Restoring: %d, done.\n' % total_restored)
    die_if_errors()
Example #27
0
def main(argv):
    global opt, par2_ok

    o = options.Options(optspec)
    opt, flags, extra = o.parse_bytes(argv[1:])
    opt.verbose = opt.verbose or 0

    par2_setup()
    if opt.par2_ok:
        if par2_ok:
            sys.exit(0)  # 'true' in sh
        else:
            sys.exit(1)
    if opt.disable_par2:
        par2_ok = 0

    git.check_repo_or_die()

    if extra:
        extra = [argv_bytes(x) for x in extra]
    else:
        debug('fsck: No filenames given: checking all packs.\n')
        extra = glob.glob(git.repo(b'objects/pack/*.pack'))

    sys.stdout.flush()
    out = byte_stream(sys.stdout)
    code = 0
    count = 0
    outstanding = {}
    for name in extra:
        if name.endswith(b'.pack'):
            base = name[:-5]
        elif name.endswith(b'.idx'):
            base = name[:-4]
        elif name.endswith(b'.par2'):
            base = name[:-5]
        elif os.path.exists(name + b'.pack'):
            base = name
        else:
            raise Exception('%r is not a pack file!' % name)
        (dir, last) = os.path.split(base)
        par2_exists = os.path.exists(base + b'.par2')
        if par2_exists and os.stat(base + b'.par2').st_size == 0:
            par2_exists = 0
        sys.stdout.flush(
        )  # Not sure we still need this, but it'll flush out too
        debug('fsck: checking %r (%s)\n' %
              (last, par2_ok and par2_exists and 'par2' or 'git'))
        if not opt.verbose:
            progress('fsck (%d/%d)\r' % (count, len(extra)))

        if not opt.jobs:
            nc = do_pack(base, last, par2_exists, out)
            code = code or nc
            count += 1
        else:
            while len(outstanding) >= opt.jobs:
                (pid, nc) = os.wait()
                nc >>= 8
                if pid in outstanding:
                    del outstanding[pid]
                    code = code or nc
                    count += 1
            pid = os.fork()
            if pid:  # parent
                outstanding[pid] = 1
            else:  # child
                try:
                    sys.exit(do_pack(base, last, par2_exists, out))
                except Exception as e:
                    log('exception: %r\n' % e)
                    sys.exit(99)

    while len(outstanding):
        (pid, nc) = os.wait()
        nc >>= 8
        if pid in outstanding:
            del outstanding[pid]
            code = code or nc
            count += 1
        if not opt.verbose:
            progress('fsck (%d/%d)\r' % (count, len(extra)))

    if istty2:
        debug('fsck done.           \n')
    sys.exit(code)
Example #28
0
def set_dir(conn, arg):
    git.check_repo_or_die(arg)
    debug1('bup server: bupdir is %r\n' % git.repodir)
    _set_mode()
    conn.ok()
Example #29
0
def list_indexes(conn, junk):
    git.check_repo_or_die()
    for f in os.listdir(git.repo('objects/pack')):
        if f.endswith('.idx'):
            conn.write('%s\n' % f)
    conn.ok()
Example #30
0
def set_dir(conn, arg):
    git.check_repo_or_die(arg)
    log('bup server: bupdir is %r\n' % git.repodir)
    conn.ok()
Example #31
0
def test_commit_parsing(tmpdir):
    def restore_env_var(name, val):
        if val is None:
            del environ[name]
        else:
            environ[name] = val

    def showval(commit, val):
        return readpipe(
            [b'git', b'show', b'-s',
             b'--pretty=format:%s' % val, commit]).strip()

    orig_cwd = os.getcwd()
    workdir = tmpdir + b'/work'
    repodir = workdir + b'/.git'
    orig_author_name = environ.get(b'GIT_AUTHOR_NAME')
    orig_author_email = environ.get(b'GIT_AUTHOR_EMAIL')
    orig_committer_name = environ.get(b'GIT_COMMITTER_NAME')
    orig_committer_email = environ.get(b'GIT_COMMITTER_EMAIL')
    environ[b'GIT_AUTHOR_NAME'] = b'bup test'
    environ[b'GIT_COMMITTER_NAME'] = environ[b'GIT_AUTHOR_NAME']
    environ[b'GIT_AUTHOR_EMAIL'] = b'bup@a425bc70a02811e49bdf73ee56450e6f'
    environ[b'GIT_COMMITTER_EMAIL'] = environ[b'GIT_AUTHOR_EMAIL']
    try:
        readpipe([b'git', b'init', workdir])
        environ[b'GIT_DIR'] = environ[b'BUP_DIR'] = repodir
        git.check_repo_or_die(repodir)
        os.chdir(workdir)
        with open('foo', 'w') as f:
            print('bar', file=f)
        readpipe([b'git', b'add', b'.'])
        readpipe([
            b'git', b'commit', b'-am', b'Do something', b'--author',
            b'Someone <someone@somewhere>', b'--date',
            b'Sat Oct 3 19:48:49 2009 -0400'
        ])
        commit = readpipe([b'git', b'show-ref', b'-s', b'master']).strip()
        parents = showval(commit, b'%P')
        tree = showval(commit, b'%T')
        cname = showval(commit, b'%cn')
        cmail = showval(commit, b'%ce')
        cdate = showval(commit, b'%ct')
        coffs = showval(commit, b'%ci')
        coffs = coffs[-5:]
        coff = (int(coffs[-4:-2]) * 60 * 60) + (int(coffs[-2:]) * 60)
        if bytes_from_byte(coffs[-5]) == b'-':
            coff = -coff
        commit_items = git.get_commit_items(commit, git.cp())
        WVPASSEQ(commit_items.parents, [])
        WVPASSEQ(commit_items.tree, tree)
        WVPASSEQ(commit_items.author_name, b'Someone')
        WVPASSEQ(commit_items.author_mail, b'someone@somewhere')
        WVPASSEQ(commit_items.author_sec, 1254613729)
        WVPASSEQ(commit_items.author_offset, -(4 * 60 * 60))
        WVPASSEQ(commit_items.committer_name, cname)
        WVPASSEQ(commit_items.committer_mail, cmail)
        WVPASSEQ(commit_items.committer_sec, int(cdate))
        WVPASSEQ(commit_items.committer_offset, coff)
        WVPASSEQ(commit_items.message, b'Do something\n')
        with open(b'bar', 'wb') as f:
            f.write(b'baz\n')
        readpipe([b'git', b'add', '.'])
        readpipe([b'git', b'commit', b'-am', b'Do something else'])
        child = readpipe([b'git', b'show-ref', b'-s', b'master']).strip()
        parents = showval(child, b'%P')
        commit_items = git.get_commit_items(child, git.cp())
        WVPASSEQ(commit_items.parents, [commit])
    finally:
        os.chdir(orig_cwd)
        restore_env_var(b'GIT_AUTHOR_NAME', orig_author_name)
        restore_env_var(b'GIT_AUTHOR_EMAIL', orig_author_email)
        restore_env_var(b'GIT_COMMITTER_NAME', orig_committer_name)
        restore_env_var(b'GIT_COMMITTER_EMAIL', orig_committer_email)
Example #32
0
def main(argv):
    global opt
    signal.signal(signal.SIGTERM, handle_sigterm)

    UnixAddress = namedtuple('UnixAddress', ['path'])
    InetAddress = namedtuple('InetAddress', ['host', 'port'])

    o = options.Options(optspec)
    opt, flags, extra = o.parse_bytes(argv[1:])

    if len(extra) > 1:
        o.fatal("at most one argument expected")

    if len(extra) == 0:
        address = InetAddress(host='127.0.0.1', port=8080)
    else:
        bind_url = extra[0]
        if bind_url.startswith('unix://'):
            address = UnixAddress(path=bind_url[len('unix://'):])
        else:
            addr_parts = extra[0].split(':', 1)
            if len(addr_parts) == 1:
                host = '127.0.0.1'
                port = addr_parts[0]
            else:
                host, port = addr_parts
            try:
                port = int(port)
            except (TypeError, ValueError) as ex:
                o.fatal('port must be an integer, not %r' % port)
            address = InetAddress(host=host, port=port)

    git.check_repo_or_die()

    settings = dict(
        debug=1,
        template_path=resource_path(b'web').decode('utf-8'),
        static_path=resource_path(b'web/static').decode('utf-8'),
    )

    # Disable buffering on stdout, for debug messages
    try:
        sys.stdout._line_buffering = True
    except AttributeError:
        sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)

    application = tornado.web.Application([
        (r"(?P<path>/.*)", BupRequestHandler, dict(repo=LocalRepo())),
    ], **settings)

    http_server = HTTPServer(application)
    io_loop_pending = IOLoop.instance()

    if isinstance(address, InetAddress):
        sockets = tornado.netutil.bind_sockets(address.port, address.host)
        http_server.add_sockets(sockets)
        print('Serving HTTP on %s:%d...' % sockets[0].getsockname()[0:2])
        if opt.browser:
            browser_addr = 'http://' + address[0] + ':' + str(address[1])
            io_loop_pending.add_callback(lambda: webbrowser.open(browser_addr))
    elif isinstance(address, UnixAddress):
        unix_socket = bind_unix_socket(address.path)
        http_server.add_socket(unix_socket)
        print('Serving HTTP on filesystem socket %r' % address.path)
    else:
        log('error: unexpected address %r', address)
        sys.exit(1)

    io_loop = io_loop_pending
    io_loop.start()

    if saved_errors:
        log('WARNING: %d errors encountered while saving.\n' %
            len(saved_errors))
        sys.exit(1)
Example #33
0
def read_ref(conn, refname):
    git.check_repo_or_die()
    r = git.read_ref(refname)
    conn.write('%s\n' % (r or '').encode('hex'))
    conn.ok()
Example #34
0
File: get-cmd.py Project: ccorn/bup
def main():
    handle_ctrl_c()
    is_reverse = environ.get(b'BUP_SERVER_REVERSE')
    opt = parse_args(sys.argv)
    git.check_repo_or_die()
    src_dir = opt.source or git.repo()
    if opt.bwlimit:
        client.bwlimit = parse_num(opt.bwlimit)
    if is_reverse and opt.remote:
        misuse("don't use -r in reverse mode; it's automatic")
    if opt.remote:
        opt.remote = argv_bytes(opt.remote)
    if opt.remote or is_reverse:
        dest_repo = RemoteRepo(opt.remote)
    else:
        dest_repo = LocalRepo()

    with dest_repo as dest_repo:
        with LocalRepo(repo_dir=src_dir) as src_repo:
            with dest_repo.new_packwriter(compression_level=opt.compress) as writer:
                # Resolve and validate all sources and destinations,
                # implicit or explicit, and do it up-front, so we can
                # fail before we start writing (for any obviously
                # broken cases).
                target_items = resolve_targets(opt.target_specs,
                                               src_repo, dest_repo)

                updated_refs = {}  # ref_name -> (original_ref, tip_commit(bin))
                no_ref_info = (None, None)

                handlers = {'ff': handle_ff,
                            'append': handle_append,
                            'force-pick': handle_pick,
                            'pick': handle_pick,
                            'new-tag': handle_new_tag,
                            'replace': handle_replace,
                            'unnamed': handle_unnamed}

                for item in target_items:
                    debug1('get-spec: %r\n' % (item.spec,))
                    debug1('get-src: %s\n' % loc_desc(item.src))
                    debug1('get-dest: %s\n' % loc_desc(item.dest))
                    dest_path = item.dest and item.dest.path
                    if dest_path:
                        if dest_path.startswith(b'/.tag/'):
                            dest_ref = b'refs/tags/%s' % dest_path[6:]
                        else:
                            dest_ref = b'refs/heads/%s' % dest_path[1:]
                    else:
                        dest_ref = None

                    dest_hash = item.dest and item.dest.hash
                    orig_ref, cur_ref = updated_refs.get(dest_ref, no_ref_info)
                    orig_ref = orig_ref or dest_hash
                    cur_ref = cur_ref or dest_hash

                    handler = handlers[item.spec.method]
                    item_result = handler(item, src_repo, writer, opt)
                    if len(item_result) > 1:
                        new_id, tree = item_result
                    else:
                        new_id = item_result[0]

                    if not dest_ref:
                        log_item(item.spec.src, item.src.type, opt)
                    else:
                        updated_refs[dest_ref] = (orig_ref, new_id)
                        if dest_ref.startswith(b'refs/tags/'):
                            log_item(item.spec.src, item.src.type, opt, tag=new_id)
                        else:
                            log_item(item.spec.src, item.src.type, opt,
                                     tree=tree, commit=new_id)

        # Only update the refs at the very end, once the writer is
        # closed, so that if something goes wrong above, the old refs
        # will be undisturbed.
        for ref_name, info in items(updated_refs):
            orig_ref, new_ref = info
            try:
                dest_repo.update_ref(ref_name, new_ref, orig_ref)
                if opt.verbose:
                    new_hex = hexlify(new_ref)
                    if orig_ref:
                        orig_hex = hexlify(orig_ref)
                        log('updated %r (%s -> %s)\n' % (ref_name, orig_hex, new_hex))
                    else:
                        log('updated %r (%s)\n' % (ref_name, new_hex))
            except (git.GitError, client.ClientError) as ex:
                add_error('unable to update ref %r: %s' % (ref_name, ex))

    if saved_errors:
        log('WARNING: %d errors encountered while saving.\n' % len(saved_errors))
        sys.exit(1)
Example #35
0
def main():
    o = options.Options(optspec)
    opt, flags, extra = o.parse(sys.argv[1:])
    verbosity = opt.verbose if not opt.quiet else -1
    
    git.check_repo_or_die()

    if not extra:
        o.fatal('must specify at least one filename to restore')

    exclude_rxs = parse_rx_excludes(flags, o.fatal)

    owner_map = {}
    for map_type in ('user', 'group', 'uid', 'gid'):
        owner_map[map_type] = parse_owner_mappings(map_type, flags, o.fatal)

    if opt.outdir:
        mkdirp(opt.outdir)
        os.chdir(opt.outdir)

    repo = RemoteRepo(opt.remote) if opt.remote else LocalRepo()
    top = os.getcwd()
    hardlinks = {}
    for path in extra:
        if not valid_restore_path(path):
            add_error("path %r doesn't include a branch and revision" % path)
            continue
        try:
            resolved = vfs.resolve(repo, path, want_meta=True, follow=False)
        except vfs.IOError as e:
            add_error(e)
            continue
        if len(resolved) == 3 and resolved[2][0] == 'latest':
            # Follow latest symlink to the actual save
            try:
                resolved = vfs.resolve(repo, 'latest', parent=resolved[:-1],
                                       want_meta=True)
            except vfs.IOError as e:
                add_error(e)
                continue
            # Rename it back to 'latest'
            resolved = tuple(elt if i != 2 else ('latest',) + elt[1:]
                             for i, elt in enumerate(resolved))
        path_parent, path_name = os.path.split(path)
        leaf_name, leaf_item = resolved[-1]
        if not leaf_item:
            add_error('error: cannot access %r in %r'
                      % ('/'.join(name for name, item in resolved),
                         path))
            continue
        if not path_name or path_name == '.':
            # Source is /foo/what/ever/ or /foo/what/ever/. -- extract
            # what/ever/* to the current directory, and if name == '.'
            # (i.e. /foo/what/ever/.), then also restore what/ever's
            # metadata to the current directory.
            treeish = vfs.item_mode(leaf_item)
            if not treeish:
                add_error('%r cannot be restored as a directory' % path)
            else:
                items = vfs.contents(repo, leaf_item, want_meta=True)
                dot, leaf_item = next(items, None)
                assert(dot == '.')
                for sub_name, sub_item in items:
                    restore(repo, '', sub_name, sub_item, top,
                            opt.sparse, opt.numeric_ids, owner_map,
                            exclude_rxs, verbosity, hardlinks)
                if path_name == '.':
                    leaf_item = vfs.augment_item_meta(repo, leaf_item,
                                                      include_size=True)
                    apply_metadata(leaf_item.meta, '.',
                                   opt.numeric_ids, owner_map)
        else:
            restore(repo, '', leaf_name, leaf_item, top,
                    opt.sparse, opt.numeric_ids, owner_map,
                    exclude_rxs, verbosity, hardlinks)

    if verbosity >= 0:
        progress('Restoring: %d, done.\n' % total_restored)
    die_if_errors()
Example #36
0
 def create(self, repo_dir=None):
     # FIXME: this is not ideal, we should somehow
     # be able to call the constructor instead?
     git.init_repo(repo_dir)
     git.check_repo_or_die(repo_dir)
Example #37
0
        WVPASS('check_repo_or_die'
               )  # if we reach this point the call above passed

        os.rename(bupdir + '/objects/pack', bupdir + '/objects/pack.tmp')
        open(bupdir + '/objects/pack', 'w').close()
        try:
            git.check_repo_or_die()
        except SystemExit, e:
            WVPASSEQ(e.code, 14)
        else:
            WVFAIL()
        os.unlink(bupdir + '/objects/pack')
        os.rename(bupdir + '/objects/pack.tmp', bupdir + '/objects/pack')

        try:
            git.check_repo_or_die('nonexistantbup.tmp')
        except SystemExit, e:
            WVPASSEQ(e.code, 15)
        else:
            WVFAIL()
    finally:
        os.chdir(orig_cwd)
    if wvfailure_count() == initial_failures:
        subprocess.call(['rm', '-rf', tmpdir])


@wvtest
def test_commit_parsing():
    def restore_env_var(name, val):
        if val is None:
            del os.environ[name]
Example #38
0
def update_ref(conn, refname):
    git.check_repo_or_die()
    newval = conn.readline().strip()
    oldval = conn.readline().strip()
    git.update_ref(refname, newval.decode('hex'), oldval.decode('hex'))
    conn.ok()
Example #39
0
def test_commit_parsing():
    def restore_env_var(name, val):
        if val is None:
            del os.environ[name]
        else:
            os.environ[name] = val

    def showval(commit, val):
        return readpipe(
            ['git', 'show', '-s',
             '--pretty=format:%s' % val, commit]).strip()

    initial_failures = wvfailure_count()
    orig_cwd = os.getcwd()
    tmpdir = tempfile.mkdtemp(dir=bup_tmp, prefix='bup-tgit-')
    workdir = tmpdir + "/work"
    repodir = workdir + '/.git'
    orig_author_name = os.environ.get('GIT_AUTHOR_NAME')
    orig_author_email = os.environ.get('GIT_AUTHOR_EMAIL')
    orig_committer_name = os.environ.get('GIT_COMMITTER_NAME')
    orig_committer_email = os.environ.get('GIT_COMMITTER_EMAIL')
    os.environ['GIT_AUTHOR_NAME'] = 'bup test'
    os.environ['GIT_COMMITTER_NAME'] = os.environ['GIT_AUTHOR_NAME']
    os.environ['GIT_AUTHOR_EMAIL'] = 'bup@a425bc70a02811e49bdf73ee56450e6f'
    os.environ['GIT_COMMITTER_EMAIL'] = os.environ['GIT_AUTHOR_EMAIL']
    try:
        readpipe(['git', 'init', workdir])
        os.environ['GIT_DIR'] = os.environ['BUP_DIR'] = repodir
        git.check_repo_or_die(repodir)
        os.chdir(workdir)
        with open('foo', 'w') as f:
            print >> f, 'bar'
        readpipe(['git', 'add', '.'])
        readpipe([
            'git', 'commit', '-am', 'Do something', '--author',
            'Someone <someone@somewhere>', '--date',
            'Sat Oct 3 19:48:49 2009 -0400'
        ])
        commit = readpipe(['git', 'show-ref', '-s', 'master']).strip()
        parents = showval(commit, '%P')
        tree = showval(commit, '%T')
        cname = showval(commit, '%cn')
        cmail = showval(commit, '%ce')
        cdate = showval(commit, '%ct')
        coffs = showval(commit, '%ci')
        coffs = coffs[-5:]
        coff = (int(coffs[-4:-2]) * 60 * 60) + (int(coffs[-2:]) * 60)
        if coffs[-5] == '-':
            coff = -coff
        commit_items = git.get_commit_items(commit, git.cp())
        WVPASSEQ(commit_items.parents, [])
        WVPASSEQ(commit_items.tree, tree)
        WVPASSEQ(commit_items.author_name, 'Someone')
        WVPASSEQ(commit_items.author_mail, 'someone@somewhere')
        WVPASSEQ(commit_items.author_sec, 1254613729)
        WVPASSEQ(commit_items.author_offset, -(4 * 60 * 60))
        WVPASSEQ(commit_items.committer_name, cname)
        WVPASSEQ(commit_items.committer_mail, cmail)
        WVPASSEQ(commit_items.committer_sec, int(cdate))
        WVPASSEQ(commit_items.committer_offset, coff)
        WVPASSEQ(commit_items.message, 'Do something\n')
        with open('bar', 'w') as f:
            print >> f, 'baz'
        readpipe(['git', 'add', '.'])
        readpipe(['git', 'commit', '-am', 'Do something else'])
        child = readpipe(['git', 'show-ref', '-s', 'master']).strip()
        parents = showval(child, '%P')
        commit_items = git.get_commit_items(child, git.cp())
        WVPASSEQ(commit_items.parents, [commit])
    finally:
        os.chdir(orig_cwd)
        restore_env_var('GIT_AUTHOR_NAME', orig_author_name)
        restore_env_var('GIT_AUTHOR_EMAIL', orig_author_email)
        restore_env_var('GIT_COMMITTER_NAME', orig_committer_name)
        restore_env_var('GIT_COMMITTER_EMAIL', orig_committer_email)
    if wvfailure_count() == initial_failures:
        subprocess.call(['rm', '-rf', tmpdir])
Example #40
0
 def __init__(self, repo_dir):
     if opt.force_repo:
         repo_dir = None
     git.check_repo_or_die(repo_dir)
     LocalRepo.__init__(self, repo_dir)
Example #41
0
File: index.py Project: gdt/bup
def main(argv):
    o = options.Options(optspec)
    opt, flags, extra = o.parse_bytes(argv[1:])

    if not (opt.modified or \
            opt['print'] or \
            opt.status or \
            opt.update or \
            opt.check or \
            opt.clear):
        opt.update = 1
    if (opt.fake_valid or opt.fake_invalid) and not opt.update:
        o.fatal('--fake-{in,}valid are meaningless without -u')
    if opt.fake_valid and opt.fake_invalid:
        o.fatal('--fake-valid is incompatible with --fake-invalid')
    if opt.clear and opt.indexfile:
        o.fatal('cannot clear an external index (via -f)')

    # FIXME: remove this once we account for timestamp races, i.e. index;
    # touch new-file; index.  It's possible for this to happen quickly
    # enough that new-file ends up with the same timestamp as the first
    # index, and then bup will ignore it.
    tick_start = time.time()
    time.sleep(1 - (tick_start - int(tick_start)))

    git.check_repo_or_die()

    handle_ctrl_c()

    if opt.verbose is None:
        opt.verbose = 0

    if opt.indexfile:
        indexfile = argv_bytes(opt.indexfile)
    else:
        indexfile = git.repo(b'bupindex')

    if opt.check:
        log('check: starting initial check.\n')
        check_index(index.Reader(indexfile), opt.verbose)

    if opt.clear:
        log('clear: clearing index.\n')
        clear_index(indexfile, opt.verbose)

    sys.stdout.flush()
    out = byte_stream(sys.stdout)

    if opt.update:
        if not extra:
            o.fatal('update mode (-u) requested but no paths given')
        extra = [argv_bytes(x) for x in extra]
        excluded_paths = parse_excludes(flags, o.fatal)
        exclude_rxs = parse_rx_excludes(flags, o.fatal)
        xexcept = index.unique_resolved_paths(extra)
        for rp, path in index.reduce_paths(extra):
            update_index(rp,
                         excluded_paths,
                         exclude_rxs,
                         indexfile,
                         check=opt.check,
                         check_device=opt.check_device,
                         xdev=opt.xdev,
                         xdev_exceptions=xexcept,
                         fake_valid=opt.fake_valid,
                         fake_invalid=opt.fake_invalid,
                         out=out,
                         verbose=opt.verbose)

    if opt['print'] or opt.status or opt.modified:
        extra = [argv_bytes(x) for x in extra]
        for name, ent in index.Reader(indexfile).filter(extra or [b'']):
            if (opt.modified
                    and (ent.is_valid() or ent.is_deleted() or not ent.mode)):
                continue
            line = b''
            if opt.status:
                if ent.is_deleted():
                    line += b'D '
                elif not ent.is_valid():
                    if ent.sha == index.EMPTY_SHA:
                        line += b'A '
                    else:
                        line += b'M '
                else:
                    line += b'  '
            if opt.hash:
                line += hexlify(ent.sha) + b' '
            if opt.long:
                line += b'%7s %7s ' % (oct(ent.mode).encode('ascii'),
                                       oct(ent.gitmode).encode('ascii'))
            out.write(line + (name or b'./') + b'\n')

    if opt.check and (opt['print'] or opt.status or opt.modified
                      or opt.update):
        log('check: starting final check.\n')
        check_index(index.Reader(indexfile), opt.verbose)

    if saved_errors:
        log('WARNING: %d errors encountered.\n' % len(saved_errors))
        sys.exit(1)
Example #42
0
def main(argv):
    o = options.Options(optspec)
    opt, flags, extra = o.parse_bytes(argv[1:])

    git.check_repo_or_die()

    global repo

    sys.stdout.flush()
    out = byte_stream(sys.stdout)
    stdin = byte_stream(sys.stdin)
    repo = LocalRepo()
    pwd = vfs.resolve(repo, b'/')
    rv = 0

    def inputiter(f):
        if os.isatty(f.fileno()):
            while 1:
                prompt = b'bup %s> ' % (b'/'.join(name for name, item in pwd) or b'/', )
                if hasattr(_helpers, 'readline'):
                    try:
                        yield _helpers.readline(prompt)
                    except EOFError:
                        print()  # Clear the line for the terminal's next prompt
                        break
                else:
                    out.write(prompt)
                    out.flush()
                    read_line = f.readline()
                    if not read_line:
                        print('')
                        break
                    yield read_line
        else:
            for line in f:
                yield line


    if extra:
        lines = (argv_bytes(arg) for arg in extra)
    else:
        if hasattr(_helpers, 'readline'):
            _helpers.set_completer_word_break_characters(b' \t\n\r/')
            _helpers.set_attempted_completion_function(attempt_completion)
            _helpers.set_completion_entry_function(enter_completion)
            if sys.platform.startswith('darwin'):
                # MacOS uses a slightly incompatible clone of libreadline
                _helpers.parse_and_bind(b'bind ^I rl_complete')
            _helpers.parse_and_bind(b'tab: complete')
        lines = inputiter(stdin)

    for line in lines:
        if not line.strip():
            continue
        words = [word for (wordstart,word) in shquote.quotesplit(line)]
        cmd = words[0].lower()
        #log('execute: %r %r\n' % (cmd, parm))
        try:
            if cmd == b'ls':
                do_ls(repo, pwd, words[1:], out)
                out.flush()
            elif cmd == b'cd':
                np = pwd
                for parm in words[1:]:
                    res = vfs.resolve(repo, parm, parent=np)
                    _, leaf_item = res[-1]
                    if not leaf_item:
                        raise Exception('%s does not exist'
                                        % path_msg(b'/'.join(name for name, item
                                                             in res)))
                    if not stat.S_ISDIR(vfs.item_mode(leaf_item)):
                        raise Exception('%s is not a directory' % path_msg(parm))
                    np = res
                pwd = np
            elif cmd == b'pwd':
                if len(pwd) == 1:
                    out.write(b'/')
                out.write(b'/'.join(name for name, item in pwd) + b'\n')
                out.flush()
            elif cmd == b'cat':
                for parm in words[1:]:
                    res = vfs.resolve(repo, parm, parent=pwd)
                    _, leaf_item = res[-1]
                    if not leaf_item:
                        raise Exception('%s does not exist' %
                                        path_msg(b'/'.join(name for name, item
                                                           in res)))
                    with vfs.fopen(repo, leaf_item) as srcfile:
                        write_to_file(srcfile, out)
                out.flush()
            elif cmd == b'get':
                if len(words) not in [2,3]:
                    rv = 1
                    raise Exception('Usage: get <filename> [localname]')
                rname = words[1]
                (dir,base) = os.path.split(rname)
                lname = len(words) > 2 and words[2] or base
                res = vfs.resolve(repo, rname, parent=pwd)
                _, leaf_item = res[-1]
                if not leaf_item:
                    raise Exception('%s does not exist' %
                                    path_msg(b'/'.join(name for name, item in res)))
                with vfs.fopen(repo, leaf_item) as srcfile:
                    with open(lname, 'wb') as destfile:
                        log('Saving %s\n' % path_msg(lname))
                        write_to_file(srcfile, destfile)
            elif cmd == b'mget':
                for parm in words[1:]:
                    dir, base = os.path.split(parm)

                    res = vfs.resolve(repo, dir, parent=pwd)
                    _, dir_item = res[-1]
                    if not dir_item:
                        raise Exception('%s does not exist' % path_msg(dir))
                    for name, item in vfs.contents(repo, dir_item):
                        if name == b'.':
                            continue
                        if fnmatch.fnmatch(name, base):
                            if stat.S_ISLNK(vfs.item_mode(item)):
                                deref = vfs.resolve(repo, name, parent=res)
                                deref_name, deref_item = deref[-1]
                                if not deref_item:
                                    raise Exception('%s does not exist' %
                                                    path_msg('/'.join(name for name, item
                                                                      in deref)))
                                item = deref_item
                            with vfs.fopen(repo, item) as srcfile:
                                with open(name, 'wb') as destfile:
                                    log('Saving %s\n' % path_msg(name))
                                    write_to_file(srcfile, destfile)
            elif cmd == b'help' or cmd == b'?':
                out.write(b'Commands: ls cd pwd cat get mget help quit\n')
                out.flush()
            elif cmd in (b'quit', b'exit', b'bye'):
                break
            else:
                rv = 1
                raise Exception('no such command %r' % cmd)
        except Exception as e:
            rv = 1
            log('error: %s\n' % e)
            raise

    sys.exit(rv)
Example #43
0
def main(argv):

    # Hack around lack of nonlocal vars in python 2
    _nonlocal = {}

    o = options.Options(optspec)
    opt, flags, extra = o.parse_bytes(argv[1:])

    if opt.indexfile:
        opt.indexfile = argv_bytes(opt.indexfile)
    if opt.name:
        opt.name = argv_bytes(opt.name)
    if opt.remote:
        opt.remote = argv_bytes(opt.remote)
    if opt.strip_path:
        opt.strip_path = argv_bytes(opt.strip_path)

    git.check_repo_or_die()
    if not (opt.tree or opt.commit or opt.name):
        o.fatal("use one or more of -t, -c, -n")
    if not extra:
        o.fatal("no filenames given")

    extra = [argv_bytes(x) for x in extra]

    opt.progress = (istty2 and not opt.quiet)
    opt.smaller = parse_num(opt.smaller or 0)
    if opt.bwlimit:
        client.bwlimit = parse_num(opt.bwlimit)

    if opt.date:
        date = parse_date_or_fatal(opt.date, o.fatal)
    else:
        date = time.time()

    if opt.strip and opt.strip_path:
        o.fatal("--strip is incompatible with --strip-path")

    graft_points = []
    if opt.graft:
        if opt.strip:
            o.fatal("--strip is incompatible with --graft")

        if opt.strip_path:
            o.fatal("--strip-path is incompatible with --graft")

        for (option, parameter) in flags:
            if option == "--graft":
                parameter = argv_bytes(parameter)
                splitted_parameter = parameter.split(b'=')
                if len(splitted_parameter) != 2:
                    o.fatal("a graft point must be of the form old_path=new_path")
                old_path, new_path = splitted_parameter
                if not (old_path and new_path):
                    o.fatal("a graft point cannot be empty")
                graft_points.append((resolve_parent(old_path),
                                     resolve_parent(new_path)))

    is_reverse = environ.get(b'BUP_SERVER_REVERSE')
    if is_reverse and opt.remote:
        o.fatal("don't use -r in reverse mode; it's automatic")

    name = opt.name
    if name and not valid_save_name(name):
        o.fatal("'%s' is not a valid branch name" % path_msg(name))
    refname = name and b'refs/heads/%s' % name or None
    if opt.remote or is_reverse:
        try:
            cli = client.Client(opt.remote)
        except client.ClientError as e:
            log('error: %s' % e)
            sys.exit(1)
        oldref = refname and cli.read_ref(refname) or None
        w = cli.new_packwriter(compression_level=opt.compress)
    else:
        cli = None
        oldref = refname and git.read_ref(refname) or None
        w = git.PackWriter(compression_level=opt.compress)

    handle_ctrl_c()


    # Metadata is stored in a file named .bupm in each directory.  The
    # first metadata entry will be the metadata for the current directory.
    # The remaining entries will be for each of the other directory
    # elements, in the order they're listed in the index.
    #
    # Since the git tree elements are sorted according to
    # git.shalist_item_sort_key, the metalist items are accumulated as
    # (sort_key, metadata) tuples, and then sorted when the .bupm file is
    # created.  The sort_key should have been computed using the element's
    # mangled name and git mode (after hashsplitting), but the code isn't
    # actually doing that but rather uses the element's real name and mode.
    # This makes things a bit more difficult when reading it back, see
    # vfs.ordered_tree_entries().

    # Maintain a stack of information representing the current location in
    # the archive being constructed.  The current path is recorded in
    # parts, which will be something like
    #      [StackDir(name=''), StackDir(name='home'), StackDir(name='someuser')],
    # and the accumulated content and metadata for files in the dirs is stored
    # in the .items member of the StackDir.

    stack = []


    def _push(part, metadata):
        # Enter a new archive directory -- make it the current directory.
        item = StackDir(part, metadata)
        stack.append(item)


    def _pop(force_tree=None, dir_metadata=None):
        # Leave the current archive directory and add its tree to its parent.
        item = stack.pop()
        # FIXME: only test if collision is possible (i.e. given --strip, etc.)?
        if force_tree:
            tree = force_tree
        else:
            names_seen = set()
            clean_list = []
            for x in item.items:
                name = x.name
                if name in names_seen:
                    parent_path = b'/'.join(x.name for x in stack) + b'/'
                    add_error('error: ignoring duplicate path %s in %s'
                              % (path_msg(name), path_msg(parent_path)))
                else:
                    names_seen.add(name)
                    clean_list.append(x)

            # if set, overrides the original metadata pushed for this dir.
            if dir_metadata is None:
                dir_metadata = item.meta
            metalist = [(b'', dir_metadata)]
            metalist += [(git.shalist_item_sort_key((entry.mode, entry.name, None)),
                          entry.meta)
                         for entry in clean_list if entry.mode != GIT_MODE_TREE]
            metalist.sort(key = lambda x: x[0])
            metadata = BytesIO(b''.join(m[1].encode() for m in metalist))
            mode, id = hashsplit.split_to_blob_or_tree(w.new_blob, w.new_tree,
                                                       [metadata],
                                                       keep_boundaries=False)
            shalist = [(mode, b'.bupm', id)]
            shalist += [(entry.gitmode,
                         git.mangle_name(entry.name, entry.mode, entry.gitmode),
                         entry.oid)
                        for entry in clean_list]

            tree = w.new_tree(shalist)
        if stack:
            stack[-1].append(item.name, GIT_MODE_TREE, GIT_MODE_TREE, tree, None)
        return tree


    _nonlocal['count'] = 0
    _nonlocal['subcount'] = 0
    _nonlocal['lastremain'] = None

    def progress_report(n):
        _nonlocal['subcount'] += n
        cc = _nonlocal['count'] + _nonlocal['subcount']
        pct = total and (cc*100.0/total) or 0
        now = time.time()
        elapsed = now - tstart
        kps = elapsed and int(cc/1024./elapsed)
        kps_frac = 10 ** int(math.log(kps+1, 10) - 1)
        kps = int(kps/kps_frac)*kps_frac
        if cc:
            remain = elapsed*1.0/cc * (total-cc)
        else:
            remain = 0.0
        if (_nonlocal['lastremain'] and (remain > _nonlocal['lastremain'])
              and ((remain - _nonlocal['lastremain'])/_nonlocal['lastremain'] < 0.05)):
            remain = _nonlocal['lastremain']
        else:
            _nonlocal['lastremain'] = remain
        hours = int(remain/60/60)
        mins = int(remain/60 - hours*60)
        secs = int(remain - hours*60*60 - mins*60)
        if elapsed < 30:
            remainstr = ''
            kpsstr = ''
        else:
            kpsstr = '%dk/s' % kps
            if hours:
                remainstr = '%dh%dm' % (hours, mins)
            elif mins:
                remainstr = '%dm%d' % (mins, secs)
            else:
                remainstr = '%ds' % secs
        qprogress('Saving: %.2f%% (%d/%dk, %d/%d files) %s %s\r'
                  % (pct, cc/1024, total/1024, fcount, ftotal,
                     remainstr, kpsstr))


    indexfile = opt.indexfile or git.repo(b'bupindex')
    r = index.Reader(indexfile)
    try:
        msr = index.MetaStoreReader(indexfile + b'.meta')
    except IOError as ex:
        if ex.errno != EACCES:
            raise
        log('error: cannot access %r; have you run bup index?'
            % path_msg(indexfile))
        sys.exit(1)
    hlink_db = hlinkdb.HLinkDB(indexfile + b'.hlink')

    def already_saved(ent):
        return ent.is_valid() and w.exists(ent.sha) and ent.sha

    def wantrecurse_pre(ent):
        return not already_saved(ent)

    def wantrecurse_during(ent):
        return not already_saved(ent) or ent.sha_missing()

    def find_hardlink_target(hlink_db, ent):
        if hlink_db and not stat.S_ISDIR(ent.mode) and ent.nlink > 1:
            link_paths = hlink_db.node_paths(ent.dev, ent.ino)
            if link_paths:
                return link_paths[0]

    total = ftotal = 0
    if opt.progress:
        for (transname,ent) in r.filter(extra, wantrecurse=wantrecurse_pre):
            if not (ftotal % 10024):
                qprogress('Reading index: %d\r' % ftotal)
            exists = ent.exists()
            hashvalid = already_saved(ent)
            ent.set_sha_missing(not hashvalid)
            if not opt.smaller or ent.size < opt.smaller:
                if exists and not hashvalid:
                    total += ent.size
            ftotal += 1
        progress('Reading index: %d, done.\n' % ftotal)
        hashsplit.progress_callback = progress_report

    # Root collisions occur when strip or graft options map more than one
    # path to the same directory (paths which originally had separate
    # parents).  When that situation is detected, use empty metadata for
    # the parent.  Otherwise, use the metadata for the common parent.
    # Collision example: "bup save ... --strip /foo /foo/bar /bar".

    # FIXME: Add collision tests, or handle collisions some other way.

    # FIXME: Detect/handle strip/graft name collisions (other than root),
    # i.e. if '/foo/bar' and '/bar' both map to '/'.

    first_root = None
    root_collision = None
    tstart = time.time()
    fcount = 0
    lastskip_name = None
    lastdir = b''
    for (transname,ent) in r.filter(extra, wantrecurse=wantrecurse_during):
        (dir, file) = os.path.split(ent.name)
        exists = (ent.flags & index.IX_EXISTS)
        hashvalid = already_saved(ent)
        wasmissing = ent.sha_missing()
        oldsize = ent.size
        if opt.verbose:
            if not exists:
                status = 'D'
            elif not hashvalid:
                if ent.sha == index.EMPTY_SHA:
                    status = 'A'
                else:
                    status = 'M'
            else:
                status = ' '
            if opt.verbose >= 2:
                log('%s %-70s\n' % (status, path_msg(ent.name)))
            elif not stat.S_ISDIR(ent.mode) and lastdir != dir:
                if not lastdir.startswith(dir):
                    log('%s %-70s\n' % (status, path_msg(os.path.join(dir, b''))))
                lastdir = dir

        if opt.progress:
            progress_report(0)
        fcount += 1

        if not exists:
            continue
        if opt.smaller and ent.size >= opt.smaller:
            if exists and not hashvalid:
                if opt.verbose:
                    log('skipping large file "%s"\n' % path_msg(ent.name))
                lastskip_name = ent.name
            continue

        assert(dir.startswith(b'/'))
        if opt.strip:
            dirp = stripped_path_components(dir, extra)
        elif opt.strip_path:
            dirp = stripped_path_components(dir, [opt.strip_path])
        elif graft_points:
            dirp = grafted_path_components(graft_points, dir)
        else:
            dirp = path_components(dir)

        # At this point, dirp contains a representation of the archive
        # path that looks like [(archive_dir_name, real_fs_path), ...].
        # So given "bup save ... --strip /foo/bar /foo/bar/baz", dirp
        # might look like this at some point:
        #   [('', '/foo/bar'), ('baz', '/foo/bar/baz'), ...].

        # This dual representation supports stripping/grafting, where the
        # archive path may not have a direct correspondence with the
        # filesystem.  The root directory is represented by an initial
        # component named '', and any component that doesn't have a
        # corresponding filesystem directory (due to grafting, for
        # example) will have a real_fs_path of None, i.e. [('', None),
        # ...].

        if first_root == None:
            first_root = dirp[0]
        elif first_root != dirp[0]:
            root_collision = True

        # If switching to a new sub-tree, finish the current sub-tree.
        while [x.name for x in stack] > [x[0] for x in dirp]:
            _pop()

        # If switching to a new sub-tree, start a new sub-tree.
        for path_component in dirp[len(stack):]:
            dir_name, fs_path = path_component
            # Not indexed, so just grab the FS metadata or use empty metadata.
            try:
                meta = metadata.from_path(fs_path, normalized=True) \
                    if fs_path else metadata.Metadata()
            except (OSError, IOError) as e:
                add_error(e)
                lastskip_name = dir_name
                meta = metadata.Metadata()
            _push(dir_name, meta)

        if not file:
            if len(stack) == 1:
                continue # We're at the top level -- keep the current root dir
            # Since there's no filename, this is a subdir -- finish it.
            oldtree = already_saved(ent) # may be None
            newtree = _pop(force_tree = oldtree)
            if not oldtree:
                if lastskip_name and lastskip_name.startswith(ent.name):
                    ent.invalidate()
                else:
                    ent.validate(GIT_MODE_TREE, newtree)
                ent.repack()
            if exists and wasmissing:
                _nonlocal['count'] += oldsize
            continue

        # it's not a directory
        if hashvalid:
            meta = msr.metadata_at(ent.meta_ofs)
            meta.hardlink_target = find_hardlink_target(hlink_db, ent)
            # Restore the times that were cleared to 0 in the metastore.
            (meta.atime, meta.mtime, meta.ctime) = (ent.atime, ent.mtime, ent.ctime)
            stack[-1].append(file, ent.mode, ent.gitmode, ent.sha, meta)
        else:
            id = None
            hlink = find_hardlink_target(hlink_db, ent)
            try:
                meta = metadata.from_path(ent.name, hardlink_target=hlink,
                                          normalized=True,
                                          after_stat=after_nondir_metadata_stat)
            except (OSError, IOError) as e:
                add_error(e)
                lastskip_name = ent.name
                continue
            if stat.S_IFMT(ent.mode) != stat.S_IFMT(meta.mode):
                # The mode changed since we indexed the file, this is bad.
                # This can cause two issues:
                # 1) We e.g. think the file is a regular file, but now it's
                #    something else (a device, socket, FIFO or symlink, etc.)
                #    and _read_ from it when we shouldn't.
                # 2) We then record it as valid, but don't update the index
                #    metadata, and on a subsequent save it has 'hashvalid'
                #    but is recorded as the file type from the index, when
                #    the content is something else ...
                # Avoid all of these consistency issues by just skipping such
                # things - it really ought to not happen anyway.
                add_error("%s: mode changed since indexing, skipping." % path_msg(ent.name))
                lastskip_name = ent.name
                continue
            if stat.S_ISREG(ent.mode):
                try:
                    # If the file changes while we're reading it, then our reading
                    # may stop at some point, but the stat() above may have gotten
                    # a different size already. Recalculate the meta size so that
                    # the repository records the accurate size in the metadata, even
                    # if the other stat() data might be slightly older than the file
                    # content (which we can't fix, this is inherently racy, but we
                    # can prevent the size mismatch.)
                    meta.size = 0
                    def new_blob(data):
                        meta.size += len(data)
                        return w.new_blob(data)
                    before_saving_regular_file(ent.name)
                    with hashsplit.open_noatime(ent.name) as f:
                        (mode, id) = hashsplit.split_to_blob_or_tree(
                                                new_blob, w.new_tree, [f],
                                                keep_boundaries=False)
                except (IOError, OSError) as e:
                    add_error('%s: %s' % (ent.name, e))
                    lastskip_name = ent.name
            elif stat.S_ISDIR(ent.mode):
                assert(0)  # handled above
            elif stat.S_ISLNK(ent.mode):
                mode, id = (GIT_MODE_SYMLINK, w.new_blob(meta.symlink_target))
            else:
                # Everything else should be fully described by its
                # metadata, so just record an empty blob, so the paths
                # in the tree and .bupm will match up.
                (mode, id) = (GIT_MODE_FILE, w.new_blob(b''))

            if id:
                ent.validate(mode, id)
                ent.repack()
                stack[-1].append(file, ent.mode, ent.gitmode, id, meta)

        if exists and wasmissing:
            _nonlocal['count'] += oldsize
            _nonlocal['subcount'] = 0


    if opt.progress:
        pct = total and _nonlocal['count']*100.0/total or 100
        progress('Saving: %.2f%% (%d/%dk, %d/%d files), done.    \n'
                 % (pct, _nonlocal['count']/1024, total/1024, fcount, ftotal))

    while len(stack) > 1: # _pop() all the parts above the root
        _pop()

    # Finish the root directory.
    # When there's a collision, use empty metadata for the root.
    tree = _pop(dir_metadata = metadata.Metadata() if root_collision else None)

    sys.stdout.flush()
    out = byte_stream(sys.stdout)

    if opt.tree:
        out.write(hexlify(tree))
        out.write(b'\n')
    if opt.commit or name:
        if compat.py_maj > 2:
            # Strip b prefix from python 3 bytes reprs to preserve previous format
             msgcmd = b'[%s]' % b', '.join([repr(argv_bytes(x))[1:].encode('ascii')
                                           for x in argv])
        else:
            msgcmd = repr(argv)
        msg = b'bup save\n\nGenerated by command:\n%s\n' % msgcmd
        userline = (b'%s <%s@%s>' % (userfullname(), username(), hostname()))
        commit = w.new_commit(tree, oldref, userline, date, None,
                              userline, date, None, msg)
        if opt.commit:
            out.write(hexlify(commit))
            out.write(b'\n')

    msr.close()
    w.close()  # must close before we can update the ref

    if opt.name:
        if cli:
            cli.update_ref(refname, commit, oldref)
        else:
            git.update_ref(refname, commit, oldref)

    if cli:
        cli.close()

    if saved_errors:
        log('WARNING: %d errors encountered while saving.\n' % len(saved_errors))
        sys.exit(1)
Example #44
0
n,name=    name of backup set to update (if any)
d,date=    date for the commit (seconds since the epoch)
v,verbose  increase log output (can be used more than once)
q,quiet    don't show progress meter
smaller=   only back up files smaller than n bytes
bwlimit=   maximum bytes/sec to transmit to server
f,indexfile=  the name of the index file (normally BUP_DIR/bupindex)
strip      strips the path to every filename given
strip-path= path-prefix to be stripped when saving
graft=     a graft point *old_path*=*new_path* (can be used more than once)
#,compress=  set compression level to # (0-9, 9 is highest) [1]
"""
o = options.Options(optspec)
(opt, flags, extra) = o.parse(sys.argv[1:])

git.check_repo_or_die()
if not (opt.tree or opt.commit or opt.name):
    o.fatal("use one or more of -t, -c, -n")
if not extra:
    o.fatal("no filenames given")

opt.progress = (istty2 and not opt.quiet)
opt.smaller = parse_num(opt.smaller or 0)
if opt.bwlimit:
    client.bwlimit = parse_num(opt.bwlimit)

if opt.date:
    date = parse_date_or_fatal(opt.date, o.fatal)
else:
    date = time.time()
Example #45
0
File: tgit.py Project: 0xkag/bup
def test_commit_parsing():

    def restore_env_var(name, val):
        if val is None:
            del os.environ[name]
        else:
            os.environ[name] = val

    def showval(commit, val):
        return readpipe(['git', 'show', '-s',
                         '--pretty=format:%s' % val, commit]).strip()

    with no_lingering_errors(), test_tempdir('bup-tgit-') as tmpdir:
        orig_cwd = os.getcwd()
        workdir = tmpdir + "/work"
        repodir = workdir + '/.git'
        orig_author_name = os.environ.get('GIT_AUTHOR_NAME')
        orig_author_email = os.environ.get('GIT_AUTHOR_EMAIL')
        orig_committer_name = os.environ.get('GIT_COMMITTER_NAME')
        orig_committer_email = os.environ.get('GIT_COMMITTER_EMAIL')
        os.environ['GIT_AUTHOR_NAME'] = 'bup test'
        os.environ['GIT_COMMITTER_NAME'] = os.environ['GIT_AUTHOR_NAME']
        os.environ['GIT_AUTHOR_EMAIL'] = 'bup@a425bc70a02811e49bdf73ee56450e6f'
        os.environ['GIT_COMMITTER_EMAIL'] = os.environ['GIT_AUTHOR_EMAIL']
        try:
            readpipe(['git', 'init', workdir])
            os.environ['GIT_DIR'] = os.environ['BUP_DIR'] = repodir
            git.check_repo_or_die(repodir)
            os.chdir(workdir)
            with open('foo', 'w') as f:
                print >> f, 'bar'
            readpipe(['git', 'add', '.'])
            readpipe(['git', 'commit', '-am', 'Do something',
                      '--author', 'Someone <someone@somewhere>',
                      '--date', 'Sat Oct 3 19:48:49 2009 -0400'])
            commit = readpipe(['git', 'show-ref', '-s', 'master']).strip()
            parents = showval(commit, '%P')
            tree = showval(commit, '%T')
            cname = showval(commit, '%cn')
            cmail = showval(commit, '%ce')
            cdate = showval(commit, '%ct')
            coffs = showval(commit, '%ci')
            coffs = coffs[-5:]
            coff = (int(coffs[-4:-2]) * 60 * 60) + (int(coffs[-2:]) * 60)
            if coffs[-5] == '-':
                coff = - coff
            commit_items = git.get_commit_items(commit, git.cp())
            WVPASSEQ(commit_items.parents, [])
            WVPASSEQ(commit_items.tree, tree)
            WVPASSEQ(commit_items.author_name, 'Someone')
            WVPASSEQ(commit_items.author_mail, 'someone@somewhere')
            WVPASSEQ(commit_items.author_sec, 1254613729)
            WVPASSEQ(commit_items.author_offset, -(4 * 60 * 60))
            WVPASSEQ(commit_items.committer_name, cname)
            WVPASSEQ(commit_items.committer_mail, cmail)
            WVPASSEQ(commit_items.committer_sec, int(cdate))
            WVPASSEQ(commit_items.committer_offset, coff)
            WVPASSEQ(commit_items.message, 'Do something\n')
            with open('bar', 'w') as f:
                print >> f, 'baz'
            readpipe(['git', 'add', '.'])
            readpipe(['git', 'commit', '-am', 'Do something else'])
            child = readpipe(['git', 'show-ref', '-s', 'master']).strip()
            parents = showval(child, '%P')
            commit_items = git.get_commit_items(child, git.cp())
            WVPASSEQ(commit_items.parents, [commit])
        finally:
            os.chdir(orig_cwd)
            restore_env_var('GIT_AUTHOR_NAME', orig_author_name)
            restore_env_var('GIT_AUTHOR_EMAIL', orig_author_email)
            restore_env_var('GIT_COMMITTER_NAME', orig_committer_name)
            restore_env_var('GIT_COMMITTER_EMAIL', orig_committer_email)
Example #46
0
def receive_objects(conn, junk):
    global suspended_w
    git.check_repo_or_die()
    suggested = {}
    if suspended_w:
        w = suspended_w
        suspended_w = None
    else:
        w = git.PackWriter()
    while 1:
        ns = conn.read(4)
        if not ns:
            w.abort()
            raise Exception('object read: expected length header, got EOF\n')
        n = struct.unpack('!I', ns)[0]
        #log('expecting %d bytes\n' % n)
        if not n:
            log('bup server: received %d object%s.\n' 
                % (w.count, w.count!=1 and "s" or ''))
            fullpath = w.close()
            if fullpath:
                (dir, name) = os.path.split(fullpath)
                conn.write('%s.idx\n' % name)
            conn.ok()
            return
        elif n == 0xffffffff:
            log('bup server: receive-objects suspended.\n')
            suspended_w = w
            conn.ok()
            return
            
        buf = conn.read(n)  # object sizes in bup are reasonably small
        #log('read %d bytes\n' % n)
        if len(buf) < n:
            w.abort()
            raise Exception('object read: expected %d bytes, got %d\n'
                            % (n, len(buf)))
        (type, content) = git._decode_packobj(buf)
        sha = git.calc_hash(type, content)
        oldpack = w.exists(sha)
        # FIXME: we only suggest a single index per cycle, because the client
        # is currently dumb to download more than one per cycle anyway.
        # Actually we should fix the client, but this is a minor optimization
        # on the server side.
        if not suggested and \
          oldpack and (oldpack == True or oldpack.endswith('.midx')):
            # FIXME: we shouldn't really have to know about midx files
            # at this layer.  But exists() on a midx doesn't return the
            # packname (since it doesn't know)... probably we should just
            # fix that deficiency of midx files eventually, although it'll
            # make the files bigger.  This method is certainly not very
            # efficient.
            w.objcache.refresh(skip_midx = True)
            oldpack = w.objcache.exists(sha)
            log('new suggestion: %r\n' % oldpack)
            assert(oldpack)
            assert(oldpack != True)
            assert(not oldpack.endswith('.midx'))
            w.objcache.refresh(skip_midx = False)
        if not suggested and oldpack:
            assert(oldpack.endswith('.idx'))
            (dir,name) = os.path.split(oldpack)
            if not (name in suggested):
                log("bup server: suggesting index %s\n" % name)
                conn.write('index %s\n' % name)
                suggested[name] = 1
        else:
            w._raw_write([buf])
Example #47
0
def read_ref(conn, refname):
    git.check_repo_or_die()
    r = git.read_ref(refname)
    conn.write('%s\n' % (r or '').encode('hex'))
    conn.ok()
Example #48
0
from bup.git import check_repo_or_die
from bup.options import Options
from bup.helpers import die_if_errors, handle_ctrl_c, log
from bup.repo import LocalRepo
from bup.rm import bup_rm

optspec = """
bup rm <branch|save...>
--
#,compress=  set compression level to # (0-9, 9 is highest) [6]
v,verbose    increase verbosity (can be specified multiple times)
unsafe       use the command even though it may be DANGEROUS
"""

handle_ctrl_c()

o = Options(optspec)
opt, flags, extra = o.parse(sys.argv[1:])

if not opt.unsafe:
    o.fatal('refusing to run dangerous, experimental command without --unsafe')

if len(extra) < 1:
    o.fatal('no paths specified')

check_repo_or_die()
repo = LocalRepo()
bup_rm(repo, extra, compression=opt.compress, verbosity=opt.verbose)
die_if_errors()
Example #49
0
def main(argv):
    o = options.Options(optspec)
    opt, flags, roots = o.parse_bytes(argv[1:])
    roots = [argv_bytes(x) for x in roots]

    if not opt.unsafe:
        o.fatal(
            'refusing to run dangerous, experimental command without --unsafe')

    now = int(time()) if opt.wrt is None else opt.wrt
    if not isinstance(now, int):
        o.fatal('--wrt value ' + str(now) + ' is not an integer')

    period_start = {}
    for period, extent in (('all', opt.keep_all_for), ('dailies',
                                                       opt.keep_dailies_for),
                           ('monthlies', opt.keep_monthlies_for),
                           ('yearlies', opt.keep_yearlies_for)):
        if extent:
            secs = period_as_secs(extent.encode('ascii'))
            if not secs:
                o.fatal('%r is not a valid period' % extent)
            period_start[period] = now - secs

    if not period_start:
        o.fatal('at least one keep argument is required')

    period_start = defaultdict(lambda: float('inf'), period_start)

    if opt.verbose:
        epoch_ymd = strftime('%Y-%m-%d-%H%M%S', localtime(0))
        for kind in ['all', 'dailies', 'monthlies', 'yearlies']:
            period_utc = period_start[kind]
            if period_utc != float('inf'):
                if not (period_utc > float('-inf')):
                    log('keeping all ' + kind)
                else:
                    try:
                        when = strftime('%Y-%m-%d-%H%M%S',
                                        localtime(period_utc))
                        log('keeping ' + kind + ' since ' + when + '\n')
                    except ValueError as ex:
                        if period_utc < 0:
                            log('keeping %s since %d seconds before %s\n' %
                                (kind, abs(period_utc), epoch_ymd))
                        elif period_utc > 0:
                            log('keeping %s since %d seconds after %s\n' %
                                (kind, period_utc, epoch_ymd))
                        else:
                            log('keeping %s since %s\n' % (kind, epoch_ymd))

    git.check_repo_or_die()

    # This could be more efficient, but for now just build the whole list
    # in memory and let bup_rm() do some redundant work.

    def parse_info(f):
        author_secs = f.readline().strip()
        return int(author_secs)

    sys.stdout.flush()
    out = byte_stream(sys.stdout)

    removals = []
    for branch, branch_id in branches(roots):
        die_if_errors()
        saves = ((utc, unhexlify(oidx)) for (
            oidx,
            utc) in git.rev_list(branch_id, format=b'%at', parse=parse_info))
        for keep_save, (utc, id) in classify_saves(saves, period_start):
            assert (keep_save in (False, True))
            # FIXME: base removals on hashes
            if opt.pretend:
                out.write((b'+ ' if keep_save else b'- ') +
                          save_name(branch, utc) + b'\n')
            elif not keep_save:
                removals.append(save_name(branch, utc))

    if not opt.pretend:
        die_if_errors()
        with LocalRepo() as repo:
            bup_rm(repo,
                   removals,
                   compression=opt.compress,
                   verbosity=opt.verbose)
        if opt.gc:
            die_if_errors()
            bup_gc(threshold=opt.gc_threshold,
                   compression=opt.compress,
                   verbosity=opt.verbose)

    die_if_errors()
Example #50
0
File: tag-cmd.py Project: 0xkag/bup
handle_ctrl_c()

optspec = """
bup tag
bup tag [-f] <tag name> <commit>
bup tag [-f] -d <tag name>
--
d,delete=   Delete a tag
f,force     Overwrite existing tag, or ignore missing tag when deleting
"""

o = options.Options(optspec)
(opt, flags, extra) = o.parse(sys.argv[1:])

git.check_repo_or_die()

tags = [t for sublist in git.tags().values() for t in sublist]

if opt.delete:
    # git.delete_ref() doesn't complain if a ref doesn't exist.  We
    # could implement this verification but we'd need to read in the
    # contents of the tag file and pass the hash, and we already know
    # about the tag's existance via "tags".
    if not opt.force and opt.delete not in tags:
        log("error: tag '%s' doesn't exist\n" % opt.delete)
        sys.exit(1)
    tag_file = 'refs/tags/%s' % opt.delete
    git.delete_ref(tag_file)
    sys.exit(0)
Example #51
0
def main(argv):
    o = options.Options(optspec)
    opt, flags, extra = o.parse_bytes(argv[1:])
    if opt.name: opt.name = argv_bytes(opt.name)
    if opt.remote: opt.remote = argv_bytes(opt.remote)
    if opt.verbose is None: opt.verbose = 0

    if not (opt.blobs or opt.tree or opt.commit or opt.name or opt.noop
            or opt.copy):
        o.fatal("use one or more of -b, -t, -c, -n, --noop, --copy")
    if opt.copy and (opt.blobs or opt.tree):
        o.fatal('--copy is incompatible with -b, -t')
    if (opt.noop or opt.copy) and (opt.commit or opt.name):
        o.fatal('--noop and --copy are incompatible with -c, -n')
    if opt.blobs and (opt.tree or opt.commit or opt.name):
        o.fatal('-b is incompatible with -t, -c, -n')
    if extra and opt.git_ids:
        o.fatal("don't provide filenames when using --git-ids")

    if opt.verbose >= 2:
        git.verbose = opt.verbose - 1
        opt.bench = 1

    max_pack_size = None
    if opt.max_pack_size:
        max_pack_size = parse_num(opt.max_pack_size)
    max_pack_objects = None
    if opt.max_pack_objects:
        max_pack_objects = parse_num(opt.max_pack_objects)

    if opt.fanout:
        hashsplit.fanout = parse_num(opt.fanout)
    if opt.blobs:
        hashsplit.fanout = 0
    if opt.bwlimit:
        client.bwlimit = parse_num(opt.bwlimit)
    if opt.date:
        date = parse_date_or_fatal(opt.date, o.fatal)
    else:
        date = time.time()

    # Hack around lack of nonlocal vars in python 2
    total_bytes = [0]

    def prog(filenum, nbytes):
        total_bytes[0] += nbytes
        if filenum > 0:
            qprogress('Splitting: file #%d, %d kbytes\r' %
                      (filenum + 1, total_bytes[0] // 1024))
        else:
            qprogress('Splitting: %d kbytes\r' % (total_bytes[0] // 1024))

    is_reverse = environ.get(b'BUP_SERVER_REVERSE')
    if is_reverse and opt.remote:
        o.fatal("don't use -r in reverse mode; it's automatic")
    start_time = time.time()

    if opt.name and not valid_save_name(opt.name):
        o.fatal("'%r' is not a valid branch name." % opt.name)
    refname = opt.name and b'refs/heads/%s' % opt.name or None

    if opt.noop or opt.copy:
        cli = pack_writer = oldref = None
    elif opt.remote or is_reverse:
        git.check_repo_or_die()
        cli = client.Client(opt.remote)
        oldref = refname and cli.read_ref(refname) or None
        pack_writer = cli.new_packwriter(compression_level=opt.compress,
                                         max_pack_size=max_pack_size,
                                         max_pack_objects=max_pack_objects)
    else:
        git.check_repo_or_die()
        cli = None
        oldref = refname and git.read_ref(refname) or None
        pack_writer = git.PackWriter(compression_level=opt.compress,
                                     max_pack_size=max_pack_size,
                                     max_pack_objects=max_pack_objects)

    input = byte_stream(sys.stdin)

    if opt.git_ids:
        # the input is actually a series of git object ids that we should retrieve
        # and split.
        #
        # This is a bit messy, but basically it converts from a series of
        # CatPipe.get() iterators into a series of file-type objects.
        # It would be less ugly if either CatPipe.get() returned a file-like object
        # (not very efficient), or split_to_shalist() expected an iterator instead
        # of a file.
        cp = git.CatPipe()

        class IterToFile:
            def __init__(self, it):
                self.it = iter(it)

            def read(self, size):
                v = next(self.it, None)
                return v or b''

        def read_ids():
            while 1:
                line = input.readline()
                if not line:
                    break
                if line:
                    line = line.strip()
                try:
                    it = cp.get(line.strip())
                    next(it, None)  # skip the file info
                except KeyError as e:
                    add_error('error: %s' % e)
                    continue
                yield IterToFile(it)

        files = read_ids()
    else:
        # the input either comes from a series of files or from stdin.
        files = extra and (open(argv_bytes(fn), 'rb')
                           for fn in extra) or [input]

    if pack_writer:
        new_blob = pack_writer.new_blob
        new_tree = pack_writer.new_tree
    elif opt.blobs or opt.tree:
        # --noop mode
        new_blob = lambda content: git.calc_hash(b'blob', content)
        new_tree = lambda shalist: git.calc_hash(b'tree',
                                                 git.tree_encode(shalist))

    sys.stdout.flush()
    out = byte_stream(sys.stdout)

    if opt.blobs:
        shalist = hashsplit.split_to_blobs(new_blob,
                                           files,
                                           keep_boundaries=opt.keep_boundaries,
                                           progress=prog)
        for (sha, size, level) in shalist:
            out.write(hexlify(sha) + b'\n')
            reprogress()
    elif opt.tree or opt.commit or opt.name:
        if opt.name:  # insert dummy_name which may be used as a restore target
            mode, sha = \
                hashsplit.split_to_blob_or_tree(new_blob, new_tree, files,
                                                keep_boundaries=opt.keep_boundaries,
                                                progress=prog)
            splitfile_name = git.mangle_name(b'data', hashsplit.GIT_MODE_FILE,
                                             mode)
            shalist = [(mode, splitfile_name, sha)]
        else:
            shalist = hashsplit.split_to_shalist(
                new_blob,
                new_tree,
                files,
                keep_boundaries=opt.keep_boundaries,
                progress=prog)
        tree = new_tree(shalist)
    else:
        last = 0
        it = hashsplit.hashsplit_iter(files,
                                      keep_boundaries=opt.keep_boundaries,
                                      progress=prog)
        for (blob, level) in it:
            hashsplit.total_split += len(blob)
            if opt.copy:
                sys.stdout.write(str(blob))
            megs = hashsplit.total_split // 1024 // 1024
            if not opt.quiet and last != megs:
                last = megs

    if opt.verbose:
        log('\n')
    if opt.tree:
        out.write(hexlify(tree) + b'\n')
    if opt.commit or opt.name:
        msg = b'bup split\n\nGenerated by command:\n%r\n' % compat.get_argvb()
        ref = opt.name and (b'refs/heads/%s' % opt.name) or None
        userline = b'%s <%s@%s>' % (userfullname(), username(), hostname())
        commit = pack_writer.new_commit(tree, oldref, userline, date, None,
                                        userline, date, None, msg)
        if opt.commit:
            out.write(hexlify(commit) + b'\n')

    if pack_writer:
        pack_writer.close()  # must close before we can update the ref

    if opt.name:
        if cli:
            cli.update_ref(refname, commit, oldref)
        else:
            git.update_ref(refname, commit, oldref)

    if cli:
        cli.close()

    secs = time.time() - start_time
    size = hashsplit.total_split
    if opt.bench:
        log('bup: %.2f kbytes in %.2f secs = %.2f kbytes/sec\n' %
            (size / 1024, secs, size / 1024 / secs))

    if saved_errors:
        log('WARNING: %d errors encountered while saving.\n' %
            len(saved_errors))
        sys.exit(1)