Exemplo n.º 1
0
    def getfilectx(repo, memctx, f):
        if lfutil.standin(f) in files:
            # if the file isn't in the manifest then it was removed
            # or renamed, raise IOError to indicate this
            try:
                fctx = ctx.filectx(lfutil.standin(f))
            except error.LookupError:
                return None
            renamed = fctx.renamed()
            if renamed:
                renamed = lfutil.splitstandin(renamed[0])

            hash = fctx.data().strip()
            path = lfutil.findfile(rsrc, hash)

            # If one file is missing, likely all files from this rev are
            if path is None:
                cachelfiles(ui, rsrc, ctx.node())
                path = lfutil.findfile(rsrc, hash)

                if path is None:
                    raise util.Abort(_("missing largefile '%s' from revision %s") % (f, node.hex(ctx.node())))

            data = ""
            fd = None
            try:
                fd = open(path, "rb")
                data = fd.read()
            finally:
                if fd:
                    fd.close()
            return context.memfilectx(repo, f, data, "l" in fctx.flags(), "x" in fctx.flags(), renamed)
        else:
            return _getnormalcontext(repo, ctx, f, revmap)
Exemplo n.º 2
0
 def _getfile(self, tmpfile, filename, hash):
     path = lfutil.findfile(self.remote, hash)
     if not path:
         raise basestore.StoreError(filename, hash, self.url,
             _("can't get file locally"))
     with open(path, 'rb') as fd:
         return lfutil.copyandhash(fd, tmpfile)
Exemplo n.º 3
0
 def _getfile(self, tmpfile, filename, hash):
     path = lfutil.findfile(self.remote, hash)
     if not path:
         raise basestore.StoreError(filename, hash, self.url,
                                    _("can't get file locally"))
     with open(path, 'rb') as fd:
         return lfutil.copyandhash(fd, tmpfile)
Exemplo n.º 4
0
def uploadlfiles(ui, rsrc, rdst, files):
    '''upload largefiles to the central store'''

    if not files:
        return

    store = basestore._openstore(rsrc, rdst, put=True)

    at = 0
    ui.debug("sending statlfile command for %d largefiles\n" % len(files))
    retval = store.exists(files)
    files = filter(lambda h: not retval[h], files)
    ui.debug("%d largefiles need to be uploaded\n" % len(files))

    for hash in files:
        ui.progress(_('uploading largefiles'),
                    at,
                    unit='largefile',
                    total=len(files))
        source = lfutil.findfile(rsrc, hash)
        if not source:
            raise util.Abort(
                _('largefile %s missing from store'
                  ' (needs to be uploaded)') % hash)
        # XXX check for errors here
        store.put(source, hash)
        at += 1
    ui.progress(_('uploading largefiles'), None)
Exemplo n.º 5
0
def uploadlfiles(ui, rsrc, rdst, files):
    '''upload largefiles to the central store'''

    if not files:
        return

    store = basestore._openstore(rsrc, rdst, put=True)

    at = 0
    ui.debug("sending statlfile command for %d largefiles\n" % len(files))
    retval = store.exists(files)
    files = filter(lambda h: not retval[h], files)
    ui.debug("%d largefiles need to be uploaded\n" % len(files))

    for hash in files:
        ui.progress(_('uploading largefiles'), at, unit='largefile',
                    total=len(files))
        source = lfutil.findfile(rsrc, hash)
        if not source:
            raise util.Abort(_('largefile %s missing from store'
                               ' (needs to be uploaded)') % hash)
        # XXX check for errors here
        store.put(source, hash)
        at += 1
    ui.progress(_('uploading largefiles'), None)
Exemplo n.º 6
0
def cachelfiles(ui, repo, node, filelist=None):
    '''cachelfiles ensures that all largefiles needed by the specified revision
    are present in the repository's largefile cache.

    returns a tuple (cached, missing).  cached is the list of files downloaded
    by this operation; missing is the list of files that were needed but could
    not be found.'''
    lfiles = lfutil.listlfiles(repo, node)
    if filelist:
        lfiles = set(lfiles) & set(filelist)
    toget = []

    for lfile in lfiles:
        try:
            expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
        except IOError as err:
            if err.errno == errno.ENOENT:
                continue # node must be None and standin wasn't found in wctx
            raise
        if not lfutil.findfile(repo, expectedhash):
            toget.append((lfile, expectedhash))

    if toget:
        store = basestore._openstore(repo)
        ret = store.get(toget)
        return ret

    return ([], [])
Exemplo n.º 7
0
    def getfilectx(repo, memctx, f):
        if lfutil.standin(f) in files:
            # if the file isn't in the manifest then it was removed
            # or renamed, raise IOError to indicate this
            try:
                fctx = ctx.filectx(lfutil.standin(f))
            except error.LookupError:
                raise IOError()
            renamed = fctx.renamed()
            if renamed:
                renamed = lfutil.splitstandin(renamed[0])

            hash = fctx.data().strip()
            path = lfutil.findfile(rsrc, hash)
            ### TODO: What if the file is not cached?
            data = ""
            fd = None
            try:
                fd = open(path, "rb")
                data = fd.read()
            finally:
                if fd:
                    fd.close()
            return context.memfilectx(f, data, "l" in fctx.flags(), "x" in fctx.flags(), renamed)
        else:
            return _getnormalcontext(repo.ui, ctx, f, revmap)
Exemplo n.º 8
0
def cachelfiles(ui, repo, node, filelist=None):
    '''cachelfiles ensures that all largefiles needed by the specified revision
    are present in the repository's largefile cache.

    returns a tuple (cached, missing).  cached is the list of files downloaded
    by this operation; missing is the list of files that were needed but could
    not be found.'''
    lfiles = lfutil.listlfiles(repo, node)
    if filelist:
        lfiles = set(lfiles) & set(filelist)
    toget = []

    for lfile in lfiles:
        try:
            expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
        except IOError as err:
            if err.errno == errno.ENOENT:
                continue  # node must be None and standin wasn't found in wctx
            raise
        if not lfutil.findfile(repo, expectedhash):
            toget.append((lfile, expectedhash))

    if toget:
        store = basestore._openstore(repo)
        ret = store.get(toget)
        return ret

    return ([], [])
Exemplo n.º 9
0
def cachelfiles(ui, repo, node):
    """cachelfiles ensures that all largefiles needed by the specified revision
    are present in the repository's largefile cache.

    returns a tuple (cached, missing).  cached is the list of files downloaded
    by this operation; missing is the list of files that were needed but could
    not be found."""
    lfiles = lfutil.listlfiles(repo, node)
    toget = []

    for lfile in lfiles:
        # If we are mid-merge, then we have to trust the standin that is in the
        # working copy to have the correct hashvalue.  This is because the
        # original hg.merge() already updated the standin as part of the normal
        # merge process -- we just have to udpate the largefile to match.
        if getattr(repo, "_ismerging", False) and os.path.exists(repo.wjoin(lfutil.standin(lfile))):
            expectedhash = lfutil.readstandin(repo, lfile)
        else:
            expectedhash = repo[node][lfutil.standin(lfile)].data().strip()

        # if it exists and its hash matches, it might have been locally
        # modified before updating and the user chose 'local'.  in this case,
        # it will not be in any store, so don't look for it.
        if (
            not os.path.exists(repo.wjoin(lfile)) or expectedhash != lfutil.hashfile(repo.wjoin(lfile))
        ) and not lfutil.findfile(repo, expectedhash):
            toget.append((lfile, expectedhash))

    if toget:
        store = basestore._openstore(repo)
        ret = store.get(toget)
        return ret

    return ([], [])
Exemplo n.º 10
0
def cachelfiles(ui, repo, node):
    '''cachelfiles ensures that all largefiles needed by the specified revision
    are present in the repository's largefile cache.

    returns a tuple (cached, missing).  cached is the list of files downloaded
    by this operation; missing is the list of files that were needed but could
    not be found.'''
    lfiles = lfutil.listlfiles(repo, node)
    toget = []

    for lfile in lfiles:
        # If we are mid-merge, then we have to trust the standin that is in the
        # working copy to have the correct hashvalue.  This is because the
        # original hg.merge() already updated the standin as part of the normal
        # merge process -- we just have to udpate the largefile to match.
        if (getattr(repo, "_ismerging", False) and
             os.path.exists(repo.wjoin(lfutil.standin(lfile)))):
            expectedhash = lfutil.readstandin(repo, lfile)
        else:
            expectedhash = repo[node][lfutil.standin(lfile)].data().strip()

        # if it exists and its hash matches, it might have been locally
        # modified before updating and the user chose 'local'.  in this case,
        # it will not be in any store, so don't look for it.
        if ((not os.path.exists(repo.wjoin(lfile)) or
             expectedhash != lfutil.hashfile(repo.wjoin(lfile))) and
            not lfutil.findfile(repo, expectedhash)):
            toget.append((lfile, expectedhash))

    if toget:
        store = basestore._openstore(repo)
        ret = store.get(toget)
        return ret

    return ([], [])
Exemplo n.º 11
0
def cachelfiles(ui, repo, node):
    '''cachelfiles ensures that all largefiles needed by the specified revision
    are present in the repository's largefile cache.

    returns a tuple (cached, missing).  cached is the list of files downloaded
    by this operation; missing is the list of files that were needed but could
    not be found.'''
    lfiles = lfutil.listlfiles(repo, node)
    toget = []

    for lfile in lfiles:
        expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
        # if it exists and its hash matches, it might have been locally
        # modified before updating and the user chose 'local'.  in this case,
        # it will not be in any store, so don't look for it.
        if ((not os.path.exists(repo.wjoin(lfile)) or
             expectedhash != lfutil.hashfile(repo.wjoin(lfile))) and
            not lfutil.findfile(repo, expectedhash)):
            toget.append((lfile, expectedhash))

    if toget:
        store = basestore._openstore(repo)
        ret = store.get(toget)
        return ret

    return ([], [])
Exemplo n.º 12
0
    def getfilectx(repo, memctx, f):
        if lfutil.standin(f) in files:
            # if the file isn't in the manifest then it was removed
            # or renamed, raise IOError to indicate this
            try:
                fctx = ctx.filectx(lfutil.standin(f))
            except error.LookupError:
                raise IOError()
            renamed = fctx.renamed()
            if renamed:
                renamed = lfutil.splitstandin(renamed[0])

            hash = fctx.data().strip()
            path = lfutil.findfile(rsrc, hash)
            ### TODO: What if the file is not cached?
            data = ''
            fd = None
            try:
                fd = open(path, 'rb')
                data = fd.read()
            finally:
                if fd:
                    fd.close()
            return context.memfilectx(f, data, 'l' in fctx.flags(),
                                      'x' in fctx.flags(), renamed)
        else:
            return _getnormalcontext(repo.ui, ctx, f, revmap)
Exemplo n.º 13
0
def statlfile(repo, proto, sha):
    '''Return '2\n' if the largefile is missing, '0\n' if it seems to be in
    good condition.

    The value 1 is reserved for mismatched checksum, but that is too expensive
    to be verified on every stat and must be caught be running 'hg verify'
    server side.'''
    filename = lfutil.findfile(repo, sha)
    if not filename:
        return '2\n'
    return '0\n'
Exemplo n.º 14
0
def statlfile(repo, proto, sha):
    '''Server command for checking if a largefile is present - returns '2\n' if
    the largefile is missing, '0\n' if it seems to be in good condition.

    The value 1 is reserved for mismatched checksum, but that is too expensive
    to be verified on every stat and must be caught be running 'hg verify'
    server side.'''
    filename = lfutil.findfile(repo, sha)
    if not filename:
        return '2\n'
    return '0\n'
Exemplo n.º 15
0
def statlfile(repo, proto, sha):
    '''Return '2\n' if the largefile is missing, '1\n' if it has a
    mismatched checksum, or '0\n' if it is in good condition'''
    filename = lfutil.findfile(repo, sha)
    if not filename:
        return '2\n'
    fd = None
    try:
        fd = open(filename, 'rb')
        return lfutil.hexsha1(fd) == sha and '0\n' or '1\n'
    finally:
        if fd:
            fd.close()
Exemplo n.º 16
0
def statlfile(repo, proto, sha):
    '''Return '2\n' if the largefile is missing, '1\n' if it has a
    mismatched checksum, or '0\n' if it is in good condition'''
    filename = lfutil.findfile(repo, sha)
    if not filename:
        return '2\n'
    fd = None
    try:
        fd = open(filename, 'rb')
        return lfutil.hexsha1(fd) == sha and '0\n' or '1\n'
    finally:
        if fd:
            fd.close()
Exemplo n.º 17
0
def statlfile(repo, proto, sha):
    """Return '2\n' if the largefile is missing, '1\n' if it has a
    mismatched checksum, or '0\n' if it is in good condition"""
    filename = lfutil.findfile(repo, sha)
    if not filename:
        return "2\n"
    fd = None
    try:
        fd = open(filename, "rb")
        return lfutil.hexsha1(fd) == sha and "0\n" or "1\n"
    finally:
        if fd:
            fd.close()
Exemplo n.º 18
0
    def getfilectx(repo, memctx, f):
        if lfutil.standin(f) in files:
            # if the file isn't in the manifest then it was removed
            # or renamed, raise IOError to indicate this
            try:
                fctx = ctx.filectx(lfutil.standin(f))
            except error.LookupError:
                raise IOError
            renamed = fctx.renamed()
            if renamed:
                renamed = lfutil.splitstandin(renamed[0])

            hash = fctx.data().strip()
            path = lfutil.findfile(rsrc, hash)

            # If one file is missing, likely all files from this rev are
            if path is None:
                cachelfiles(ui, rsrc, ctx.node())
                path = lfutil.findfile(rsrc, hash)

                if path is None:
                    raise util.Abort(
                        _("missing largefile \'%s\' from revision %s") %
                        (f, node.hex(ctx.node())))

            data = ''
            fd = None
            try:
                fd = open(path, 'rb')
                data = fd.read()
            finally:
                if fd:
                    fd.close()
            return context.memfilectx(repo, f, data, 'l' in fctx.flags(), 'x'
                                      in fctx.flags(), renamed)
        else:
            return _getnormalcontext(repo, ctx, f, revmap)
Exemplo n.º 19
0
                def getfile(self, name, rev):
                    realname, realrev = rev
                    f = super(lfsource, self).getfile(name, rev)

                    if (not realname.startswith(lfutil.shortnameslash)
                            or f[0] is None):
                        return f

                    # Substitute in the largefile data for the hash
                    hash = f[0].strip()
                    path = lfutil.findfile(rsrc, hash)

                    if path is None:
                        raise error.Abort(_("missing largefile for '%s' in %s")
                                          % (realname, realrev))
                    return util.readfile(path), f[1]
Exemplo n.º 20
0
                def getfile(self, name, rev):
                    realname, realrev = rev
                    f = super(lfsource, self).getfile(name, rev)

                    if (not realname.startswith(lfutil.shortnameslash)
                            or f[0] is None):
                        return f

                    # Substitute in the largefile data for the hash
                    hash = f[0].strip()
                    path = lfutil.findfile(rsrc, hash)

                    if path is None:
                        raise error.Abort(
                            _("missing largefile for '%s' in %s") %
                            (realname, realrev))
                    return util.readfile(path), f[1]
Exemplo n.º 21
0
def hgsubrepoarchive(orig, repo, ui, archiver, prefix, match=None):
    repo._get(repo._state + ('hg', ))
    rev = repo._state[1]
    ctx = repo._repo[rev]

    lfcommands.cachelfiles(ui, repo._repo, ctx.node())

    def write(name, mode, islink, getdata):
        # At this point, the standin has been replaced with the largefile name,
        # so the normal matcher works here without the lfutil variants.
        if match and not match(f):
            return
        data = getdata()

        archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)

    for f in ctx:
        ff = ctx.flags(f)
        getdata = ctx[f].data
        if lfutil.isstandin(f):
            path = lfutil.findfile(repo._repo, getdata().strip())
            if path is None:
                raise util.Abort(
                    _('largefile %s not found in repo store or system cache') %
                    lfutil.splitstandin(f))
            f = lfutil.splitstandin(f)

            def getdatafn():
                fd = None
                try:
                    fd = open(os.path.join(prefix, path), 'rb')
                    return fd.read()
                finally:
                    if fd:
                        fd.close()

            getdata = getdatafn

        write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)

    for subpath in sorted(ctx.substate):
        sub = ctx.sub(subpath)
        submatch = match_.narrowmatcher(subpath, match)
        sub.archive(ui, archiver,
                    os.path.join(prefix, repo._path) + '/', submatch)
Exemplo n.º 22
0
def hgsubrepoarchive(orig, repo, ui, archiver, prefix, match=None):
    repo._get(repo._state + ('hg',))
    rev = repo._state[1]
    ctx = repo._repo[rev]

    lfcommands.cachelfiles(ui, repo._repo, ctx.node())

    def write(name, mode, islink, getdata):
        # At this point, the standin has been replaced with the largefile name,
        # so the normal matcher works here without the lfutil variants.
        if match and not match(f):
            return
        data = getdata()

        archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)

    for f in ctx:
        ff = ctx.flags(f)
        getdata = ctx[f].data
        if lfutil.isstandin(f):
            path = lfutil.findfile(repo._repo, getdata().strip())
            if path is None:
                raise util.Abort(
                    _('largefile %s not found in repo store or system cache')
                    % lfutil.splitstandin(f))
            f = lfutil.splitstandin(f)

            def getdatafn():
                fd = None
                try:
                    fd = open(os.path.join(prefix, path), 'rb')
                    return fd.read()
                finally:
                    if fd:
                        fd.close()

            getdata = getdatafn

        write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)

    for subpath in sorted(ctx.substate):
        sub = ctx.sub(subpath)
        submatch = match_.narrowmatcher(subpath, match)
        sub.archive(ui, archiver, os.path.join(prefix, repo._path) + '/',
                    submatch)
Exemplo n.º 23
0
    def getfilectx(repo, memctx, f):
        if lfutil.standin(f) in files:
            # if the file isn't in the manifest then it was removed
            # or renamed, raise IOError to indicate this
            try:
                fctx = ctx.filectx(lfutil.standin(f))
            except error.LookupError:
                raise IOError()
            renamed = fctx.renamed()
            if renamed:
                renamed = lfutil.splitstandin(renamed[0])

            hash = fctx.data().strip()
            path = lfutil.findfile(rsrc, hash)
            ### TODO: What if the file is not cached?
            data = ''
            fd = None
            try:
                fd = open(path, 'rb')
                data = fd.read()
            finally:
                if fd:
                    fd.close()
            return context.memfilectx(f, data, 'l' in fctx.flags(),
                                      'x' in fctx.flags(), renamed)
        else:
            try:
                fctx = ctx.filectx(f)
            except error.LookupError:
                raise IOError()
            renamed = fctx.renamed()
            if renamed:
                renamed = renamed[0]
            data = fctx.data()
            if f == '.hgtags':
                newdata = []
                for line in data.splitlines():
                    id, name = line.split(' ', 1)
                    newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]),
                        name))
                data = ''.join(newdata)
            return context.memfilectx(f, data, 'l' in fctx.flags(),
                                      'x' in fctx.flags(), renamed)
Exemplo n.º 24
0
def getlfile(repo, proto, sha):
    '''Retrieve a largefile from the repository-local cache or system
    cache.'''
    filename = lfutil.findfile(repo, sha)
    if not filename:
        raise util.Abort(_('requested largefile %s not present in cache') % sha)
    f = open(filename, 'rb')
    length = os.fstat(f.fileno())[6]

    # Since we can't set an HTTP content-length header here, and
    # Mercurial core provides no way to give the length of a streamres
    # (and reading the entire file into RAM would be ill-advised), we
    # just send the length on the first line of the response, like the
    # ssh proto does for string responses.
    def generator():
        yield '%d\n' % length
        for chunk in util.filechunkiter(f):
            yield chunk
    return wireproto.streamres(generator())
Exemplo n.º 25
0
def getlfile(repo, proto, sha):
    '''Retrieve a largefile from the repository-local cache or system
    cache.'''
    filename = lfutil.findfile(repo, sha)
    if not filename:
        raise util.Abort(_('requested largefile %s not present in cache') % sha)
    f = open(filename, 'rb')
    length = os.fstat(f.fileno())[6]

    # Since we can't set an HTTP content-length header here, and
    # Mercurial core provides no way to give the length of a streamres
    # (and reading the entire file into RAM would be ill-advised), we
    # just send the length on the first line of the response, like the
    # ssh proto does for string responses.
    def generator():
        yield '%d\n' % length
        for chunk in util.filechunkiter(f):
            yield chunk
    return wireproto.streamres(generator())
Exemplo n.º 26
0
def uploadlfiles(ui, rsrc, rdst, files):
    """upload largefiles to the central store"""

    if not files:
        return

    store = basestore._openstore(rsrc, rdst, put=True)

    at = 0
    files = filter(lambda h: not store.exists(h), files)
    for hash in files:
        ui.progress(_("uploading largefiles"), at, unit="largefile", total=len(files))
        source = lfutil.findfile(rsrc, hash)
        if not source:
            raise util.Abort(_("largefile %s missing from store" " (needs to be uploaded)") % hash)
        # XXX check for errors here
        store.put(source, hash)
        at += 1
    ui.progress(_("uploading largefiles"), None)
Exemplo n.º 27
0
                def getfile(self, name, rev):
                    realname, realrev = rev
                    f = super(lfsource, self).getfile(name, rev)

                    if (not realname.startswith(lfutil.shortnameslash)
                            or f[0] is None):
                        return f

                    # Substitute in the largefile data for the hash
                    hash = f[0].strip()
                    path = lfutil.findfile(rsrc, hash)

                    if path is None:
                        raise util.Abort(_("missing largefile for \'%s\' in %s")
                                          % (realname, realrev))
                    fp = open(path, 'rb')

                    try:
                        return (fp.read(), f[1])
                    finally:
                        fp.close()
Exemplo n.º 28
0
                def getfile(self, name, rev):
                    realname, realrev = rev
                    f = super(lfsource, self).getfile(name, rev)

                    if (not realname.startswith(lfutil.shortnameslash)
                            or f[0] is None):
                        return f

                    # Substitute in the largefile data for the hash
                    hash = f[0].strip()
                    path = lfutil.findfile(rsrc, hash)

                    if path is None:
                        raise util.Abort(
                            _("missing largefile for \'%s\' in %s") %
                            (realname, realrev))
                    fp = open(path, 'rb')

                    try:
                        return (fp.read(), f[1])
                    finally:
                        fp.close()
Exemplo n.º 29
0
def overridearchive(orig,
                    repo,
                    dest,
                    node,
                    kind,
                    decode=True,
                    matchfn=None,
                    prefix=None,
                    mtime=None,
                    subrepos=None):
    # No need to lock because we are only reading history and
    # largefile caches, neither of which are modified.
    lfcommands.cachelfiles(repo.ui, repo, node)

    if kind not in archival.archivers:
        raise util.Abort(_("unknown archive type '%s'") % kind)

    ctx = repo[node]

    if kind == 'files':
        if prefix:
            raise util.Abort(_('cannot give prefix when archiving to files'))
    else:
        prefix = archival.tidyprefix(dest, kind, prefix)

    def write(name, mode, islink, getdata):
        if matchfn and not matchfn(name):
            return
        data = getdata()
        if decode:
            data = repo.wwritedata(name, data)
        archiver.addfile(prefix + name, mode, islink, data)

    archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])

    if repo.ui.configbool("ui", "archivemeta", True):

        def metadata():
            base = 'repo: %s\nnode: %s\nbranch: %s\n' % (hex(
                repo.changelog.node(0)), hex(node), ctx.branch())

            tags = ''.join('tag: %s\n' % t for t in ctx.tags()
                           if repo.tagtype(t) == 'global')
            if not tags:
                repo.ui.pushbuffer()
                opts = {
                    'template': '{latesttag}\n{latesttagdistance}',
                    'style': '',
                    'patch': None,
                    'git': None
                }
                cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
                ltags, dist = repo.ui.popbuffer().split('\n')
                tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
                tags += 'latesttagdistance: %s\n' % dist

            return base + tags

        write('.hg_archival.txt', 0644, False, metadata)

    for f in ctx:
        ff = ctx.flags(f)
        getdata = ctx[f].data
        if lfutil.isstandin(f):
            path = lfutil.findfile(repo, getdata().strip())
            if path is None:
                raise util.Abort(
                    _('largefile %s not found in repo store or system cache') %
                    lfutil.splitstandin(f))
            f = lfutil.splitstandin(f)

            def getdatafn():
                fd = None
                try:
                    fd = open(path, 'rb')
                    return fd.read()
                finally:
                    if fd:
                        fd.close()

            getdata = getdatafn
        write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)

    if subrepos:
        for subpath in sorted(ctx.substate):
            sub = ctx.sub(subpath)
            submatch = match_.narrowmatcher(subpath, matchfn)
            sub.archive(repo.ui, archiver, prefix, submatch)

    archiver.done()
Exemplo n.º 30
0
def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
            prefix=None, mtime=None, subrepos=None):
    # No need to lock because we are only reading history and
    # largefile caches, neither of which are modified.
    lfcommands.cachelfiles(repo.ui, repo, node)

    if kind not in archival.archivers:
        raise util.Abort(_("unknown archive type '%s'") % kind)

    ctx = repo[node]

    if kind == 'files':
        if prefix:
            raise util.Abort(
                _('cannot give prefix when archiving to files'))
    else:
        prefix = archival.tidyprefix(dest, kind, prefix)

    def write(name, mode, islink, getdata):
        if matchfn and not matchfn(name):
            return
        data = getdata()
        if decode:
            data = repo.wwritedata(name, data)
        archiver.addfile(prefix + name, mode, islink, data)

    archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])

    if repo.ui.configbool("ui", "archivemeta", True):
        def metadata():
            base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
                hex(repo.changelog.node(0)), hex(node), ctx.branch())

            tags = ''.join('tag: %s\n' % t for t in ctx.tags()
                           if repo.tagtype(t) == 'global')
            if not tags:
                repo.ui.pushbuffer()
                opts = {'template': '{latesttag}\n{latesttagdistance}',
                        'style': '', 'patch': None, 'git': None}
                cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
                ltags, dist = repo.ui.popbuffer().split('\n')
                tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
                tags += 'latesttagdistance: %s\n' % dist

            return base + tags

        write('.hg_archival.txt', 0644, False, metadata)

    for f in ctx:
        ff = ctx.flags(f)
        getdata = ctx[f].data
        if lfutil.isstandin(f):
            path = lfutil.findfile(repo, getdata().strip())
            if path is None:
                raise util.Abort(
                    _('largefile %s not found in repo store or system cache')
                    % lfutil.splitstandin(f))
            f = lfutil.splitstandin(f)

            def getdatafn():
                fd = None
                try:
                    fd = open(path, 'rb')
                    return fd.read()
                finally:
                    if fd:
                        fd.close()

            getdata = getdatafn
        write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)

    if subrepos:
        for subpath in sorted(ctx.substate):
            sub = ctx.sub(subpath)
            submatch = match_.narrowmatcher(subpath, matchfn)
            sub.archive(repo.ui, archiver, prefix, submatch)

    archiver.done()
Exemplo n.º 31
0
def override_archive(orig, repo, dest, node, kind, decode=True, matchfn=None, prefix=None, mtime=None, subrepos=None):
    # No need to lock because we are only reading history and
    # largefile caches, neither of which are modified.
    lfcommands.cachelfiles(repo.ui, repo, node)

    if kind not in archival.archivers:
        raise util.Abort(_("unknown archive type '%s'") % kind)

    ctx = repo[node]

    if kind == "files":
        if prefix:
            raise util.Abort(_("cannot give prefix when archiving to files"))
    else:
        prefix = archival.tidyprefix(dest, kind, prefix)

    def write(name, mode, islink, getdata):
        if matchfn and not matchfn(name):
            return
        data = getdata()
        if decode:
            data = repo.wwritedata(name, data)
        archiver.addfile(prefix + name, mode, islink, data)

    archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])

    if repo.ui.configbool("ui", "archivemeta", True):

        def metadata():
            base = "repo: %s\nnode: %s\nbranch: %s\n" % (hex(repo.changelog.node(0)), hex(node), ctx.branch())

            tags = "".join("tag: %s\n" % t for t in ctx.tags() if repo.tagtype(t) == "global")
            if not tags:
                repo.ui.pushbuffer()
                opts = {"template": "{latesttag}\n{latesttagdistance}", "style": "", "patch": None, "git": None}
                cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
                ltags, dist = repo.ui.popbuffer().split("\n")
                tags = "".join("latesttag: %s\n" % t for t in ltags.split(":"))
                tags += "latesttagdistance: %s\n" % dist

            return base + tags

        write(".hg_archival.txt", 0644, False, metadata)

    for f in ctx:
        ff = ctx.flags(f)
        getdata = ctx[f].data
        if lfutil.isstandin(f):
            path = lfutil.findfile(repo, getdata().strip())
            if path is None:
                raise util.Abort(_("largefile %s not found in repo store or system cache") % lfutil.splitstandin(f))
            f = lfutil.splitstandin(f)

            def getdatafn():
                fd = None
                try:
                    fd = open(path, "rb")
                    return fd.read()
                finally:
                    if fd:
                        fd.close()

            getdata = getdatafn
        write(f, "x" in ff and 0755 or 0644, "l" in ff, getdata)

    if subrepos:
        for subpath in ctx.substate:
            sub = ctx.sub(subpath)
            sub.archive(repo.ui, archiver, prefix)

    archiver.done()