Ejemplo n.º 1
0
    def _batch(self, pointers, localstore, action):
        if action not in ['upload', 'download']:
            raise error.ProgrammingError('invalid Git-LFS action: %s' % action)

        response = self._batchrequest(pointers, action)
        objects = self._extractobjects(response, pointers, action)
        total = sum(x.get('size', 0) for x in objects)
        sizes = {}
        for obj in objects:
            sizes[obj.get('oid')] = obj.get('size', 0)
        topic = {
            'upload': _('lfs uploading'),
            'download': _('lfs downloading')
        }[action]
        if len(objects) > 1:
            self.ui.note(
                _('lfs: need to transfer %d objects (%s)\n') %
                (len(objects), util.bytecount(total)))
        self.ui.progress(topic, 0, total=total)

        def transfer(chunk):
            for obj in chunk:
                objsize = obj.get('size', 0)
                if self.ui.verbose:
                    if action == 'download':
                        msg = _('lfs: downloading %s (%s)\n')
                    elif action == 'upload':
                        msg = _('lfs: uploading %s (%s)\n')
                    self.ui.note(msg %
                                 (obj.get('oid'), util.bytecount(objsize)))
                retry = self.retry
                while True:
                    try:
                        self._basictransfer(obj, action, localstore)
                        yield 1, obj.get('oid')
                        break
                    except socket.error as ex:
                        if retry > 0:
                            self.ui.note(
                                _('lfs: failed: %r (remaining retry %d)\n') %
                                (ex, retry))
                            retry -= 1
                            continue
                        raise

        # Until https multiplexing gets sorted out
        if self.ui.configbool('experimental', 'lfs.worker-enable'):
            oids = worker.worker(self.ui, 0.1, transfer, (),
                                 sorted(objects, key=lambda o: o.get('oid')))
        else:
            oids = transfer(sorted(objects, key=lambda o: o.get('oid')))

        processed = 0
        for _one, oid in oids:
            processed += sizes[oid]
            self.ui.progress(topic, processed, total=total)
            self.ui.note(_('lfs: processed: %s\n') % oid)
        self.ui.progress(topic, pos=None, total=total)
Ejemplo n.º 2
0
def runworker(ui, fn, wargs, items):
    # 0.4 is the cost per argument. So if we have at least 100 files
    # on a 4 core machine than our linear cost outweights the
    # drawback of spwaning. We are overwritign this if we force a
    # worker to run with a ridiculous high number.
    weight = 0.0  # disable worker
    useworker = ui.config('p4fastimport', 'useworker')
    if useworker == 'force':
        weight = 100000.0  # force worker
    elif util.parsebool(useworker or ''):
        weight = 0.04  # normal weight

    # Fix duplicated messages before
    # https://www.mercurial-scm.org/repo/hg-committed/rev/9d3d56aa1a9f
    ui.flush()
    return worker.worker(ui, weight, fn, wargs, items)
Ejemplo n.º 3
0
def verify(ui, repo, args=None, **opts):
    '''verify current revision against Subversion repository
    '''

    if repo is None:
        raise error.RepoError("There is no Mercurial repository"
                              " here (.hg not found)")

    ctx = repo[opts.get('rev', '.')]
    if 'close' in ctx.extra():
        ui.write('cannot verify closed branch')
        return 0
    convert_revision = ctx.extra().get('convert_revision')
    if convert_revision is None or not convert_revision.startswith('svn:'):
        raise error.Abort('revision %s not from SVN' % ctx)

    if args:
        url = repo.ui.expandpath(args[0])
    else:
        url = repo.ui.expandpath('default')

    svn = svnrepo.svnremoterepo(ui, url).svn
    meta = repo.svnmeta(svn.uuid, svn.subdir)
    srev, branch, branchpath = meta.get_source_rev(ctx=ctx)

    branchpath = branchpath[len(svn.subdir.lstrip('/')):]
    branchurl = ('%s/%s' % (url, branchpath)).strip('/')

    ui.write('verifying %s against %s@%i\n' % (ctx, branchurl, srev))

    def diff_file(path, svndata):
        fctx = ctx[path]

        if ui.verbose and not fctx.isbinary():
            svndesc = '%s/%s/%s@%d' % (svn.svn_url, branchpath, path, srev)
            hgdesc = '%s@%s' % (path, ctx)

            for c in difflib.unified_diff(svndata.splitlines(True),
                                          fctx.data().splitlines(True),
                                          svndesc, hgdesc):
                ui.note(c)

    if opts.get('stupid', ui.configbool('hgsubversion', 'stupid')):
        svnfiles = set()
        result = 0

        hgfiles = set(ctx) - util.ignoredfiles

        def verifydata(svndata):
            svnworker = svnrepo.svnremoterepo(ui, url).svn

            i = 0
            res = True
            for fn, type in svndata:
                i += 1
                if type != 'f':
                    continue

                fp = fn
                if branchpath:
                    fp = branchpath + '/' + fn
                data, mode = svnworker.get_file(posixpath.normpath(fp), srev)
                try:
                    fctx = ctx[fn]
                except error.LookupError:
                    yield i, "%s\0%r" % (fn, res)
                    continue

                if not fctx.data() == data:
                    ui.write('difference in: %s\n' % fn)
                    diff_file(fn, data)
                    res = False
                if not fctx.flags() == mode:
                    ui.write('wrong flags for: %s\n' % fn)
                    res = False
                yield i, "%s\0%r" % (fn, res)

        if url.startswith('file://'):
            perarg = 0.00001
        else:
            perarg = 0.000001

        svndata = svn.list_files(branchpath, srev)
        w = worker.worker(repo.ui, perarg, verifydata, (), tuple(svndata))
        i = 0
        for _, t in w:
            compathacks.progress(ui, 'verify', i, total=len(hgfiles))
            i += 1
            fn, ok = t.split('\0', 2)
            if not bool(ok):
                result = 1
            svnfiles.add(fn)

        if hgfiles != svnfiles:
            unexpected = hgfiles - svnfiles
            for f in sorted(unexpected):
                ui.write('unexpected file: %s\n' % f)
            missing = svnfiles - hgfiles
            for f in sorted(missing):
                ui.write('missing file: %s\n' % f)
            result = 1

        compathacks.progress(ui, 'verify', None, total=len(hgfiles))

    else:

        class VerifyEditor(svnwrap.Editor):
            """editor that verifies a repository against the given context."""
            def __init__(self, ui, ctx):
                self.ui = ui
                self.ctx = ctx
                self.unexpected = set(ctx) - util.ignoredfiles
                self.missing = set()
                self.failed = False

                self.total = len(self.unexpected)
                self.seen = 0

            def open_root(self, base_revnum, pool=None):
                pass

            def add_directory(self,
                              path,
                              parent_baton,
                              copyfrom_path,
                              copyfrom_revision,
                              pool=None):
                self.file = None
                self.props = None

            def open_directory(self,
                               path,
                               parent_baton,
                               base_revision,
                               pool=None):
                self.file = None
                self.props = None

            def add_file(self,
                         path,
                         parent_baton=None,
                         copyfrom_path=None,
                         copyfrom_revision=None,
                         file_pool=None):

                if path in self.unexpected:
                    self.unexpected.remove(path)
                    self.file = path
                    self.props = {}
                else:
                    self.total += 1
                    self.missing.add(path)
                    self.failed = True
                    self.file = None
                    self.props = None

                self.seen += 1
                compathacks.progress(self.ui,
                                     'verify',
                                     self.seen,
                                     total=self.total)

            def open_file(self, path, base_revnum):
                raise NotImplementedError()

            def apply_textdelta(self, file_baton, base_checksum, pool=None):
                stream = svnwrap.SimpleStringIO(closing=False)
                handler = svnwrap.apply_txdelta('', stream)
                if not callable(handler):
                    raise error.Abort('Error in Subversion bindings: '
                                      'cannot call handler!')

                def txdelt_window(window):
                    handler(window)
                    # window being None means we're done
                    if window:
                        return

                    fctx = self.ctx[self.file]
                    hgdata = fctx.data()
                    svndata = stream.getvalue()

                    if 'svn:executable' in self.props:
                        if fctx.flags() != 'x':
                            self.ui.write('wrong flags for: %s\n' % self.file)
                            self.failed = True
                    elif 'svn:special' in self.props:
                        hgdata = 'link ' + hgdata
                        if fctx.flags() != 'l':
                            self.ui.write('wrong flags for: %s\n' % self.file)
                            self.failed = True
                    elif fctx.flags():
                        self.ui.write('wrong flags for: %s\n' % self.file)
                        self.failed = True

                    if hgdata != svndata:
                        self.ui.write('difference in: %s\n' % self.file)
                        diff_file(self.file, svndata)
                        self.failed = True

                if self.file is not None:
                    return txdelt_window

            def change_dir_prop(self, dir_baton, name, value, pool=None):
                pass

            def change_file_prop(self, file_baton, name, value, pool=None):
                if self.props is not None:
                    self.props[name] = value

            def close_file(self, file_baton, checksum, pool=None):
                pass

            def close_directory(self, dir_baton, pool=None):
                pass

            def delete_entry(self, path, revnum, pool=None):
                raise NotImplementedError()

            def check(self):
                compathacks.progress(self.ui, 'verify', None, total=self.total)

                for f in self.unexpected:
                    self.ui.write('unexpected file: %s\n' % f)
                    self.failed = True
                for f in self.missing:
                    self.ui.write('missing file: %s\n' % f)
                    self.failed = True
                return not self.failed

        v = VerifyEditor(ui, ctx)
        svnrepo.svnremoterepo(ui, branchurl).svn.get_revision(srev, v)
        if v.check():
            result = 0
        else:
            result = 1

    return result
Ejemplo n.º 4
0
def fix(ui, repo, *pats, **opts):
    """rewrite file content in changesets or working directory

    Runs any configured tools to fix the content of files. Only affects files
    with changes, unless file arguments are provided. Only affects changed lines
    of files, unless the --whole flag is used. Some tools may always affect the
    whole file regardless of --whole.

    If --working-dir is used, files with uncommitted changes in the working copy
    will be fixed. Note that no backup are made.

    If revisions are specified with --source, those revisions and their
    descendants will be checked, and they may be replaced with new revisions
    that have fixed file content. By automatically including the descendants,
    no merging, rebasing, or evolution will be required. If an ancestor of the
    working copy is included, then the working copy itself will also be fixed,
    and the working copy will be updated to the fixed parent.

    When determining what lines of each file to fix at each revision, the whole
    set of revisions being fixed is considered, so that fixes to earlier
    revisions are not forgotten in later ones. The --base flag can be used to
    override this default behavior, though it is not usually desirable to do so.
    """
    opts = pycompat.byteskwargs(opts)
    cmdutil.check_at_most_one_arg(opts, b'all', b'source', b'rev')
    cmdutil.check_incompatible_arguments(
        opts, b'working_dir', [b'all', b'source']
    )

    with repo.wlock(), repo.lock(), repo.transaction(b'fix'):
        revstofix = getrevstofix(ui, repo, opts)
        basectxs = getbasectxs(repo, opts, revstofix)
        workqueue, numitems = getworkqueue(
            ui, repo, pats, opts, revstofix, basectxs
        )
        basepaths = getbasepaths(repo, opts, workqueue, basectxs)
        fixers = getfixers(ui)

        # Rather than letting each worker independently fetch the files
        # (which also would add complications for shared/keepalive
        # connections), prefetch them all first.
        _prefetchfiles(repo, workqueue, basepaths)

        # There are no data dependencies between the workers fixing each file
        # revision, so we can use all available parallelism.
        def getfixes(items):
            for rev, path in items:
                ctx = repo[rev]
                olddata = ctx[path].data()
                metadata, newdata = fixfile(
                    ui, repo, opts, fixers, ctx, path, basepaths, basectxs[rev]
                )
                # Don't waste memory/time passing unchanged content back, but
                # produce one result per item either way.
                yield (
                    rev,
                    path,
                    metadata,
                    newdata if newdata != olddata else None,
                )

        results = worker.worker(
            ui, 1.0, getfixes, tuple(), workqueue, threadsafe=False
        )

        # We have to hold on to the data for each successor revision in memory
        # until all its parents are committed. We ensure this by committing and
        # freeing memory for the revisions in some topological order. This
        # leaves a little bit of memory efficiency on the table, but also makes
        # the tests deterministic. It might also be considered a feature since
        # it makes the results more easily reproducible.
        filedata = collections.defaultdict(dict)
        aggregatemetadata = collections.defaultdict(list)
        replacements = {}
        wdirwritten = False
        commitorder = sorted(revstofix, reverse=True)
        with ui.makeprogress(
            topic=_(b'fixing'), unit=_(b'files'), total=sum(numitems.values())
        ) as progress:
            for rev, path, filerevmetadata, newdata in results:
                progress.increment(item=path)
                for fixername, fixermetadata in filerevmetadata.items():
                    aggregatemetadata[fixername].append(fixermetadata)
                if newdata is not None:
                    filedata[rev][path] = newdata
                    hookargs = {
                        b'rev': rev,
                        b'path': path,
                        b'metadata': filerevmetadata,
                    }
                    repo.hook(
                        b'postfixfile',
                        throw=False,
                        **pycompat.strkwargs(hookargs)
                    )
                numitems[rev] -= 1
                # Apply the fixes for this and any other revisions that are
                # ready and sitting at the front of the queue. Using a loop here
                # prevents the queue from being blocked by the first revision to
                # be ready out of order.
                while commitorder and not numitems[commitorder[-1]]:
                    rev = commitorder.pop()
                    ctx = repo[rev]
                    if rev == wdirrev:
                        writeworkingdir(repo, ctx, filedata[rev], replacements)
                        wdirwritten = bool(filedata[rev])
                    else:
                        replacerev(ui, repo, ctx, filedata[rev], replacements)
                    del filedata[rev]

        cleanup(repo, replacements, wdirwritten)
        hookargs = {
            b'replacements': replacements,
            b'wdirwritten': wdirwritten,
            b'metadata': aggregatemetadata,
        }
        repo.hook(b'postfix', throw=True, **pycompat.strkwargs(hookargs))
Ejemplo n.º 5
0
def verify(ui, repo, args=None, **opts):
    '''verify current revision against Subversion repository
    '''

    if repo is None:
        raise error.RepoError("There is no Mercurial repository"
                              " here (.hg not found)")

    ctx = repo[opts.get('rev', '.')]
    if 'close' in ctx.extra():
        ui.write('cannot verify closed branch')
        return 0
    convert_revision = ctx.extra().get('convert_revision')
    if convert_revision is None or not convert_revision.startswith('svn:'):
        raise hgutil.Abort('revision %s not from SVN' % ctx)

    if args:
        url = repo.ui.expandpath(args[0])
    else:
        url = repo.ui.expandpath('default')

    svn = svnrepo.svnremoterepo(ui, url).svn
    meta = repo.svnmeta(svn.uuid, svn.subdir)
    srev, branch, branchpath = meta.get_source_rev(ctx=ctx)

    branchpath = branchpath[len(svn.subdir.lstrip('/')):]
    branchurl = ('%s/%s' % (url, branchpath)).strip('/')

    ui.write('verifying %s against %s@%i\n' % (ctx, branchurl, srev))

    def diff_file(path, svndata):
        fctx = ctx[path]

        if ui.verbose and not fctx.isbinary():
            svndesc = '%s/%s/%s@%d' % (svn.svn_url, branchpath, path, srev)
            hgdesc = '%s@%s' % (path, ctx)

            for c in difflib.unified_diff(svndata.splitlines(True),
                                          fctx.data().splitlines(True),
                                          svndesc, hgdesc):
                ui.note(c)

    if opts.get('stupid', ui.configbool('hgsubversion', 'stupid')):
        svnfiles = set()
        result = 0

        hgfiles = set(ctx) - util.ignoredfiles

        def verifydata(svndata):
            svnworker = svnrepo.svnremoterepo(ui, url).svn

            i = 0
            res = True
            for fn, type in svndata:
                i += 1
                if type != 'f':
                    continue

                fp = fn
                if branchpath:
                    fp = branchpath + '/' + fn
                data, mode = svnworker.get_file(posixpath.normpath(fp), srev)
                try:
                    fctx = ctx[fn]
                except error.LookupError:
                    yield i, "%s\0%r" % (fn, res)
                    continue

                if not fctx.data() == data:
                    ui.write('difference in: %s\n' % fn)
                    diff_file(fn, data)
                    res = False
                if not fctx.flags() == mode:
                    ui.write('wrong flags for: %s\n' % fn)
                    res = False
                yield i, "%s\0%r" % (fn, res)

        if url.startswith('file://'):
            perarg = 0.00001
        else:
            perarg = 0.000001

        svndata = svn.list_files(branchpath, srev)
        w = worker.worker(repo.ui, perarg, verifydata, (), tuple(svndata))
        i = 0
        for _, t in w:
            ui.progress('verify', i, total=len(hgfiles))
            i += 1
            fn, ok = t.split('\0', 2)
            if not bool(ok):
                result = 1
            svnfiles.add(fn)

        if hgfiles != svnfiles:
            unexpected = hgfiles - svnfiles
            for f in sorted(unexpected):
                ui.write('unexpected file: %s\n' % f)
            missing = svnfiles - hgfiles
            for f in sorted(missing):
                ui.write('missing file: %s\n' % f)
            result = 1

        ui.progress('verify', None, total=len(hgfiles))

    else:
        class VerifyEditor(svnwrap.Editor):
            """editor that verifies a repository against the given context."""
            def __init__(self, ui, ctx):
                self.ui = ui
                self.ctx = ctx
                self.unexpected = set(ctx) - util.ignoredfiles
                self.missing = set()
                self.failed = False

                self.total = len(self.unexpected)
                self.seen = 0

            def open_root(self, base_revnum, pool=None):
                pass

            def add_directory(self, path, parent_baton, copyfrom_path,
                              copyfrom_revision, pool=None):
                self.file = None
                self.props = None

            def open_directory(self, path, parent_baton, base_revision, pool=None):
                self.file = None
                self.props = None

            def add_file(self, path, parent_baton=None, copyfrom_path=None,
                         copyfrom_revision=None, file_pool=None):

                if path in self.unexpected:
                    self.unexpected.remove(path)
                    self.file = path
                    self.props = {}
                else:
                    self.total += 1
                    self.missing.add(path)
                    self.failed = True
                    self.file = None
                    self.props = None

                self.seen += 1
                self.ui.progress('verify', self.seen, total=self.total)

            def open_file(self, path, base_revnum):
                raise NotImplementedError()

            def apply_textdelta(self, file_baton, base_checksum, pool=None):
                stream = svnwrap.SimpleStringIO(closing=False)
                handler = svnwrap.apply_txdelta('', stream)
                if not callable(handler):
                    raise hgutil.Abort('Error in Subversion bindings: '
                                       'cannot call handler!')
                def txdelt_window(window):
                    handler(window)
                    # window being None means we're done
                    if window:
                        return

                    fctx = self.ctx[self.file]
                    hgdata = fctx.data()
                    svndata = stream.getvalue()

                    if 'svn:executable' in self.props:
                        if fctx.flags() != 'x':
                            self.ui.warn('wrong flags for: %s\n' % self.file)
                            self.failed = True
                    elif 'svn:special' in self.props:
                        hgdata = 'link ' + hgdata
                        if fctx.flags() != 'l':
                            self.ui.warn('wrong flags for: %s\n' % self.file)
                            self.failed = True
                    elif fctx.flags():
                        self.ui.warn('wrong flags for: %s\n' % self.file)
                        self.failed = True

                    if hgdata != svndata:
                        self.ui.warn('difference in: %s\n' % self.file)
                        diff_file(self.file, svndata)
                        self.failed = True

                if self.file is not None:
                    return txdelt_window

            def change_dir_prop(self, dir_baton, name, value, pool=None):
                pass

            def change_file_prop(self, file_baton, name, value, pool=None):
                if self.props is not None:
                    self.props[name] = value

            def close_file(self, file_baton, checksum, pool=None):
                pass

            def close_directory(self, dir_baton, pool=None):
                pass

            def delete_entry(self, path, revnum, pool=None):
                raise NotImplementedError()

            def check(self):
                self.ui.progress('verify', None, total=self.total)

                for f in self.unexpected:
                    self.ui.warn('unexpected file: %s\n' % f)
                    self.failed = True
                for f in self.missing:
                    self.ui.warn('missing file: %s\n' % f)
                    self.failed = True
                return not self.failed

        v = VerifyEditor(ui, ctx)
        svnrepo.svnremoterepo(ui, branchurl).svn.get_revision(srev, v)
        if v.check():
            result = 0
        else:
            result = 1

    return result
Ejemplo n.º 6
0
def fix(ui, repo, *pats, **opts):
    """rewrite file content in changesets or working directory

    Runs any configured tools to fix the content of files. Only affects files
    with changes, unless file arguments are provided. Only affects changed lines
    of files, unless the --whole flag is used. Some tools may always affect the
    whole file regardless of --whole.

    If revisions are specified with --rev, those revisions will be checked, and
    they may be replaced with new revisions that have fixed file content.  It is
    desirable to specify all descendants of each specified revision, so that the
    fixes propagate to the descendants. If all descendants are fixed at the same
    time, no merging, rebasing, or evolution will be required.

    If --working-dir is used, files with uncommitted changes in the working copy
    will be fixed. If the checked-out revision is also fixed, the working
    directory will update to the replacement revision.

    When determining what lines of each file to fix at each revision, the whole
    set of revisions being fixed is considered, so that fixes to earlier
    revisions are not forgotten in later ones. The --base flag can be used to
    override this default behavior, though it is not usually desirable to do so.
    """
    opts = pycompat.byteskwargs(opts)
    if opts['all']:
        if opts['rev']:
            raise error.Abort(_('cannot specify both "--rev" and "--all"'))
        opts['rev'] = ['not public() and not obsolete()']
        opts['working_dir'] = True
    with repo.wlock(), repo.lock(), repo.transaction('fix'):
        revstofix = getrevstofix(ui, repo, opts)
        basectxs = getbasectxs(repo, opts, revstofix)
        workqueue, numitems = getworkqueue(ui, repo, pats, opts, revstofix,
                                           basectxs)
        fixers = getfixers(ui)

        # There are no data dependencies between the workers fixing each file
        # revision, so we can use all available parallelism.
        def getfixes(items):
            for rev, path in items:
                ctx = repo[rev]
                olddata = ctx[path].data()
                newdata = fixfile(ui, opts, fixers, ctx, path, basectxs[rev])
                # Don't waste memory/time passing unchanged content back, but
                # produce one result per item either way.
                yield (rev, path, newdata if newdata != olddata else None)
        results = worker.worker(ui, 1.0, getfixes, tuple(), workqueue,
                                threadsafe=False)

        # We have to hold on to the data for each successor revision in memory
        # until all its parents are committed. We ensure this by committing and
        # freeing memory for the revisions in some topological order. This
        # leaves a little bit of memory efficiency on the table, but also makes
        # the tests deterministic. It might also be considered a feature since
        # it makes the results more easily reproducible.
        filedata = collections.defaultdict(dict)
        replacements = {}
        wdirwritten = False
        commitorder = sorted(revstofix, reverse=True)
        with ui.makeprogress(topic=_('fixing'), unit=_('files'),
                             total=sum(numitems.values())) as progress:
            for rev, path, newdata in results:
                progress.increment(item=path)
                if newdata is not None:
                    filedata[rev][path] = newdata
                numitems[rev] -= 1
                # Apply the fixes for this and any other revisions that are
                # ready and sitting at the front of the queue. Using a loop here
                # prevents the queue from being blocked by the first revision to
                # be ready out of order.
                while commitorder and not numitems[commitorder[-1]]:
                    rev = commitorder.pop()
                    ctx = repo[rev]
                    if rev == wdirrev:
                        writeworkingdir(repo, ctx, filedata[rev], replacements)
                        wdirwritten = bool(filedata[rev])
                    else:
                        replacerev(ui, repo, ctx, filedata[rev], replacements)
                    del filedata[rev]

        cleanup(repo, replacements, wdirwritten)
Ejemplo n.º 7
0
    def _batch(self, pointers, localstore, action):
        if action not in [b'upload', b'download']:
            raise error.ProgrammingError(b'invalid Git-LFS action: %s' % action)

        response = self._batchrequest(pointers, action)
        objects = self._extractobjects(response, pointers, action)
        total = sum(x.get(b'size', 0) for x in objects)
        sizes = {}
        for obj in objects:
            sizes[obj.get(b'oid')] = obj.get(b'size', 0)
        topic = {
            b'upload': _(b'lfs uploading'),
            b'download': _(b'lfs downloading'),
        }[action]
        if len(objects) > 1:
            self.ui.note(
                _(b'lfs: need to transfer %d objects (%s)\n')
                % (len(objects), util.bytecount(total))
            )

        def transfer(chunk):
            for obj in chunk:
                objsize = obj.get(b'size', 0)
                if self.ui.verbose:
                    if action == b'download':
                        msg = _(b'lfs: downloading %s (%s)\n')
                    elif action == b'upload':
                        msg = _(b'lfs: uploading %s (%s)\n')
                    self.ui.note(
                        msg % (obj.get(b'oid'), util.bytecount(objsize))
                    )
                retry = self.retry
                while True:
                    try:
                        self._basictransfer(obj, action, localstore)
                        yield 1, obj.get(b'oid')
                        break
                    except socket.error as ex:
                        if retry > 0:
                            self.ui.note(
                                _(b'lfs: failed: %r (remaining retry %d)\n')
                                % (stringutil.forcebytestr(ex), retry)
                            )
                            retry -= 1
                            continue
                        raise

        # Until https multiplexing gets sorted out
        if self.ui.configbool(b'experimental', b'lfs.worker-enable'):
            oids = worker.worker(
                self.ui,
                0.1,
                transfer,
                (),
                sorted(objects, key=lambda o: o.get(b'oid')),
            )
        else:
            oids = transfer(sorted(objects, key=lambda o: o.get(b'oid')))

        with self.ui.makeprogress(topic, total=total) as progress:
            progress.update(0)
            processed = 0
            blobs = 0
            for _one, oid in oids:
                processed += sizes[oid]
                blobs += 1
                progress.update(processed)
                self.ui.note(_(b'lfs: processed: %s\n') % oid)

        if blobs > 0:
            if action == b'upload':
                self.ui.status(
                    _(b'lfs: uploaded %d files (%s)\n')
                    % (blobs, util.bytecount(processed))
                )
            elif action == b'download':
                self.ui.status(
                    _(b'lfs: downloaded %d files (%s)\n')
                    % (blobs, util.bytecount(processed))
                )
Ejemplo n.º 8
0
def cmd_format_source(ui, repo, tool, *pats, **opts):
    """register a tool to format source files during merges and rebases

    Record a mapping from the given file pattern FILES to a source formatting
    tool TOOL. Mappings are stored in the version-controlled file
    (automatically committed when format-source is used) .hg-format-source in
    the root of the checkout. The mapping causes TOOL to be run on FILES during
    future merge and rebase operations.

    The actual command run for TOOL needs to be registered in the config. See
    :hg:`help -e format-source` for details.

    """
    if repo.getcwd():
        msg = _("format-source must be run from repository root")
        hint = _("cd %s") % repo.root
        raise error.Abort(msg, hint=hint)

    if not pats:
        raise error.Abort(_('no files specified'))

    # XXX We support glob pattern only for now, the recursive behavior of various others is a bit wonky.
    for pattern in pats:
        if not pattern.startswith('glob:'):
            msg = _("format-source only supports explicit 'glob' patterns "
                    "for now ('%s')")
            msg %= pattern
            hint = _('maybe try with "glob:%s"') % pattern
            raise error.Abort(msg, hint=hint)

    # lock the repo to make sure no content is changed
    with repo.wlock():
        # formatting tool
        if ' ' in tool:
            raise error.Abort(_("tool name cannot contain space: '%s'") % tool)

        # if tool was not specified in the cfg maybe we can use our mozilla firefox in tree clang-format tool
        if should_use_default(repo, tool):
            shell_tool, tool_config_files, file_ext = return_default_clang_format(
                repo)
        else:
            shell_tool = repo.ui.config('format-source', tool)
            tool_config_files = repo.ui.configlist('format-source',
                                                   '%s:configpaths' % tool)
            file_ext = tuple(
                repo.ui.configlist('format-source', '%s:fileext' % tool))

        if not shell_tool:
            msg = _("unknown format tool: %s (no 'format-source.%s' config)")
            raise error.Abort(msg.format(tool, tool))
        if not file_ext:
            msg = _("no {}:fileext present".format(tool))
            raise error.Abort(msg.format(tool, tool))
        cmdutil.bailifchanged(repo)
        cmdutil.checkunfinished(repo, commit=True)
        wctx = repo[None]
        # files to be formatted
        matcher = scmutil.match(wctx, pats, opts)
        files = list(wctx.matches(matcher))

        if util.versiontuple(n=2) >= (4, 7):
            # In 4.7 we have ui.makeprogress
            with ui.makeprogress(_('formatting'),
                                 unit=_('files'),
                                 total=len(files)) as progress:
                proc = worker.worker(ui, 0.1, batchformat,
                                     (repo, wctx, tool, shell_tool, file_ext),
                                     files)
                for filepath in proc:
                    progress.increment(item=filepath)
        else:
            proc = worker.worker(ui, 0.1, batchformat,
                                 (repo, wctx, tool, shell_tool, file_ext),
                                 files)
            # Wait for everything to finish
            for filepath in proc:
                pass

        # update the storage to mark formatted file as formatted
        with repo.wvfs(file_storage_path, mode='ab') as storage:
            for pattern in pats:
                # XXX if pattern was relative, we need to reroot it from the
                # repository root. For now we constrained the command to run
                # at the root of the repository.
                data = {
                    'tool': encoding.unifromlocal(tool),
                    'pattern': encoding.unifromlocal(pattern)
                }
                if tool_config_files:
                    data['configpaths'] = [
                        encoding.unifromlocal(path)
                        for path in tool_config_files
                    ]
                entry = json.dumps(data, sort_keys=True)
                assert '\n' not in entry
                storage.write('%s\n' % entry)

        if file_storage_path not in wctx:
            storage_matcher = scmutil.match(wctx,
                                            ['path:' + file_storage_path])
            cmdutil.add(ui, repo, storage_matcher, '', True)

        # commit the whole
        with repo.lock():
            commit_patterns = ['path:' + file_storage_path]
            commit_patterns.extend(pats)
            return commands._docommit(ui, repo, *commit_patterns, **opts)