Ejemplo n.º 1
0
def debuginstall(ui, fm):
    fm.write(
        b"fsmonitor-watchman",
        _(b"fsmonitor checking for watchman binary... (%s)\n"),
        ui.configpath(b"fsmonitor", b"watchman_exe"),
    )
    root = tempfile.mkdtemp()
    c = watchmanclient.client(ui, root)
    err = None
    try:
        v = c.command(b"version")
        fm.write(
            b"fsmonitor-watchman-version",
            _(b" watchman binary version %s\n"),
            pycompat.bytestr(v["version"]),
        )
    except watchmanclient.Unavailable as e:
        err = stringutil.forcebytestr(e)
    fm.condwrite(
        err,
        b"fsmonitor-watchman-error",
        _(b" watchman binary missing or broken: %s\n"),
        err,
    )
    return 1 if err else 0
Ejemplo n.º 2
0
 def transfer(chunk):
     for obj in chunk:
         objsize = obj.get(b'size', 0)
         if self.ui.verbose:
             if action == b'download':
                 msg = _(b'lfs: downloading %s (%s)\n')
             elif action == b'upload':
                 msg = _(b'lfs: uploading %s (%s)\n')
             self.ui.note(
                 msg % (obj.get(b'oid'), util.bytecount(objsize))
             )
         retry = self.retry
         while True:
             try:
                 self._basictransfer(obj, action, localstore)
                 yield 1, obj.get(b'oid')
                 break
             except socket.error as ex:
                 if retry > 0:
                     self.ui.note(
                         _(b'lfs: failed: %r (remaining retry %d)\n')
                         % (stringutil.forcebytestr(ex), retry)
                     )
                     retry -= 1
                     continue
                 raise
Ejemplo n.º 3
0
def _watchmantofsencoding(path):
    """Fix path to match watchman and local filesystem encoding

    watchman's paths encoding can differ from filesystem encoding. For example,
    on Windows, it's always utf-8.
    """
    try:
        decoded = path.decode(_watchmanencoding)
    except UnicodeDecodeError as e:
        raise error.Abort(stringutil.forcebytestr(e),
                          hint=b'watchman encoding error')

    try:
        encoded = decoded.encode(_fsencoding, 'strict')
    except UnicodeEncodeError as e:
        raise error.Abort(stringutil.forcebytestr(e))

    return encoded
Ejemplo n.º 4
0
 def sendfile(self, filename, hash):
     self.ui.debug(b'remotestore: sendfile(%s, %s)\n' % (filename, hash))
     try:
         with lfutil.httpsendfile(self.ui, filename) as fd:
             return self._put(hash, fd)
     except IOError as e:
         raise error.Abort(
             _(b'remotestore: could not open file %s: %s') %
             (filename, stringutil.forcebytestr(e)))
Ejemplo n.º 5
0
    def _getfile(self, tmpfile, filename, hash):
        try:
            chunks = self._get(hash)
        except urlerr.httperror as e:
            # 401s get converted to error.Aborts; everything else is fine being
            # turned into a StoreError
            raise basestore.StoreError(filename, hash, self.url,
                                       stringutil.forcebytestr(e))
        except urlerr.urlerror as e:
            # This usually indicates a connection problem, so don't
            # keep trying with the other files... they will probably
            # all fail too.
            raise error.Abort(b'%s: %s' %
                              (urlutil.hidepassword(self.url), e.reason))
        except IOError as e:
            raise basestore.StoreError(filename, hash, self.url,
                                       stringutil.forcebytestr(e))

        return lfutil.copyandhash(chunks, tmpfile)
 def _command(self, *args):
     watchmanargs = (args[0], self._root) + args[1:]
     try:
         if self._watchmanclient is None:
             self._firsttime = False
             watchman_exe = self._ui.configpath(b'fsmonitor',
                                                b'watchman_exe')
             self._watchmanclient = pywatchman.client(
                 timeout=self._timeout,
                 useImmutableBser=True,
                 binpath=procutil.tonativestr(watchman_exe),
             )
         return self._watchmanclient.query(*watchmanargs)
     except pywatchman.CommandError as ex:
         if 'unable to resolve root' in ex.msg:
             raise WatchmanNoRoot(self._root,
                                  stringutil.forcebytestr(ex.msg))
         raise Unavailable(stringutil.forcebytestr(ex.msg))
     except pywatchman.WatchmanError as ex:
         raise Unavailable(stringutil.forcebytestr(ex))
Ejemplo n.º 7
0
    def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
                 filter=None):
        '''apply the patch in patchfile to the repository as a transplant'''
        (manifest, user, (time, timezone), files, message) = cl[:5]
        date = "%d %d" % (time, timezone)
        extra = {'transplant_source': node}
        if filter:
            (user, date, message) = self.filter(filter, node, cl, patchfile)

        if log:
            # we don't translate messages inserted into commits
            message += '\n(transplanted from %s)' % nodemod.hex(node)

        self.ui.status(_('applying %s\n') % nodemod.short(node))
        self.ui.note('%s %s\n%s\n' % (user, date, message))

        if not patchfile and not merge:
            raise error.Abort(_('can only omit patchfile if merging'))
        if patchfile:
            try:
                files = set()
                patch.patch(self.ui, repo, patchfile, files=files, eolmode=None)
                files = list(files)
            except Exception as inst:
                seriespath = os.path.join(self.path, 'series')
                if os.path.exists(seriespath):
                    os.unlink(seriespath)
                p1 = repo.dirstate.p1()
                p2 = node
                self.log(user, date, message, p1, p2, merge=merge)
                self.ui.write(stringutil.forcebytestr(inst) + '\n')
                raise TransplantError(_('fix up the working directory and run '
                                        'hg transplant --continue'))
        else:
            files = None
        if merge:
            p1, p2 = repo.dirstate.parents()
            repo.setparents(p1, node)
            m = match.always(repo.root, '')
        else:
            m = match.exact(repo.root, '', files)

        n = repo.commit(message, user, date, extra=extra, match=m,
                        editor=self.getcommiteditor())
        if not n:
            self.ui.warn(_('skipping emptied changeset %s\n') %
                           nodemod.short(node))
            return None
        if not merge:
            self.transplants.set(n, node)

        return n
Ejemplo n.º 8
0
def _handleunavailable(ui, state, ex):
    """Exception handler for Watchman interaction exceptions"""
    if isinstance(ex, watchmanclient.Unavailable):
        # experimental config: fsmonitor.verbose
        if ex.warn and ui.configbool(b'fsmonitor', b'verbose'):
            if b'illegal_fstypes' not in stringutil.forcebytestr(ex):
                ui.warn(stringutil.forcebytestr(ex) + b'\n')
        if ex.invalidate:
            state.invalidate()
        # experimental config: fsmonitor.verbose
        if ui.configbool(b'fsmonitor', b'verbose'):
            ui.log(
                b'fsmonitor',
                b'Watchman unavailable: %s\n',
                stringutil.forcebytestr(ex.msg),
            )
    else:
        ui.log(
            b'fsmonitor',
            b'Watchman exception: %s\n',
            stringutil.forcebytestr(ex),
        )
Ejemplo n.º 9
0
def do_relink(src, dst, files, ui):
    def relinkfile(src, dst):
        bak = dst + b'.bak'
        os.rename(dst, bak)
        try:
            util.oslink(src, dst)
        except OSError:
            os.rename(bak, dst)
            raise
        os.remove(bak)

    CHUNKLEN = 65536
    relinked = 0
    savedbytes = 0

    progress = ui.makeprogress(_(b'relinking'),
                               unit=_(b'files'),
                               total=len(files))
    pos = 0
    for f, sz in files:
        pos += 1
        source = os.path.join(src, f)
        tgt = os.path.join(dst, f)
        # Binary mode, so that read() works correctly, especially on Windows
        sfp = open(source, b'rb')
        dfp = open(tgt, b'rb')
        sin = sfp.read(CHUNKLEN)
        while sin:
            din = dfp.read(CHUNKLEN)
            if sin != din:
                break
            sin = sfp.read(CHUNKLEN)
        sfp.close()
        dfp.close()
        if sin:
            ui.debug(b'not linkable: %s\n' % f)
            continue
        try:
            relinkfile(source, tgt)
            progress.update(pos, item=f)
            relinked += 1
            savedbytes += sz
        except OSError as inst:
            ui.warn(b'%s: %s\n' % (tgt, stringutil.forcebytestr(inst)))

    progress.complete()

    ui.status(
        _(b'relinked %d files (%s reclaimed)\n') %
        (relinked, util.bytecount(savedbytes)))
Ejemplo n.º 10
0
def logservicecall(logger, service, **kwargs):
    start = time.time()
    logger(service, eventtype=b'start', **kwargs)
    try:
        yield
        logger(service,
               eventtype=b'success',
               elapsedms=(time.time() - start) * 1000,
               **kwargs)
    except Exception as e:
        logger(service,
               eventtype=b'failure',
               elapsedms=(time.time() - start) * 1000,
               errormsg=stringutil.forcebytestr(e),
               **kwargs)
        raise
Ejemplo n.º 11
0
def hook(ui, repo, hooktype, node=None, **kwargs):
    """add comment to bugzilla for each changeset that refers to a
    bugzilla bug id. only add a comment once per bug, so same change
    seen multiple times does not fill bug with duplicate data."""
    if node is None:
        raise error.Abort(
            _(b'hook type %s does not pass a changeset id') % hooktype
        )
    try:
        bz = bugzilla(ui, repo)
        ctx = repo[node]
        bugs = bz.find_bugs(ctx)
        if bugs:
            for bug in bugs:
                bz.update(bug, bugs[bug], ctx)
            bz.notify(bugs, stringutil.email(ctx.user()))
    except Exception as e:
        raise error.Abort(_(b'Bugzilla error: %s') % stringutil.forcebytestr(e))
Ejemplo n.º 12
0
def handlechangegroup_widen(op, inpart):
    """Changegroup exchange handler which restores temporarily-stripped nodes"""
    # We saved a bundle with stripped node data we must now restore.
    # This approach is based on mercurial/repair.py@6ee26a53c111.
    repo = op.repo
    ui = op.ui

    chgrpfile = op._widen_bundle
    del op._widen_bundle
    vfs = repo.vfs

    ui.note(_(b"adding branch\n"))
    f = vfs.open(chgrpfile, b"rb")
    try:
        gen = exchange.readbundle(ui, f, chgrpfile, vfs)
        # silence internal shuffling chatter
        override = {(b'ui', b'quiet'): True}
        if ui.verbose:
            override = {}
        with ui.configoverride(override):
            if isinstance(gen, bundle2.unbundle20):
                with repo.transaction(b'strip') as tr:
                    bundle2.processbundle(repo, gen, lambda: tr)
            else:
                gen.apply(
                    repo, b'strip', b'bundle:' + vfs.join(chgrpfile), True
                )
    finally:
        f.close()

    # remove undo files
    for undovfs, undofile in repo.undofiles():
        try:
            undovfs.unlink(undofile)
        except OSError as e:
            if e.errno != errno.ENOENT:
                ui.warn(
                    _(b'error removing %s: %s\n')
                    % (undovfs.join(undofile), stringutil.forcebytestr(e))
                )

    # Remove partial backup only if there were no exceptions
    op._widen_uninterr.__exit__(None, None, None)
    vfs.unlink(chgrpfile)
Ejemplo n.º 13
0
    def _lookup(repo, proto, key):
        localkey = encoding.tolocal(key)

        if isinstance(localkey, str) and _scratchbranchmatcher(localkey):
            scratchnode = repo.bundlestore.index.getnode(localkey)
            if scratchnode:
                return "%d %s\n" % (1, scratchnode)
            else:
                return "%d %s\n" % (0, 'scratch branch %s not found' % localkey)
        else:
            try:
                r = hex(repo.lookup(localkey))
                return "%d %s\n" % (1, r)
            except Exception as inst:
                if repo.bundlestore.index.getbundle(localkey):
                    return "%d %s\n" % (1, localkey)
                else:
                    r = stringutil.forcebytestr(inst)
                    return "%d %s\n" % (0, r)
Ejemplo n.º 14
0
def _urlerrorreason(urlerror):
    '''Create a friendly message for the given URLError to be used in an
    LfsRemoteError message.
    '''
    inst = urlerror

    if isinstance(urlerror.reason, Exception):
        inst = urlerror.reason

    if util.safehasattr(inst, b'reason'):
        try:  # usually it is in the form (errno, strerror)
            reason = inst.reason.args[1]
        except (AttributeError, IndexError):
            # it might be anything, for example a string
            reason = inst.reason
        if isinstance(reason, pycompat.unicode):
            # SSLError of Python 2.7.9 contains a unicode
            reason = encoding.unitolocal(reason)
        return reason
    elif getattr(inst, "strerror", None):
        return encoding.strtolocal(inst.strerror)
    else:
        return stringutil.forcebytestr(urlerror)
Ejemplo n.º 15
0
    def _batchrequest(self, pointers, action):
        """Get metadata about objects pointed by pointers for given action

        Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
        See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
        """
        objects = [
            {r'oid': pycompat.strurl(p.oid()), r'size': p.size()}
            for p in pointers
        ]
        requestdata = pycompat.bytesurl(
            json.dumps(
                {r'objects': objects, r'operation': pycompat.strurl(action),}
            )
        )
        url = b'%s/objects/batch' % self.baseurl
        batchreq = util.urlreq.request(pycompat.strurl(url), data=requestdata)
        batchreq.add_header(r'Accept', r'application/vnd.git-lfs+json')
        batchreq.add_header(r'Content-Type', r'application/vnd.git-lfs+json')
        try:
            with contextlib.closing(self.urlopener.open(batchreq)) as rsp:
                rawjson = rsp.read()
        except util.urlerr.httperror as ex:
            hints = {
                400: _(
                    b'check that lfs serving is enabled on %s and "%s" is '
                    b'supported'
                )
                % (self.baseurl, action),
                404: _(b'the "lfs.url" config may be used to override %s')
                % self.baseurl,
            }
            hint = hints.get(ex.code, _(b'api=%s, action=%s') % (url, action))
            raise LfsRemoteError(
                _(b'LFS HTTP error: %s') % stringutil.forcebytestr(ex),
                hint=hint,
            )
        except util.urlerr.urlerror as ex:
            hint = (
                _(b'the "lfs.url" config may be used to override %s')
                % self.baseurl
            )
            raise LfsRemoteError(
                _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
            )
        try:
            response = pycompat.json_loads(rawjson)
        except ValueError:
            raise LfsRemoteError(
                _(b'LFS server returns invalid JSON: %s')
                % rawjson.encode("utf-8")
            )

        if self.ui.debugflag:
            self.ui.debug(b'Status: %d\n' % rsp.status)
            # lfs-test-server and hg serve return headers in different order
            headers = pycompat.bytestr(rsp.info()).strip()
            self.ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))

            if r'objects' in response:
                response[r'objects'] = sorted(
                    response[r'objects'], key=lambda p: p[r'oid']
                )
            self.ui.debug(
                b'%s\n'
                % pycompat.bytesurl(
                    json.dumps(
                        response,
                        indent=2,
                        separators=(r'', r': '),
                        sort_keys=True,
                    )
                )
            )

        def encodestr(x):
            if isinstance(x, pycompat.unicode):
                return x.encode('utf-8')
            return x

        return pycompat.rapply(encodestr, response)
Ejemplo n.º 16
0
    def _basictransfer(self, obj, action, localstore):
        """Download or upload a single object using basic transfer protocol

        obj: dict, an object description returned by batch API
        action: string, one of ['upload', 'download']
        localstore: blobstore.local

        See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\
        basic-transfers.md
        """
        oid = obj[b'oid']
        href = obj[b'actions'][action].get(b'href')
        headers = obj[b'actions'][action].get(b'header', {}).items()

        request = util.urlreq.request(pycompat.strurl(href))
        if action == b'upload':
            # If uploading blobs, read data from local blobstore.
            if not localstore.verify(oid):
                raise error.Abort(
                    _(b'detected corrupt lfs object: %s') % oid,
                    hint=_(b'run hg verify'),
                )
            request.data = filewithprogress(localstore.open(oid), None)
            request.get_method = lambda: r'PUT'
            request.add_header(r'Content-Type', r'application/octet-stream')
            request.add_header(r'Content-Length', len(request.data))

        for k, v in headers:
            request.add_header(pycompat.strurl(k), pycompat.strurl(v))

        response = b''
        try:
            with contextlib.closing(self.urlopener.open(request)) as req:
                ui = self.ui  # Shorten debug lines
                if self.ui.debugflag:
                    ui.debug(b'Status: %d\n' % req.status)
                    # lfs-test-server and hg serve return headers in different
                    # order
                    headers = pycompat.bytestr(req.info()).strip()
                    ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))

                if action == b'download':
                    # If downloading blobs, store downloaded data to local
                    # blobstore
                    localstore.download(oid, req)
                else:
                    while True:
                        data = req.read(1048576)
                        if not data:
                            break
                        response += data
                    if response:
                        ui.debug(b'lfs %s response: %s' % (action, response))
        except util.urlerr.httperror as ex:
            if self.ui.debugflag:
                self.ui.debug(
                    b'%s: %s\n' % (oid, ex.read())
                )  # XXX: also bytes?
            raise LfsRemoteError(
                _(b'LFS HTTP error: %s (oid=%s, action=%s)')
                % (stringutil.forcebytestr(ex), oid, action)
            )
        except util.urlerr.urlerror as ex:
            hint = _(b'attempted connection to %s') % pycompat.bytesurl(
                util.urllibcompat.getfullurl(request)
            )
            raise LfsRemoteError(
                _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
            )
Ejemplo n.º 17
0
def storebundle(op, params, bundlefile):
    log = _getorcreateinfinitepushlogger(op)
    parthandlerstart = time.time()
    log(scratchbranchparttype, eventtype=b'start')
    index = op.repo.bundlestore.index
    store = op.repo.bundlestore.store
    op.records.add(scratchbranchparttype + b'_skippushkey', True)

    bundle = None
    try:  # guards bundle
        bundlepath = b"bundle:%s+%s" % (op.repo.root, bundlefile)
        bundle = hg.repository(op.repo.ui, bundlepath)

        bookmark = params.get(b'bookmark')
        bookprevnode = params.get(b'bookprevnode', b'')
        force = params.get(b'force')

        if bookmark:
            oldnode = index.getnode(bookmark)
        else:
            oldnode = None
        bundleheads = bundle.revs(b'heads(bundle())')
        if bookmark and len(bundleheads) > 1:
            raise error.Abort(
                _(b'cannot push more than one head to a scratch branch'))

        revs = _getrevs(bundle, oldnode, force, bookmark)

        # Notify the user of what is being pushed
        plural = b's' if len(revs) > 1 else b''
        op.repo.ui.warn(_(b"pushing %d commit%s:\n") % (len(revs), plural))
        maxoutput = 10
        for i in range(0, min(len(revs), maxoutput)):
            firstline = bundle[revs[i]].description().split(b'\n')[0][:50]
            op.repo.ui.warn(b"    %s  %s\n" % (revs[i], firstline))

        if len(revs) > maxoutput + 1:
            op.repo.ui.warn(b"    ...\n")
            firstline = bundle[revs[-1]].description().split(b'\n')[0][:50]
            op.repo.ui.warn(b"    %s  %s\n" % (revs[-1], firstline))

        nodesctx = [bundle[rev] for rev in revs]
        inindex = lambda rev: bool(index.getbundle(bundle[rev].hex()))
        if bundleheads:
            newheadscount = sum(not inindex(rev) for rev in bundleheads)
        else:
            newheadscount = 0
        # If there's a bookmark specified, there should be only one head,
        # so we choose the last node, which will be that head.
        # If a bug or malicious client allows there to be a bookmark
        # with multiple heads, we will place the bookmark on the last head.
        bookmarknode = nodesctx[-1].hex() if nodesctx else None
        key = None
        if newheadscount:
            with open(bundlefile, b'rb') as f:
                bundledata = f.read()
                with logservicecall(log,
                                    b'bundlestore',
                                    bundlesize=len(bundledata)):
                    bundlesizelimit = 100 * 1024 * 1024  # 100 MB
                    if len(bundledata) > bundlesizelimit:
                        error_msg = (b'bundle is too big: %d bytes. ' +
                                     b'max allowed size is 100 MB')
                        raise error.Abort(error_msg % (len(bundledata), ))
                    key = store.write(bundledata)

        with logservicecall(log, b'index', newheadscount=newheadscount), index:
            if key:
                index.addbundle(key, nodesctx)
            if bookmark:
                index.addbookmark(bookmark, bookmarknode)
                _maybeaddpushbackpart(op, bookmark, bookmarknode, bookprevnode,
                                      params)
        log(
            scratchbranchparttype,
            eventtype=b'success',
            elapsedms=(time.time() - parthandlerstart) * 1000,
        )

    except Exception as e:
        log(
            scratchbranchparttype,
            eventtype=b'failure',
            elapsedms=(time.time() - parthandlerstart) * 1000,
            errormsg=stringutil.forcebytestr(e),
        )
        raise
    finally:
        if bundle:
            bundle.close()