示例#1
0
def build_audit_path(repo):
    try:
        # Mercurial 2.9
        return pathutil.pathauditor(repo.root)
    except ImportError:
        try:
            # Mercurial 1.9 to 2.9
            return scmutil.pathauditor(repo.root)
        except ImportError:
            # Mercurial < 1.9
            return getattr(repo.opener, 'audit_path', util.path_auditor(repo.root))
示例#2
0
class gitdirstate(dirstate.dirstate):
    @dirstate.rootcache('.hgignore')
    def _ignore(self):
        files = [self._join('.hgignore')]
        for name, path in self._ui.configitems("ui"):
            if name == 'ignore' or name.startswith('ignore.'):
                files.append(util.expandpath(path))
        patterns = []
        # Only use .gitignore if there's no .hgignore
        try:
            fp = open(files[0])
            fp.close()
        except:
            fns = self._finddotgitignores()
            for fn in fns:
                d = os.path.dirname(fn)
                fn = self.pathto(fn)
                fp = open(fn)
                pats, warnings = gignorepats(None, fp, root=d)
                for warning in warnings:
                    self._ui.warn("%s: %s\n" % (fn, warning))
                patterns.extend(pats)
        return ignore.ignore(self._root,
                             files,
                             self._ui.warn,
                             extrapatterns=patterns)

    def _finddotgitignores(self):
        """A copy of dirstate.walk. This is called from the new _ignore method,
        which is called by dirstate.walk, which would cause infinite recursion, 
        except _finddotgitignores calls the superclass _ignore directly."""
        match = matchmod.match(self._root, self.getcwd(),
                               ['relglob:.gitignore'])
        #TODO: need subrepos?
        subrepos = []
        unknown = True
        ignored = False
        full = True

        def fwarn(f, msg):
            self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
            return False

        ignore = super(gitdirstate, self)._ignore
        dirignore = self._dirignore
        if ignored:
            ignore = util.never
            dirignore = util.never
        elif not unknown:
            # if unknown and ignored are False, skip step 2
            ignore = util.always
            dirignore = util.always

        matchfn = match.matchfn
        matchalways = match.always()
        matchtdir = match.traversedir
        dmap = self._map
        listdir = osutil.listdir
        lstat = os.lstat
        dirkind = stat.S_IFDIR
        regkind = stat.S_IFREG
        lnkkind = stat.S_IFLNK
        join = self._join

        exact = skipstep3 = False
        if matchfn == match.exact:  # match.exact
            exact = True
            dirignore = util.always  # skip step 2
        elif match.files() and not match.anypats():  # match.match, no patterns
            skipstep3 = True

        if not exact and self._checkcase:
            normalize = self._normalize
            skipstep3 = False
        else:
            normalize = None

        # step 1: find all explicit files
        results, work, dirsnotfound = self._walkexplicit(match, subrepos)

        skipstep3 = skipstep3 and not (work or dirsnotfound)
        work = [d for d in work if not dirignore(d)]
        wadd = work.append

        # step 2: visit subdirectories
        while work:
            nd = work.pop()
            skip = None
            if nd == '.':
                nd = ''
            else:
                skip = '.hg'
            try:
                entries = listdir(join(nd), stat=True, skip=skip)
            except OSError, inst:
                if inst.errno in (errno.EACCES, errno.ENOENT):
                    fwarn(nd, inst.strerror)
                    continue
                raise
            for f, kind, st in entries:
                if normalize:
                    nf = normalize(nd and (nd + "/" + f) or f, True, True)
                else:
                    nf = nd and (nd + "/" + f) or f
                if nf not in results:
                    if kind == dirkind:
                        if not ignore(nf):
                            if matchtdir:
                                matchtdir(nf)
                            wadd(nf)
                        if nf in dmap and (matchalways or matchfn(nf)):
                            results[nf] = None
                    elif kind == regkind or kind == lnkkind:
                        if nf in dmap:
                            if matchalways or matchfn(nf):
                                results[nf] = st
                        elif (matchalways or matchfn(nf)) and not ignore(nf):
                            results[nf] = st
                    elif nf in dmap and (matchalways or matchfn(nf)):
                        results[nf] = None

        for s in subrepos:
            del results[s]
        del results['.hg']

        # step 3: report unseen items in the dmap hash
        if not skipstep3 and not exact:
            if not results and matchalways:
                visit = dmap.keys()
            else:
                visit = [f for f in dmap if f not in results and matchfn(f)]
            visit.sort()

            if unknown:
                # unknown == True means we walked the full directory tree above.
                # So if a file is not seen it was either a) not matching matchfn
                # b) ignored, c) missing, or d) under a symlink directory.
                audit_path = pathutil.pathauditor(self._root)

                for nf in iter(visit):
                    # Report ignored items in the dmap as long as they are not
                    # under a symlink directory.
                    if audit_path.check(nf):
                        try:
                            results[nf] = lstat(join(nf))
                        except OSError:
                            # file doesn't exist
                            results[nf] = None
                    else:
                        # It's either missing or under a symlink directory
                        results[nf] = None
            else:
                # We may not have walked the full directory tree above,
                # so stat everything we missed.
                nf = iter(visit).next
                for st in util.statfiles([join(i) for i in visit]):
                    results[nf()] = st
        return results.keys()
示例#3
0
def overridewalk(orig, self, match, subrepos, unknown, ignored, full=True):
    """Replacement for dirstate.walk, hooking into Watchman.

    Whenever full is False, ignored is False, and the Watchman client is
    available, use Watchman combined with saved state to possibly return only a
    subset of files."""

    def bail():
        return orig(match, subrepos, unknown, ignored, full=True)

    if full or ignored or not self._watchmanclient.available():
        return bail()
    state = self._fsmonitorstate
    clock, ignorehash, notefiles = state.get()
    if not clock:
        if state.walk_on_invalidate:
            return bail()
        # Initial NULL clock value, see
        # https://facebook.github.io/watchman/docs/clockspec.html
        clock = "c:0:0"
        notefiles = []

    def fwarn(f, msg):
        self._ui.warn("%s: %s\n" % (self.pathto(f), msg))
        return False

    def badtype(mode):
        kind = _("unknown")
        if stat.S_ISCHR(mode):
            kind = _("character device")
        elif stat.S_ISBLK(mode):
            kind = _("block device")
        elif stat.S_ISFIFO(mode):
            kind = _("fifo")
        elif stat.S_ISSOCK(mode):
            kind = _("socket")
        elif stat.S_ISDIR(mode):
            kind = _("directory")
        return _("unsupported file type (type is %s)") % kind

    ignore = self._ignore
    dirignore = self._dirignore
    if unknown:
        if _hashignore(ignore) != ignorehash and clock != "c:0:0":
            # ignore list changed -- can't rely on Watchman state any more
            if state.walk_on_invalidate:
                return bail()
            notefiles = []
            clock = "c:0:0"
    else:
        # always ignore
        ignore = util.always
        dirignore = util.always

    matchfn = match.matchfn
    matchalways = match.always()
    dmap = self._map
    nonnormalset = getattr(self, "_nonnormalset", None)

    copymap = self._copymap
    getkind = stat.S_IFMT
    dirkind = stat.S_IFDIR
    regkind = stat.S_IFREG
    lnkkind = stat.S_IFLNK
    join = self._join
    normcase = util.normcase
    fresh_instance = False

    exact = skipstep3 = False
    if matchfn == match.exact:  # match.exact
        exact = True
        dirignore = util.always  # skip step 2
    elif match.files() and not match.anypats():  # match.match, no patterns
        skipstep3 = True

    if not exact and self._checkcase:
        # note that even though we could receive directory entries, we're only
        # interested in checking if a file with the same name exists. So only
        # normalize files if possible.
        normalize = self._normalizefile
        skipstep3 = False
    else:
        normalize = None

    # step 1: find all explicit files
    results, work, dirsnotfound = self._walkexplicit(match, subrepos)

    skipstep3 = skipstep3 and not (work or dirsnotfound)
    work = [d for d in work if not dirignore(d[0])]

    if not work and (exact or skipstep3):
        for s in subrepos:
            del results[s]
        del results[".hg"]
        return results

    # step 2: query Watchman
    try:
        # Use the user-configured timeout for the query.
        # Add a little slack over the top of the user query to allow for
        # overheads while transferring the data
        self._watchmanclient.settimeout(state.timeout + 0.1)
        result = self._watchmanclient.command(
            "query",
            {
                "fields": ["mode", "mtime", "size", "exists", "name"],
                "since": clock,
                "expression": ["not", ["anyof", ["dirname", ".hg"], ["name", ".hg", "wholename"]]],
                "sync_timeout": int(state.timeout * 1000),
                "empty_on_fresh_instance": state.walk_on_invalidate,
            },
        )
    except Exception as ex:
        _handleunavailable(self._ui, state, ex)
        self._watchmanclient.clearconnection()
        return bail()
    else:
        # We need to propagate the last observed clock up so that we
        # can use it for our next query
        state.setlastclock(result["clock"])
        if result["is_fresh_instance"]:
            if state.walk_on_invalidate:
                state.invalidate()
                return bail()
            fresh_instance = True
            # Ignore any prior noteable files from the state info
            notefiles = []

    # for file paths which require normalization and we encounter a case
    # collision, we store our own foldmap
    if normalize:
        foldmap = dict((normcase(k), k) for k in results)

    switch_slashes = os.sep == "\\"
    # The order of the results is, strictly speaking, undefined.
    # For case changes on a case insensitive filesystem we may receive
    # two entries, one with exists=True and another with exists=False.
    # The exists=True entries in the same response should be interpreted
    # as being happens-after the exists=False entries due to the way that
    # Watchman tracks files.  We use this property to reconcile deletes
    # for name case changes.
    for entry in result["files"]:
        fname = entry["name"]
        if switch_slashes:
            fname = fname.replace("\\", "/")
        if normalize:
            normed = normcase(fname)
            fname = normalize(fname, True, True)
            foldmap[normed] = fname
        fmode = entry["mode"]
        fexists = entry["exists"]
        kind = getkind(fmode)

        if not fexists:
            # if marked as deleted and we don't already have a change
            # record, mark it as deleted.  If we already have an entry
            # for fname then it was either part of walkexplicit or was
            # an earlier result that was a case change
            if fname not in results and fname in dmap and (matchalways or matchfn(fname)):
                results[fname] = None
        elif kind == dirkind:
            if fname in dmap and (matchalways or matchfn(fname)):
                results[fname] = None
        elif kind == regkind or kind == lnkkind:
            if fname in dmap:
                if matchalways or matchfn(fname):
                    results[fname] = entry
            elif (matchalways or matchfn(fname)) and not ignore(fname):
                results[fname] = entry
        elif fname in dmap and (matchalways or matchfn(fname)):
            results[fname] = None

    # step 3: query notable files we don't already know about
    # XXX try not to iterate over the entire dmap
    if normalize:
        # any notable files that have changed case will already be handled
        # above, so just check membership in the foldmap
        notefiles = set((normalize(f, True, True) for f in notefiles if normcase(f) not in foldmap))
    visit = set((f for f in notefiles if (f not in results and matchfn(f) and (f in dmap or not ignore(f)))))

    if nonnormalset is not None and not fresh_instance:
        if matchalways:
            visit.update(f for f in nonnormalset if f not in results)
            visit.update(f for f in copymap if f not in results)
        else:
            visit.update(f for f in nonnormalset if f not in results and matchfn(f))
            visit.update(f for f in copymap if f not in results and matchfn(f))
    else:
        if matchalways:
            visit.update(
                f for f, st in dmap.iteritems() if (f not in results and (st[2] < 0 or st[0] != "n" or fresh_instance))
            )
            visit.update(f for f in copymap if f not in results)
        else:
            visit.update(
                f
                for f, st in dmap.iteritems()
                if (f not in results and (st[2] < 0 or st[0] != "n" or fresh_instance) and matchfn(f))
            )
            visit.update(f for f in copymap if f not in results and matchfn(f))

    audit = pathutil.pathauditor(self._root).check
    auditpass = [f for f in visit if audit(f)]
    auditpass.sort()
    auditfail = visit.difference(auditpass)
    for f in auditfail:
        results[f] = None

    nf = iter(auditpass).next
    for st in util.statfiles([join(f) for f in auditpass]):
        f = nf()
        if st or f in dmap:
            results[f] = st

    for s in subrepos:
        del results[s]
    del results[".hg"]
    return results
示例#4
0
def overridewalk(orig, self, match, subrepos, unknown, ignored, full=True):
    '''Replacement for dirstate.walk, hooking into Watchman.

    Whenever full is False, ignored is False, and the Watchman client is
    available, use Watchman combined with saved state to possibly return only a
    subset of files.'''
    def bail():
        return orig(match, subrepos, unknown, ignored, full=True)

    if full or ignored or not self._watchmanclient.available():
        return bail()
    state = self._fsmonitorstate
    clock, ignorehash, notefiles = state.get()
    if not clock:
        if state.walk_on_invalidate:
            return bail()
        # Initial NULL clock value, see
        # https://facebook.github.io/watchman/docs/clockspec.html
        clock = 'c:0:0'
        notefiles = []

    def fwarn(f, msg):
        self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
        return False

    def badtype(mode):
        kind = _('unknown')
        if stat.S_ISCHR(mode):
            kind = _('character device')
        elif stat.S_ISBLK(mode):
            kind = _('block device')
        elif stat.S_ISFIFO(mode):
            kind = _('fifo')
        elif stat.S_ISSOCK(mode):
            kind = _('socket')
        elif stat.S_ISDIR(mode):
            kind = _('directory')
        return _('unsupported file type (type is %s)') % kind

    ignore = self._ignore
    dirignore = self._dirignore
    if unknown:
        if _hashignore(ignore) != ignorehash and clock != 'c:0:0':
            # ignore list changed -- can't rely on Watchman state any more
            if state.walk_on_invalidate:
                return bail()
            notefiles = []
            clock = 'c:0:0'
    else:
        # always ignore
        ignore = util.always
        dirignore = util.always

    matchfn = match.matchfn
    matchalways = match.always()
    dmap = self._map
    nonnormalset = getattr(self, '_nonnormalset', None)

    copymap = self._copymap
    getkind = stat.S_IFMT
    dirkind = stat.S_IFDIR
    regkind = stat.S_IFREG
    lnkkind = stat.S_IFLNK
    join = self._join
    normcase = util.normcase
    fresh_instance = False

    exact = skipstep3 = False
    if matchfn == match.exact:  # match.exact
        exact = True
        dirignore = util.always  # skip step 2
    elif match.files() and not match.anypats():  # match.match, no patterns
        skipstep3 = True

    if not exact and self._checkcase:
        # note that even though we could receive directory entries, we're only
        # interested in checking if a file with the same name exists. So only
        # normalize files if possible.
        normalize = self._normalizefile
        skipstep3 = False
    else:
        normalize = None

    # step 1: find all explicit files
    results, work, dirsnotfound = self._walkexplicit(match, subrepos)

    skipstep3 = skipstep3 and not (work or dirsnotfound)
    work = [d for d in work if not dirignore(d[0])]

    if not work and (exact or skipstep3):
        for s in subrepos:
            del results[s]
        del results['.hg']
        return results

    # step 2: query Watchman
    try:
        # Use the user-configured timeout for the query.
        # Add a little slack over the top of the user query to allow for
        # overheads while transferring the data
        self._watchmanclient.settimeout(state.timeout + 0.1)
        result = self._watchmanclient.command('query', {
            'fields': ['mode', 'mtime', 'size', 'exists', 'name'],
            'since': clock,
            'expression': [
                'not', [
                    'anyof', ['dirname', '.hg'],
                    ['name', '.hg', 'wholename']
                ]
            ],
            'sync_timeout': int(state.timeout * 1000),
            'empty_on_fresh_instance': state.walk_on_invalidate,
        })
    except Exception as ex:
        _handleunavailable(self._ui, state, ex)
        self._watchmanclient.clearconnection()
        return bail()
    else:
        # We need to propagate the last observed clock up so that we
        # can use it for our next query
        state.setlastclock(result['clock'])
        if result['is_fresh_instance']:
            if state.walk_on_invalidate:
                state.invalidate()
                return bail()
            fresh_instance = True
            # Ignore any prior noteable files from the state info
            notefiles = []

    # for file paths which require normalization and we encounter a case
    # collision, we store our own foldmap
    if normalize:
        foldmap = dict((normcase(k), k) for k in results)

    switch_slashes = os.sep == '\\'
    # The order of the results is, strictly speaking, undefined.
    # For case changes on a case insensitive filesystem we may receive
    # two entries, one with exists=True and another with exists=False.
    # The exists=True entries in the same response should be interpreted
    # as being happens-after the exists=False entries due to the way that
    # Watchman tracks files.  We use this property to reconcile deletes
    # for name case changes.
    for entry in result['files']:
        fname = entry['name']
        if switch_slashes:
            fname = fname.replace('\\', '/')
        if normalize:
            normed = normcase(fname)
            fname = normalize(fname, True, True)
            foldmap[normed] = fname
        fmode = entry['mode']
        fexists = entry['exists']
        kind = getkind(fmode)

        if not fexists:
            # if marked as deleted and we don't already have a change
            # record, mark it as deleted.  If we already have an entry
            # for fname then it was either part of walkexplicit or was
            # an earlier result that was a case change
            if fname not in results and fname in dmap and (
                    matchalways or matchfn(fname)):
                results[fname] = None
        elif kind == dirkind:
            if fname in dmap and (matchalways or matchfn(fname)):
                results[fname] = None
        elif kind == regkind or kind == lnkkind:
            if fname in dmap:
                if matchalways or matchfn(fname):
                    results[fname] = entry
            elif (matchalways or matchfn(fname)) and not ignore(fname):
                results[fname] = entry
        elif fname in dmap and (matchalways or matchfn(fname)):
            results[fname] = None

    # step 3: query notable files we don't already know about
    # XXX try not to iterate over the entire dmap
    if normalize:
        # any notable files that have changed case will already be handled
        # above, so just check membership in the foldmap
        notefiles = set((normalize(f, True, True) for f in notefiles
                         if normcase(f) not in foldmap))
    visit = set((f for f in notefiles if (f not in results and matchfn(f)
                                          and (f in dmap or not ignore(f)))))

    if nonnormalset is not None and not fresh_instance:
        if matchalways:
            visit.update(f for f in nonnormalset if f not in results)
            visit.update(f for f in copymap if f not in results)
        else:
            visit.update(f for f in nonnormalset
                         if f not in results and matchfn(f))
            visit.update(f for f in copymap
                         if f not in results and matchfn(f))
    else:
        if matchalways:
            visit.update(f for f, st in dmap.iteritems()
                         if (f not in results and
                             (st[2] < 0 or st[0] != 'n' or fresh_instance)))
            visit.update(f for f in copymap if f not in results)
        else:
            visit.update(f for f, st in dmap.iteritems()
                         if (f not in results and
                             (st[2] < 0 or st[0] != 'n' or fresh_instance)
                             and matchfn(f)))
            visit.update(f for f in copymap
                         if f not in results and matchfn(f))

    audit = pathutil.pathauditor(self._root).check
    auditpass = [f for f in visit if audit(f)]
    auditpass.sort()
    auditfail = visit.difference(auditpass)
    for f in auditfail:
        results[f] = None

    nf = iter(auditpass).next
    for st in util.statfiles([join(f) for f in auditpass]):
        f = nf()
        if st or f in dmap:
            results[f] = st

    for s in subrepos:
        del results[s]
    del results['.hg']
    return results
示例#5
0
    def _finddotgitignores(self):
        """A copy of dirstate.walk. This is called from the new _ignore method,
        which is called by dirstate.walk, which would cause infinite recursion,
        except _finddotgitignores calls the superclass _ignore directly."""
        match = matchmod.match(self._root, self.getcwd(),
                               [b'relglob:.gitignore'])
        # TODO: need subrepos?
        subrepos = []
        unknown = True
        ignored = False

        def fwarn(f, msg):
            self._ui.warn(b'%s: %s\n' % (
                self.pathto(f),
                pycompat.sysbytes(msg),
            ))
            return False

        ignore = super()._ignore
        dirignore = self._dirignore
        if ignored:
            ignore = util.never
            dirignore = util.never
        elif not unknown:
            # if unknown and ignored are False, skip step 2
            ignore = util.always
            dirignore = util.always

        matchfn = match.matchfn
        matchalways = match.always()
        matchtdir = match.traversedir
        dmap = self._map
        lstat = os.lstat
        dirkind = stat.S_IFDIR
        regkind = stat.S_IFREG
        lnkkind = stat.S_IFLNK
        join = self._join

        exact = skipstep3 = False
        if matchfn == match.exact:  # match.exact
            exact = True
            dirignore = util.always  # skip step 2
        elif match.files() and not match.anypats():  # match.match, no patterns
            skipstep3 = True

        if not exact and self._checkcase:
            normalize = self._normalize
            skipstep3 = False
        else:
            normalize = None

        # step 1: find all explicit files
        results, work, dirsnotfound = self._walkexplicit(match, subrepos)

        skipstep3 = skipstep3 and not (work or dirsnotfound)
        work = [nd for nd, d in work if not dirignore(d)]
        wadd = work.append

        # step 2: visit subdirectories
        while work:
            nd = work.pop()
            skip = None
            if nd != b'':
                skip = b'.hg'
            try:
                entries = util.listdir(join(nd), stat=True, skip=skip)
            except OSError as inst:
                if inst.errno in (errno.EACCES, errno.ENOENT):
                    fwarn(nd, inst.strerror)
                    continue
                raise
            for f, kind, st in entries:
                if normalize:
                    nf = normalize(nd and (nd + b"/" + f) or f, True, True)
                else:
                    nf = nd and (nd + b"/" + f) or f
                if nf not in results:
                    if kind == dirkind:
                        if not ignore(nf):
                            if matchtdir:
                                matchtdir(nf)
                            wadd(nf)
                        if nf in dmap and (matchalways or matchfn(nf)):
                            results[nf] = None
                    elif kind == regkind or kind == lnkkind:
                        if nf in dmap:
                            if matchalways or matchfn(nf):
                                results[nf] = st
                        elif (matchalways or matchfn(nf)) and not ignore(nf):
                            results[nf] = st
                    elif nf in dmap and (matchalways or matchfn(nf)):
                        results[nf] = None

        for s in subrepos:
            del results[s]
        del results[b'.hg']

        # step 3: report unseen items in the dmap hash
        if not skipstep3 and not exact:
            if not results and matchalways:
                visit = dmap.keys()
            else:
                visit = [f for f in dmap if f not in results and matchfn(f)]
            visit.sort()

            if unknown:
                # unknown == True means we walked the full directory tree
                # above. So if a file is not seen it was either a) not matching
                # matchfn b) ignored, c) missing, or d) under a symlink
                # directory.
                audit_path = pathutil.pathauditor(self._root)

                for nf in iter(visit):
                    # Report ignored items in the dmap as long as they are not
                    # under a symlink directory.
                    if audit_path.check(nf):
                        try:
                            results[nf] = lstat(join(nf))
                        except OSError:
                            # file doesn't exist
                            results[nf] = None
                    else:
                        # It's either missing or under a symlink directory
                        results[nf] = None
            else:
                # We may not have walked the full directory tree above,
                # so stat everything we missed.
                nf = next(iter(visit))
                for st in util.statfiles([join(i) for i in visit]):
                    results[nf()] = st
        return results.keys()
示例#6
0
def overridewalk(orig, self, match, subrepos, unknown, ignored, full=True):
    """Replacement for dirstate.walk, hooking into Watchman.

    Whenever full is False, ignored is False, and the Watchman client is
    available, use Watchman combined with saved state to possibly return only a
    subset of files."""
    def bail(reason):
        self._ui.debug(b'fsmonitor: fallback to core status, %s\n' % reason)
        return orig(match, subrepos, unknown, ignored, full=True)

    if full:
        return bail(b'full rewalk requested')
    if ignored:
        return bail(b'listing ignored files')
    if not self._watchmanclient.available():
        return bail(b'client unavailable')
    state = self._fsmonitorstate
    clock, ignorehash, notefiles = state.get()
    if not clock:
        if state.walk_on_invalidate:
            return bail(b'no clock')
        # Initial NULL clock value, see
        # https://facebook.github.io/watchman/docs/clockspec.html
        clock = b'c:0:0'
        notefiles = []

    ignore = self._ignore
    dirignore = self._dirignore
    if unknown:
        if _hashignore(ignore) != ignorehash and clock != b'c:0:0':
            # ignore list changed -- can't rely on Watchman state any more
            if state.walk_on_invalidate:
                return bail(b'ignore rules changed')
            notefiles = []
            clock = b'c:0:0'
    else:
        # always ignore
        ignore = util.always
        dirignore = util.always

    matchfn = match.matchfn
    matchalways = match.always()
    dmap = self._map
    if util.safehasattr(dmap, b'_map'):
        # for better performance, directly access the inner dirstate map if the
        # standard dirstate implementation is in use.
        dmap = dmap._map
    nonnormalset = self._map.nonnormalset

    copymap = self._map.copymap
    getkind = stat.S_IFMT
    dirkind = stat.S_IFDIR
    regkind = stat.S_IFREG
    lnkkind = stat.S_IFLNK
    join = self._join
    normcase = util.normcase
    fresh_instance = False

    exact = skipstep3 = False
    if match.isexact():  # match.exact
        exact = True
        dirignore = util.always  # skip step 2
    elif match.prefix():  # match.match, no patterns
        skipstep3 = True

    if not exact and self._checkcase:
        # note that even though we could receive directory entries, we're only
        # interested in checking if a file with the same name exists. So only
        # normalize files if possible.
        normalize = self._normalizefile
        skipstep3 = False
    else:
        normalize = None

    # step 1: find all explicit files
    results, work, dirsnotfound = self._walkexplicit(match, subrepos)

    skipstep3 = skipstep3 and not (work or dirsnotfound)
    work = [d for d in work if not dirignore(d[0])]

    if not work and (exact or skipstep3):
        for s in subrepos:
            del results[s]
        del results[b'.hg']
        return results

    # step 2: query Watchman
    try:
        # Use the user-configured timeout for the query.
        # Add a little slack over the top of the user query to allow for
        # overheads while transferring the data
        self._watchmanclient.settimeout(state.timeout + 0.1)
        result = self._watchmanclient.command(
            b'query',
            {
                b'fields': [b'mode', b'mtime', b'size', b'exists', b'name'],
                b'since':
                clock,
                b'expression': [
                    b'not',
                    [
                        b'anyof',
                        [b'dirname', b'.hg'],
                        [b'name', b'.hg', b'wholename'],
                    ],
                ],
                b'sync_timeout':
                int(state.timeout * 1000),
                b'empty_on_fresh_instance':
                state.walk_on_invalidate,
            },
        )
    except Exception as ex:
        _handleunavailable(self._ui, state, ex)
        self._watchmanclient.clearconnection()
        return bail(b'exception during run')
    else:
        # We need to propagate the last observed clock up so that we
        # can use it for our next query
        state.setlastclock(pycompat.sysbytes(result[b'clock']))
        if result[b'is_fresh_instance']:
            if state.walk_on_invalidate:
                state.invalidate()
                return bail(b'fresh instance')
            fresh_instance = True
            # Ignore any prior noteable files from the state info
            notefiles = []

    # for file paths which require normalization and we encounter a case
    # collision, we store our own foldmap
    if normalize:
        foldmap = {normcase(k): k for k in results}

    switch_slashes = pycompat.ossep == b'\\'
    # The order of the results is, strictly speaking, undefined.
    # For case changes on a case insensitive filesystem we may receive
    # two entries, one with exists=True and another with exists=False.
    # The exists=True entries in the same response should be interpreted
    # as being happens-after the exists=False entries due to the way that
    # Watchman tracks files.  We use this property to reconcile deletes
    # for name case changes.
    for entry in result[b'files']:
        fname = entry[b'name']

        # Watchman always give us a str. Normalize to bytes on Python 3
        # using Watchman's encoding, if needed.
        if not isinstance(fname, bytes):
            fname = fname.encode(_watchmanencoding)

        if _fixencoding:
            fname = _watchmantofsencoding(fname)

        if switch_slashes:
            fname = fname.replace(b'\\', b'/')
        if normalize:
            normed = normcase(fname)
            fname = normalize(fname, True, True)
            foldmap[normed] = fname
        fmode = entry[b'mode']
        fexists = entry[b'exists']
        kind = getkind(fmode)

        if b'/.hg/' in fname or fname.endswith(b'/.hg'):
            return bail(b'nested-repo-detected')

        if not fexists:
            # if marked as deleted and we don't already have a change
            # record, mark it as deleted.  If we already have an entry
            # for fname then it was either part of walkexplicit or was
            # an earlier result that was a case change
            if (fname not in results and fname in dmap
                    and (matchalways or matchfn(fname))):
                results[fname] = None
        elif kind == dirkind:
            if fname in dmap and (matchalways or matchfn(fname)):
                results[fname] = None
        elif kind == regkind or kind == lnkkind:
            if fname in dmap:
                if matchalways or matchfn(fname):
                    results[fname] = entry
            elif (matchalways or matchfn(fname)) and not ignore(fname):
                results[fname] = entry
        elif fname in dmap and (matchalways or matchfn(fname)):
            results[fname] = None

    # step 3: query notable files we don't already know about
    # XXX try not to iterate over the entire dmap
    if normalize:
        # any notable files that have changed case will already be handled
        # above, so just check membership in the foldmap
        notefiles = {
            normalize(f, True, True)
            for f in notefiles if normcase(f) not in foldmap
        }
    visit = {
        f
        for f in notefiles
        if (f not in results and matchfn(f) and (f in dmap or not ignore(f)))
    }

    if not fresh_instance:
        if matchalways:
            visit.update(f for f in nonnormalset if f not in results)
            visit.update(f for f in copymap if f not in results)
        else:
            visit.update(f for f in nonnormalset
                         if f not in results and matchfn(f))
            visit.update(f for f in copymap if f not in results and matchfn(f))
    else:
        if matchalways:
            visit.update(f for f, st in pycompat.iteritems(dmap)
                         if f not in results)
            visit.update(f for f in copymap if f not in results)
        else:
            visit.update(f for f, st in pycompat.iteritems(dmap)
                         if f not in results and matchfn(f))
            visit.update(f for f in copymap if f not in results and matchfn(f))

    audit = pathutil.pathauditor(self._root, cached=True).check
    auditpass = [f for f in visit if audit(f)]
    auditpass.sort()
    auditfail = visit.difference(auditpass)
    for f in auditfail:
        results[f] = None

    nf = iter(auditpass)
    for st in util.statfiles([join(f) for f in auditpass]):
        f = next(nf)
        if st or f in dmap:
            results[f] = st

    for s in subrepos:
        del results[s]
    del results[b'.hg']
    return results
示例#7
0
    def __init__(self, baseui, path, create=False, intents=None):
        # type: (uimod.ui, str, bool, Any) -> None
        self.requirements = set()
        self.filtername = None
        self._phasedefaults = []
        self._bookmarks = {}
        self.obsstore = False
        self._revbranchcache = None
        # generic mapping between names and nodes
        self.names = gitnamespaces()

        # wvfs: rooted at the repository root, used to access the working copy
        self.wvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
        # vfs: rooted at .hg, used to access repo files outside of .hg/store
        self.vfs = None
        # svfs: usually rooted at .hg/store, used to access repository history
        # If this is a shared repository, this vfs may point to another
        # repository's .hg/store directory.
        self.svfs = None
        self.root = self.wvfs.base
        self.path = self.wvfs.join(".git")
        self.origroot = path

        # This is only used by context.workingctx.match in order to
        # detect files in subrepos.
        self.auditor = pathutil.pathauditor(self.root,
                                            callback=self._checknested)
        # This is only used by context.basectx.match in order to detect
        # files in subrepos.
        self.nofsauditor = pathutil.pathauditor(self.root,
                                                callback=self._checknested,
                                                realfs=False,
                                                cached=True)

        self.baseui = baseui
        self.ui = baseui.copy()
        self.ui.copy = baseui.copy  # prevent copying repo configuration
        self.vfs = vfsmod.vfs(self.path, cacheaudited=True)
        if (self.ui.configbool('devel', 'all-warnings')
                or self.ui.configbool('devel', 'check-locks')):
            self.vfs.audit = self._getvfsward(self.vfs.audit)

        try:
            self.ui.readconfig(self.vfs.join("hgrc"), self.root)
            # self._loadextensions()
        except IOError:
            pass

        color.setup(self.ui)

        if not self.vfs.isdir():
            if create:
                pygit2.init_repository(self.path, False)
            else:
                raise error.RepoError(_("repository %s not found") % path)
        elif create:
            raise error.RepoError(_("repository %s already exists") % path)

        self._repo = pygit2.Repository(self.path)

        # FIXME: move to propertycache
        self.dirstate = gitdirstate(self, self.ui, self.root)