def getbundle(self, source, **kwargs): self.requirecap('getbundle', _('look up remote changes')) opts = {} for key, value in kwargs.iteritems(): if value is None: continue keytype = gboptsmap.get(key) if keytype is None: assert False, 'unexpected' elif keytype == 'nodes': value = encodelist(value) elif keytype == 'csv': value = ','.join(value) elif keytype == 'boolean': value = '%i' % bool(value) elif keytype != 'plain': raise KeyError('unknown getbundle option type %s' % keytype) opts[key] = value f = self._callcompressable("getbundle", **opts) bundlecaps = kwargs.get('bundlecaps') if bundlecaps is None: bundlecaps = () # kwargs could have it to None if util.any((cap.startswith('HG2') for cap in bundlecaps)): return bundle2.getunbundler(self.ui, f) else: return changegroupmod.cg1unpacker(f, 'UN')
def system(self, cmd, environ={}, cwd=None, onerr=None, errprefix=None): '''execute shell command with appropriate output stream. command output will be redirected if fout is not stdout. ''' out = self.fout if util.any(s[1] for s in self._bufferstates): out = self return util.system(cmd, environ=environ, cwd=cwd, onerr=onerr, errprefix=errprefix, out=out)
def computemutable(repo): """compute the set of revision that should be filtered when used a server Secret and hidden changeset should not pretend to be here.""" assert not repo.changelog.filteredrevs # fast check to avoid revset call on huge repo if util.any(repo._phasecache.phaseroots[1:]): getphase = repo._phasecache.phase maymutable = filterrevs(repo, 'base') return frozenset(r for r in maymutable if getphase(repo, r)) return frozenset()
def spin(): """ Spin the 'event loop', and never return. """ while True: clist = list(ParallelContext._allcontexts) for c in clist: c.run() dowait = util.any((len(c.running) for c in ParallelContext._allcontexts)) if dowait: # Wait on local jobs first for perf for job, cb in ParallelContext._waitany(ParallelContext._condition): cb(job.exitcode) else: assert any(len(c.pending) for c in ParallelContext._allcontexts)
def spin(): """ Spin the 'event loop', and never return. """ while True: clist = list(ParallelContext._allcontexts) for c in clist: c.run() # In python 2.4, subprocess instances wait on child processes under the hood when they are created... this # unfortunate behavior means that before using os.waitpid, we need to check the status using .poll() # see http://bytes.com/groups/python/675403-os-wait-losing-child found = False for c in clist: for i in xrange(0, len(c.running)): p, cb = c.running[i] result = p.poll() if result != None: del c.running[i] cb(result) found = True break if found: break if found: continue dowait = util.any((len(c.running) for c in ParallelContext._allcontexts)) if dowait: pid, status = ParallelContext._waitany() result = statustoresult(status) for c in ParallelContext._allcontexts: for i in xrange(0, len(c.running)): p, cb = c.running[i] if ParallelContext._comparepid(pid, p): del c.running[i] cb(result) found = True break if found: break else: assert any(len(c.pending) for c in ParallelContext._allcontexts)
def __init__(self, path, matcher): self._root = matcher._root self._cwd = matcher._cwd self._path = path self._matcher = matcher self._always = matcher._always self._pathrestricted = matcher._pathrestricted self._files = [f[len(path) + 1:] for f in matcher._files if f.startswith(path + "/")] # If the parent repo had a path to this subrepo and no patterns are # specified, this submatcher always matches. if not self._always and not matcher._anypats: self._always = util.any(f == path for f in matcher._files) self._anypats = matcher._anypats self.matchfn = lambda fn: matcher.matchfn(self._path + "/" + fn) self._fmap = set(self._files)
def resolve(self, makefile, variables, fd, setting): plist = [data.Pattern(p) for p in self._arguments[0].resolvesplit(makefile, variables, setting)] fd.write(' '.join([w for w in self._arguments[1].resolvesplit(makefile, variables, setting) if not util.any((p.match(w) for p in plist))]))
def guesskind(dest): for kind, extensions in exts.iteritems(): if util.any(dest.endswith(ext) for ext in extensions): return kind return None
def resolvedeps(self, makefile, targetstack, rulestack, recursive): """ Resolve the actual path of this target, using vpath if necessary. Recursively resolve dependencies of this target. This means finding implicit rules which match the target, if appropriate. Figure out whether this target needs to be rebuild, and set self.outofdate appropriately. @param targetstack is the current stack of dependencies being resolved. If this target is already in targetstack, bail to prevent infinite recursion. @param rulestack is the current stack of implicit rules being used to resolve dependencies. A rule chain cannot use the same implicit rule twice. """ assert makefile.parsingfinished if self.target in targetstack: raise ResolutionError("Recursive dependency: %s -> %s" % ( " -> ".join(targetstack), self.target)) targetstack = targetstack + [self.target] indent = getindent(targetstack) _log.info("%sConsidering target '%s'", indent, self.target) self.resolvevpath(makefile) # Sanity-check our rules. If we're single-colon, only one rule should have commands ruleswithcommands = self.ruleswithcommands() if len(self.rules) and not self.isdoublecolon(): if ruleswithcommands > 1: # In GNU make this is a warning, not an error. I'm going to be stricter. # TODO: provide locations raise DataError("Target '%s' has multiple rules with commands." % self.target) if ruleswithcommands == 0: self.resolveimplicitrule(makefile, targetstack, rulestack) # If a target is mentioned, but doesn't exist, has no commands and no # prerequisites, it is special and exists just to say that targets which # depend on it are always out of date. This is like .FORCE but more # compatible with other makes. # Otherwise, we don't know how to make it. if not len(self.rules) and self.mtime is None and not util.any((len(rule.prerequisites) > 0 for rule in self.rules)): raise ResolutionError("No rule to make target '%s' needed by %r" % (self.target, targetstack)) if recursive: for r in self.rules: newrulestack = rulestack + [r] for d in r.prerequisites: dt = makefile.gettarget(d) if dt.explicit: continue dt.resolvedeps(makefile, targetstack, newrulestack, True) for v in makefile.getpatternvariablesfor(self.target): self.variables.merge(v)
def resolveimplicitrule(self, makefile, targetstack, rulestack): """ Try to resolve an implicit rule to build this target. """ # The steps in the GNU make manual Implicit-Rule-Search.html are very detailed. I hope they can be trusted. indent = getindent(targetstack) _log.info("%sSearching for implicit rule to make '%s'", indent, self.target) dir, s, file = util.strrpartition(self.target, '/') dir = dir + s candidates = [] # list of PatternRuleInstance hasmatch = util.any((r.hasspecificmatch(file) for r in makefile.implicitrules)) for r in makefile.implicitrules: if r in rulestack: _log.info("%s %s: Avoiding implicit rule recursion", indent, r.loc) continue if not len(r.commands): continue for ri in r.matchesfor(dir, file, hasmatch): candidates.append(ri) newcandidates = [] for r in candidates: depfailed = None for p in r.prerequisites: t = makefile.gettarget(p) t.resolvevpath(makefile) if not t.explicit and t.mtime is None: depfailed = p break if depfailed is not None: if r.doublecolon: _log.info("%s Terminal rule at %s doesn't match: prerequisite '%s' not mentioned and doesn't exist.", indent, r.loc, depfailed) else: newcandidates.append(r) continue _log.info("%sFound implicit rule at %s for target '%s'", indent, r.loc, self.target) self.rules.append(r) return # Try again, but this time with chaining and without terminal (double-colon) rules for r in newcandidates: newrulestack = rulestack + [r.prule] depfailed = None for p in r.prerequisites: t = makefile.gettarget(p) try: t.resolvedeps(makefile, targetstack, newrulestack, True) except ResolutionError: depfailed = p break if depfailed is not None: _log.info("%s Rule at %s doesn't match: prerequisite '%s' could not be made.", indent, r.loc, depfailed) continue _log.info("%sFound implicit rule at %s for target '%s'", indent, r.loc, self.target) self.rules.append(r) return _log.info("%sCouldn't find implicit rule to remake '%s'", indent, self.target)
def ismatchany(self): return util.any((t.ismatchany() for t in self.targetpatterns))