def refresh(self): if self.lastrefresh + self.refreshinterval > time.time(): return if self.baseui: u = self.baseui.copy() else: u = ui.ui() u.setconfig('ui', 'report_untrusted', 'off', 'hgwebdir') u.setconfig('ui', 'nontty', 'true', 'hgwebdir') # displaying bundling progress bar while serving feels wrong and may # break some wsgi implementations. u.setconfig('progress', 'disable', 'true', 'hgweb') if not isinstance(self.conf, (dict, list, tuple)): map = {'paths': 'hgweb-paths'} if not os.path.exists(self.conf): raise util.Abort(_('config file %s not found!') % self.conf) u.readconfig(self.conf, remap=map, trust=True) paths = [] for name, ignored in u.configitems('hgweb-paths'): for path in u.configlist('hgweb-paths', name): paths.append((name, path)) elif isinstance(self.conf, (list, tuple)): paths = self.conf elif isinstance(self.conf, dict): paths = self.conf.items() repos = findrepos(paths) for prefix, root in u.configitems('collections'): prefix = util.pconvert(prefix) for path in scmutil.walkrepos(root, followsym=True): repo = os.path.normpath(path) name = util.pconvert(repo) if name.startswith(prefix): name = name[len(prefix):] repos.append((name.lstrip('/'), repo)) self.repos = repos self.ui = u encoding.encoding = self.ui.config('web', 'encoding', encoding.encoding) self.style = self.ui.config('web', 'style', 'paper') self.templatepath = self.ui.config('web', 'templates', None) self.stripecount = self.ui.config('web', 'stripes', 1) if self.stripecount: self.stripecount = int(self.stripecount) self._baseurl = self.ui.config('web', 'baseurl') prefix = self.ui.config('web', 'prefix', '') if prefix.startswith('/'): prefix = prefix[1:] if prefix.endswith('/'): prefix = prefix[:-1] self.prefix = prefix self.lastrefresh = time.time()
def refresh(self): if self.lastrefresh + self.refreshinterval > time.time(): return if self.baseui: u = self.baseui.copy() else: u = ui.ui() u.setconfig('ui', 'report_untrusted', 'off', 'hgwebdir') u.setconfig('ui', 'nontty', 'true', 'hgwebdir') # displaying bundling progress bar while serving feels wrong and may # break some wsgi implementations. u.setconfig('progress', 'disable', 'true', 'hgweb') if not isinstance(self.conf, (dict, list, tuple)): map = {'paths': 'hgweb-paths'} if not os.path.exists(self.conf): raise util.Abort(_('config file %s not found!') % self.conf) u.readconfig(self.conf, remap=map, trust=True) paths = [] for name, ignored in u.configitems('hgweb-paths'): for path in u.configlist('hgweb-paths', name): paths.append((name, path)) elif isinstance(self.conf, (list, tuple)): paths = self.conf elif isinstance(self.conf, dict): paths = self.conf.items() repos = findrepos(paths) for prefix, root in u.configitems('collections'): prefix = util.pconvert(prefix) for path in scmutil.walkrepos(root, followsym=True): repo = os.path.normpath(path) name = util.pconvert(repo) if name.startswith(prefix): name = name[len(prefix):] repos.append((name.lstrip('/'), repo)) self.repos = repos self.ui = u encoding.encoding = self.ui.config('web', 'encoding', encoding.encoding) self.style = self.ui.config('web', 'style', 'paper') self.templatepath = self.ui.config('web', 'templates', None) self.stripecount = self.ui.config('web', 'stripes', 1) if self.stripecount: self.stripecount = int(self.stripecount) self._baseurl = self.ui.config('web', 'baseurl') prefix = self.ui.config('web', 'prefix', '') if prefix.startswith('/'): prefix = prefix[1:] if prefix.endswith('/'): prefix = prefix[:-1] self.prefix = prefix self.lastrefresh = time.time()
def snapshot(repo, files, ctx): '''snapshot files as of some revision''' dirname = os.path.basename(repo.root) or 'root' dirname += '.%d' % _diffCount if ctx.rev() is not None: dirname += '.%d' % ctx.rev() base = os.path.join(qtlib.gettempdir(), dirname) fns_and_mtime = [] if not os.path.exists(base): os.mkdir(base) for fn in files: wfn = util.pconvert(fn) if not wfn in ctx: # File doesn't exist; could be a bogus modify continue dest = os.path.join(base, wfn) if os.path.exists(dest): # File has already been snapshot continue destdir = os.path.dirname(dest) if not os.path.isdir(destdir): os.makedirs(destdir) data = repo.wwritedata(wfn, ctx[wfn].data()) f = open(dest, 'wb') f.write(data) f.close() if ctx.rev() is None: fns_and_mtime.append((dest, repo.wjoin(fn), os.lstat(dest).st_mtime)) else: # Make file read/only, to indicate it's static (archival) nature os.chmod(dest, stat.S_IREAD) return base, fns_and_mtime
def parsedefinitions(ui, repo, svnroot, exts): """Return (targetdir, revision, source) tuples. Fail if nested targetdirs are detected. source is an svn project URL. """ defs = [] for base in sorted(exts): for line in exts[base]: if not line.strip() or line.lstrip().startswith('#'): # Ignore comments and blank lines continue try: path, rev, source, pegrev, norevline = parsedefinition(line) except BadDefinition: ui.warn(_('ignoring invalid external definition: %r\n' % line)) continue source = resolvesource(ui, svnroot, source) if source is None: continue wpath = hgutil.pconvert(os.path.join(base, path)) wpath = canonpath(repo.root, '', wpath) defs.append((wpath, rev, source, pegrev, norevline, base)) # Check target dirs are not nested defs.sort() for i, d in enumerate(defs): for d2 in defs[i + 1:]: if d2[0].startswith(d[0] + '/'): raise hgutil.Abort( _('external directories cannot nest:\n%s\n%s') % (d[0], d2[0])) return defs
def snapshot_wdir(ui, repo, files, tmproot): '''snapshot files from working directory. if not using snapshot, -I/-X does not work and recursive diff in tools like kdiff3 and meld displays too many files.''' repo_root = repo.root dirname = os.path.basename(repo_root) if dirname == "": dirname = "root" base = os.path.join(tmproot, dirname) os.mkdir(base) ui.note(_('making snapshot of %d files from working dir\n') % (len(files))) fns_and_mtime = [] for fn in files: wfn = util.pconvert(fn) ui.note(' %s\n' % wfn) dest = os.path.join(base, wfn) destdir = os.path.dirname(dest) if not os.path.isdir(destdir): os.makedirs(destdir) fp = open(dest, 'wb') for chunk in util.filechunkiter(repo.wopener(wfn)): fp.write(chunk) fp.close() fns_and_mtime.append((dest, os.path.join(repo_root, fn), os.path.getmtime(dest))) return dirname, fns_and_mtime
def snapshot_node(ui, repo, files, node, tmproot): ''' snapshot files as of some revision (adapted from Extdiff extension) ''' ctx = repo[node] mf = ctx.manifest() dirname = os.path.basename(repo.root) if dirname == '': dirname = 'root' dirname = '%s.%s' % (dirname, str(ctx)) base = os.path.join(tmproot, dirname) try: os.mkdir(base) except: pass ui.note(_('making snapshot of %d files from rev %s\n') % (len(files), str(ctx))) for fn in files: if not fn in mf: # skipping new file after a merge ? continue wfn = util.pconvert(fn) ui.note(' %s\n' % wfn) dest = os.path.join(base, wfn) destdir = os.path.dirname(dest) if not os.path.isdir(destdir): os.makedirs(destdir) data = repo.wwritedata(wfn, repo.file(wfn).read(mf[wfn])) open(dest, 'wb').write(data) return dirname
def parsedefinitions(ui, repo, svnroot, exts): """Return (targetdir, revision, source) tuples. Fail if nested targetdirs are detected. source is an svn project URL. """ defs = [] for base in sorted(exts): for line in exts[base]: if not line.strip() or line.lstrip().startswith('#'): # Ignore comments and blank lines continue try: path, rev, source, pegrev, norevline = parsedefinition(line) except BadDefinition: ui.warn(_('ignoring invalid external definition: %r\n' % line)) continue source = resolvesource(ui, svnroot, source) if source is None: continue wpath = hgutil.pconvert(os.path.join(base, path)) wpath = hgutil.canonpath(repo.root, '', wpath) defs.append((wpath, rev, source, pegrev, norevline, base)) # Check target dirs are not nested defs.sort() for i, d in enumerate(defs): for d2 in defs[i+1:]: if d2[0].startswith(d[0] + '/'): raise hgutil.Abort(_('external directories cannot nest:\n%s\n%s') % (d[0], d2[0])) return defs
def generate_text_diffs(self, row): wfile = self.filemodel[row][FM_PATH] pfile = util.pconvert(wfile) lines = chunks.check_max_diff(self.get_ctx(), pfile) if lines: return self.diff_highlight_buffer(lines) matcher = cmdutil.matchfiles(self.repo, [pfile]) opts = patch.diffopts(self.ui, self.opts) opts.git = True difftext = [] if self.is_merge(): wctx = self.repo[None] pctx1, pctx2 = wctx.parents() difftext = [_('===== Diff to first parent %d:%s =====\n') % ( pctx1.rev(), str(pctx1))] try: for s in patch.diff(self.repo, pctx1.node(), None, match=matcher, opts=opts): difftext.extend(s.splitlines(True)) difftext.append(_('\n===== Diff to second parent %d:%s =====\n') % ( pctx2.rev(), str(pctx2))) for s in patch.diff(self.repo, pctx2.node(), None, match=matcher, opts=opts): difftext.extend(s.splitlines(True)) except (IOError, error.RepoError, error.LookupError, util.Abort), e: self.stbar.set_text(str(e))
def function(tree, srcpath, opts): if snapfile: opts['rev'] = tree.revs else: destpath = relpath(os.path.abspath(os.curdir), tree.root) rpath = util.pconvert(relpath(toproot, tree.root)) if not srcpath: srcpath = forest.top().getpath(source) if srcpath: srcpath = '/'.join((srcpath, rpath)) else: ui.warn(_("warning: %s\n") % _("repository %s not found") % source[0]) try: tree.getrepo(ui) except RepoError: # Need to clone quiet = ui.quiet try: ui.quiet = True # Hack to shut up qclone's ui.status() qclone(ui=ui, source=srcpath, sroot=source, dest=destpath, rpath=rpath, opts=opts) except util.Abort, err: ui.warn(_("skipped: %s\n") % err) ui.quiet = quiet return
def parsedefinitions(ui, repo, svnroot, exts): """Return (targetdir, revision, source) tuples. Fail if nested targetdirs are detected. source is an svn project URL. """ defs = [] for base in sorted(exts): for line in exts[base]: try: path, rev, source, pegrev = parsedefinition(line) except BadDefinition: ui.warn(_('ignoring invalid external definition: %r' % line)) continue if re_scheme.search(source): pass elif source.startswith('^/'): source = svnroot + source[1:] else: ui.warn(_('ignoring unsupported non-fully qualified external: %r' % source)) continue wpath = hgutil.pconvert(os.path.join(base, path)) wpath = hgutil.canonpath(repo.root, '', wpath) defs.append((wpath, rev, source, pegrev)) # Check target dirs are not nested defs.sort() for i, d in enumerate(defs): for d2 in defs[i+1:]: if d2[0].startswith(d[0] + '/'): raise hgutil.Abort(_('external directories cannot nest:\n%s\n%s') % (d[0], d2[0])) return defs
def snapshot_node(ui, repo, files, node, tmproot): """ snapshot files as of some revision (adapted from Extdiff extension) """ mf = repo.changectx(node).manifest() dirname = os.path.basename(repo.root) if dirname == "": dirname = "root" dirname = "%s.%s" % (dirname, short(node)) base = os.path.join(tmproot, dirname) try: os.mkdir(base) except: pass ui.note(_("making snapshot of %d files from rev %s\n") % (len(files), short(node))) for fn in files: if not fn in mf: # skipping new file after a merge ? continue wfn = util.pconvert(fn) ui.note(" %s\n" % wfn) dest = os.path.join(base, wfn) destdir = os.path.dirname(dest) if not os.path.isdir(destdir): os.makedirs(destdir) data = repo.wwritedata(wfn, repo.file(wfn).read(mf[wfn])) open(dest, "wb").write(data) return dirname
def snapshot(repo, files, ctx, tmproot): '''snapshot files as of some revision''' dirname = os.path.basename(repo.root) or 'root' if ctx.rev() is not None: dirname = '%s.%s' % (dirname, str(ctx)) base = os.path.join(tmproot, dirname) os.mkdir(base) fns_and_mtime = [] for fn in files: wfn = util.pconvert(fn) if not wfn in ctx: # File doesn't exist; could be a bogus modify continue dest = os.path.join(base, wfn) destdir = os.path.dirname(dest) if not os.path.isdir(destdir): os.makedirs(destdir) data = repo.wwritedata(wfn, ctx[wfn].data()) f = open(dest, 'wb') f.write(data) f.close() if ctx.rev() is None: fns_and_mtime.append((dest, repo.wjoin(fn), os.path.getmtime(dest))) elif os.name != 'nt': # Make file read/only, to indicate it's static (archival) nature os.chmod(dest, stat.S_IREAD) return base, fns_and_mtime
def printconfigs(env): encoding.environ = env rcutil._rccomponents = None # reset cache ui = uimod.ui.load() for section, name, value in ui.walkconfig(): source = ui.configsource(section, name) print('%s.%s=%s # %s' % (section, name, value, util.pconvert(source))) print('')
def _hgwebdir_refresh(self): if self.lastrefresh + self.refreshinterval > time.time(): return _hgwebdir_refresh_parent(self) for prefix, root in self.ui.configitems('collections'): prefix = util.pconvert(prefix) for path in walkrepos(root, followsym=True): repo = hg.repository(self.ui, path) for npath in repo.nested: npath = os.path.normpath(os.path.join(path, npath)) name = util.pconvert(npath) if name.startswith(prefix): name = name[len(prefix):] repo = (name.lstrip('/'), npath) if repo not in self.repos: self.repos.append(repo) self.lastrefresh = time.time()
def seed(ui, snapshot=None, source='default', **opts): """populate a forest according to a snapshot file. Populate an empty local forest according to a snapshot file. Given a snapshot file, clone any non-existant directory from the provided path-alias. This defaults to cloning from the 'default' path. Unless the --tip option is set, this command will clone the revision specified in the snapshot file. Look at the help text for the clone command for more information. """ snapfile = snapshot or opts['snapfile'] if not snapfile: raise cmdutil.ParseError("fseed", _("invalid arguments")) forest = Forest(snapfile=snapfile) tip = opts['tip'] dest = opts['root'] if not dest: dest = os.curdir forest.trees.remove(forest.top()) dest = os.path.normpath(dest) for tree in forest.trees: srcpath = tree.getpath([source]) if not srcpath: ui.status("[%s]\n" % util.pconvert(tree.root)) ui.warn(_("skipped: path alias %s not defined\n") % source) ui.status("\n") continue srcpath = urltopath(srcpath) if tree.root == ".": destpath = dest else: destpath = os.path.join(dest, tree.root) opts['rev'] = tree.revs try: qclone(ui=ui, source=srcpath, sroot=None, dest=destpath, rpath=util.pconvert(tree.root), opts=opts) except util.Abort, err: ui.warn(_("skipped: %s\n") % err) ui.status("\n")
def splitstandin(filename): # Split on / because that's what dirstate always uses, even on Windows. # Change local separator to / first just in case we are passed filenames # from an external source (like the command line). bits = util.pconvert(filename).split('/', 1) if len(bits) == 2 and bits[0] == shortname: return bits[1] else: return None
def splitstandin(filename): # Split on / because that's what dirstate always uses, even on Windows. # Change local separator to / first just in case we are passed filenames # from an external source (like the command line). bits = util.pconvert(filename).split('/', 1) if len(bits) == 2 and bits[0] == shortname: return bits[1] else: return None
def onCurrentChange(self, index, old): 'Connected to treeview "currentChanged" signal' row = index.model().getRow(index) if row is None: return path, status, mst, upath, ext, sz = row wfile = util.pconvert(path) pctx = self.pctx and self.pctx.p1() or None self.fileview.setContext(self.repo[None], pctx) self.fileview.displayFile(wfile, status)
def onCurrentChange(self, index, old): 'Connected to treeview "currentChanged" signal' row = index.model().getRow(index) if row is None: return path, status, mst, upath, ext, sz = row wfile = util.pconvert(path) pctx = self.pctx and self.pctx.p1() or None self.fileview.setContext(self.repo[None], pctx) self.fileview.displayFile(wfile, status)
def shortreponame(ui): name = ui.config('web', 'name') if not name: return src = ui.configsource('web', 'name') # path:line if '/.hg/hgrc:' not in util.pconvert(src): # global web.name will set the same name to all repositories ui.debug('ignoring global web.name defined at %s\n' % src) return return name
def refresh(self): if self.lastrefresh + self.refreshinterval > time.time(): return if self.baseui: u = self.baseui.copy() else: u = ui.ui() u.setconfig('ui', 'report_untrusted', 'off') u.setconfig('ui', 'interactive', 'off') if not isinstance(self.conf, (dict, list, tuple)): map = {'paths': 'hgweb-paths'} if not os.path.exists(self.conf): raise util.Abort(_('config file %s not found!') % self.conf) u.readconfig(self.conf, remap=map, trust=True) paths = u.configitems('hgweb-paths') elif isinstance(self.conf, (list, tuple)): paths = self.conf elif isinstance(self.conf, dict): paths = self.conf.items() repos = findrepos(paths) for prefix, root in u.configitems('collections'): prefix = util.pconvert(prefix) for path in util.walkrepos(root, followsym=True): repo = os.path.normpath(path) name = util.pconvert(repo) if name.startswith(prefix): name = name[len(prefix):] repos.append((name.lstrip('/'), repo)) self.repos = repos self.ui = u encoding.encoding = self.ui.config('web', 'encoding', encoding.encoding) self.style = self.ui.config('web', 'style', 'paper') self.templatepath = self.ui.config('web', 'templates', None) self.stripecount = self.ui.config('web', 'stripes', 1) if self.stripecount: self.stripecount = int(self.stripecount) self._baseurl = self.ui.config('web', 'baseurl') self.lastrefresh = time.time()
def refresh(self): if self.lastrefresh + self.refreshinterval > time.time(): return if self.baseui: u = self.baseui.copy() else: u = ui.ui() u.setconfig('ui', 'report_untrusted', 'off') u.setconfig('ui', 'interactive', 'off') if not isinstance(self.conf, (dict, list, tuple)): map = {'paths': 'hgweb-paths'} if not os.path.exists(self.conf): raise util.Abort(_('config file %s not found!') % self.conf) u.readconfig(self.conf, remap=map, trust=True) paths = u.configitems('hgweb-paths') elif isinstance(self.conf, (list, tuple)): paths = self.conf elif isinstance(self.conf, dict): paths = self.conf.items() repos = findrepos(paths) for prefix, root in u.configitems('collections'): prefix = util.pconvert(prefix) for path in util.walkrepos(root, followsym=True): repo = os.path.normpath(path) name = util.pconvert(repo) if name.startswith(prefix): name = name[len(prefix):] repos.append((name.lstrip('/'), repo)) self.repos = repos self.ui = u encoding.encoding = self.ui.config('web', 'encoding', encoding.encoding) self.style = self.ui.config('web', 'style', 'paper') self.templatepath = self.ui.config('web', 'templates', None) self.stripecount = self.ui.config('web', 'stripes', 1) if self.stripecount: self.stripecount = int(self.stripecount) self._baseurl = self.ui.config('web', 'baseurl') self.lastrefresh = time.time()
def shortreponame(ui): name = ui.config('web', 'name') if not name: return src = ui.configsource('web', 'name') # path:line if '/.hg/hgrc:' not in util.pconvert(src): # global web.name will set the same name to all repositories ui.debug('ignoring global web.name defined at %s\n' % src) return return name
def addrepocontentstotree(roote, ctx, toproot=''): subpaths = ctx.substate.keys() for path in subpaths: if not 'S' in self._statusfilter: break e = roote pathelements = hglib.tounicode(path).split('/') for p in pathelements[:-1]: if not p in e: e.addchild(p) e = e[p] p = pathelements[-1] if not p in e: e.addchild(p) e = e[p] e.setstatus('S') # If the subrepo exists in the working directory # and it is a mercurial subrepo, # add the files that it contains to the tree as well, according # to the status filter abspath = os.path.join(ctx._repo.root, path) if os.path.isdir(abspath): # Add subrepo files to the tree substate = ctx.substate[path] # Add the subrepo info to the _subinfo dictionary: # The value is the subrepo context, while the key is # the path of the subrepo relative to the topmost repo if toproot: # Note that we cannot use os.path.join() because we # need path items to be separated by "/" toprelpath = '/'.join([toproot, path]) else: toprelpath = path toprelpath = util.pconvert(toprelpath) self._subinfo[toprelpath] = \ {'substate': substate, 'ctx': None} srev = substate[1] sub = ctx.sub(path) if srev and isinstance(sub, hgsubrepo): srepo = sub._repo if srev in srepo: sctx = srepo[srev] self._subinfo[toprelpath]['ctx'] = sctx # Add the subrepo contents to the tree e = addrepocontentstotree(e, sctx, toprelpath) # Add regular files to the tree status, uncleanpaths, files = getctxtreeinfo(ctx) addfilestotree(roote, files, status, uncleanpaths) return roote
def addrepocontentstotree(roote, ctx, toproot=''): subpaths = ctx.substate.keys() for path in subpaths: if not 'S' in self._statusfilter: break e = roote pathelements = hglib.tounicode(path).split('/') for p in pathelements[:-1]: if not p in e: e.addchild(p) e = e[p] p = pathelements[-1] if not p in e: e.addchild(p) e = e[p] e.setstatus('S') # If the subrepo exists in the working directory # and it is a mercurial subrepo, # add the files that it contains to the tree as well, according # to the status filter abspath = os.path.join(ctx._repo.root, path) if os.path.isdir(abspath): # Add subrepo files to the tree substate = ctx.substate[path] # Add the subrepo info to the _subinfo dictionary: # The value is the subrepo context, while the key is # the path of the subrepo relative to the topmost repo if toproot: # Note that we cannot use os.path.join() because we # need path items to be separated by "/" toprelpath = '/'.join([toproot, path]) else: toprelpath = path toprelpath = util.pconvert(toprelpath) self._subinfo[toprelpath] = \ {'substate': substate, 'ctx': None} srev = substate[1] sub = ctx.sub(path) if srev and isinstance(sub, hgsubrepo): srepo = sub._repo if srev in srepo: sctx = srepo[srev] self._subinfo[toprelpath]['ctx'] = sctx # Add the subrepo contents to the tree e = addrepocontentstotree(e, sctx, toprelpath) # Add regular files to the tree status, uncleanpaths, files = getctxtreeinfo(ctx) addfilestotree(roote, files, status, uncleanpaths) return roote
def standin(filename): '''Return the repo-relative path to the standin for the specified big file.''' # Notes: # 1) Most callers want an absolute path, but _createstandin() needs # it repo-relative so lfadd() can pass it to repoadd(). So leave # it up to the caller to use repo.wjoin() to get an absolute path. # 2) Join with '/' because that's what dirstate always uses, even on # Windows. Change existing separator to '/' first in case we are # passed filenames from an external source (like the command line). return shortname + '/' + util.pconvert(filename)
def standin(filename): '''Return the repo-relative path to the standin for the specified big file.''' # Notes: # 1) Most callers want an absolute path, but _createstandin() needs # it repo-relative so lfadd() can pass it to repoadd(). So leave # it up to the caller to use repo.wjoin() to get an absolute path. # 2) Join with '/' because that's what dirstate always uses, even on # Windows. Change existing separator to '/' first in case we are # passed filenames from an external source (like the command line). return shortname + '/' + util.pconvert(filename)
def web_path(path, config): '''strip leading slashes from local path, turn into web-safe path.''' path = util.pconvert(path) count = config.web_strip while count > 0: c = path.find('/') if c == -1: break path = path[c+1:] count -= 1 return path
def standin(filename): '''Return the repo-relative path to the standin for the specified big file.''' # Notes: # 1) Some callers want an absolute path, but for instance addlargefiles # needs it repo-relative so it can be passed to repo[None].add(). So # leave it up to the caller to use repo.wjoin() to get an absolute path. # 2) Join with '/' because that's what dirstate always uses, even on # Windows. Change existing separator to '/' first in case we are # passed filenames from an external source (like the command line). return shortnameslash + util.pconvert(filename)
def standin(filename): '''Return the repo-relative path to the standin for the specified big file.''' # Notes: # 1) Some callers want an absolute path, but for instance addlargefiles # needs it repo-relative so it can be passed to repo[None].add(). So # leave it up to the caller to use repo.wjoin() to get an absolute path. # 2) Join with '/' because that's what dirstate always uses, even on # Windows. Change existing separator to '/' first in case we are # passed filenames from an external source (like the command line). return shortnameslash + util.pconvert(filename)
def _ignore_file(self, stat, file): ignore = open(self.repo.wjoin('.hgignore'), 'a') try: try: ignore.write('glob:' + util.pconvert(file) + '\n') except IOError: Prompt('Ignore Failed', 'Could not update .hgignore', self).run() finally: ignore.close() self.reload_status() return True
def _listbookmarks(self, pattern): if pattern.endswith(b'*'): pattern = b're:^' + pattern[:-1] + b'.*' kind, pat, matcher = stringutil.stringmatcher(pattern) prefixlen = len(self._bookmarkmap) + 1 for dirpath, _, books in self._repo.vfs.walk(self._bookmarkmap): for book in books: bookmark = os.path.join(dirpath, book)[prefixlen:] bookmark = util.pconvert(bookmark) if not matcher(bookmark): continue yield bookmark, self._read(os.path.join(dirpath, book))
def strip(self, path): '''strip leading slashes from local path, turn into web-safe path.''' path = util.pconvert(path) count = self.stripcount while count > 0: c = path.find('/') if c == -1: break path = path[c + 1:] count -= 1 return path
def webroot(root): '''strip leading prefix of repo root and turn into url-safe path.''' count = int(self.ui.config('bugzilla', 'strip', 0)) root = util.pconvert(root) while count > 0: c = root.find('/') if c == -1: break root = root[c + 1:] count -= 1 return root
def webroot(root): '''strip leading prefix of repo root and turn into url-safe path.''' count = int(self.ui.config('bugzilla', 'strip', 0)) root = util.pconvert(root) while count > 0: c = root.find('/') if c == -1: break root = root[c+1:] count -= 1 return root
def urlrepos(prefix, roothead, paths): """yield url paths and filesystem paths from a list of repo paths >>> list(urlrepos('hg', '/opt', ['/opt/r', '/opt/r/r', '/opt'])) [('hg/r', '/opt/r'), ('hg/r/r', '/opt/r/r'), ('hg', '/opt')] >>> list(urlrepos('', '/opt', ['/opt/r', '/opt/r/r', '/opt'])) [('r', '/opt/r'), ('r/r', '/opt/r/r'), ('', '/opt')] """ for path in paths: path = os.path.normpath(path) yield (prefix + '/' + util.pconvert(path[len(roothead):]).lstrip('/')).strip('/'), path
def strip(self, path): '''strip leading slashes from local path, turn into web-safe path.''' path = util.pconvert(path) count = self.stripcount while count > 0: c = path.find('/') if c == -1: break path = path[c+1:] count -= 1 return path
def webroot(root): """strip leading prefix of repo root and turn into url-safe path.""" count = int(self.ui.config("bugzilla", "strip", 0)) root = util.pconvert(root) while count > 0: c = root.find("/") if c == -1: break root = root[c + 1 :] count -= 1 return root
def fileData(self, index): """Returns the displayable file data at the given index""" repo = self._repoagent.rawRepo() if not index.isValid(): return filedata.createNullData(repo) path, status, mst, upath, ext, sz = self.rows[index.row()] wfile = util.pconvert(path) ctx = self.workingContext pctx = self._pctx and self._pctx.p1() or ctx.p1() if status == 'S': return filedata.createSubrepoData(ctx, pctx, wfile) else: return filedata.createFileData(ctx, pctx, wfile, status, None, mst)
def fileData(self, index): """Returns the displayable file data at the given index""" repo = self._repoagent.rawRepo() if not index.isValid(): return filedata.createNullData(repo) path, status, mst, upath, ext, sz = self.rows[index.row()] wfile = util.pconvert(path) ctx = repo[None] pctx = self._pctx and self._pctx.p1() or ctx.p1() if status == 'S': return filedata.createSubrepoData(ctx, pctx, wfile) else: return filedata.createFileData(ctx, pctx, wfile, status, None, mst)
def urlrepos(prefix, roothead, paths): """yield url paths and filesystem paths from a list of repo paths >>> conv = lambda seq: [(v, util.pconvert(p)) for v,p in seq] >>> conv(urlrepos('hg', '/opt', ['/opt/r', '/opt/r/r', '/opt'])) [('hg/r', '/opt/r'), ('hg/r/r', '/opt/r/r'), ('hg', '/opt')] >>> conv(urlrepos('', '/opt', ['/opt/r', '/opt/r/r', '/opt'])) [('r', '/opt/r'), ('r/r', '/opt/r/r'), ('', '/opt')] """ for path in paths: path = os.path.normpath(path) yield (prefix + '/' + util.pconvert(path[len(roothead):]).lstrip('/')).strip('/'), path
def function(tree, destpath, opts): if not destpath: destpath = forest.top().getpath(dest) if destpath: rpath = util.pconvert(relpath(forest.top().root, tree.root)) destpath = '/'.join((destpath, rpath)) else: ui.warn(_("skipped: %s\n") % _("repository %s not found") % dest[0]) return try: commands.outgoing(ui, tree.repo, destpath, **opts) except Exception, err: ui.warn(_("skipped: %s\n") % err)
def strip(self, path): """strip leading slashes from local path, turn into web-safe path.""" path = util.pconvert(path) count = self.stripcount if count < 0: return "" while count > 0: c = path.find("/") if c == -1: break path = path[c + 1 :] count -= 1 return path
def _walkstreamfiles(orig, repo): if state.shallowremote: # if we are shallow ourselves, stream our local commits if shallowrepo.requirement in repo.requirements: striplen = len(repo.store.path) + 1 readdir = repo.store.rawvfs.readdir visit = [os.path.join(repo.store.path, 'data')] while visit: p = visit.pop() for f, kind, st in readdir(p, stat=True): fp = p + '/' + f if kind == stat.S_IFREG: if not fp.endswith('.i') and not fp.endswith('.d'): n = util.pconvert(fp[striplen:]) yield (store.decodedir(n), n, st.st_size) if kind == stat.S_IFDIR: visit.append(fp) shallowtrees = repo.ui.configbool('remotefilelog', 'shallowtrees', False) if 'treemanifest' in repo.requirements and not shallowtrees: for (u, e, s) in repo.store.datafiles(): if (u.startswith('meta/') and (u.endswith('.i') or u.endswith('.d'))): yield (u, e, s) # Return .d and .i files that do not match the shallow pattern match = state.match if match and not match.always(): for (u, e, s) in repo.store.datafiles(): f = u[5:-2] # trim data/... and .i/.d if not state.match(f): yield (u, e, s) for x in repo.store.topfiles(): if shallowtrees and x[0][:15] == '00manifesttree.': continue if state.noflatmf and x[0][:11] == '00manifest.': continue yield x elif shallowrepo.requirement in repo.requirements: # don't allow cloning from a shallow repo to a full repo # since it would require fetching every version of every # file in order to create the revlogs. raise error.Abort(_("Cannot clone from a shallow repo " "to a full repo.")) else: for x in orig(repo): yield x
def function(tree, srcpath, opts): if not srcpath: srcpath = forest.top().getpath(source) if srcpath: rpath = util.pconvert(relpath(forest.top().root, tree.root)) srcpath = '/'.join((srcpath, rpath)) else: ui.warn(_("skipped: %s\n") % _("repository %s not found") % source[0]) return try: commands.incoming(ui, tree.repo, srcpath, **opts) except Exception, err: ui.warn(_("skipped: %s\n") % err)
def _walkstreamfiles(orig, repo, matcher=None): if state.shallowremote: # if we are shallow ourselves, stream our local commits if shallowutil.isenabled(repo): striplen = len(repo.store.path) + 1 readdir = repo.store.rawvfs.readdir visit = [os.path.join(repo.store.path, b'data')] while visit: p = visit.pop() for f, kind, st in readdir(p, stat=True): fp = p + b'/' + f if kind == stat.S_IFREG: if not fp.endswith(b'.i') and not fp.endswith( b'.d'): n = util.pconvert(fp[striplen:]) d = store.decodedir(n) t = store.FILETYPE_OTHER yield (t, d, n, st.st_size) if kind == stat.S_IFDIR: visit.append(fp) if scmutil.istreemanifest(repo): for (t, u, e, s) in repo.store.datafiles(): if u.startswith(b'meta/') and (u.endswith(b'.i') or u.endswith(b'.d')): yield (t, u, e, s) # Return .d and .i files that do not match the shallow pattern match = state.match if match and not match.always(): for (t, u, e, s) in repo.store.datafiles(): f = u[5:-2] # trim data/... and .i/.d if not state.match(f): yield (t, u, e, s) for x in repo.store.topfiles(): if state.noflatmf and x[1][:11] == b'00manifest.': continue yield x elif shallowutil.isenabled(repo): # don't allow cloning from a shallow repo to a full repo # since it would require fetching every version of every # file in order to create the revlogs. raise error.Abort( _(b"Cannot clone from a shallow repo to a full repo.")) else: for x in orig(repo, matcher): yield x
def function(tree, srcpath, opts): if not srcpath: srcpath = forest.top().getpath(source) if srcpath: rpath = util.pconvert(relpath(forest.top().root, tree.root)) srcpath = '/'.join((srcpath, rpath)) else: ui.warn(_("skipped: %s\n") % _("repository %s not found") % source[0]) return try: fetch.fetch(ui, tree.getrepo(ui), srcpath, **opts) except Exception, err: ui.warn(_("skipped: %s\n") % err) tree.repo.transaction().__del__()
def issvnurl(ui, url): try: proto, path = url.split('://', 1) if proto == 'file': path = urllib.url2pathname(path) except ValueError: proto = 'file' path = os.path.abspath(url) if proto == 'file': path = util.pconvert(path) check = protomap.get(proto, lambda *args: False) while '/' in path: if check(ui, path, proto): return True path = path.rsplit('/', 1)[0] return False
def snapshot(ui, repo, files, node, tmproot, listsubrepos): """snapshot files as of some revision if not using snapshot, -I/-X does not work and recursive diff in tools like kdiff3 and meld displays too many files.""" dirname = os.path.basename(repo.root) if dirname == b"": dirname = b"root" if node is not None: dirname = b'%s.%s' % (dirname, short(node)) base = os.path.join(tmproot, dirname) os.mkdir(base) fnsandstat = [] if node is not None: ui.note( _(b'making snapshot of %d files from rev %s\n') % (len(files), short(node)) ) else: ui.note( _(b'making snapshot of %d files from working directory\n') % (len(files)) ) if files: repo.ui.setconfig(b"ui", b"archivemeta", False) archival.archive( repo, base, node, b'files', match=scmutil.matchfiles(repo, files), subrepos=listsubrepos, ) for fn in sorted(files): wfn = util.pconvert(fn) ui.note(b' %s\n' % wfn) if node is None: dest = os.path.join(base, wfn) fnsandstat.append((dest, repo.wjoin(fn), os.lstat(dest))) return dirname, fnsandstat
def clientfetch(repo, paths, lastnodemap=None, peer=None): """download annotate cache from the server for paths""" if not paths: return if peer is None: with annotatepeer(repo) as peer: return clientfetch(repo, paths, lastnodemap, peer) if lastnodemap is None: lastnodemap = {} ui = repo.ui results = [] with peer.commandexecutor() as batcher: ui.debug(b'fastannotate: requesting %d files\n' % len(paths)) for p in paths: results.append( batcher.callcommand( b'getannotate', { b'path': p, b'lastnode': lastnodemap.get(p) }, )) for result in results: r = result.result() # TODO: pconvert these paths on the server? r = {util.pconvert(p): v for p, v in pycompat.iteritems(r)} for path in sorted(r): # ignore malicious paths if not path.startswith(b'fastannotate/') or b'/../' in (path + b'/'): ui.debug(b'fastannotate: ignored malicious path %s\n' % path) continue content = r[path] if ui.debugflag: ui.debug(b'fastannotate: writing %d bytes to %s\n' % (len(content), path)) repo.vfs.makedirs(os.path.dirname(path)) with repo.vfs(path, b'wb') as f: f.write(content)
def issvnurl(ui, url): try: proto, path = url.split('://', 1) if proto == 'file': if (os.name == 'nt' and path[:1] == '/' and path[1:2].isalpha() and path[2:6].lower() == '%3a/'): path = path[:2] + ':/' + path[6:] path = urllib.url2pathname(path) except ValueError: proto = 'file' path = os.path.abspath(url) if proto == 'file': path = util.pconvert(path) check = protomap.get(proto, lambda *args: False) while '/' in path: if check(ui, path, proto): return True path = path.rsplit('/', 1)[0] return False
def snapshot(ui, repo, files, node, tmproot): '''snapshot files as of some revision if not using snapshot, -I/-X does not work and recursive diff in tools like kdiff3 and meld displays too many files.''' dirname = os.path.basename(repo.root) if dirname == "": dirname = "root" if node is not None: dirname = '%s.%s' % (dirname, short(node)) base = os.path.join(tmproot, dirname) os.mkdir(base) if node is not None: ui.note( _('making snapshot of %d files from rev %s\n') % (len(files), short(node))) else: ui.note( _('making snapshot of %d files from working directory\n') % (len(files))) wopener = scmutil.opener(base) fns_and_mtime = [] ctx = repo[node] for fn in sorted(files): wfn = util.pconvert(fn) if wfn not in ctx: # File doesn't exist; could be a bogus modify continue ui.note(' %s\n' % wfn) dest = os.path.join(base, wfn) fctx = ctx[wfn] data = repo.wwritedata(wfn, fctx.data()) if 'l' in fctx.flags(): wopener.symlink(data, wfn) else: wopener.write(wfn, data) if 'x' in fctx.flags(): util.setflags(dest, False, True) if node is None: fns_and_mtime.append( (dest, repo.wjoin(fn), os.lstat(dest).st_mtime)) return dirname, fns_and_mtime
def classifycfgpath(path): """assign sort order to configuration file in path >>> classifycfgpath("/etc/mercurial/hgrc") 1 >>> classifycfgpath("repo/.hg/projrc") 2 >>> classifycfgpath(util.expandpath("~/.hgrc")) 3 >>> classifycfgpath("repo/.hg/hgrc") 4 """ path = path.rsplit(":", 1)[0] if path in classifycfgpath.systemrcpath: return SYSTEMRC if util.pconvert(path).endswith(".hg/projrc"): return PROJRC if path in userrcpath(): return USERRC # .hg/hgrc, file in $HGRCPATH, or an included file return HGRC
def findrepos(paths): repos = [] for prefix, root in cleannames(paths): roothead, roottail = os.path.split(root) # "foo = /bar/*" makes every subrepo of /bar/ to be # mounted as foo/subrepo # and "foo = /bar/**" also recurses into the subdirectories, # remember to use it without working dir. try: recurse = {'*': False, '**': True}[roottail] except KeyError: repos.append((prefix, root)) continue roothead = os.path.normpath(roothead) for path in util.walkrepos(roothead, followsym=True, recurse=recurse): path = os.path.normpath(path) name = util.pconvert(path[len(roothead):]).strip('/') if prefix: name = prefix + '/' + name repos.append((name, path)) return repos
def snapshot(repo, files, ctx): '''snapshot files as of some revision''' dirname = os.path.basename(repo.root) or 'root' dirname += '.%d' % _diffCount if ctx.rev() is not None: dirname += '.%d' % ctx.rev() base = os.path.join(qtlib.gettempdir(), dirname) fns_and_mtime = [] if not os.path.exists(base): os.makedirs(base) for fn in files: wfn = util.pconvert(fn) if not wfn in ctx: # File doesn't exist; could be a bogus modify continue dest = os.path.join(base, wfn) if os.path.exists(dest): # File has already been snapshot continue destdir = os.path.dirname(dest) try: if not os.path.isdir(destdir): os.makedirs(destdir) fctx = ctx[wfn] data = repo.wwritedata(wfn, fctx.data()) f = open(dest, 'wb') f.write(data) f.close() if 'x' in fctx.flags(): util.setflags(dest, False, True) if ctx.rev() is None: fns_and_mtime.append((dest, repo.wjoin(fn), os.lstat(dest).st_mtime)) else: # Make file read/only, to indicate it's static (archival) nature os.chmod(dest, stat.S_IREAD) except EnvironmentError: pass return base, fns_and_mtime
def _walkstreamfiles(orig, repo): if state.shallowremote: # if we are shallow ourselves, stream our local commits if shallowrepo.requirement in repo.requirements: striplen = len(repo.store.path) + 1 readdir = repo.store.rawvfs.readdir visit = [os.path.join(repo.store.path, 'data')] while visit: p = visit.pop() for f, kind, st in readdir(p, stat=True): fp = p + '/' + f if kind == stat.S_IFREG: if not fp.endswith('.i') and not fp.endswith('.d'): n = util.pconvert(fp[striplen:]) yield (store.decodedir(n), n, st.st_size) if kind == stat.S_IFDIR: visit.append(fp) # Return .d and .i files that do not match the shallow pattern match = state.match or match.always(repo.root, '') for (u, e, s) in repo.store.datafiles(): f = u[5:-2] # trim data/... and .i/.d if not state.match(f): yield (u, e, s) for x in repo.store.topfiles(): yield x elif shallowrepo.requirement in repo.requirements: # don't allow cloning from a shallow repo to a full repo # since it would require fetching every version of every # file in order to create the revlogs. raise util.Abort( _("Cannot clone from a shallow repo " + "to a full repo.")) else: for x in orig(repo): yield x
def snapshot_node(ui, repo, files, node, tmproot): '''snapshot files as of some revision''' mf = repo.changectx(node).manifest() dirname = os.path.basename(repo.root) if dirname == "": dirname = "root" dirname = '%s.%s' % (dirname, short(node)) base = os.path.join(tmproot, dirname) os.mkdir(base) ui.note(_('making snapshot of %d files from rev %s\n') % (len(files), short(node))) for fn in files: if not fn in mf: # skipping new file after a merge ? continue wfn = util.pconvert(fn) ui.note(' %s\n' % wfn) dest = os.path.join(base, wfn) destdir = os.path.dirname(dest) if not os.path.isdir(destdir): os.makedirs(destdir) data = repo.wwritedata(wfn, repo.file(wfn).read(mf[wfn])) open(dest, 'wb').write(data) return dirname
def _walk(self, relpath, recurse, allfiles=False): """modifies walk to return non .i/.d files so streaming clones can send remotefilelog store/data files """ path = self.path if relpath: path += '/' + relpath striplen = len(self.path) + 1 l = [] if self.rawvfs.isdir(path): visit = [path] readdir = self.rawvfs.readdir while visit: p = visit.pop() for f, kind, st in readdir(p, stat=True): fp = p + '/' + f if (kind == stat.S_IFREG and (allfiles or f[-2:] in ('.d', '.i'))): n = util.pconvert(fp[striplen:]) l.append((storemod.decodedir(n), n, st.st_size)) elif kind == stat.S_IFDIR and recurse: visit.append(fp) l.sort() return l
def unixpath(path): '''Return a version of path normalized for use with the lfdirstate.''' return util.pconvert(os.path.normpath(path))