def rootedmatch(repo, ctx, patterns): """match patterns against the root of a repository""" # rework of basectx.match to ignore current working directory # Only a case insensitive filesystem needs magic to translate user input # to actual case in the filesystem. icasefs = not util.fscasesensitive(repo.root) if util.safehasattr(match, 'icasefsmatcher'): #< hg 4.3 if icasefs: return match.icasefsmatcher(repo.root, repo.root, patterns, default='glob', auditor=repo.auditor, ctx=ctx) else: return match.match(repo.root, repo.root, patterns, default='glob', auditor=repo.auditor, ctx=ctx) else: return match.match(repo.root, repo.root, patterns, default='glob', auditor=repo.auditor, ctx=ctx, icasefs=icasefs)
def new_commit(orig_commit, ui, repo, *pats, **opts): if opts['message'] or opts['logfile'] or opts.get('amend'): # don't act if user already specified a message, or for amend commits return orig_commit(ui, repo, *pats, **opts) # check if changelog changed logname = ui.config('changelog', 'filename', 'CHANGES') if pats: match = matchmod.match(repo.root, repo.getcwd(), pats, opts.get('include'), opts.get('exclude')) if logname not in match: # changelog is not mentioned return orig_commit(ui, repo, *pats, **opts) logmatch = matchmod.match(repo.root, repo.getcwd(), [logname]) logmatch.bad = lambda f, msg: None # don't complain if file is missing # get diff of changelog log = [] for chunk in patch.diff(repo, None, None, match=logmatch): for line in chunk.splitlines(): # naive: all added lines are the changelog if line.startswith('+') and not line.startswith('+++'): log.append(line[1:].rstrip().expandtabs()) log = normalize_log(log) # always let the user edit the message opts['force_editor'] = True opts['edit'] = True opts['message'] = log return orig_commit(ui, repo, *pats, **opts)
def gignore(root, files, warn, extrapatterns=None): allpats = [] pats = [] if ignoremod: pats = ignore.readpats(root, files, warn) for f, patlist in pats: allpats.extend(patlist) else: allpats.extend(['include:%s' % f for f in files]) if extrapatterns: allpats.extend(extrapatterns) if not allpats: return util.never try: ignorefunc = matchmod.match(root, '', [], allpats) except util.Abort: for f, patlist in pats: try: matchmod.match(root, '', [], patlist) except util.Abort, inst: raise util.Abort('%s: %s' % (f, inst[0])) if extrapatterns: try: matchmod.match(root, '', [], extrapatterns) except util.Abort, inst: raise util.Abort('%s: %s' % ('extra patterns', inst[0]))
def __init__(self, ui, root, data): self._decode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'} self._encode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'} self.cfg = config.config() # Our files should not be touched. The pattern must be # inserted first override a '** = native' pattern. self.cfg.set('patterns', '.hg*', 'BIN', 'eol') # We can then parse the user's patterns. self.cfg.parse('.hgeol', data) isrepolf = self.cfg.get('repository', 'native') != 'CRLF' self._encode['NATIVE'] = isrepolf and 'to-lf' or 'to-crlf' iswdlf = ui.config('eol', 'native', pycompat.oslinesep) in ('LF', '\n') self._decode['NATIVE'] = iswdlf and 'to-lf' or 'to-crlf' include = [] exclude = [] self.patterns = [] for pattern, style in self.cfg.items('patterns'): key = style.upper() if key == 'BIN': exclude.append(pattern) else: include.append(pattern) m = match.match(root, '', [pattern]) self.patterns.append((pattern, key, m)) # This will match the files for which we need to care # about inconsistent newlines. self.match = match.match(root, '', [], include, exclude)
def gignore(root, files, warn, extrapatterns=None): allpats = [] pats = [(f, ['include:%s' % f]) for f in files] for f, patlist in pats: allpats.extend(patlist) if extrapatterns: allpats.extend(extrapatterns) if not allpats: return util.never try: ignorefunc = matchmod.match(root, '', [], allpats) except error.Abort: for f, patlist in pats: try: matchmod.match(root, '', [], patlist) except error.Abort, inst: # in this case, patlist is ['include: FILE'], and # inst[0] should already include FILE raise if extrapatterns: try: matchmod.match(root, '', [], extrapatterns) except error.Abort, inst: raise error.Abort('%s: %s' % ('extra patterns', inst[0]))
def testVisitchildrensetIncludeInclude(self): m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir']) m2 = matchmod.match(b'', b'', include=[b'rootfilesin:dir']) im = matchmod.intersectmatchers(m1, m2) self.assertEqual(im.visitchildrenset(b'.'), {b'dir'}) self.assertEqual(im.visitchildrenset(b'dir'), b'this') self.assertEqual(im.visitchildrenset(b'dir/subdir'), set()) self.assertEqual(im.visitchildrenset(b'dir/foo'), set()) self.assertEqual(im.visitchildrenset(b'folder'), set()) self.assertEqual(im.visitchildrenset(b'dir/subdir/z'), set()) self.assertEqual(im.visitchildrenset(b'dir/subdir/x'), set())
def testVisitdirIncludeIncludfe(self): m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir']) m2 = matchmod.match(b'', b'', include=[b'rootfilesin:dir']) im = matchmod.intersectmatchers(m1, m2) self.assertEqual(im.visitdir(b'.'), True) self.assertEqual(im.visitdir(b'dir'), True) self.assertFalse(im.visitdir(b'dir/subdir')) self.assertFalse(im.visitdir(b'dir/foo')) self.assertFalse(im.visitdir(b'folder')) self.assertFalse(im.visitdir(b'dir/subdir/z')) self.assertFalse(im.visitdir(b'dir/subdir/x'))
def add_glob(self, widget): newglob = hglib.fromutf(self.glob_entry.get_text()) if newglob == '': return newglob = 'glob:' + newglob try: match.match(self.repo.root, '', [], [newglob]) except util.Abort, inst: gdialog.Prompt(_('Invalid glob expression'), str(inst), self).run() return
def add_regexp(self, widget): newregexp = hglib.fromutf(self.regexp_entry.get_text()) if newregexp == '': return try: match.match(self.repo.root, '', [], ['relre:' + newregexp]) re.compile(newregexp) except (util.Abort, re.error), inst: gdialog.Prompt(_('Invalid regexp expression'), str(inst), self).run() return
def testVisitchildrensetIncludeInclude4(self): m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir/x']) m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir/z']) um = matchmod.unionmatcher([m1, m2]) self.assertEqual(um.visitchildrenset(b'.'), {b'dir'}) self.assertEqual(um.visitchildrenset(b'dir'), {b'subdir'}) self.assertEqual(um.visitchildrenset(b'dir/subdir'), {b'x', b'z'}) self.assertEqual(um.visitchildrenset(b'dir/foo'), set()) self.assertEqual(um.visitchildrenset(b'folder'), set()) self.assertEqual(um.visitchildrenset(b'dir/subdir/z'), b'all') self.assertEqual(um.visitchildrenset(b'dir/subdir/x'), b'all')
def testVisitchildrensetIncludeInclude2(self): m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir']) m2 = matchmod.match(b'', b'', include=[b'path:folder']) um = matchmod.unionmatcher([m1, m2]) self.assertEqual(um.visitchildrenset(b'.'), {b'folder', b'dir'}) self.assertEqual(um.visitchildrenset(b'dir'), {b'subdir'}) self.assertEqual(um.visitchildrenset(b'dir/subdir'), b'all') self.assertEqual(um.visitchildrenset(b'dir/foo'), set()) self.assertEqual(um.visitchildrenset(b'folder'), b'all') # OPT: These next two could be 'all' instead of 'this'. self.assertEqual(um.visitchildrenset(b'dir/subdir/z'), b'this') self.assertEqual(um.visitchildrenset(b'dir/subdir/x'), b'this')
def testVisitdirIncludeInclude2(self): m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir']) m2 = matchmod.match(b'', b'', include=[b'path:folder']) im = matchmod.intersectmatchers(m1, m2) # FIXME: is True correct here? self.assertEqual(im.visitdir(b'.'), True) self.assertFalse(im.visitdir(b'dir')) self.assertFalse(im.visitdir(b'dir/subdir')) self.assertFalse(im.visitdir(b'dir/foo')) self.assertFalse(im.visitdir(b'folder')) self.assertFalse(im.visitdir(b'dir/subdir/z')) self.assertFalse(im.visitdir(b'dir/subdir/x'))
def testVisitchildrensetIncludeInclude2(self): m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir']) m2 = matchmod.match(b'', b'', include=[b'path:folder']) im = matchmod.intersectmatchers(m1, m2) # FIXME: is set() correct here? self.assertEqual(im.visitchildrenset(b'.'), set()) self.assertEqual(im.visitchildrenset(b'dir'), set()) self.assertEqual(im.visitchildrenset(b'dir/subdir'), set()) self.assertEqual(im.visitchildrenset(b'dir/foo'), set()) self.assertEqual(im.visitchildrenset(b'folder'), set()) self.assertEqual(im.visitchildrenset(b'dir/subdir/z'), set()) self.assertEqual(im.visitchildrenset(b'dir/subdir/x'), set())
def testVisitdirIncludeInclude4(self): m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir/x']) m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir/z']) um = matchmod.unionmatcher([m1, m2]) # OPT: these next three could probably be False as well. self.assertEqual(um.visitdir(b'.'), True) self.assertEqual(um.visitdir(b'dir'), True) self.assertEqual(um.visitdir(b'dir/subdir'), True) self.assertFalse(um.visitdir(b'dir/foo')) self.assertFalse(um.visitdir(b'folder')) self.assertEqual(um.visitdir(b'dir/subdir/z'), b'all') self.assertEqual(um.visitdir(b'dir/subdir/x'), b'all')
def testVisitchildrensetIncludeInclude3(self): m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir/x']) m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir']) um = matchmod.unionmatcher([m1, m2]) self.assertEqual(um.visitchildrenset(b'.'), {b'dir'}) self.assertEqual(um.visitchildrenset(b'dir'), {b'subdir'}) self.assertEqual(um.visitchildrenset(b'dir/subdir'), b'all') self.assertEqual(um.visitchildrenset(b'dir/foo'), set()) self.assertEqual(um.visitchildrenset(b'folder'), set()) self.assertEqual(um.visitchildrenset(b'dir/subdir/x'), b'all') # OPT: this should probably be 'all' not 'this'. self.assertEqual(um.visitchildrenset(b'dir/subdir/z'), b'this')
def testVisitdirIncludeInclude3(self): m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir/x']) m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir']) um = matchmod.unionmatcher([m1, m2]) self.assertEqual(um.visitdir(b'.'), True) self.assertEqual(um.visitdir(b'dir'), True) self.assertEqual(um.visitdir(b'dir/subdir'), b'all') self.assertFalse(um.visitdir(b'dir/foo')) self.assertFalse(um.visitdir(b'folder')) self.assertEqual(um.visitdir(b'dir/subdir/x'), b'all') # OPT: this should probably be 'all' not True. self.assertEqual(um.visitdir(b'dir/subdir/z'), True)
def testVisitchildrensetIncludeInclude4(self): m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir/x']) m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir/z']) im = matchmod.intersectmatchers(m1, m2) # OPT: these next two could probably be set() as well. self.assertEqual(im.visitchildrenset(b'.'), {b'dir'}) self.assertEqual(im.visitchildrenset(b'dir'), {b'subdir'}) self.assertEqual(im.visitchildrenset(b'dir/subdir'), set()) self.assertEqual(im.visitchildrenset(b'dir/foo'), set()) self.assertEqual(im.visitchildrenset(b'folder'), set()) self.assertEqual(im.visitchildrenset(b'dir/subdir/z'), set()) self.assertEqual(im.visitchildrenset(b'dir/subdir/x'), set())
def testVisitdirIncludeInclude4(self): m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir/x']) m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir/z']) im = matchmod.intersectmatchers(m1, m2) # OPT: these next three could probably be False as well. self.assertEqual(im.visitdir(b'.'), True) self.assertEqual(im.visitdir(b'dir'), True) self.assertEqual(im.visitdir(b'dir/subdir'), True) self.assertFalse(im.visitdir(b'dir/foo')) self.assertFalse(im.visitdir(b'folder')) self.assertFalse(im.visitdir(b'dir/subdir/z')) self.assertFalse(im.visitdir(b'dir/subdir/x'))
def testVisitchildrensetIncludeInclude3(self): m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir/x']) m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir']) im = matchmod.intersectmatchers(m1, m2) self.assertEqual(im.visitchildrenset(b'.'), {b'dir'}) self.assertEqual(im.visitchildrenset(b'dir'), {b'subdir'}) self.assertEqual(im.visitchildrenset(b'dir/subdir'), {b'x'}) self.assertEqual(im.visitchildrenset(b'dir/foo'), set()) self.assertEqual(im.visitchildrenset(b'folder'), set()) self.assertEqual(im.visitchildrenset(b'dir/subdir/z'), set()) # OPT: this should probably be 'all' not 'this'. self.assertEqual(im.visitchildrenset(b'dir/subdir/x'), b'this')
def testVisitdirIncludeInclude3(self): m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir/x']) m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir']) im = matchmod.intersectmatchers(m1, m2) self.assertEqual(im.visitdir(b'.'), True) self.assertEqual(im.visitdir(b'dir'), True) self.assertEqual(im.visitdir(b'dir/subdir'), True) self.assertFalse(im.visitdir(b'dir/foo')) self.assertFalse(im.visitdir(b'folder')) self.assertFalse(im.visitdir(b'dir/subdir/z')) # OPT: this should probably be 'all' not True. self.assertEqual(im.visitdir(b'dir/subdir/x'), True)
def testVisitchildrensetIncludeInclude(self): m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir']) m2 = matchmod.match(b'', b'', include=[b'rootfilesin:dir']) dm = matchmod.differencematcher(m1, m2) self.assertEqual(dm.visitchildrenset(b'.'), {b'dir'}) self.assertEqual(dm.visitchildrenset(b'dir'), {b'subdir'}) self.assertEqual(dm.visitchildrenset(b'dir/subdir'), b'all') self.assertEqual(dm.visitchildrenset(b'dir/foo'), set()) self.assertEqual(dm.visitchildrenset(b'folder'), set()) # OPT: We should probably return set() for these; we don't because # patternmatcher.visitdir() (our m2) doesn't return 'all' for subdirs of # an 'all' pattern, just 'this'. self.assertEqual(dm.visitchildrenset(b'dir/subdir/z'), b'this') self.assertEqual(dm.visitchildrenset(b'dir/subdir/x'), b'this')
def testVisitdirIncludeIncludfe(self): m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir']) m2 = matchmod.match(b'', b'', include=[b'rootfilesin:dir']) dm = matchmod.differencematcher(m1, m2) self.assertEqual(dm.visitdir(b'.'), True) self.assertEqual(dm.visitdir(b'dir'), True) self.assertEqual(dm.visitdir(b'dir/subdir'), True) self.assertFalse(dm.visitdir(b'dir/foo')) self.assertFalse(dm.visitdir(b'folder')) # OPT: We should probably return False for these; we don't because # patternmatcher.visitdir() (our m2) doesn't return 'all' for subdirs of # an 'all' pattern, just True. self.assertEqual(dm.visitdir(b'dir/subdir/z'), True) self.assertEqual(dm.visitdir(b'dir/subdir/x'), True)
def addEntry(self): newfilter = hglib.fromunicode(self.le.text()).strip() if newfilter == '': return self.le.clear() if self.recombo.currentIndex() == 0: test = 'glob:' + newfilter try: match.match(self.repo.root, '', [], [test]) self.insertFilters([newfilter], False) except util.Abort, inst: qtlib.WarningMsgBox(_('Invalid glob expression'), str(inst), parent=self) return
def post_save(cls, request, form, template_path): dir = os.path.dirname(template_path) + os.sep file = os.path.basename(template_path) if request.user.first_name and request.user.last_name: author = "%s %s" % (request.user.first_name, request.user.last_name) else: author = request.user.username message = form.cleaned_data['commitmessage'] or '--' path = TEMPLATESADMIN_HG_ROOT if path is None: for template_dir in settings.TEMPLATE_DIRS: if dir.startswith(template_dir): if path is None or len(templare_dir)>len(path): path = template_dir if path is None: raise TemplatesAdminException("Could not find template base directory") uio = ui.ui() uio.setconfig('ui', 'interactive', False) uio.setconfig('ui', 'report_untrusted', False) uio.setconfig('ui', 'quiet', True) repo = hg.repository(uio, path=path) filter = match.match(repo.root, dir, [file]) repo.commit(match=filter, text=message, user="******" % (author, request.user.email)) return "Template '%s' was committed succesfully into mercurial repository." % file
def makechangegroup(orig, repo, outgoing, version, source, *args, **kwargs): if not shallowutil.isenabled(repo): return orig(repo, outgoing, version, source, *args, **kwargs) original = repo.shallowmatch try: # if serving, only send files the clients has patterns for if source == 'serve': bundlecaps = kwargs.get(r'bundlecaps') includepattern = None excludepattern = None for cap in (bundlecaps or []): if cap.startswith("includepattern="): raw = cap[len("includepattern="):] if raw: includepattern = raw.split('\0') elif cap.startswith("excludepattern="): raw = cap[len("excludepattern="):] if raw: excludepattern = raw.split('\0') if includepattern or excludepattern: repo.shallowmatch = match.match(repo.root, '', None, includepattern, excludepattern) else: repo.shallowmatch = match.always() return orig(repo, outgoing, version, source, *args, **kwargs) finally: repo.shallowmatch = original
def buildmatch(ui, repo, user, key): '''return tuple of (match function, list enabled).''' if not ui.has_section(key): ui.debug('acl: %s not enabled\n' % key) return None pats = [ pat for pat, users in ui.configitems(key) if _usermatch(ui, user, users) ] ui.debug('acl: %s enabled, %d entries for user %s\n' % (key, len(pats), user)) # Branch-based ACL if not repo: if pats: # If there's an asterisk (meaning "any branch"), always return True; # Otherwise, test if b is in pats if '*' in pats: return util.always return lambda b: b in pats return util.never # Path-based ACL if pats: return match.match(repo.root, '', pats) return util.never
def getchangegroup(orig, repo, source, heads=None, common=None, bundlecaps=None): if not requirement in repo.requirements: return orig(repo, source, heads=heads, common=common, bundlecaps=bundlecaps) original = repo.shallowmatch try: # if serving, only send files the clients has patterns for if source == 'serve': includepattern = None excludepattern = None for cap in (bundlecaps or []): if cap.startswith("includepattern="): raw = cap[len("includepattern="):] if raw: includepattern = raw.split('\0') elif cap.startswith("excludepattern="): raw = cap[len("excludepattern="):] if raw: excludepattern = raw.split('\0') if includepattern or excludepattern: repo.shallowmatch = match.match(repo.root, '', None, includepattern, excludepattern) else: repo.shallowmatch = match.always(repo.root, '') return orig(repo, source, heads, common, bundlecaps) finally: repo.shallowmatch = original
def get_gitdiff(filenode_old, filenode_new): """Returns mercurial style git diff between given ``filenode_old`` and ``filenode_new``. """ for filenode in (filenode_old, filenode_new): if not isinstance(filenode, FileNode): raise VCSError("Given object should be FileNode object, not %s" % filenode.__class__) repo = filenode_new.changeset.repository old_raw_id = getattr(filenode_old.changeset, 'raw_id', '0' * 40) new_raw_id = getattr(filenode_new.changeset, 'raw_id', '0' * 40) root = filenode_new.changeset.repository.path file_filter = match(root, '', [filenode_new.path]) if isinstance(repo, MercurialRepository): vcs_gitdiff = patch.diff(repo._repo, old_raw_id, new_raw_id, match=file_filter, opts=diffopts(git=True)) else: vcs_gitdiff = repo._get_diff(old_raw_id, new_raw_id, filenode_new.path) return vcs_gitdiff
def __init__(self, ui, root, data): self._decode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'} self._encode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'} self.cfg = config.config() # Our files should not be touched. The pattern must be # inserted first override a '** = native' pattern. self.cfg.set('patterns', '.hg*', 'BIN') # We can then parse the user's patterns. self.cfg.parse('.hgeol', data) isrepolf = self.cfg.get('repository', 'native') != 'CRLF' self._encode['NATIVE'] = isrepolf and 'to-lf' or 'to-crlf' iswdlf = ui.config('eol', 'native', os.linesep) in ('LF', '\n') self._decode['NATIVE'] = iswdlf and 'to-lf' or 'to-crlf' include = [] exclude = [] for pattern, style in self.cfg.items('patterns'): key = style.upper() if key == 'BIN': exclude.append(pattern) else: include.append(pattern) # This will match the files for which we need to care # about inconsistent newlines. self.match = match.match(root, '', [], include, exclude)
def testSetGetNodeSuffix(self): clean = self.parsemanifest(A_SHORT_MANIFEST) m = self.parsemanifest(A_SHORT_MANIFEST) h = m[b'foo'] f = m.flags(b'foo') want = h + b'a' # Merge code wants to set 21-byte fake hashes at times m[b'foo'] = want self.assertEqual(want, m[b'foo']) self.assertEqual([(b'bar/baz/qux.py', BIN_HASH_2), (b'foo', BIN_HASH_1 + b'a')], list(m.iteritems())) # Sometimes it even tries a 22-byte fake hash, but we can # return 21 and it'll work out m[b'foo'] = want + b'+' self.assertEqual(want, m[b'foo']) # make sure the suffix survives a copy match = matchmod.match(b'', b'', [b're:foo']) m2 = m.matches(match) self.assertEqual(want, m2[b'foo']) self.assertEqual(1, len(m2)) m2 = m.copy() self.assertEqual(want, m2[b'foo']) # suffix with iteration self.assertEqual([(b'bar/baz/qux.py', BIN_HASH_2), (b'foo', want)], list(m.iteritems())) # shows up in diff self.assertEqual({b'foo': ((want, f), (h, b''))}, m.diff(clean)) self.assertEqual({b'foo': ((h, b''), (want, f))}, clean.diff(m))
def promptForLfiles(parent, ui, repo, files): lfiles = [] uself = 'largefiles' in repo.extensions() section = 'largefiles' try: minsize = int(ui.config(section, 'minsize', default='10')) except ValueError: minsize = 10 patterns = ui.config(section, 'patterns', default=()) if patterns: patterns = patterns.split(' ') matcher = match.match(repo.root, '', list(patterns)) else: matcher = None for wfile in files: if matcher and matcher(wfile): # patterns have always precedence over size lfiles.append(wfile) else: # check for minimal size filesize = os.path.getsize(repo.wjoin(wfile)) if filesize > minsize*1024*1024: lfiles.append(wfile) if lfiles: ret = LfilesPrompt(parent, files).run() if ret == 0: # add as largefiles/bfiles for lfile in lfiles: files.remove(lfile) elif ret == 1: # add as normal files lfiles = [] elif ret == 2: return None return files, lfiles
def post_save(cls, request, form, template_path): dir = os.path.dirname(template_path) + os.sep file = os.path.basename(template_path) if request.user.first_name and request.user.last_name: author = "%s %s" % (request.user.first_name, request.user.last_name) else: author = request.user.username message = form.cleaned_data['commitmessage'] or '--' path = TEMPLATESADMIN_HG_ROOT if path is None: for template_dir in settings.TEMPLATE_DIRS: if dir.startswith(template_dir): if path is None or len(templare_dir) > len(path): path = template_dir if path is None: raise TemplatesAdminException( "Could not find template base directory") uio = ui.ui() uio.setconfig('ui', 'interactive', False) uio.setconfig('ui', 'report_untrusted', False) uio.setconfig('ui', 'quiet', True) repo = hg.repository(uio, path=path) filter = match.match(repo.root, dir, [file]) repo.commit(match=filter, text=message, user="******" % (author, request.user.email)) return "Template '%s' was committed succesfully into mercurial repository." % file
def _findeditor(repo, files): '''returns tuple of editor name and editor path. tools matched by pattern are returned as (name, toolpath) tools detected by search are returned as (name, toolpath) tortoisehg.editor is returned as (None, tortoisehg.editor) HGEDITOR or ui.editor are returned as (None, ui.editor) So first return value is an [editor-tool] name or None and second return value is a toolpath or user configured command line ''' ui = repo.ui # first check for tool specified by file patterns. The first file pattern # which matches one of the files being edited selects the editor for pat, tool in ui.configitems("editor-patterns"): mf = match.match(repo.root, '', [pat]) toolpath = _findtool(ui, tool) if mf(files[0]) and toolpath: return (tool, util.shellquote(toolpath)) # then editor-tools tools = {} for k, v in ui.configitems("editor-tools"): t = k.split('.')[0] if t not in tools: try: priority = int(_toolstr(ui, t, "priority", "0")) except ValueError, e: priority = -100 tools[t] = priority
def get_status(repo, rev1=None, rev2=None, files=None, clean=None, ignored=None): if rev1 is None and rev2 is None: rev1 = '.' if hasattr(repo, 'status'): if not files: m = None else: m = match.match(None, None, files, exact=True) status = repo.status(rev1, rev2, match=m, clean=clean, ignored=ignored, unknown=True) return { 'modified': status[0], 'added': status[1], 'removed': status[2], 'deleted': status[3], 'unknown': status[4], 'ignored': status[5], 'clean': status[6], } else: vim_throw('statuns', repo.path)
def isignored(ui, repo, path, **opts): """Find ignore rule for file or directory, specified in PATH parameter """ if not os.path.exists(path): raise util.Abort("Specified path does not exist") if not os.path.exists("{0}/.hgignore".format(repo.root)): raise util.Abort(".hgignore file not found") pats = ignore.readpats(repo.root, ['.hgignore'], 0) allpats = [] for f, patlist in pats: allpats.extend(patlist) if not allpats: return util.never for p in patlist: matcher = match.match(repo.root, '', [], [p]) if matcher(path): print("Path '{0}' is ignored by:".format(path)) print(p) return print("Path '{0}' is not ignored".format(path))
def dirty(repo, filepath): if not hasattr(repo, '__getitem__'): vim_throw('statuns', repo.path) m=match.match(None, None, [filepath], exact=True) status=repo.status(match=m, unknown=True) if any(status[:-2]): vim.command('let r=1')
def testSetGetNodeSuffix(self): clean = self.parsemanifest(A_SHORT_MANIFEST) m = self.parsemanifest(A_SHORT_MANIFEST) h = m['foo'] f = m.flags('foo') want = h + 'a' # Merge code wants to set 21-byte fake hashes at times m['foo'] = want self.assertEqual(want, m['foo']) self.assertEqual([('bar/baz/qux.py', BIN_HASH_2), ('foo', BIN_HASH_1 + 'a')], list(m.iteritems())) # Sometimes it even tries a 22-byte fake hash, but we can # return 21 and it'll work out m['foo'] = want + '+' self.assertEqual(want, m['foo']) # make sure the suffix survives a copy match = matchmod.match('', '', ['re:foo']) m2 = m.matches(match) self.assertEqual(want, m2['foo']) self.assertEqual(1, len(m2)) m2 = m.copy() self.assertEqual(want, m2['foo']) # suffix with iteration self.assertEqual([('bar/baz/qux.py', BIN_HASH_2), ('foo', want)], list(m.iteritems())) # shows up in diff self.assertEqual({'foo': ((want, f), (h, ''))}, m.diff(clean)) self.assertEqual({'foo': ((h, ''), (want, f))}, clean.diff(m))
def buildmatch(ui, repo, user, key): '''return tuple of (match function, list enabled).''' if not ui.has_section(key): ui.debug('acl: %s not enabled\n' % key) return None pats = [pat for pat, users in ui.configitems(key) if _usermatch(ui, user, users)] ui.debug('acl: %s enabled, %d entries for user %s\n' % (key, len(pats), user)) # Branch-based ACL if not repo: if pats: # If there's an asterisk (meaning "any branch"), always return True; # Otherwise, test if b is in pats if '*' in pats: return util.always return lambda b: b in pats return util.never # Path-based ACL if pats: return match.match(repo.root, '', pats) return util.never
def makechangegroup(orig, repo, outgoing, version, source, *args, **kwargs): if not requirement in repo.requirements: return orig(repo, outgoing, version, source, *args, **kwargs) original = repo.shallowmatch try: # if serving, only send files the clients has patterns for if source == 'serve': bundlecaps = kwargs.get('bundlecaps') includepattern = None excludepattern = None for cap in (bundlecaps or []): if cap.startswith("includepattern="): raw = cap[len("includepattern="):] if raw: includepattern = raw.split('\0') elif cap.startswith("excludepattern="): raw = cap[len("excludepattern="):] if raw: excludepattern = raw.split('\0') if includepattern or excludepattern: repo.shallowmatch = match.match(repo.root, '', None, includepattern, excludepattern) else: repo.shallowmatch = match.always(repo.root, '') return orig(repo, outgoing, version, source, *args, **kwargs) finally: repo.shallowmatch = original
def status(self, path=None): '''Return status of repository or file. Without file argument: returns status of the repository: :"D?": dirty (tracked modified files: added, removed, deleted, modified), :"?U": untracked-dirty (added, but not tracked files) :None: clean (status is empty) With file argument: returns status of this file: "M"odified, "A"dded, "R"emoved, "D"eleted (removed from filesystem, but still tracked), "U"nknown, "I"gnored, (None)Clean. ''' repo = self._repo() if path: m = match.match(None, None, [path], exact=True) statuses = repo.status(match=m, unknown=True, ignored=True) for status, paths in zip(self.statuses, statuses): if paths: return status return None else: resulting_status = 0 for status, paths in zip(self.repo_statuses, repo.status(unknown=True)): if paths: resulting_status |= status return self.repo_statuses_str[resulting_status]
def affects(self, opts, fixctx, path): """Should this fixer run on the file at the given path and context?""" repo = fixctx.repo() matcher = matchmod.match( repo.root, repo.root, [self._pattern], ctx=fixctx ) return matcher(path)
def dirty(repo, filepath): if not hasattr(repo, '__getitem__'): vim_throw('statuns', repo.path) m = match.match(None, None, [filepath], exact=True) status = repo.status(match=m, unknown=True) if any(status[:-2]): vim.command('let r=1')
def _makematcher(repo, ctx, pat, changedonly): cwd = '' # always relative to repo root patterns = [] if pat and ':' not in pat and '*' not in pat: # mimic case-insensitive partial string match patterns.append('relre:(?i)' + re.escape(pat)) elif pat: patterns.append(pat) include = [] if changedonly: include.extend('path:%s' % p for p in ctx.files()) if not include: # no match return matchmod.exact(repo.root, cwd, []) try: return matchmod.match(repo.root, cwd, patterns, include=include, default='relglob', auditor=repo.auditor, ctx=ctx) except (error.Abort, error.ParseError): # no match return matchmod.exact(repo.root, cwd, [])
def __init__(self, ui, root, data): self._decode = {"LF": "to-lf", "CRLF": "to-crlf", "BIN": "is-binary"} self._encode = {"LF": "to-lf", "CRLF": "to-crlf", "BIN": "is-binary"} self.cfg = config.config() # Our files should not be touched. The pattern must be # inserted first override a '** = native' pattern. self.cfg.set("patterns", ".hg*", "BIN", "eol") # We can then parse the user's patterns. self.cfg.parse(".hgeol", data) isrepolf = self.cfg.get("repository", "native") != "CRLF" self._encode["NATIVE"] = isrepolf and "to-lf" or "to-crlf" iswdlf = ui.config("eol", "native", os.linesep) in ("LF", "\n") self._decode["NATIVE"] = iswdlf and "to-lf" or "to-crlf" include = [] exclude = [] for pattern, style in self.cfg.items("patterns"): key = style.upper() if key == "BIN": exclude.append(pattern) else: include.append(pattern) # This will match the files for which we need to care # about inconsistent newlines. self.match = match.match(root, "", [], include, exclude)
def testMatchesFull(self): '''Tests matches() for what should be a full match.''' m = self.parsemanifest(A_DEEPER_MANIFEST) match = matchmod.match('/', '', ['']) m2 = m.matches(match) self.assertEqual(m.keys(), m2.keys())
def testMatchException(self): m = self.parsemanifest(A_SHORT_MANIFEST) match = matchmod.match('', '', ['re:.*']) def filt(path): if path == 'foo': assert False return True match.matchfn = filt self.assertRaises(AssertionError, m.matches, match)
def testMatchesNonexistentDirectory(self): '''Tests matches() for a relpath match on a directory that doesn't actually exist.''' m = self.parsemanifest(A_DEEPER_MANIFEST) match = matchmod.match('/', '', ['a/f'], default='relpath') m2 = m.matches(match) self.assertEqual([], m2.keys())
def check_not_modified_since(rev, file): key=rev+':'+file if key in status_cache: return status_cache[key] r=file in repo.status(node1=rev, clean=True, match=match.match(None, None, [file], exact=True))[6] status_cache[key]=r return r
def add_largefiles(ui, repo, *pats, **opts): large = opts.pop("large", None) lfsize = lfutil.getminsize(ui, lfutil.islfilesrepo(repo), opts.pop("lfsize", None)) lfmatcher = None if lfutil.islfilesrepo(repo): lfpats = ui.configlist(lfutil.longname, "patterns", default=[]) if lfpats: lfmatcher = match_.match(repo.root, "", list(lfpats)) lfnames = [] m = scmutil.match(repo[None], pats, opts) m.bad = lambda x, y: None wctx = repo[None] for f in repo.walk(m): exact = m.exact(f) lfile = lfutil.standin(f) in wctx nfile = f in wctx exists = lfile or nfile # Don't warn the user when they attempt to add a normal tracked file. # The normal add code will do that for us. if exact and exists: if lfile: ui.warn(_("%s already a largefile\n") % f) continue if exact or not exists: abovemin = lfsize and os.lstat(repo.wjoin(f)).st_size >= lfsize * 1024 * 1024 if large or abovemin or (lfmatcher and lfmatcher(f)): lfnames.append(f) if ui.verbose or not exact: ui.status(_("adding %s as a largefile\n") % m.rel(f)) bad = [] standins = [] # Need to lock, otherwise there could be a race condition between # when standins are created and added to the repo. wlock = repo.wlock() try: if not opts.get("dry_run"): lfdirstate = lfutil.openlfdirstate(ui, repo) for f in lfnames: standinname = lfutil.standin(f) lfutil.writestandin(repo, standinname, hash="", executable=lfutil.getexecutable(repo.wjoin(f))) standins.append(standinname) if lfdirstate[f] == "r": lfdirstate.normallookup(f) else: lfdirstate.add(f) lfdirstate.write() bad += [lfutil.splitstandin(f) for f in lfutil.repo_add(repo, standins) if f in m.files()] finally: wlock.release() return bad
def testMatchesExactLarge(self): '''Tests matches() for files matching a large list of exact files. ''' m = self.parsemanifest(A_HUGE_MANIFEST) flist = m.keys()[80:300] match = matchmod.match('/', '', flist, exact=True) m2 = m.matches(match) self.assertEqual(flist, m2.keys())
def testMatchesExactPath(self): '''Tests matches() on an exact match on a directory, which should result in an empty manifest because you can't perform an exact match against a directory.''' m = self.parsemanifest(A_DEEPER_MANIFEST) match = matchmod.match('/', '', ['a/b'], exact=True) m2 = m.matches(match) self.assertEqual([], m2.keys())
def testMatchesWithPattern(self): '''Tests matches() for files matching a pattern that reside deeper than the specified directory.''' m = self.parsemanifest(A_DEEPER_MANIFEST) match = matchmod.match('/', '', ['a/b/*/*.txt']) m2 = m.matches(match) self.assertEqual( ['a/b/c/bar.txt', 'a/b/c/foo.txt', 'a/b/d/ten.txt'], m2.keys())
def testMatchesDirectory(self): '''Tests matches() on a relpath match on a directory, which should match against all files within said directory.''' m = self.parsemanifest(A_DEEPER_MANIFEST) match = matchmod.match('/', '', ['a/b'], default='relpath') m2 = m.matches(match) self.assertEqual([ 'a/b/c/bar.py', 'a/b/c/bar.txt', 'a/b/c/foo.py', 'a/b/c/foo.txt', 'a/b/d/baz.py', 'a/b/d/qux.py', 'a/b/d/ten.txt', 'a/b/dog.py', 'a/b/fish.py'], m2.keys())
def testMatchesCwd(self): '''Tests matches() on a relpath match with the current directory ('.') when not in the root directory.''' m = self.parsemanifest(A_DEEPER_MANIFEST) match = matchmod.match('/', 'a/b', ['.'], default='relpath') m2 = m.matches(match) self.assertEqual([ 'a/b/c/bar.py', 'a/b/c/bar.txt', 'a/b/c/foo.py', 'a/b/c/foo.txt', 'a/b/d/baz.py', 'a/b/d/qux.py', 'a/b/d/ten.txt', 'a/b/dog.py', 'a/b/fish.py'], m2.keys())
def __init__(self, ui, repo, inc, exc): self.ui = ui self.repo = repo self.match = match.match(repo.root, '', [], inc, exc) self.restrict = kwtools['hgcmd'] in restricted.split() self.postcommit = False kwmaps = self.ui.configitems('keywordmaps') if kwmaps: # override default templates self.templates = dict(kwmaps) else: self.templates = _defaultkwmaps(self.ui)
def checkhook(ui, repo, node=None, **kwargs): """ :param ui: ui object :type ui: mercurial.ui.ui :param repo: repository object :type repo: mercurial.repo :return: False if there was no problem, True if the commit/push should be canceled :rtype: bool """ configFiles = ui.configlist('checkmeta', 'pattern_files', default=".hgmeta") mandatoryChecks = set(ui.configlist('checkmeta', 'mandatory')) lastRev = None filesToCheck = set() if node is None: for fileName in repo[node]: filesToCheck.add(fileName) else: lastRev = len(repo) - 1 # checking a group if revisions for rev in xrange(repo[node].rev(), len(repo)): for fileName in repo[rev].files(): filesToCheck.add(fileName) patterns = readPatternFiles(ui, datas=[ repo[lastRev].filectx(fn).data() for fn in configFiles ]) matchPatterns = { match.match(repo.root, '', [], [pattern]): check for pattern, check in patterns.iteritems() } # for each file affected by the transaction, find the matching pattern and # run all connected tests for fileName in filesToCheck: checksRun, success = runTests(ui, fileName, matchPatterns, repo[lastRev].filectx(fileName).data()) if not success: return True missingChecks = mandatoryChecks - set(checksRun) if len(missingChecks) > 0: ui.warn(_("Mandatory checks not run for {0}: {1}\n").format( fileName, ", ".join(missingChecks))) return True return False
def buildmatch(ui, repo, user, key): '''return tuple of (match function, list enabled).''' if not ui.has_section(key): ui.debug(_('acl: %s not enabled\n') % key) return None pats = [pat for pat, users in ui.configitems(key) if user in users.replace(',', ' ').split()] ui.debug(_('acl: %s enabled, %d entries for user %s\n') % (key, len(pats), user)) if pats: return match.match(repo.root, '', pats) return match.exact(repo.root, '', [])