def _format_link_file(self, formatter, ns, target, label, fullmatch=None): """ returns a tag for org.example.package.Class.method(Class.java:123) """ fqcn = fullmatch.group("fqcn") classname = fullmatch.group("classname") line = fullmatch.group("lineno") line = line and '#L%s' % line or "" rm = RepositoryManager(self.env) self.get_psf() # force parse # search repositories by fully qualified class name if fqcn: fqcn_filename = fqcn.replace('.', '/') + ".java" for reponame in self._srcpathes.keys(): repo = rm.get_repository(reponame) if not repo: continue for srcpath in self._srcpathes[reponame]: try: if repo.has_node(srcpath + fqcn_filename, None): return tag.a( label, href=formatter.href.browser(reponame + '/' + srcpath + fqcn_filename) + line, class_="source") except Exception, e: # GIT throws exception if MBCS used in fqcn self.env.log.error(e) pass # implemented but not fit i feel # for prefix in self._prefixes.keys(): # if fqcn.startswith(prefix): # href = self._prefixes[prefix] + fqcn.replace('.', '/') + '.html' # return tag.a(label, href=href, class_="file") return label # not missing resource
def _format_link(self, formatter, ns, match, label, fullmatch=None): if ns == 'log1': groups = fullmatch.groupdict() it_log = groups.get('it_log') revs = groups.get('log_revs') path = groups.get('log_path') or '/' target = '%s%s@%s' % (it_log, path, revs) # prepending it_log is needed, as the helper expects it there intertrac = formatter.shorthand_intertrac_helper( 'log', target, label, fullmatch) if intertrac: return intertrac path, query, fragment = formatter.split_link(path) else: assert ns in ('log', 'log2') if ns == 'log': match, query, fragment = formatter.split_link(match) else: query = fragment = '' match = ''.join(reversed(match.split('/', 1))) path = match revs = '' if self.LOG_LINK_RE.match(match): indexes = [sep in match and match.index(sep) for sep in ':@'] idx = min([i for i in indexes if i is not False]) path, revs = match[:idx], match[idx+1:] rm = RepositoryManager(self.env) try: reponame, repos, path = rm.get_repository_by_path(path) if not reponame: reponame = rm.get_default_repository(formatter.context) if reponame is not None: repos = rm.get_repository(reponame) if repos: if 'LOG_VIEW' in formatter.perm(repos.resource): reponame = repos.reponame or None path = path or '/' revranges = RevRanges(repos, revs) if revranges.has_ranges(): href = formatter.href.log(reponame, path, revs=unicode(revranges)) else: # try to resolve if single rev repos.normalize_rev(revs) href = formatter.href.log(reponame, path, rev=revs or None) if query and '?' in href: query = '&' + query[1:] return tag.a(label, class_='source', href=href + query + fragment) errmsg = _("No permission to view change log") elif reponame: errmsg = _("Repository '%(repo)s' not found", repo=reponame) else: errmsg = _("No default repository defined") except TracError as e: errmsg = to_unicode(e) return tag.a(label, class_='missing source', title=errmsg)
def pre_process_request(self, req, handler): """ Pre-process the request by adding 'Zip Archive' link into alternative format links The link is constructed from first and latest revision number, taken from the default repository. :param Request req: Trac request :param object handler: existing handler :returns: Handler, modified or not """ # Add link only in /browser or /browser/?rev= pages if (self.browser_regx.match(req.path_info) and 'BROWSER_VIEW' in req.perm and 'FILE_VIEW' in req.perm): # Get default repository and its type rm = RepositoryManager(self.env) repo = rm.get_repository('') repo_type = rm.repository_type # Construct the export urls for each format and based on revision info latest_rev = plaintext(str(req.args.get('rev', repo.get_youngest_rev()))) # Use Trac's internal implementation if repo_type == 'svn': return handler # For other types, iterate supported formats for format, info in self.formats.items(): add_link(req, 'alternate', req.href('export/archive', rev=latest_rev, format=format), _(info['desc']), info['mime'], info['ext']) return handler
def _get_source(self, formatter, source_obj, dest_format): repos_mgr = RepositoryManager(self.env) try: #0.12+ repos_name, repos, source_obj = repos_mgr.get_repository_by_path( source_obj) except AttributeError, e: #0.11 repos = repos_mgr.get_repository(formatter.req.authname)
def _read_source_from_repos(self, formatter, src_path): repos_mgr = RepositoryManager(self.env) try: #0.12+ repos_name, repos, source_obj = repos_mgr.get_repository_by_path( src_path) except AttributeError, e: #0.11 repos = repos_mgr.get_repository(formatter.req.authname)
def _format_link(self, formatter, ns, match, label, fullmatch=None): if ns == 'log1': groups = fullmatch.groupdict() it_log = groups.get('it_log') revs = groups.get('log_revs') path = groups.get('log_path') or '/' target = '%s%s@%s' % (it_log, path, revs) # prepending it_log is needed, as the helper expects it there intertrac = formatter.shorthand_intertrac_helper( 'log', target, label, fullmatch) if intertrac: return intertrac path, query, fragment = formatter.split_link(path) else: assert ns in ('log', 'log2') if ns == 'log': match, query, fragment = formatter.split_link(match) else: query = fragment = '' match = ''.join(reversed(match.split('/', 1))) path = match revs = '' if self.LOG_LINK_RE.match(match): indexes = [sep in match and match.index(sep) for sep in ':@'] idx = min([i for i in indexes if i is not False]) path, revs = match[:idx], match[idx+1:] rm = RepositoryManager(self.env) try: reponame, repos, path = rm.get_repository_by_path(path) if not reponame: reponame = rm.get_default_repository(formatter.context) if reponame is not None: repos = rm.get_repository(reponame) if repos: if 'LOG_VIEW' in formatter.perm: reponame = repos.reponame or None path = path or '/' revranges = RevRanges(repos, revs) if revranges.has_ranges(): href = formatter.href.log(reponame, path, revs=unicode(revranges)) else: # try to resolve if single rev repos.normalize_rev(revs) href = formatter.href.log(reponame, path, rev=revs or None) if query and '?' in href: query = '&' + query[1:] return tag.a(label, class_='source', href=href + query + fragment) errmsg = _("No permission to view change log") elif reponame: errmsg = _("Repository '%(repo)s' not found", repo=reponame) else: errmsg = _("No default repository defined") except TracError, e: errmsg = to_unicode(e)
def _render_repository_index(self, context, all_repositories, order, desc): # Color scale for the age column timerange = custom_colorizer = None if self.color_scale: custom_colorizer = self.get_custom_colorizer() rm = RepositoryManager(self.env) repositories = [] for reponame, repoinfo in all_repositories.iteritems(): if not reponame or as_bool(repoinfo.get('hidden')): continue try: repos = rm.get_repository(reponame) except TracError as err: entry = (reponame, repoinfo, None, None, exception_to_unicode(err), None) else: if repos: if not repos.is_viewable(context.perm): continue try: youngest = repos.get_changeset(repos.youngest_rev) except NoSuchChangeset: youngest = None if self.color_scale and youngest: if not timerange: timerange = TimeRange(youngest.date) else: timerange.insert(youngest.date) raw_href = self._get_download_href(context.href, repos, None, None) entry = (reponame, repoinfo, repos, youngest, None, raw_href) else: entry = (reponame, repoinfo, None, None, u"\u2013", None) if entry[4] is not None: # Check permission in case of error root = Resource('repository', reponame).child('source', '/') if 'BROWSER_VIEW' not in context.perm(root): continue repositories.append(entry) # Ordering of repositories if order == 'date': def repo_order((reponame, repoinfo, repos, youngest, err, href)): return (youngest.date if youngest else to_datetime(0), embedded_numbers(reponame.lower())) elif order == 'author': def repo_order((reponame, repoinfo, repos, youngest, err, href)): return (youngest.author.lower() if youngest else '', embedded_numbers(reponame.lower())) else: def repo_order((reponame, repoinfo, repos, youngest, err, href)): return embedded_numbers(reponame.lower()) repositories = sorted(repositories, key=repo_order, reverse=desc) return {'repositories' : repositories, 'timerange': timerange, 'colorize_age': custom_colorizer}
def _render_repository_index(self, context, all_repositories, order, desc): # Color scale for the age column timerange = custom_colorizer = None if self.color_scale: custom_colorizer = self.get_custom_colorizer() rm = RepositoryManager(self.env) repositories = [] for reponame, repoinfo in all_repositories.iteritems(): if not reponame or as_bool(repoinfo.get("hidden")): continue try: repos = rm.get_repository(reponame) except TracError as err: entry = (reponame, repoinfo, None, None, exception_to_unicode(err), None) else: if repos: if not repos.is_viewable(context.perm): continue try: youngest = repos.get_changeset(repos.youngest_rev) except NoSuchChangeset: youngest = None if self.color_scale and youngest: if not timerange: timerange = TimeRange(youngest.date) else: timerange.insert(youngest.date) raw_href = self._get_download_href(context.href, repos, None, None) entry = (reponame, repoinfo, repos, youngest, None, raw_href) else: entry = (reponame, repoinfo, None, None, u"\u2013", None) if entry[4] is not None: # Check permission in case of error root = Resource("repository", reponame).child(self.realm, "/") if "BROWSER_VIEW" not in context.perm(root): continue repositories.append(entry) # Ordering of repositories if order == "date": def repo_order((reponame, repoinfo, repos, youngest, err, href)): return (youngest.date if youngest else to_datetime(0), embedded_numbers(reponame.lower())) elif order == "author": def repo_order((reponame, repoinfo, repos, youngest, err, href)): return (youngest.author.lower() if youngest else "", embedded_numbers(reponame.lower())) else: def repo_order((reponame, repoinfo, repos, youngest, err, href)): return embedded_numbers(reponame.lower()) repositories = sorted(repositories, key=repo_order, reverse=desc) return {"repositories": repositories, "timerange": timerange, "colorize_age": custom_colorizer}
def check_permission(self, action, username, resource, perm): realm = resource.realm if resource else None if (realm, action) in self._handled_perms: authz, users = self._get_authz_info() if authz is None: return False if username == 'anonymous': usernames = ('$anonymous', '*') else: usernames = (username, '$authenticated', '*') if resource is None: return True if users & set(usernames) else None rm = RepositoryManager(self.env) try: repos = rm.get_repository(resource.parent.id) except TracError: return True # Allow error to be displayed in the repo index if repos is None: return True modules = [resource.parent.id or self.authz_module_name] if modules[0]: modules.append('') def check_path(path): path = '/' + join(repos.scope, path) if path != '/': path += '/' # Allow access to parent directories of allowed resources if any( section.get(user) is True for module in modules for spath, section in authz.get( module, {}).iteritems() if spath.startswith(path) for user in usernames): return True # Walk from resource up parent directories for spath in parent_iter(path): for module in modules: section = authz.get(module, {}).get(spath) if section: for user in usernames: result = section.get(user) if result is not None: return result if realm == 'source': return check_path(resource.id) elif realm == 'changeset': changes = list(repos.get_changeset(resource.id).get_changes()) if not changes or any( check_path(change[0]) for change in changes): return True
def check_permission(self, action, username, resource, perm): realm = resource.realm if resource else None if (realm, action) in self._handled_perms: authz, users = self._get_authz_info() if authz is None: return False if username == 'anonymous': usernames = ('$anonymous', '*') else: usernames = (username, '$authenticated', '*') if resource is None: return True if users & set(usernames) else None rm = RepositoryManager(self.env) try: repos = rm.get_repository(resource.parent.id) except TracError: return True # Allow error to be displayed in the repo index if repos is None: return True modules = [resource.parent.id or self.authz_module_name] if modules[0]: modules.append('') def check_path(path): path = '/' + join(repos.scope, path) if path != '/': path += '/' # Allow access to parent directories of allowed resources if any(section.get(user) is True for module in modules for spath, section in authz.get(module, {}).iteritems() if spath.startswith(path) for user in usernames): return True # Walk from resource up parent directories for spath in parent_iter(path): for module in modules: section = authz.get(module, {}).get(spath) if section: for user in usernames: result = section.get(user) if result is not None: return result if realm == 'source': return check_path(resource.id) elif realm == 'changeset': changes = list(repos.get_changeset(resource.id).get_changes()) if not changes or any(check_path(change[0]) for change in changes): return True
def _format_link(self, formatter, ns, match, label, fullmatch=None): if ns == "log1": groups = fullmatch.groupdict() it_log = groups.get("it_log") revs = groups.get("log_revs") path = groups.get("log_path") or "/" target = "%s%s@%s" % (it_log, path, revs) # prepending it_log is needed, as the helper expects it there intertrac = formatter.shorthand_intertrac_helper("log", target, label, fullmatch) if intertrac: return intertrac path, query, fragment = formatter.split_link(path) else: assert ns in ("log", "log2") if ns == "log": match, query, fragment = formatter.split_link(match) else: query = fragment = "" match = "".join(reversed(match.split("/", 1))) path = match revs = "" if self.LOG_LINK_RE.match(match): indexes = [sep in match and match.index(sep) for sep in ":@"] idx = min([i for i in indexes if i is not False]) path, revs = match[:idx], match[idx + 1 :] rm = RepositoryManager(self.env) try: reponame, repos, path = rm.get_repository_by_path(path) if not reponame: reponame = rm.get_default_repository(formatter.context) if reponame is not None: repos = rm.get_repository(reponame) if repos: if "LOG_VIEW" in formatter.perm: reponame = repos.reponame or None path = path or "/" revranges = RevRanges(repos, revs) if revranges.has_ranges(): href = formatter.href.log(reponame, path, revs=unicode(revranges)) else: # try to resolve if single rev repos.normalize_rev(revs) href = formatter.href.log(reponame, path, rev=revs or None) if query and "?" in href: query = "&" + query[1:] return tag.a(label, class_="source", href=href + query + fragment) errmsg = _("No permission to view change log") elif reponame: errmsg = _("Repository '%(repo)s' not found", repo=reponame) else: errmsg = _("No default repository defined") except TracError as e: errmsg = to_unicode(e) return tag.a(label, class_="missing source", title=errmsg)
def get_psf(self): """ parse attached "team project set" for Eclipse IDE returns a-list as [ Eclipse project name -> repository URL ] """ if self._psf is None: self._psf = {} psfResource = Resource('wiki', 'TeamProjectSet').child( 'attachment', 'projectSet.psf') if (self.compmgr[AttachmentModule].resource_exists(psfResource)): psf = Attachment(self.env, psfResource) def startElement(name, attrs): if name == 'project': attr = attrs.get('reference', "").split(',') self._psf[attr[2]] = urlparse( attr[1]).path # trim leading scheme/port reader = expat.ParserCreate() reader.StartElementHandler = startElement reader.ParseFile(psf.open()) # specify checkout dir in server subversion directory rm = RepositoryManager(self.env) repos = rm.get_all_repositories() for projectname in self._psf.keys(): path = self._psf.get(projectname) + '/.project' for reponame in repos: repo = rm.get_repository(reponame) if not repo: continue for npath in self.iter_lstrip(path): if not repo.has_node(npath, None): continue self._psf[projectname] = npath[:-9] # search .classpath here npath = npath[:-9] + '/.classpath' if not repo.has_node(npath, None): continue node = repo.get_node(npath, None) srcpathes = self.parse_classpath( node.get_content()) self._srcpathes[repo.reponame] = map( lambda x: npath[:-10] + x, srcpathes) else: # TeamProjectSet not found for repo in self.config.getlist('wiki', 'source_path', sep=';'): # expected: "svn: trunk/theproject/src/main/java trunk/theproject/src/test/java;" repo = repo.split(':') repo, srcpaths = len(repo) < 2 and ( "", repo[0] ) or repo # no leading reponame, use default repo self._srcpathes[repo] = self._srcpathes.get(repo, []) self._srcpathes[repo].extend([ s.rstrip('/') + '/' for s in srcpaths.split(' ') if s ]) return self._psf # { project_name: repository_url, ... }
def expand_macro(self, formatter, name, args): path = unicode(args) rm = RepositoryManager(self.env) for repo_name in rm.get_all_repositories(): repo = rm.get_repository(repo_name) if repo.has_node(path): return self.get_javadoc(repo, repo_name, path) return "No file found for %s" % (path)
def _get_source(self, formatter, source_obj, dest_format): repos_mgr = RepositoryManager(self.env) try: # 0.12+ repos_name, repos, source_obj = \ repos_mgr.get_repository_by_path(source_obj) except AttributeError: # 0.11 repos = repos_mgr.get_repository(formatter.req.authname) path, rev = _split_path(source_obj) try: node = repos.get_node(path, rev) except (NoSuchChangeset, NoSuchNode), e: return system_message(e), None, None
def _suggest_source(self, req, term): def suggest_revs(repos, node, search_rev): if search_rev: for category, names, path, rev in repos.get_quickjump_entries( None): if path and path != '/': # skip jumps to other paths # (like SVN's 'trunk', 'branches/...', 'tags/...' folders) continue # Multiple Mercurial tags on same revision are comma-separated: for name in names.split(', '): if ' ' in name: # use first token, e.g. '1.0' from '1.0 (tip)' name = name.split(' ', 1)[0] if name.startswith(search_rev): yield name for r in node.get_history(10): rev = repos.short_rev(r[1]) if str(rev).startswith(search_rev): yield rev rm = RepositoryManager(self.env) if term.find('/') == -1 and term.find('@') == -1: lower_term = term.lower() completions = sorted( reponame + '/' for reponame in rm.get_all_repositories() if reponame.lower().startswith(lower_term) and 'BROWSER_VIEW' in req.perm('repository', reponame)) else: pos = term.find('/') if pos == -1: pos = term.find('@') reponame, path = term[:pos], term[pos:] repos = rm.get_repository(reponame) completions = [] if repos is not None: if path.find('@') != -1: path, search_rev = path.rsplit('@', 1) node = repos.get_node(path, repos.youngest_rev) if node.can_view(req.perm): completions.extend( '%s%s@%s' % (reponame, path, rev) for rev in suggest_revs(repos, node, search_rev)) else: dir, filename = path.rsplit('/', 1) node = repos.get_node(dir or '/', repos.youngest_rev) completions = sorted( '%s/%s%s' % (reponame, n.path.lstrip('/'), '/' if n.isdir else '') for n in node.get_entries() if n.name.startswith(filename) and n.can_view(req.perm)) return completions
def register_repository(self, repository, name=None): """Register a repository with trac""" project = repository.project tracname = name if name is not None else repository.name if repository.name in project.data.get('plugins', {}).get('trac', {}).get( repository.type, {}): logger.error( "Repository %s:%s is already registered in project %s", repository.type, repository.name, project.name) return False if repository.type not in self.typemap: logger.error("Repository type %s is not supported in Trac", repository.type) return False if not self.has_env(project): logger.warning( "Tried to add repository %s:%s to Trac of project %s, but there is no environment", repository.type, repository.name, project.name) return False try: env = Environment(self.get_env_path(project)) DbRepositoryProvider(env).add_repository( tracname, repository.path, self.typemap[repository.type]) # save mapping in project project.data.setdefault('plugins', {}).setdefault('trac', {}).setdefault( repository.type, {})[repository.name] = tracname project.save() # Synchronise repository rm = RepositoryManager(env) repos = rm.get_repository(tracname) repos.sync(lambda rev: logger.debug("Synced revision: %s", rev), clean=True) return True except Exception as e: logger.exception( "Exception occured while addingrepository %s:%s to Trac of project %s", repository.type, repository.name, project.name) return False
def __init__(self, env, context): self.env = env self.context = context rm = RepositoryManager(self.env) self.repos = rm.get_repository(context.resource.parent.id) self.path = context.resource.id self.rev = context.resource.version # maintain state self.prev_chgset = None self.chgset_data = {} add_script(context.req, 'common/js/blame.js') add_stylesheet(context.req, 'common/css/changeset.css') add_stylesheet(context.req, 'common/css/diff.css') self.reset()
def get_timeline_events(self, req, start, stop, filters): rm = RepositoryManager(self.env) events = super(GitHubBrowser, self).get_timeline_events(req, start, stop, filters) for event in events: if event[0] != 'changeset': yield event continue allow = True for changeset in event[3][0]: reponame = changeset[2][0] repos = rm.get_repository(reponame) key = 'branches' if is_default(reponame) else '%s.branches' % reponame branches = self.config.getlist('github', key, sep=' ') if branches: allow = allow and allow_revision(changeset[0].rev, repos, branches) if allow: yield event
def _format_link(self, formatter, ns, target, label, fullmatch=None): """ returns a tag for Resource: Projectname/path/to/src/org/example/package/Class.java Line: 123 """ # search repository rm = RepositoryManager(self.env) line = fullmatch.group('line') line = line and '#L%s' % line or "" # option 1: search with unmodified path path = fullmatch.group('path') reponame, repos, npath = rm.get_repository_by_path( path) # @UnusedVariable node = get_allowed_node(repos, npath, None, formatter.perm) if node: return tag.a(label, href=formatter.href.browser(path) + line, class_="source") # option 2: search with "/trunk/" + path path = "/trunk/" + fullmatch.group('path') reponame, repos, npath = rm.get_repository_by_path( path) # @UnusedVariable node = get_allowed_node(repos, npath, None, formatter.perm) if node: return tag.a(label, href=formatter.href.browser(path) + line, class_="source") # option 3: heuristic search in repositories for subversion projectname, trailing = fullmatch.group('path').lstrip('/').split( '/', 1) psf = self.get_psf() if projectname in psf: # subversion can checkout in the middle of repository path = psf[projectname] + '/' + trailing repos = rm.get_all_repositories() for npath, reponame in product(self.iter_lstrip(path), repos): repo = rm.get_repository(reponame) node = get_allowed_node(repo, npath, None, formatter.perm) if node: return tag.a(label, class_="source", href=formatter.href.browser(repo.reponame + '/' + node.path) + line) return tag.a(label, class_='missing source')
def _do_resync(self, reponame): rm = RepositoryManager(self.env) if reponame == '*': repositories = rm.get_real_repositories() else: if is_default(reponame): reponame = '' repos = rm.get_repository(reponame) if repos is None: raise TracError( _("Repository '%(repo)s' not found", repo=reponame or '(default)')) repositories = [repos] Changeset = namedtuple('changeset', 'repos rev message author date') for repos in sorted(repositories, key=lambda r: r.reponame): printout( _('Resyncing repository history for %(reponame)s... ', reponame=repos.reponame or '(default)')) with self.env.db_transaction as db: db( """ DELETE FROM codereviewer_map WHERE repo=%s """, (repos.reponame, )) for time, author, message, rev in db( """ SELECT time, author, message, rev FROM revision WHERE repos=%s ORDER BY time """, (repos.id, )): cset = Changeset(repos, rev, message, author, from_utimestamp(time)) self._map(repos.reponame, cset) self._sync_feedback(rev) for cnt, in self.env.db_query( "SELECT count(rev) FROM revision WHERE repos=%s", (repos.id, )): printout( ngettext('%(num)s revision cached.', '%(num)s revisions cached.', num=cnt)) printout(_("Done."))
def register_repository(self, repository, name=None): """Register a repository with trac""" project = repository.project tracname = name if name is not None else repository.name if repository.name in project.data.get('plugins', {}).get('trac', {}).get(repository.type, {}): logger.error("Repository %s:%s is already registered in project %s", repository.type, repository.name, project.name) return False if repository.type not in self.typemap: logger.error("Repository type %s is not supported in Trac", repository.type) return False if not self.has_env(project): logger.warning("Tried to add repository %s:%s to Trac of project %s, but there is no environment", repository.type, repository.name, project.name) return False try: env = Environment(self.get_env_path(project)) DbRepositoryProvider(env).add_repository(tracname, repository.path, self.typemap[repository.type]) # save mapping in project project.data.setdefault('plugins', {}).setdefault('trac', {}).setdefault(repository.type, {})[repository.name] = tracname project.save() # Synchronise repository rm = RepositoryManager(env) repos = rm.get_repository(tracname) repos.sync(lambda rev: logger.debug("Synced revision: %s", rev), clean=True) return True except Exception as e: logger.exception("Exception occured while addingrepository %s:%s to Trac of project %s", repository.type, repository.name, project.name) return False
def pre_process_request(self, req, handler): """ Pre-process the request by adding 'Zip Archive' link into alternative format links The link is constructed from first and latest revision number, taken from the default repository. :param Request req: Trac request :param object handler: existing handler :returns: Handler, modified or not """ # Add link only in /browser or /browser/?rev= pages if (self.browser_regx.match(req.path_info) and 'BROWSER_VIEW' in req.perm and 'FILE_VIEW' in req.perm): # Get default repository and its type rm = RepositoryManager(self.env) repo = rm.get_repository('') repo_type = rm.repository_type # Construct the export urls for each format and based on revision info try: latest_rev = plaintext( str(req.args.get('rev', repo.get_youngest_rev()))) except: pass # Use Trac's internal implementation if repo_type == 'svn': return handler # For other types, iterate supported formats for format, info in self.formats.items(): add_link( req, 'alternate', req.href('export/archive', rev=latest_rev, format=format), _(info['desc']), info['mime'], info['ext']) return handler
def _render_repository_index(self, context, all_repositories, order, desc): # Color scale for the age column timerange = custom_colorizer = None if self.color_scale: custom_colorizer = self.get_custom_colorizer() rm = RepositoryManager(self.env) repositories = [] for reponame, repoinfo in all_repositories.iteritems(): if not reponame or as_bool(repoinfo.get('hidden')): continue try: repos = rm.get_repository(reponame) if repos: if not repos.can_view(context.perm): continue try: youngest = repos.get_changeset(repos.youngest_rev) except NoSuchChangeset: youngest = None if self.color_scale and youngest: if not timerange: timerange = TimeRange(youngest.date) else: timerange.insert(youngest.date) entry = (reponame, repoinfo, repos, youngest, None) else: entry = (reponame, repoinfo, None, None, u"\u2013") except TracError, err: entry = (reponame, repoinfo, None, None, exception_to_unicode(err)) if entry[-1] is not None: # Check permission in case of error root = Resource('repository', reponame).child('source', '/') if 'BROWSER_VIEW' not in context.perm(root): continue repositories.append(entry)
def process_request(self, req): strategy = req.args.get('strategy') term = req.args.get('q') if strategy == 'linkresolvers': wiki = WikiSystem(self.env) completions = [] for provider in wiki.syntax_providers: for name, resolver in provider.get_link_resolvers(): if name.startswith(term): completions.append(name) self._send_json(req, completions) elif strategy == 'ticket': with self.env.db_query as db: rows = db(""" SELECT id, summary FROM ticket WHERE %s %s ORDER BY changetime DESC LIMIT 10 """ % (db.cast('id', 'text'), db.prefix_match()), (db.prefix_match_value(term), )) completions = [{ 'id': row[0], 'summary': row[1], } for row in rows if 'TICKET_VIEW' in req.perm(Resource('ticket', row[0]))] self._send_json(req, completions) elif strategy == 'wikipage': with self.env.db_query as db: rows = db(""" SELECT name FROM wiki WHERE name %s GROUP BY name ORDER BY name LIMIT 10 """ % db.prefix_match(), (db.prefix_match_value(term), )) completions = [row[0] for row in rows if 'WIKI_VIEW' in req.perm(Resource('wiki', row[0]))] self._send_json(req, completions) elif strategy == 'macro': resource = Resource() context = web_context(req, resource) wiki = WikiSystem(self.env) macros = [] for provider in wiki.macro_providers: names = list(provider.get_macros() or []) for name in names: if name.startswith(term): macros.append((name, provider)) completions = [] if len(macros) == 1: name, provider = macros[0] descr = provider.get_macro_description(name) if isinstance(descr, (tuple, list)): descr = dgettext(descr[0], to_unicode(descr[1])) descr = format_to_html(self.env, context, descr) completions.append({ 'name': name, 'description': descr, }) else: for name, provider in macros: descr = provider.get_macro_description(name) if isinstance(descr, (tuple, list)): descr = dgettext(descr[0], to_unicode(descr[1])) descr = format_to_oneliner(self.env, context, descr, shorten=True) completions.append({ 'name': name, 'description': descr, }) self._send_json(req, completions) elif strategy == 'source': rm = RepositoryManager(self.env) completions = [] if term.find('/') == -1: for reponame, repoinfo in rm.get_all_repositories().iteritems(): if 'BROWSER_VIEW' in req.perm(Resource('repository', reponame)): if len(term) == 0 or reponame.lower().startswith(term.lower()): completions.append(reponame+'/') else: reponame, path = term.split('/', 1) repos = rm.get_repository(reponame) if repos is not None: if path.find('@') != -1: path, search_rev = path.rsplit('@', 1) node = repos.get_node(path, repos.youngest_rev) if node.can_view(req.perm): for r in node.get_history(10): if str(r[1]).startswith(search_rev): completions.append('%s/%s@%s' % (reponame, path, r[1])) else: if path.find('/') != -1: dir, filename = path.rsplit('/', 1) else: dir, filename = '/', path node = repos.get_node(dir, repos.youngest_rev) completions = ['%s/%s%s' % (reponame, n.path, '/' if n.isdir else '') for n in node.get_entries() if n.can_view(req.perm) and n.name.startswith(filename)] self._send_json(req, completions) raise TracError()
def render_property_diff(self, name, old_context, old_props, new_context, new_props, options): # Build 5 columns table showing modifications on merge sources # || source || added || removed || added (ni) || removed (ni) || # || source || removed || rm = RepositoryManager(self.env) repos = rm.get_repository(old_context.resource.parent.id) def parse_sources(props): sources = {} for line in props[name].splitlines(): path, revs = line.split(':', 1) spath = _path_within_scope(repos.scope, path) if spath is not None: inheritable, non_inheritable = _partition_inheritable(revs) sources[spath] = (set(Ranges(inheritable)), set(Ranges(non_inheritable))) return sources old_sources = parse_sources(old_props) new_sources = parse_sources(new_props) # Go through new sources, detect modified ones or added ones blocked = name.endswith('blocked') added_label = [_("merged: "), _("blocked: ")][blocked] removed_label = [_("reverse-merged: "), _("un-blocked: ")][blocked] added_ni_label = _("marked as non-inheritable: ") removed_ni_label = _("unmarked as non-inheritable: ") def revs_link(revs, context): if revs: revs = to_ranges(revs) return _get_revs_link(revs.replace(',', u',\u200b'), context, spath, revs) modified_sources = [] for spath, (new_revs, new_revs_ni) in new_sources.iteritems(): if spath in old_sources: (old_revs, old_revs_ni), status = old_sources.pop(spath), None else: old_revs = old_revs_ni = set() status = _(' (added)') added = new_revs - old_revs removed = old_revs - new_revs added_ni = new_revs_ni - old_revs_ni removed_ni = old_revs_ni - new_revs_ni try: all_revs = set(repos._get_node_revs(spath)) # TODO: also pass first_rev here, for getting smaller a set # (this is an optmization fix, result is already correct) added &= all_revs removed &= all_revs added_ni &= all_revs removed_ni &= all_revs except NoSuchNode: pass if added or removed: modified_sources.append(( spath, [_get_source_link(spath, new_context), status], added and tag(added_label, revs_link(added, new_context)), removed and tag(removed_label, revs_link(removed, old_context)), added_ni and tag(added_ni_label, revs_link(added_ni, new_context)), removed_ni and tag(removed_ni_label, revs_link(removed_ni, old_context)) )) # Go through remaining old sources, those were deleted removed_sources = [] for spath, old_revs in old_sources.iteritems(): removed_sources.append((spath, _get_source_link(spath, old_context))) if modified_sources or removed_sources: modified_sources.sort() removed_sources.sort() changes = tag.table(tag.tbody( [tag.tr(tag.td(c) for c in cols[1:]) for cols in modified_sources], [tag.tr(tag.td(src), tag.td(_('removed'), colspan=4)) for spath, src in removed_sources]), class_='props') else: changes = tag.em(_(' (with no actual effect on merging)')) return tag.li(tag_('Property %(prop)s changed', prop=tag.strong(name)), changes)
def render_property_diff(self, name, old_context, old_props, new_context, new_props, options): # Build 5 columns table showing modifications on merge sources # || source || added || removed || added (ni) || removed (ni) || # || source || removed || rm = RepositoryManager(self.env) repos = rm.get_repository(old_context.resource.parent.id) def parse_sources(props): sources = {} for line in props[name].splitlines(): path, revs = line.split(':', 1) spath = _path_within_scope(repos.scope, path) if spath is not None: inheritable, non_inheritable = _partition_inheritable(revs) sources[spath] = (set(Ranges(inheritable)), set(Ranges(non_inheritable))) return sources old_sources = parse_sources(old_props) new_sources = parse_sources(new_props) # Go through new sources, detect modified ones or added ones blocked = name.endswith('blocked') added_label = [_("merged: "), _("blocked: ")][blocked] removed_label = [_("reverse-merged: "), _("un-blocked: ")][blocked] added_ni_label = _("marked as non-inheritable: ") removed_ni_label = _("unmarked as non-inheritable: ") sources = [] changed_revs = {} changed_nodes = [] for spath, (new_revs, new_revs_ni) in new_sources.iteritems(): new_spath = spath not in old_sources if new_spath: old_revs = old_revs_ni = set() else: old_revs, old_revs_ni = old_sources.pop(spath) added = new_revs - old_revs removed = old_revs - new_revs # unless new revisions differ from old revisions if not added and not removed: continue added_ni = new_revs_ni - old_revs_ni removed_ni = old_revs_ni - new_revs_ni revs = sorted(added | removed | added_ni | removed_ni) try: node = repos.get_node(spath, revs[-1]) changed_nodes.append((node, revs[0])) except NoSuchNode: pass sources.append((spath, new_spath, added, removed, added_ni, removed_ni)) if changed_nodes: changed_revs = repos._get_changed_revs(changed_nodes) def revs_link(revs, context): if revs: revs = to_ranges(revs) return _get_revs_link(revs.replace(',', u',\u200b'), context, spath, revs) modified_sources = [] for spath, new_spath, added, removed, added_ni, removed_ni in sources: if spath in changed_revs: revs = set(changed_revs[spath]) added &= revs removed &= revs added_ni &= revs removed_ni &= revs if added or removed: if new_spath: status = _(" (added)") else: status = None modified_sources.append(( spath, [_get_source_link(spath, new_context), status], added and tag(added_label, revs_link(added, new_context)), removed and tag(removed_label, revs_link(removed, old_context)), added_ni and tag(added_ni_label, revs_link(added_ni, new_context)), removed_ni and tag(removed_ni_label, revs_link(removed_ni, old_context)) )) # Go through remaining old sources, those were deleted removed_sources = [] for spath, old_revs in old_sources.iteritems(): removed_sources.append((spath, _get_source_link(spath, old_context))) if modified_sources or removed_sources: modified_sources.sort() removed_sources.sort() changes = tag.table(tag.tbody( [tag.tr(tag.td(c) for c in cols[1:]) for cols in modified_sources], [tag.tr(tag.td(src), tag.td(_('removed'), colspan=4)) for spath, src in removed_sources]), class_='props') else: changes = tag.em(_(' (with no actual effect on merging)')) return tag.li(tag_('Property %(prop)s changed', prop=tag.strong(name)), changes)
def render_property_diff(self, name, old_context, old_props, new_context, new_props, options): # Build 5 columns table showing modifications on merge sources # || source || added || removed || added (ni) || removed (ni) || # || source || removed || rm = RepositoryManager(self.env) repos = rm.get_repository(old_context.resource.parent.id) def parse_sources(props): sources = {} value = props[name] lines = value.splitlines() if name == 'svn:mergeinfo' \ else value.split() for line in lines: path, revs = line.split(':', 1) spath = _path_within_scope(repos.scope, path) if spath is not None: inheritable, non_inheritable = _partition_inheritable(revs) sources[spath] = (set(Ranges(inheritable)), set(Ranges(non_inheritable))) return sources old_sources = parse_sources(old_props) new_sources = parse_sources(new_props) # Go through new sources, detect modified ones or added ones blocked = name.endswith('blocked') added_label = [_("merged: "), _("blocked: ")][blocked] removed_label = [_("reverse-merged: "), _("un-blocked: ")][blocked] added_ni_label = _("marked as non-inheritable: ") removed_ni_label = _("unmarked as non-inheritable: ") sources = [] changed_revs = {} changed_nodes = [] for spath, (new_revs, new_revs_ni) in new_sources.iteritems(): new_spath = spath not in old_sources if new_spath: old_revs = old_revs_ni = set() else: old_revs, old_revs_ni = old_sources.pop(spath) added = new_revs - old_revs removed = old_revs - new_revs # unless new revisions differ from old revisions if not added and not removed: continue added_ni = new_revs_ni - old_revs_ni removed_ni = old_revs_ni - new_revs_ni revs = sorted(added | removed | added_ni | removed_ni) try: node = repos.get_node(spath, revs[-1]) changed_nodes.append((node, revs[0])) except NoSuchNode: pass sources.append( (spath, new_spath, added, removed, added_ni, removed_ni)) if changed_nodes: changed_revs = repos._get_changed_revs(changed_nodes) def revs_link(revs, context): if revs: revs = to_ranges(revs) return _get_revs_link(revs.replace(',', u',\u200b'), context, spath, revs) modified_sources = [] for spath, new_spath, added, removed, added_ni, removed_ni in sources: if spath in changed_revs: revs = set(changed_revs[spath]) added &= revs removed &= revs added_ni &= revs removed_ni &= revs if added or removed: if new_spath: status = _(" (added)") else: status = None modified_sources.append( (spath, [_get_source_link(spath, new_context), status], added and tag(added_label, revs_link(added, new_context)), removed and tag(removed_label, revs_link(removed, old_context)), added_ni and tag(added_ni_label, revs_link(added_ni, new_context)), removed_ni and tag(removed_ni_label, revs_link(removed_ni, old_context)))) # Go through remaining old sources, those were deleted removed_sources = [] for spath, old_revs in old_sources.iteritems(): removed_sources.append( (spath, _get_source_link(spath, old_context))) if modified_sources or removed_sources: modified_sources.sort() removed_sources.sort() changes = tag.table(tag.tbody([ tag.tr(tag.td(c) for c in cols[1:]) for cols in modified_sources ], [ tag.tr(tag.td(src), tag.td(_('removed'), colspan=4)) for spath, src in removed_sources ]), class_='props') else: changes = tag.em(_(' (with no actual effect on merging)')) return tag.li(tag_('Property %(prop)s changed', prop=tag.strong(name)), changes)
class RepositoryManager(Component): """Adds creation, modification and deletion of repositories. This class extends Trac's `RepositoryManager` and adds some capabilities that allow users to create and manage repositories. The original `RepositoryManager` *just* allows adding and removing existing repositories from Trac's database, which means that still someone must do some shell work on the server. To work nicely together with manually created and added repositories a new `ManagedRepository` class is used to mark the ones that can be handled by this module. It also implements forking, if the connector supports that, which creates instances of `ForkedRepository`. """ base_dir = Option('repository-manager', 'base_dir', 'repositories', doc="""The base folder in which repositories will be created. """) owner_as_maintainer = BoolOption('repository-manager', 'owner_as_maintainer', True, doc="""If true, the owner will have the role of a maintainer, too. Otherwise, he will only act as an administrator for his repositories. """) connectors = ExtensionPoint(IAdministrativeRepositoryConnector) manager = None roles = ('maintainer', 'writer', 'reader') def __init__(self): self.manager = TracRepositoryManager(self.env) def get_supported_types(self): """Return the list of supported repository types.""" types = set(type for connector in self.connectors for (type, prio) in connector.get_supported_types() or [] if prio >= 0) return list(types & set(self.manager.get_supported_types())) def get_forkable_types(self): """Return the list of forkable repository types.""" return list(type for type in self.get_supported_types() if self.can_fork(type)) def can_fork(self, type): """Return whether the given repository type can be forked.""" return self._get_repository_connector(type).can_fork(type) def can_delete_changesets(self, type): """Return whether the given repository type can delete changesets.""" return self._get_repository_connector(type).can_delete_changesets(type) def can_ban_changesets(self, type): """Return whether the given repository type can ban changesets.""" return self._get_repository_connector(type).can_ban_changesets(type) def get_forkable_repositories(self): """Return a dictionary of repository information, indexed by name and including only repositories that can be forked.""" repositories = self.manager.get_all_repositories() result = {} for key in repositories: if repositories[key]['type'] in self.get_forkable_types(): result[key] = repositories[key]['name'] return result def get_managed_repositories(self): """Return the list of existing managed repositories.""" repositories = self.manager.get_all_repositories() result = {} for key in repositories: try: self.get_repository(repositories[key]['name'], True) result[key] = repositories[key]['name'] except: pass return result def get_repository(self, name, convert_to_managed=False): """Retrieve the appropriate repository for the given name. Converts the found repository into a `ManagedRepository`, if requested. In that case, expect an exception if the found repository was not created using this `RepositoryManager`. """ repo = self.manager.get_repository(name) if repo and convert_to_managed: convert_managed_repository(self.env, repo) return repo def get_repository_by_id(self, id, convert_to_managed=False): """Retrieve a matching `Repository` for the given id.""" repositories = self.manager.get_all_repositories() for name, info in repositories.iteritems(): if info['id'] == int(id): return self.get_repository(name, convert_to_managed) return None def get_repository_by_path(self, path): """Retrieve a matching `Repository` for the given path.""" return self.manager.get_repository_by_path(path) def get_base_directory(self, type): """Get the base directory for the given repository type.""" return os.path.join(self.env.path, self.base_dir, type) def create(self, repo): """Create a new empty repository. * Checks if the new repository can be created and added * Prepares the filesystem * Uses an appropriate connector to create and initialize the repository * Postprocesses the filesystem (modes) * Inserts everything into the database and synchronizes Trac """ if self.get_repository(repo['name']) or os.path.lexists(repo['dir']): raise TracError(_("Repository or directory already exists.")) self._prepare_base_directory(repo['dir']) self._get_repository_connector(repo['type']).create(repo) self._adjust_modes(repo['dir']) with self.env.db_transaction as db: id = self.manager.get_repository_id(repo['name']) roles = list((id, role + 's', '') for role in self.roles) db.executemany( "INSERT INTO repository (id, name, value) VALUES (%s, %s, %s)", [(id, 'dir', repo['dir']), (id, 'type', repo['type']), (id, 'owner', repo['owner'])] + roles) self.manager.reload_repositories() self.manager.get_repository(repo['name']).sync(None, True) self.update_auth_files() def fork_local(self, repo): """Fork a local repository. * Checks if the new repository can be created and added * Checks if the origin exists and can be forked * The filesystem is obviously already prepared * Uses an appropriate connector to fork the repository * Postprocesses the filesystem (modes) * Inserts everything into the database and synchronizes Trac """ if self.get_repository(repo['name']) or os.path.lexists(repo['dir']): raise TracError(_("Repository or directory already exists.")) origin = self.get_repository(repo['origin'], True) if not origin: raise TracError(_("Origin for local fork does not exist.")) if origin.type != repo['type']: raise TracError( _("Fork of local repository must have same type " "as origin.")) repo.update({'origin_url': 'file://' + origin.directory}) self._prepare_base_directory(repo['dir']) self._get_repository_connector(repo['type']).fork(repo) self._adjust_modes(repo['dir']) with self.env.db_transaction as db: id = self.manager.get_repository_id(repo['name']) roles = list((id, role + 's', '') for role in self.roles) db.executemany( "INSERT INTO repository (id, name, value) VALUES (%s, %s, %s)", [(id, 'dir', repo['dir']), (id, 'type', repo['type']), (id, 'owner', repo['owner']), (id, 'description', origin.description), (id, 'origin', origin.id), (id, 'inherit_readers', True)] + roles) self.manager.reload_repositories() self.manager.get_repository(repo['name']).sync(None, True) self.update_auth_files() def modify(self, repo, data): """Modify an existing repository.""" convert_managed_repository(self.env, repo) if repo.directory != data['dir']: shutil.move(repo.directory, data['dir']) with self.env.db_transaction as db: db.executemany( "UPDATE repository SET value = %s WHERE id = %s AND name = %s", [(data[key], repo.id, key) for key in data]) self.manager.reload_repositories() if repo.directory != data['dir']: repo = self.get_repository(data['name']) repo.sync(clean=True) self.update_auth_files() def remove(self, repo, delete): """Remove an existing repository. Depending on the parameter delete this method also removes the repository from the filesystem. This can not be undone. """ convert_managed_repository(self.env, repo) if delete: shutil.rmtree(repo.directory) with self.env.db_transaction as db: db("DELETE FROM repository WHERE id = %d" % repo.id) db("DELETE FROM revision WHERE repos = %d" % repo.id) db("DELETE FROM node_change WHERE repos = %d" % repo.id) self.manager.reload_repositories() self.update_auth_files() def delete_changeset(self, repo, rev, ban): """Delete a changeset from a managed repository, if supported. Depending on the parameter ban this method also marks the changeset to be kept out of the repository. That features needs special support by the used scm. """ convert_managed_repository(self.env, repo) self._get_repository_connector(repo.type).delete_changeset( repo, rev, ban) def add_role(self, repo, role, subject): """Add a role for the given repository.""" assert role in self.roles convert_managed_repository(self.env, repo) role_attr = '_' + role + 's' setattr(repo, role_attr, getattr(repo, role_attr) | set([subject])) self._update_roles_in_db(repo) def revoke_roles(self, repo, roles): """Revoke a list of `role, subject` pairs.""" convert_managed_repository(self.env, repo) for role, subject in roles: role_attr = '_' + role + 's' config = getattr(repo, role_attr) config = config - set([subject]) setattr(repo, role_attr, getattr(repo, role_attr) - set([subject])) self._update_roles_in_db(repo) def update_auth_files(self): """Rewrites all configured auth files for all managed repositories. """ types = self.get_supported_types() all_repositories = [] for repo in self.manager.get_real_repositories(): try: convert_managed_repository(self.env, repo) all_repositories.append(repo) except: pass for type in types: repos = [repo for repo in all_repositories if repo.type == type] self._get_repository_connector(type).update_auth_files(repos) authz_source_file = AuthzSourcePolicy(self.env).authz_file if authz_source_file: authz_source_path = os.path.join(self.env.path, authz_source_file) authz = ConfigParser() groups = set() for repo in all_repositories: groups |= { name for name in repo.maintainers() if name[0] == '@' } groups |= {name for name in repo.writers() if name[0] == '@'} groups |= {name for name in repo.readers() if name[0] == '@'} authz.add_section('groups') for group in groups: members = expand_user_set(self.env, [group]) authz.set('groups', group[1:], ', '.join(sorted(members))) authenticated = sorted({u[0] for u in self.env.get_known_users()}) authz.set('groups', 'authenticated', ', '.join(authenticated)) for repo in all_repositories: section = repo.reponame + ':/' authz.add_section(section) r = repo.maintainers() | repo.writers() | repo.readers() def apply_user_list(users, action): if not users: return if 'anonymous' in users: authz.set(section, '*', action) return if 'authenticated' in users: authz.set(section, '@authenticated', action) return for user in sorted(users): authz.set(section, user, action) apply_user_list(r, 'r') self._prepare_base_directory(authz_source_path) with open(authz_source_path, 'wb') as authz_file: authz.write(authz_file) try: modes = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IWGRP os.chmod(authz_source_path, modes) except: pass ### Private methods def _get_repository_connector(self, repo_type): """Get the matching connector with maximum priority.""" return max(((connector, type, prio) for connector in self.connectors for (type, prio) in connector.get_supported_types() if prio >= 0 and type == repo_type), key=lambda x: x[2])[0] def _prepare_base_directory(self, directory): """Create the base directories and set the correct modes.""" base = os.path.dirname(directory) original_umask = os.umask(0) try: os.makedirs(base, stat.S_IRWXU | stat.S_IRWXG) except OSError, e: if e.errno == errno.EEXIST and os.path.isdir(base): pass else: raise finally:
def _get_source(self, formatter, source_obj, dest_format): repos_mgr = RepositoryManager(self.env) try: # 0.12+ repos_name, repos, source_obj = repos_mgr.get_repository_by_path(source_obj) except AttributeError, e: # 0.11 repos = repos_mgr.get_repository(formatter.req.authname)
def check_permission(self, action, username, resource, perm): realm = resource.realm if resource else None if (realm, action) in self._handled_perms: authz, users = self._get_authz_info() if authz is None: return False if username == 'anonymous': usernames = '$anonymous', '*' else: usernames = username, '$authenticated', '*' if resource is None: return True if users & set(usernames) else None rm = RepositoryManager(self.env) try: repos = rm.get_repository(resource.parent.id) except TracError: return True # Allow error to be displayed in the repo index if repos is None: return True modules = [resource.parent.id or self.authz_module_name] if modules[0]: modules.append('') def check_path_0(spath): sections = [ authz.get(module, {}).get(spath) for module in modules ] sections = [section for section in sections if section] denied = False for user in usernames: for section in sections: if user in section: if section[user]: return True denied = True # Don't check section without module name # because the section with module name defines # the user's permissions. break if denied: # All users has no readable permission. return False def check_path(path): path = '/' + pathjoin(repos.scope, path) if path != '/': path += '/' # Allow access to parent directories of allowed resources for spath in set( sum((list(authz.get(module, {})) for module in modules), [])): if spath.startswith(path): result = check_path_0(spath) if result is True: return True # Walk from resource up parent directories for spath in parent_iter(path): result = check_path_0(spath) if result is not None: return result if realm == 'source': return check_path(resource.id) elif realm == 'changeset': changes = list(repos.get_changeset(resource.id).get_changes()) if not changes or any( check_path(change[0]) for change in changes): return True
def process_request(self, req): """ Handle the export requests :raises: TracError in case of failure """ req.perm.require('BROWSER_VIEW') req.perm.require('FILE_VIEW') # Get default repository and its type rm = RepositoryManager(self.env) repo = rm.get_repository('') repo_type = rm.repository_type svn_path = 'trunk' format = plaintext(req.args.get('format', 'zip')) # Get revision info. For svn it's in format: <revnum>/<path> revision = plaintext(str(req.args.get('rev', repo.get_youngest_rev()))) if repo_type == 'svn': revision = repo.get_youngest_rev() svn_path = req.args.get('rev', svn_path) # Validate if given revision really exists try: revision = repo.normalize_rev(revision) except NoSuchChangeset: raise HTTPNotFound('No such changeset') # Validate format if format not in self.formats: raise TracError('Format is not supported') # Load project object based on current environment env_name = conf.resolveProjectName(self.env) repo_type = self.env.config.get('trac', 'repository_type') repo_dir = conf.getEnvironmentVcsPath(env_name) project = Project.get(env_name=env_name) if repo_type not in conf.supported_scm_systems: raise TracError('Non-supported VCS type') # Create temporary directory with appropriate subdirectory where to export repository tempfd = tempfile.NamedTemporaryFile(delete=False) # Dump the repository per type, into defined location try: if repo_type == 'git': # Use short revision format revision = revision[:6] prefix = '%s-%s' % (env_name, revision[:6]) self._archive_git(repo_dir, revision, format, tempfd.name, prefix) elif repo_type == 'hg': # In case of both local:global revision format, use only global if ':' in revision: revision = revision.split(':', 1)[1] prefix = '%s-%s' % (env_name, revision[:6]) self._archive_hg(repo_dir, revision, format, tempfd.name, prefix) elif repo_type == 'svn': assert format == 'zip', 'Only zip format is supported for subversion' # Redirect to Trac's internal changeset functionality # Example: https://localhost/svnproject/changeset/4/trunk?old_path=%2F&format=zip changeset_href = Href('/%s/changeset' % env_name) return req.redirect(changeset_href(revision, svn_path, old_path='/', format='zip')) # Redirect raises RequestDone: re-raise it except RequestDone: raise except Exception, err: self.env.log.exception('Repository dump failed: %s' % err) raise TracError('Repository archive failed - please try again later')
def _read_source_from_repos(self, formatter, src_path): repos_mgr = RepositoryManager(self.env) try: #0.12+ repos_name, repos, source_obj = repos_mgr.get_repository_by_path(src_path) except AttributeError, e: #0.11 repos = repos_mgr.get_repository(formatter.req.authname)
def render_property_diff(self, name, old_context, old_props, new_context, new_props, options): # Build 5 columns table showing modifications on merge sources # || source || added || removed || added (ni) || removed (ni) || # || source || removed || rm = RepositoryManager(self.env) repos = rm.get_repository(old_context.resource.parent.id) def parse_sources(props): sources = {} for line in props[name].splitlines(): path, revs = line.split(':', 1) spath = _path_within_scope(repos.scope, path) if spath is not None: inheritable, non_inheritable = _partition_inheritable(revs) sources[spath] = (set(Ranges(inheritable)), set(Ranges(non_inheritable))) return sources old_sources = parse_sources(old_props) new_sources = parse_sources(new_props) # Go through new sources, detect modified ones or added ones blocked = name.endswith('blocked') added_label = [_("merged: "), _("blocked: ")][blocked] removed_label = [_("reverse-merged: "), _("un-blocked: ")][blocked] added_ni_label = _("marked as non-inheritable: ") removed_ni_label = _("unmarked as non-inheritable: ") def revs_link(revs, context): if revs: revs = to_ranges(revs) return _get_revs_link(revs.replace(',', u',\u200b'), context, spath, revs) modified_sources = [] for spath, (new_revs, new_revs_ni) in new_sources.iteritems(): if spath in old_sources: (old_revs, old_revs_ni), status = old_sources.pop(spath), None else: old_revs = old_revs_ni = set() status = _(' (added)') added = new_revs - old_revs removed = old_revs - new_revs added_ni = new_revs_ni - old_revs_ni removed_ni = old_revs_ni - new_revs_ni try: all_revs = set(repos._get_node_revs(spath)) # TODO: also pass first_rev here, for getting smaller a set # (this is an optmization fix, result is already correct) added &= all_revs removed &= all_revs added_ni &= all_revs removed_ni &= all_revs except NoSuchNode: pass if added or removed: modified_sources.append( (spath, [_get_source_link(spath, new_context), status], added and tag(added_label, revs_link(added, new_context)), removed and tag(removed_label, revs_link(removed, old_context)), added_ni and tag(added_ni_label, revs_link(added_ni, new_context)), removed_ni and tag(removed_ni_label, revs_link(removed_ni, old_context)))) # Go through remaining old sources, those were deleted removed_sources = [] for spath, old_revs in old_sources.iteritems(): removed_sources.append( (spath, _get_source_link(spath, old_context))) if modified_sources or removed_sources: modified_sources.sort() removed_sources.sort() changes = tag.table(tag.tbody([ tag.tr(tag.td(c) for c in cols[1:]) for cols in modified_sources ], [ tag.tr(tag.td(src), tag.td(_('removed'), colspan=4)) for spath, src in removed_sources ]), class_='props') else: changes = tag.em(_(' (with no actual effect on merging)')) return tag.li(tag_('Property %(prop)s changed', prop=tag.strong(name)), changes)