def __init__(self, path, rev, msg=None): if msg is None: msg = _("No node %(path)s at revision %(rev)s", path=path, rev=rev) else: msg = _("%(msg)s: No node %(path)s at revision %(rev)s", msg=msg, path=path, rev=rev) ResourceNotFound.__init__(self, msg, _('No such node'))
def putAttachment(self, req, ticket, filename, description, data, replace=True): """ Add an attachment, optionally (and defaulting to) overwriting an existing one. Returns filename.""" if not model.Ticket(self.env, ticket).exists: raise ResourceNotFound('Ticket "%s" does not exist' % ticket) if replace: try: attachment = Attachment(self.env, 'ticket', ticket, filename) req.perm(attachment.resource).require('ATTACHMENT_DELETE') attachment.delete() except TracError: pass attachment = Attachment(self.env, 'ticket', ticket) req.perm(attachment.resource).require('ATTACHMENT_CREATE') attachment.author = req.authname attachment.description = description attachment.insert(filename, StringIO(data.data), len(data.data)) return attachment.filename
class BrowserModule(Component): implements(INavigationContributor, IPermissionRequestor, IRequestHandler, IWikiSyntaxProvider, IHTMLPreviewAnnotator, IWikiMacroProvider) property_renderers = ExtensionPoint(IPropertyRenderer) downloadable_paths = ListOption('browser', 'downloadable_paths', '/trunk, /branches/*, /tags/*', doc="""List of repository paths that can be downloaded. Leave this option empty if you want to disable all downloads, otherwise set it to a comma-separated list of authorized paths (those paths are glob patterns, i.e. "*" can be used as a wild card). In a multi-repository environment, the path must be qualified with the repository name if the path does not point to the default repository (e.g. /reponame/trunk). Note that a simple prefix matching is performed on the paths, so aliases won't get automatically resolved. (''since 0.10'')""") color_scale = BoolOption('browser', 'color_scale', True, doc="""Enable colorization of the ''age'' column. This uses the same color scale as the source code annotation: blue is older, red is newer. (''since 0.11'')""") NEWEST_COLOR = (255, 136, 136) newest_color = Option('browser', 'newest_color', repr(NEWEST_COLOR), doc="""(r,g,b) color triple to use for the color corresponding to the newest color, for the color scale used in ''blame'' or the browser ''age'' column if `color_scale` is enabled. (''since 0.11'')""") OLDEST_COLOR = (136, 136, 255) oldest_color = Option('browser', 'oldest_color', repr(OLDEST_COLOR), doc="""(r,g,b) color triple to use for the color corresponding to the oldest color, for the color scale used in ''blame'' or the browser ''age'' column if `color_scale` is enabled. (''since 0.11'')""") intermediate_point = Option('browser', 'intermediate_point', '', doc="""If set to a value between 0 and 1 (exclusive), this will be the point chosen to set the `intermediate_color` for interpolating the color value. (''since 0.11'')""") intermediate_color = Option('browser', 'intermediate_color', '', doc="""(r,g,b) color triple to use for the color corresponding to the intermediate color, if two linear interpolations are used for the color scale (see `intermediate_point`). If not set, the intermediate color between `oldest_color` and `newest_color` will be used. (''since 0.11'')""") render_unsafe_content = BoolOption('browser', 'render_unsafe_content', 'false', """Whether raw files should be rendered in the browser, or only made downloadable. Pretty much any file may be interpreted as HTML by the browser, which allows a malicious user to create a file containing cross-site scripting attacks. For open repositories where anyone can check-in a file, it is recommended to leave this option disabled (which is the default).""") hidden_properties = ListOption('browser', 'hide_properties', 'svk:merge', doc="""Comma-separated list of version control properties to hide from the repository browser. (''since 0.9'')""") # public methods def get_custom_colorizer(self): """Returns a converter for values from [0.0, 1.0] to a RGB triple.""" def interpolate(old, new, value): # Provides a linearly interpolated color triple for `value` # which must be a floating point value between 0.0 and 1.0 return tuple([int(b + (a - b) * value) for a, b in zip(new, old)]) def parse_color(rgb, default): # Get three ints out of a `rgb` string or return `default` try: t = tuple([int(v) for v in re.split(r'(\d+)', rgb)[1::2]]) return t if len(t) == 3 else default except ValueError: return default newest_color = parse_color(self.newest_color, self.NEWEST_COLOR) oldest_color = parse_color(self.oldest_color, self.OLDEST_COLOR) try: intermediate = float(self.intermediate_point) except ValueError: intermediate = None if intermediate: intermediate_color = parse_color(self.intermediate_color, None) if not intermediate_color: intermediate_color = tuple([(a + b) / 2 for a, b in zip(newest_color, oldest_color)]) def colorizer(value): if value <= intermediate: value = value / intermediate return interpolate(oldest_color, intermediate_color, value) else: value = (value - intermediate) / (1.0 - intermediate) return interpolate(intermediate_color, newest_color, value) else: def colorizer(value): return interpolate(oldest_color, newest_color, value) return colorizer # INavigationContributor methods def get_active_navigation_item(self, req): return 'browser' def get_navigation_items(self, req): rm = RepositoryManager(self.env) if 'BROWSER_VIEW' in req.perm and rm.get_real_repositories(): yield ('mainnav', 'browser', tag.a(_('Browse Source'), href=req.href.browser())) # IPermissionRequestor methods def get_permission_actions(self): return ['BROWSER_VIEW', 'FILE_VIEW'] # IRequestHandler methods def match_request(self, req): match = re.match(r'/(export|browser|file)(/.*)?$', req.path_info) if match: mode, path = match.groups() if mode == 'export': if path and '/' in path: path_elts = path.split('/', 2) if len(path_elts) != 3: return False path = path_elts[2] req.args['rev'] = path_elts[1] req.args['format'] = 'raw' elif mode == 'file': req.redirect(req.href.browser(path, rev=req.args.get('rev'), format=req.args.get('format')), permanent=True) req.args['path'] = path or '/' return True def process_request(self, req): req.perm.require('BROWSER_VIEW') presel = req.args.get('preselected') if presel and (presel + '/').startswith(req.href.browser() + '/'): req.redirect(presel) path = req.args.get('path', '/') rev = req.args.get('rev', '') if rev.lower() in ('', 'head'): rev = None order = req.args.get('order', 'name').lower() desc = req.args.has_key('desc') xhr = req.get_header('X-Requested-With') == 'XMLHttpRequest' rm = RepositoryManager(self.env) all_repositories = rm.get_all_repositories() reponame, repos, path = rm.get_repository_by_path(path) # Repository index show_index = not reponame and path == '/' if show_index: if repos and (as_bool(all_repositories[''].get('hidden')) or not repos.is_viewable(req.perm)): repos = None if not repos and reponame: raise ResourceNotFound(_("Repository '%(repo)s' not found", repo=reponame)) if reponame and reponame != repos.reponame: # Redirect alias qs = req.query_string req.redirect(req.href.browser(repos.reponame or None, path) + ('?' + qs if qs else '')) reponame = repos.reponame if repos else None # Find node for the requested path/rev context = web_context(req) node = None display_rev = lambda rev: rev if repos: try: if rev: rev = repos.normalize_rev(rev) # If `rev` is `None`, we'll try to reuse `None` consistently, # as a special shortcut to the latest revision. rev_or_latest = rev or repos.youngest_rev node = get_existing_node(req, repos, path, rev_or_latest) except NoSuchChangeset, e: raise ResourceNotFound(e.message, _('Invalid changeset number')) context = context.child(repos.resource.child('source', path, version=rev_or_latest)) display_rev = repos.display_rev # Prepare template data path_links = get_path_links(req.href, reponame, path, rev, order, desc) repo_data = dir_data = file_data = None if show_index: repo_data = self._render_repository_index( context, all_repositories, order, desc) if node: if node.isdir: dir_data = self._render_dir(req, repos, node, rev, order, desc) elif node.isfile: file_data = self._render_file(req, context, repos, node, rev) if not repos and not (repo_data and repo_data['repositories']): raise ResourceNotFound(_("No node %(path)s", path=path)) quickjump_data = properties_data = None if node and not xhr: properties_data = self.render_properties( 'browser', context, node.get_properties()) quickjump_data = list(repos.get_quickjump_entries(rev)) data = { 'context': context, 'reponame': reponame, 'repos': repos, 'repoinfo': all_repositories.get(reponame or ''), 'path': path, 'rev': node and node.rev, 'stickyrev': rev, 'display_rev': display_rev, 'created_path': node and node.created_path, 'created_rev': node and node.created_rev, 'properties': properties_data, 'path_links': path_links, 'order': order, 'desc': 1 if desc else None, 'repo': repo_data, 'dir': dir_data, 'file': file_data, 'quickjump_entries': quickjump_data, 'wiki_format_messages': \ self.config['changeset'].getbool('wiki_format_messages'), 'xhr': xhr, } if xhr: # render and return the content only return 'dir_entries.html', data, None if dir_data or repo_data: add_script(req, 'common/js/expand_dir.js') add_script(req, 'common/js/keyboard_nav.js') # Links for contextual navigation if node: if node.isfile: prev_rev = repos.previous_rev(rev=node.created_rev, path=node.created_path) if prev_rev: href = req.href.browser(reponame, node.created_path, rev=prev_rev) add_link(req, 'prev', href, _('Revision %(num)s', num=display_rev(prev_rev))) if rev is not None: add_link(req, 'up', req.href.browser(reponame, node.created_path)) next_rev = repos.next_rev(rev=node.created_rev, path=node.created_path) if next_rev: href = req.href.browser(reponame, node.created_path, rev=next_rev) add_link(req, 'next', href, _('Revision %(num)s', num=display_rev(next_rev))) prevnext_nav(req, _('Previous Revision'), _('Next Revision'), _('Latest Revision')) else: if path != '/': add_link(req, 'up', path_links[-2]['href'], _('Parent directory')) add_ctxtnav(req, tag.a(_('Last Change'), href=req.href.changeset(node.created_rev, reponame, node.created_path))) if node.isfile: annotate = data['file']['annotate'] if annotate: add_ctxtnav(req, _('Normal'), title=_('View file without annotations'), href=req.href.browser(reponame, node.created_path, rev=rev)) if annotate != 'blame': add_ctxtnav(req, _('Blame'), title=_('Annotate each line with the last ' 'changed revision ' '(this can be time consuming...)'), href=req.href.browser(reponame, node.created_path, rev=rev, annotate='blame')) add_ctxtnav(req, _('Revision Log'), href=req.href.log(reponame, path, rev=rev)) path_url = repos.get_path_url(path, rev) if path_url: if path_url.startswith('//'): path_url = req.scheme + ':' + path_url add_ctxtnav(req, _('Repository URL'), href=path_url) add_stylesheet(req, 'common/css/browser.css') return 'browser.html', data, None
def process_request(self, req): req.perm.require('LOG_VIEW') mode = req.args.get('mode', 'stop_on_copy') path = req.args.get('path', '/') rev = req.args.get('rev') stop_rev = req.args.get('stop_rev') revs = req.args.get('revs') format = req.args.get('format') verbose = req.args.get('verbose') limit = int(req.args.get('limit') or self.default_log_limit) rm = RepositoryManager(self.env) reponame, repos, path = rm.get_repository_by_path(path) if not repos: raise ResourceNotFound( _("Repository '%(repo)s' not found", repo=reponame)) if reponame != repos.reponame: # Redirect alias qs = req.query_string req.redirect( req.href.log(repos.reponame or None, path) + ('?' + qs if qs else '')) normpath = repos.normalize_path(path) # if `revs` parameter is given, then we're restricted to the # corresponding revision ranges. # If not, then we're considering all revisions since `rev`, # on that path, in which case `revranges` will be None. revranges = None if revs: try: revranges = Ranges(revs) rev = revranges.b except ValueError: pass rev = unicode(repos.normalize_rev(rev)) display_rev = repos.display_rev # The `history()` method depends on the mode: # * for ''stop on copy'' and ''follow copies'', it's `Node.history()` # unless explicit ranges have been specified # * for ''show only add, delete'' we're using # `Repository.get_path_history()` cset_resource = repos.resource.child('changeset') show_graph = False if mode == 'path_history': def history(): for h in repos.get_path_history(path, rev): if 'CHANGESET_VIEW' in req.perm(cset_resource(id=h[1])): yield h elif revranges: def history(): prevpath = path expected_next_item = None ranges = list(revranges.pairs) ranges.reverse() for (a, b) in ranges: a = repos.normalize_rev(a) b = repos.normalize_rev(b) while not repos.rev_older_than(b, a) and b != a: node = get_existing_node(req, repos, prevpath, b) node_history = list(node.get_history(2)) p, rev, chg = node_history[0] if repos.rev_older_than(rev, a): break # simply skip, no separator if 'CHANGESET_VIEW' in req.perm(cset_resource(id=rev)): if expected_next_item: # check whether we're continuing previous range np, nrev, nchg = expected_next_item if rev != nrev: # no, we need a separator yield (np, nrev, None) yield node_history[0] prevpath = node_history[-1][0] # follow copy b = repos.previous_rev(rev) if len(node_history) > 1: expected_next_item = node_history[-1] else: expected_next_item = None if expected_next_item: yield (expected_next_item[0], expected_next_item[1], None) else: show_graph = path == '/' and not verbose \ and not repos.has_linear_changesets def history(): node = get_existing_node(req, repos, path, rev) for h in node.get_history(): if 'CHANGESET_VIEW' in req.perm(cset_resource(id=h[1])): yield h # -- retrieve history, asking for limit+1 results info = [] depth = 1 previous_path = normpath count = 0 for old_path, old_rev, old_chg in history(): if stop_rev and repos.rev_older_than(old_rev, stop_rev): break old_path = repos.normalize_path(old_path) item = { 'path': old_path, 'rev': old_rev, 'existing_rev': old_rev, 'change': old_chg, 'depth': depth, } if old_chg == Changeset.DELETE: item['existing_rev'] = repos.previous_rev(old_rev, old_path) if not (mode == 'path_history' and old_chg == Changeset.EDIT): info.append(item) if old_path and old_path != previous_path and \ not (mode == 'path_history' and old_path == normpath): depth += 1 item['depth'] = depth item['copyfrom_path'] = old_path if mode == 'stop_on_copy': break elif mode == 'path_history': depth -= 1 if old_chg is None: # separator entry stop_limit = limit else: count += 1 stop_limit = limit + 1 if count >= stop_limit: break previous_path = old_path if info == []: node = get_existing_node(req, repos, path, rev) if repos.rev_older_than(stop_rev, node.created_rev): # FIXME: we should send a 404 error here raise TracError( _( "The file or directory '%(path)s' doesn't " "exist at revision %(rev)s or at any previous revision.", path=path, rev=display_rev(rev)), _('Nonexistent path')) # Generate graph data graph = {} if show_graph: threads, vertices, columns = \ make_log_graph(repos, (item['rev'] for item in info)) graph.update(threads=threads, vertices=vertices, columns=columns, colors=self.graph_colors, line_width=0.04, dot_radius=0.1) add_script(req, 'common/js/excanvas.js', ie_if='IE') add_script(req, 'common/js/log_graph.js') add_script_data(req, graph=graph) def make_log_href(path, **args): link_rev = rev if rev == str(repos.youngest_rev): link_rev = None params = {'rev': link_rev, 'mode': mode, 'limit': limit} params.update(args) if verbose: params['verbose'] = verbose return req.href.log(repos.reponame or None, path, **params) if format in ('rss', 'changelog'): info = [i for i in info if i['change']] # drop separators if info and count > limit: del info[-1] elif info and count >= limit: # stop_limit reached, there _might_ be some more next_rev = info[-1]['rev'] next_path = info[-1]['path'] next_revranges = None if revranges: next_revranges = str(revranges.truncate(next_rev)) if next_revranges or not revranges: older_revisions_href = make_log_href(next_path, rev=next_rev, revs=next_revranges) add_link( req, 'next', older_revisions_href, _('Revision Log (restarting at %(path)s, rev. %(rev)s)', path=next_path, rev=display_rev(next_rev))) # only show fully 'limit' results, use `change == None` as a marker info[-1]['change'] = None revisions = [i['rev'] for i in info] changes = get_changes(repos, revisions, self.log) extra_changes = {} if format == 'changelog': for rev in revisions: changeset = changes[rev] cs = {} cs['message'] = wrap(changeset.message, 70, initial_indent='\t', subsequent_indent='\t') files = [] actions = [] for cpath, kind, chg, bpath, brev in changeset.get_changes(): files.append(bpath if chg == Changeset.DELETE else cpath) actions.append(chg) cs['files'] = files cs['actions'] = actions extra_changes[rev] = cs data = { 'context': web_context(req, 'source', path, parent=repos.resource), 'reponame': repos.reponame or None, 'repos': repos, 'path': path, 'rev': rev, 'stop_rev': stop_rev, 'display_rev': display_rev, 'revranges': revranges, 'mode': mode, 'verbose': verbose, 'limit': limit, 'items': info, 'changes': changes, 'extra_changes': extra_changes, 'graph': graph, 'wiki_format_messages': self.config['changeset'].getbool('wiki_format_messages') } if format == 'changelog': return 'revisionlog.txt', data, 'text/plain' elif format == 'rss': data['email_map'] = Chrome(self.env).get_email_map() data['context'] = web_context(req, 'source', path, parent=repos.resource, absurls=True) return 'revisionlog.rss', data, 'application/rss+xml' item_ranges = [] range = [] for item in info: if item['change'] is None: # separator if range: # start new range range.append(item) item_ranges.append(range) range = [] else: range.append(item) if range: item_ranges.append(range) data['item_ranges'] = item_ranges add_stylesheet(req, 'common/css/diff.css') add_stylesheet(req, 'common/css/browser.css') path_links = get_path_links(req.href, repos.reponame, path, rev) if path_links: data['path_links'] = path_links if path != '/': add_link(req, 'up', path_links[-2]['href'], _('Parent directory')) rss_href = make_log_href(path, format='rss', revs=revs, stop_rev=stop_rev) add_link(req, 'alternate', auth_link(req, rss_href), _('RSS Feed'), 'application/rss+xml', 'rss') changelog_href = make_log_href(path, format='changelog', revs=revs, stop_rev=stop_rev) add_link(req, 'alternate', changelog_href, _('ChangeLog'), 'text/plain') add_ctxtnav(req, _('View Latest Revision'), href=req.href.browser(repos.reponame or None, path)) if 'next' in req.chrome['links']: next = req.chrome['links']['next'][0] add_ctxtnav( req, tag.span(tag.a(_('Older Revisions'), href=next['href']), Markup(' →'))) return 'revisionlog.html', data, None
def __init__(self, rev): ResourceNotFound.__init__(self, _('No changeset %(rev)s in the repository', rev=rev), _('No such changeset'))
def __init__(self, rev): ResourceNotFound.__init__( self, _('No changeset %(rev)s in the repository', rev=rev), _('No such changeset'))
def process_request(self, req): """Process the request """ base = self.get_mypage_base(req.authname) tzinfo = getattr(req, 'tz', None) now = datetime.now(tzinfo or localtz) today = format_date(now, 'iso8601', tzinfo) today_page_name = base + today today_page = WikiPage(self.env, today_page_name) if today_page.exists: req.redirect(req.href.wiki(today_page_name, action='edit')) # create page of the day for today if 'WIKI_CREATE' not in req.perm(today_page.resource): raise ResourceNotFound(_("Can't create the page of the day.")) ws = WikiSystem(self.env) def get_page_text(pagename): if ws.has_page(pagename): page = WikiPage(self.env, pagename) if 'WIKI_VIEW' in req.perm(page.resource): self.log.debug("get_page_text(%s) -> %s", pagename, page.text) return page.text self.log.debug("get_page_text(%s) -> None", pagename) # retrieve page template template_name = 'PageTemplates/MyPage' mytemplate_name = '/'.join([template_name, req.authname]) template_text = get_page_text(mytemplate_name) if template_text is None: template_text = get_page_text(template_name) text = last_page_text = last_page_quoted = None if template_text is not None: # retrieve previous "page of the day", if any all_mypages = self.get_all_mypages(base) last = bisect(all_mypages, today_page_name) - 1 self.log.debug("Pos of today %s in %r is %d", today_page_name, all_mypages, last) last_page_name = all_mypages[last] if last >= 0 else None last_page_link = '' if last_page_name: last_page_link = '[[%s]]' % last_page_name last_page_text = get_page_text(last_page_name) if last_page_text is not None: last_page_quoted = '\n'.join( ['> ' + line for line in last_page_text.splitlines()]) today_user = user_time(req, format_date, now, tzinfo=tzinfo) author = req.session.get('name') or get_reporter_id(req) text = template_text \ .replace(self.tokens['date'][0], today_user) \ .replace(self.tokens['isodate'][0], today) \ .replace(self.tokens['user'][0], req.authname) \ .replace(self.tokens['author'][0], author) \ .replace(self.tokens['lp_link'][0], last_page_link) \ .replace(self.tokens['lp_name'][0], last_page_name or '') \ .replace(self.tokens['lp_text'][0], last_page_text or '') \ .replace(self.tokens['lp_quoted'][0], last_page_quoted or '') req.redirect(req.href.wiki(today_page_name, action='edit', text=text))
def _upgrade_db(self, db): #open the revision map if int(self.enable_revmap) == 0: return 0 try: revmap_fd = open(self.revmap, 'rb') except IOError: raise ResourceNotFound( _("revision map '%(revmap)s' not found", revmap=self.revmap)) cursor = db.cursor() try: cursor.execute("DROP TABLE svn_revmap;") except Exception: db.rollback() db_backend, _unused = DatabaseManager(self.env)._get_connector() cursor = db.cursor() for table in self.SCHEMA: for stmt in db_backend.to_sql(table): self.env.log.debug(stmt) cursor.execute(stmt) insert_count = 0 prev_rev = 0 git_hash = revmap_fd.readline()[0:-1] while 1: #make sure this line is the hash if not re.match(r'[0-9a-f]{40}', git_hash): raise Exception("expecting hash, found '%s'" % git_hash) line = revmap_fd.readline()[0:-1] if line.startswith('git-svn-id:'): commit_msg = '<no commit message>' else: #slurp lines into the commit messsages until there's a blank line, a line starting with git-svn-id or a hash commit_msg = '' while not re.match( r'[0-9a-f]{40}', line) and not line.startswith('git-svn-id:'): if len(line) > 0: if not commit_msg: commit_msg = line else: commit_msg = commit_msg + " " + line line = revmap_fd.readline()[0:-1] if not line.startswith('git-svn-id:'): raise Exception("expected git-svn-id, got '%s'" % line) svn_rev_match = re.match(r'^git-svn-id:.*@(\d+) ', line) svn_rev = int(svn_rev_match.group(1)) insert_query = "INSERT INTO svn_revmap (svn_rev, git_hash, commit_msg) VALUES (%s, %s, %s);" self.env.log.debug(insert_query % (svn_rev, git_hash, commit_msg)) cursor.execute(insert_query, (svn_rev, git_hash, commit_msg.decode('utf-8'))) if prev_rev - 1 != svn_rev: self.env.log.debug("found a gap between r%d and r%d" % (prev_rev, svn_rev)) prev_rev = svn_rev insert_count += 1 if svn_rev == 1: break git_hash = revmap_fd.readline()[0:-1] while len(git_hash) == 0: git_hash = revmap_fd.readline()[0:-1] self.env.log.debug("inserted %d mappings into svn_revmap" % insert_count)
def get_first_remaining_changeset(): for review in self.get_reviews(ticket): if review.changeset in changesets and \ review.changeset_when >= blocking_when: return review # changeset exists on path raise ResourceNotFound("Not found for #%s" % ticket)
def process_request(self, req): presel = req.args.get('preselected') if presel and (presel + '/').startswith(req.href.browser() + '/'): req.redirect(presel) path = req.args.get('path', '/') rev = req.args.get('rev', '') if rev.lower() in ('', 'head'): rev = None format = req.args.get('format') order = req.args.get('order', 'name').lower() desc = 'desc' in req.args rm = RepositoryManager(self.env) all_repositories = rm.get_all_repositories() reponame, repos, path = rm.get_repository_by_path(path) # Repository index show_index = not reponame and path == '/' if show_index: if repos and (as_bool(all_repositories[''].get('hidden')) or not repos.is_viewable(req.perm)): repos = None if not repos and reponame: raise ResourceNotFound(_("Repository '%(repo)s' not found", repo=reponame)) if reponame and reponame != repos.reponame: # Redirect alias qs = req.query_string req.redirect(req.href.browser(repos.reponame or None, path) + ('?' + qs if qs else '')) reponame = repos.reponame if repos else None # Find node for the requested path/rev context = web_context(req) node = None changeset = None display_rev = lambda rev: rev if repos: try: if rev: rev = repos.normalize_rev(rev) # If `rev` is `None`, we'll try to reuse `None` consistently, # as a special shortcut to the latest revision. rev_or_latest = rev or repos.youngest_rev node = get_existing_node(req, repos, path, rev_or_latest) except NoSuchChangeset as e: raise ResourceNotFound(e, _('Invalid changeset number')) if node: try: # use changeset instance to retrieve branches and tags changeset = repos.get_changeset(node.rev) except NoSuchChangeset: pass context = context.child(repos.resource.child(self.realm, path, version=rev_or_latest)) display_rev = repos.display_rev # Prepare template data path_links = get_path_links(req.href, reponame, path, rev, order, desc) repo_data = dir_data = file_data = None if show_index: repo_data = self._render_repository_index( context, all_repositories, order, desc) if node: if not node.is_viewable(req.perm): raise PermissionError('BROWSER_VIEW' if node.isdir else 'FILE_VIEW', node.resource, self.env) if node.isdir: if format in ('zip',): # extension point here... self._render_zip(req, context, repos, node, rev) # not reached dir_data = self._render_dir(req, repos, node, rev, order, desc) elif node.isfile: file_data = self._render_file(req, context, repos, node, rev) if not repos and not (repo_data and repo_data['repositories']): # If no viewable repositories, check permission instead of # repos.is_viewable() req.perm.require('BROWSER_VIEW') if show_index: raise ResourceNotFound(_("No viewable repositories")) else: raise ResourceNotFound(_("No node %(path)s", path=path)) quickjump_data = properties_data = None if node and not req.is_xhr: properties_data = self.render_properties( 'browser', context, node.get_properties()) quickjump_data = list(repos.get_quickjump_entries(rev)) data = { 'context': context, 'reponame': reponame, 'repos': repos, 'repoinfo': all_repositories.get(reponame or ''), 'path': path, 'rev': node and node.rev, 'stickyrev': rev, 'display_rev': display_rev, 'changeset': changeset, 'created_path': node and node.created_path, 'created_rev': node and node.created_rev, 'properties': properties_data, 'path_links': path_links, 'order': order, 'desc': 1 if desc else None, 'repo': repo_data, 'dir': dir_data, 'file': file_data, 'quickjump_entries': quickjump_data, 'wiki_format_messages': self.config['changeset'].getbool('wiki_format_messages'), } if req.is_xhr: # render and return the content only return 'dir_entries.html', data if dir_data or repo_data: add_script(req, 'common/js/expand_dir.js') add_script(req, 'common/js/keyboard_nav.js') # Links for contextual navigation if node: if node.isfile: prev_rev = repos.previous_rev(rev=node.created_rev, path=node.created_path) if prev_rev: href = req.href.browser(reponame, node.created_path, rev=prev_rev) add_link(req, 'prev', href, _('Revision %(num)s', num=display_rev(prev_rev))) if rev is not None: add_link(req, 'up', req.href.browser(reponame, node.created_path)) next_rev = repos.next_rev(rev=node.created_rev, path=node.created_path) if next_rev: href = req.href.browser(reponame, node.created_path, rev=next_rev) add_link(req, 'next', href, _('Revision %(num)s', num=display_rev(next_rev))) prevnext_nav(req, _('Previous Revision'), _('Next Revision'), _('Latest Revision')) else: if path != '/': add_link(req, 'up', path_links[-2]['href'], _('Parent directory')) add_ctxtnav(req, tag.a(_('Last Change'), href=req.href.changeset(node.created_rev, reponame, node.created_path))) if node.isfile: annotate = data['file']['annotate'] if annotate: add_ctxtnav(req, _('Normal'), title=_('View file without annotations'), href=req.href.browser(reponame, node.created_path, rev=rev)) if annotate != 'blame': add_ctxtnav(req, _('Blame'), title=_('Annotate each line with the last ' 'changed revision ' '(this can be time consuming...)'), href=req.href.browser(reponame, node.created_path, rev=rev, annotate='blame')) add_ctxtnav(req, _('Revision Log'), href=req.href.log(reponame, path, rev=rev)) path_url = repos.get_path_url(path, rev) if path_url: if path_url.startswith('//'): path_url = req.scheme + ':' + path_url add_ctxtnav(req, _('Repository URL'), href=path_url) add_stylesheet(req, 'common/css/browser.css') return 'browser.html', data
def process_request(self, req): req.perm.require('CLOUD_VIEW') # setup cloud droplets if not hasattr(self, 'droplets'): # setup chefapi and cloudapi chefapi = Chef(self.chef_base_path, self.aws_keypair_pem, self.aws_username, self.chef_boot_run_list, self.chef_boot_sudo, self.chef_boot_version, self.log) cloudapi = Aws(self.aws_key, self.aws_secret, self.aws_keypair, self.aws_security_groups, self.rds_username, self.rds_password, self.log) # instantiate each droplet (singletons) self.droplets = {} self.titles = Droplet.titles(self.env) for _order, droplet_name, _title in self.titles: self.droplets[droplet_name] = Droplet.new( self.env, droplet_name, chefapi, cloudapi, self.field_handlers, self.log) # ensure at least one droplet exists if not self.droplets: raise ResourceNotFound(_("No cloud resources found in trac.ini."), _('Missing Cloud Resource')) droplet_name = req.args.get('droplet_name', '') id = req.args.get('id', '') action = req.args.get('action', 'view') file = req.args.get('file', '') if not droplet_name: droplet_name = self.default_resource if not droplet_name: _order, droplet_name, _title = self.titles[0] req.redirect(req.href.cloud(droplet_name)) # check for valid kind if droplet_name not in self.droplets: raise ResourceNotFound( _("Cloud resource '%(droplet_name)s' does not exist.", droplet_name=droplet_name), _('Invalid Cloud Resource')) # retrieve the droplet droplet = self.droplets[droplet_name] # route the request if req.method == 'POST': if 'cancel' in req.args: req.redirect(req.href.cloud(droplet_name, id)) elif action == 'new': droplet.create(req) elif action == 'delete': droplet.delete(req, id) elif action == 'edit': droplet.save(req, id) elif action == 'audit' or 'audit' in req.args: droplet.audit(req, id) elif action == 'execute' or 'execute' in req.args: droplet.execute(req, id) else: # req.method == 'GET': if action in ('edit', 'new'): template, data, content_type = droplet.render_edit(req, id) Chrome(self.env).add_wiki_toolbars(req) elif action == 'delete': template, data, content_type = droplet.render_delete(req, id) elif action == 'progress': template, data, content_type = droplet.render_progress( req, file) elif id == '': template, data, content_type = droplet.render_grid(req) if content_type: # i.e. alternate format return template, data, content_type else: template, data, content_type = droplet.render_view(req, id) if content_type: # i.e. alternate format return template, data, content_type # add contextual nav for _order, droplet_name, title in self.titles: add_ctxtnav(req, title, href=req.href.cloud(droplet_name)) add_stylesheet(req, 'common/css/report.css') # reuse css return template, data, None
def __init__(self, path, rev, msg=None): ResourceNotFound.__init__(self, "%sNo node %s at revision %s" % ((msg and '%s: ' % msg) or '', path, rev), _('No such node'))
def process_request(self, req): if crashdump_use_jinja2: metadata = {'content_type': 'text/html'} else: metadata = None action = req.args.get('action', 'view') if action == 'crash_list': page = req.args.getint('page', 1) default_max = self.items_per_page max = req.args.getint('max') limit = as_int(max, default_max, min=0) # explict max takes precedence offset = (page - 1) * limit sort_col = req.args.get('sort', '') asc = req.args.getint('asc', 0, min=0, max=1) title = '' description = '' data = { 'action': 'crash_list', 'max': limit, 'numrows': 0, 'title': title, 'description': description, 'message': None, 'paginator': None } req_status = req.args.get('status') or 'active' #results = CrashDump.query(env=self.env, status=req_status) results = CrashDump.query(env=self.env, status=None) data['results'] = results limit_offset = 0 need_paginator = limit > 0 and limit_offset need_reorder = limit_offset is None numrows = len(results) paginator = None if need_paginator: paginator = Paginator(results, page - 1, limit, num_items) data['paginator'] = paginator if paginator.has_next_page: add_link(req, 'next', report_href(page=page + 1), _('Next Page')) if paginator.has_previous_page: add_link(req, 'prev', report_href(page=page - 1), _('Previous Page')) pagedata = [] shown_pages = paginator.get_shown_pages(21) for p in shown_pages: pagedata.append([ report_href(page=p), None, str(p), _('Page %(num)d', num=p) ]) fields = ['href', 'class', 'string', 'title'] paginator.shown_pages = [ dict(zip(fields, p)) for p in pagedata ] paginator.current_page = { 'href': None, 'class': 'current', 'string': str(paginator.page + 1), 'title': None } numrows = paginator.num_items data['paginator'] = paginator add_script_data(req, {'comments_prefs': self._get_prefs(req)}) if not crashdump_use_jinja2: add_script(req, 'common/js/folding.js') add_script(req, 'crashdump/crashdump.js') add_stylesheet(req, 'crashdump/crashdump.css') return 'list.html', data, metadata start = time.time() if 'crashuuid' in req.args: crashobj = CrashDump.find_by_uuid(self.env, req.args['crashuuid']) if not crashobj: raise ResourceNotFound( _("Crash %(id)s does not exist.", id=req.args['crashuuid']), _("Invalid crash identifier")) elif 'crashid' in req.args: crashobj = CrashDump.find_by_id(self.env, req.args['crashid']) if not crashobj: raise ResourceNotFound( _("Crash %(id)s does not exist.", id=req.args['crashid']), _("Invalid crash identifier")) else: raise ResourceNotFound(_("No crash identifier specified.")) end = time.time() xhr = req.get_header('X-Requested-With') == 'XMLHttpRequest' #req.perm('crash', id, version).require('TICKET_VIEW') params = _get_list_from_args(req.args, 'params', None) self.log.debug('process_request %s:%s-%s' % (action, type(params), params)) if action is None or action == 'view': data = self._prepare_data(req, crashobj) xmlfile = data['xmlfile'] if 'xmlfile' in data else None data['dbtime'] = end - start field_changes = {} data.update({ 'action': action, 'params': params, # Store a timestamp for detecting "mid air collisions" 'start_time': crashobj['changetime'] }) self._insert_crashdump_data(req, crashobj, data, get_reporter_id(req, 'author'), field_changes) if params is None: add_script_data(req, {'comments_prefs': self._get_prefs(req)}) if not crashdump_use_jinja2: add_script(req, 'common/js/folding.js') add_script(req, 'crashdump/crashdump.js') add_stylesheet(req, 'crashdump/crashdump.css') data['show_delete_crash'] = self.show_delete_crash linked_tickets = [] for tkt_id in crashobj.linked_tickets: a = self._link_ticket_by_id(req, tkt_id) if a: linked_tickets.append(a) data['linked_tickets'] = linked_tickets return 'report.html', data, metadata else: if params[0] in [ 'sysinfo', 'sysinfo_ex', 'fast_protect_version_info', 'exception', 'memory_blocks', 'memory_regions', 'modules', 'threads', 'stackdumps', 'file_info' ]: return params[0] + '.html', data, metadata elif params[0] == 'memory_block': block_base = safe_list_get_as_int(params, 1, 0) memory_block = None for b in data['memory_blocks']: if b.base == block_base: memory_block = b break data.update({ 'memory_block': memory_block, 'memory_block_base': block_base }) return 'memory_block.html', data, metadata elif params[0] == 'stackdump': threadid = safe_list_get_as_int(params, 1, 0) stackdump = None if threadid in data['stackdumps']: stackdump = data['stackdumps'][threadid] self.log.debug('stackdump %s' % stackdump) data.update({'stackdump': stackdump, 'threadid': threadid}) return 'stackdump.html', data, metadata else: raise ResourceNotFound( _("Invalid sub-page request %(param)s for crash %(uuid)s.", param=str(params[0]), uuid=str(crashobj.uuid))) elif action == 'sysinfo_report': data = self._prepare_data(req, crashobj) data['dbtime'] = end - start if 'xmlreport' in data: xmlfile = data['xmlreport'] data['sysinfo_report'] = None if isinstance(xmlfile, XMLReport) or (isinstance(xmlfile, string) and os.path.isfile(xmlfile)): try: data['sysinfo_report'] = SystemInfoReport( xmlreport=xmlfile) except SystemInfoReport.SystemInfoReportException as e: data['xmlfile_error'] = str(e) else: data['xmlfile_error'] = _( "XML file %(file)s is unavailable", file=xmlfile) data.update({ 'action': action, 'params': params, # Store a timestamp for detecting "mid air collisions" 'start_time': crashobj['changetime'] }) if params is None: add_script_data(req, {'comments_prefs': self._get_prefs(req)}) if not crashdump_use_jinja2: add_script(req, 'common/js/folding.js') add_script(req, 'crashdump/crashdump.js') add_stylesheet(req, 'crashdump/crashdump.css') linked_tickets = [] for tkt_id in crashobj.linked_tickets: a = self._link_ticket_by_id(req, tkt_id) if a: linked_tickets.append(a) data['linked_tickets'] = linked_tickets return 'sysinfo_report.html', data, metadata else: if params[0] in [ 'sysinfo', 'sysinfo_ex', 'sysinfo_opengl', 'sysinfo_env', 'sysinfo_terra4d_dirs', 'sysinfo_cpu', 'sysinfo_locale', 'sysinfo_network', 'sysinfo_rawdata' ]: return params[0] + '.html', data, metadata else: raise ResourceNotFound( _("Invalid sub-page request %(param)s for crash %(uuid)s.", param=str(params[0]), uuid=str(crashobj.uuid))) elif action == 'systeminfo_raw': data = self._prepare_data(req, crashobj) xmlfile = data['xmlfile'] if 'xmlfile' in data else None data['dbtime'] = end - start fast_protect_system_info = data[ 'fast_protect_system_info'] if 'fast_protect_system_info' in data else None if fast_protect_system_info: if crashobj['crashhostname']: filename = "%s_%s.terra4d-system-info" % (str( crashobj.uuid), str(crashobj['crashhostname'])) else: filename = "%s.terra4d-system-info" % str(crashobj.uuid) if fast_protect_system_info.rawdata: return self._send_data( req, fast_protect_system_info.rawdata.raw, filename=filename) raise ResourceNotFound( _("No system information available for crash %(uuid)s.", uuid=str(crashobj.uuid))) elif action == 'delete': add_script_data(req, {'comments_prefs': self._get_prefs(req)}) add_script(req, 'crashdump/crashdump.js') add_stylesheet(req, 'crashdump/crashdump.css') data = {'id': crashobj.id, 'uuid': crashobj.uuid} crashobj.delete(self.dumpdata_dir) return 'deleted.html', data, metadata elif action == 'minidump_raw': return self._send_file(req, crashobj, 'minidumpfile') elif action == 'minidump_text': return self._send_file(req, crashobj, 'minidumpreporttextfile') elif action == 'minidump_xml': return self._send_file(req, crashobj, 'minidumpreportxmlfile') elif action == 'minidump_html': return self._send_file(req, crashobj, 'minidumpreporthtmlfile') elif action == 'coredump_raw': return self._send_file(req, crashobj, 'coredumpfile') elif action == 'coredump_text': return self._send_file(req, crashobj, 'coredumpreporttextfile') elif action == 'coredump_xml': return self._send_file(req, crashobj, 'coredumpreportxmlfile') elif action == 'coredump_html': return self._send_file(req, crashobj, 'coredumpreporthtmlfile') elif action == 'raw': if crashobj['minidumpfile']: return self._send_file(req, crashobj, 'minidumpfile') elif crashobj['coredumpfile']: return self._send_file(req, crashobj, 'coredumpfile') elif action == 'xml': if crashobj['minidumpreportxmlfile']: return self._send_file(req, crashobj, 'minidumpreportxmlfile') elif crashobj['coredumpreportxmlfile']: return self._send_file(req, crashobj, 'coredumpreportxmlfile') elif action == 'html': if crashobj['minidumpreporthtmlfile']: return self._send_file(req, crashobj, 'minidumpreporthtmlfile') elif crashobj['coredumpreporthtmlfile']: return self._send_file(req, crashobj, 'coredumpreporthtmlfile') elif action == 'text': if crashobj['minidumpreporttextfile']: return self._send_file(req, crashobj, 'minidumpreporttextfile') elif crashobj['coredumpreporttextfile']: return self._send_file(req, crashobj, 'coredumpreporttextfile') raise ResourceNotFound( _("Invalid action %(action)s for crash %(uuid)s specified.", action=str(action), uuid=str(crashobj.uuid)))
class RemoteTicket(object): '''Local proxy for a ticket in a remote Trac system. ''' # All fields from Trac ticket table except id remote_fields = [ 'time', 'changetime', 'component', #'severity', 'priority', 'owner', 'reporter', 'cc', 'version', 'milestone', 'status', 'resolution', 'summary', 'description', 'keywords', ] table_fields = remote_fields + ['cachetime', 'remote_name', 'id'] cachetime_pos = -3 def __init__(self, env, remote_name, tkt_id, refresh=False): self.env = env self.remote_name = remote_name self.id = int(tkt_id) self.fields = [ f for f in TicketSystem(self.env).get_ticket_fields() if f['name'] in RemoteTicket.remote_fields ] self.time_fields = [ f['name'] for f in self.fields if f['type'] == 'time' ] self.values = {} if not refresh: self._fetch_ticket() else: self._refresh_ticket() def _fetch_ticket(self): rts = RemoteTicketSystem(self.env) db = self.env.get_read_db() cursor = db.cursor() # Try to retrieve remote ticket from cache cursor.execute( '''SELECT %s FROM remote_tickets WHERE remote_name=%%s and id=%%s ''' % (', '.join(self.table_fields)), (self.remote_name, self.id)) row = cursor.fetchone() # Remote ticket not in cache if not row: self._refresh_ticket() self._cachetime = from_utimestamp(row[self.cachetime_pos]) ttl = timedelta(seconds=int(rts.cache_ttl) // 1000, microseconds=int(rts.cache_ttl) % 1000 * 1000) # Cached remote ticket is too old if self._cachetime < datetime.now(utc) - ttl: self._refresh_ticket() # Cached ticket is valid, populate instance for name, value in zip(self.remote_fields, row): if name in self.time_fields: self.values[name] = from_utimestamp(value) elif value is None: self.values[name] = empty else: self.values[name] = value def _refresh_ticket(self): rts = RemoteTicketSystem(self.env) remote_trac = rts.get_remote_trac(self.remote_name)['url'] xmlrpc_addr = Href(remote_trac).rpc() server = xmlrpclib.ServerProxy(xmlrpc_addr) try: tkt_vals = server.ticket.get(self.id) except xmlrpclib.ProtocolError, e: msg = ("Could not contact remote Trac '%s' at %s. " "Received error %s, %s") log = ("XML-RPC ProtocolError contacting Trac %s at %s, " "errcode=%s, errmsg='%s'") args = (self.remote_name, xmlrpc_addr, e.errcode, e.errmsg) self.env.log.warn(log, *args) raise ResourceNotFound(msg % args, "Uncontactable server") except xmlrpclib.Fault, e: msg = ("Could not retrieve remote ticket %s:#%s." "Received fault %s, %s") log = ("XML-RPC Fault contacting Trac %s at %s, " "faultCode=%s, faultString='%s'") args = (self.remote_name, self.id, e.faultCode, e.faultString) self.env.log.warn(log, *args) raise ResourceNotFound(msg % args, "Remote ticket unavailable")
except xmlrpclib.Fault, e: msg = ("Could not retrieve remote ticket %s:#%s." "Received fault %s, %s") log = ("XML-RPC Fault contacting Trac %s at %s, " "faultCode=%s, faultString='%s'") args = (self.remote_name, self.id, e.faultCode, e.faultString) self.env.log.warn(log, *args) raise ResourceNotFound(msg % args, "Remote ticket unavailable") except socket.error, e: msg = ("Could not connect to remote Trac '%s' at %s. " "Reason: %s") log = ("Network error connecting to remote Trac '%s' at '%s'. " "Error: %s") args = (self.remote_name, xmlrpc_addr, e.args) self.env.log.warn(log, *args) raise ResourceNotFound(msg % args, "Network error") except Exception, e: msg = ("Unknown exception contacting remote Trac '%s' at %s. " "Exception args: %s %s %s") args = (self.remote_name, xmlrpc_addr, e, type(e), e.args) self.env.log.error(msg, *args) raise # Convert from DateTime used by xmlrpclib to datetime used by trac for k in self.time_fields: tkt_vals[3][k] = parse_date(tkt_vals[3][k].value, utc) self.values.update(tkt_vals[3]) self._cachetime = datetime.now(utc) self.save()
def process_request(self, req): presel = req.args.get('preselected') if presel and (presel + '/').startswith(req.href.browser() + '/'): req.redirect(presel) path = req.args.get('path', '/') rev = req.args.get('rev', '') if rev.lower() in ('', 'head'): rev = None format = req.args.get('format') order = req.args.get('order', 'name').lower() desc = 'desc' in req.args xhr = req.get_header('X-Requested-With') == 'XMLHttpRequest' rm = RepositoryManager(self.env) all_repositories = rm.get_all_repositories() reponame, repos, path = rm.get_repository_by_path(path) # Repository index show_index = not reponame and path == '/' if show_index: if repos and (as_bool(all_repositories[''].get('hidden')) or not repos.is_viewable(req.perm)): repos = None if not repos and reponame: raise ResourceNotFound( _("Repository '%(repo)s' not found", repo=reponame)) if reponame and reponame != repos.reponame: # Redirect alias qs = req.query_string req.redirect( req.href.browser(repos.reponame or None, path) + ('?' + qs if qs else '')) reponame = repos.reponame if repos else None # Find node for the requested path/rev context = web_context(req) node = None changeset = None display_rev = lambda rev: rev if repos: try: if rev: rev = repos.normalize_rev(rev) # If `rev` is `None`, we'll try to reuse `None` consistently, # as a special shortcut to the latest revision. rev_or_latest = rev or repos.youngest_rev node = get_existing_node(req, repos, path, rev_or_latest) except NoSuchChangeset, e: raise ResourceNotFound(e, _('Invalid changeset number')) if node: try: # use changeset instance to retrieve branches and tags changeset = repos.get_changeset(node.rev) except NoSuchChangeset: pass context = context.child( repos.resource.child('source', path, version=rev_or_latest)) display_rev = repos.display_rev