def expand_macro(self, formatter, name, args): from trac.config import Option section_filter = key_filter = '' args, kw = parse_args(args) if args: section_filter = args.pop(0).strip() if args: key_filter = args.pop(0).strip() registry = Option.get_registry(self.compmgr) sections = {} for (section, key), option in registry.iteritems(): if section.startswith(section_filter): sections.setdefault(section, {})[key] = option return tag.div(class_='tracini')( (tag.h3(tag.code('[%s]' % section), id='%s-section' % section), tag.table(class_='wiki')( tag.tbody(tag.tr(tag.td(tag.tt(option.name)), tag.td(format_to_oneliner( self.env, formatter.context, to_unicode(option.__doc__)))) for option in sorted(sections[section].itervalues(), key=lambda o: o.name) if option.name.startswith(key_filter)))) for section in sorted(sections))
def __getattribute__(self, name): if name == '__class__': return Option value = Option.__getattribute__(self, name) if name == '__doc__': value = dgettext(doc_domain, value) return value
def iterate(self, compmgr=None, defaults=True): """Iterate over the options in this section. If `compmgr` is specified, only return default option values for components that are enabled in the given `ComponentManager`. """ options = set() name_str = self.name for setting in ProductSetting.select(self.env, where={'product': self.product, 'section': name_str}): option = self.optionxform(setting.option) options.add(option) yield option for parent in self.config.parents: for option in parent[self.name].iterate(defaults=False): loption = self.optionxform(option) if loption not in options: options.add(loption) yield option if defaults: for section, option in Option.get_registry(compmgr).keys(): if section == self.name and \ self.optionxform(option) not in options: yield option
def expand_macro(self, formatter, name, args): from trac.config import ConfigSection, Option section_filter = key_filter = '' args, kw = parse_args(args) if args: section_filter = args.pop(0).strip() if args: key_filter = args.pop(0).strip() def getdoc(option_or_section): doc = to_unicode(option_or_section.__doc__) if doc: doc = dgettext(option_or_section.doc_domain, doc) return doc registry = ConfigSection.get_registry(self.compmgr) sections = dict((name, getdoc(section)) for name, section in registry.iteritems() if name.startswith(section_filter)) registry = Option.get_registry(self.compmgr) options = {} for (section, key), option in registry.iteritems(): if section.startswith(section_filter): options.setdefault(section, {})[key] = option sections.setdefault(section, '') def default_cell(option): default = option.default if default is True: default = 'true' elif default is False: default = 'false' elif default == 0: default = '0.0' if isinstance(default, float) else '0' elif default: default = ', '.join(to_unicode(val) for val in default) \ if isinstance(default, (list, tuple)) \ else to_unicode(default) else: return tag.td(_("(no default)"), class_='nodefault') return tag.td(tag.code(default), class_='default') return tag.div(class_='tracini')( (tag.h3(tag.code('[%s]' % section), id='%s-section' % section), format_to_html(self.env, formatter.context, section_doc), tag.table(class_='wiki')(tag.tbody( tag.tr(tag.td(tag.tt(option.name)), tag.td(format_to_oneliner( self.env, formatter.context, getdoc(option))), default_cell(option)) for option in sorted(options.get(section, {}).itervalues(), key=lambda o: o.name) if option.name.startswith(key_filter)))) for section, section_doc in sorted(sections.iteritems()))
def _update_sample_config(self): filename = os.path.join(self.env.config_file_path + ".sample") if not os.path.isfile(filename): return config = Configuration(filename) for (section, name), option in Option.get_registry().iteritems(): config.set(section, name, option.dumps(option.default)) try: config.save() self.log.info( "Wrote sample configuration file with the new " "settings and their default values: %s", filename ) except IOError as e: self.log.warn("Couldn't write sample configuration file (%s)", e, exc_info=True)
def expand_macro(self, formatter, name, content): from trac.config import ConfigSection, Option section_filter = key_filter = '' args, kw = parse_args(content) if args: section_filter = args.pop(0).strip() if args: key_filter = args.pop(0).strip() def getdoc(option_or_section): doc = to_unicode(option_or_section.__doc__) if doc: doc = dgettext(option_or_section.doc_domain, doc) return doc registry = ConfigSection.get_registry(self.compmgr) sections = dict((name, getdoc(section)) for name, section in registry.iteritems() if name.startswith(section_filter)) registry = Option.get_registry(self.compmgr) options = {} for (section, key), option in registry.iteritems(): if section.startswith(section_filter): options.setdefault(section, {})[key] = option sections.setdefault(section, '') def default_cell(option): default = option.default if default is not None and default != '': return tag.td(tag.code(option.dumps(default)), class_='default') else: return tag.td(_("(no default)"), class_='nodefault') return tag.div(class_='tracini')( (tag.h3(tag.code('[%s]' % section), id='%s-section' % section), format_to_html(self.env, formatter.context, section_doc), tag.table(class_='wiki')(tag.tbody( tag.tr(tag.td(tag.tt(option.name)), tag.td(format_to_oneliner( self.env, formatter.context, getdoc(option))), default_cell(option), class_='odd' if idx % 2 else 'even') for idx, option in enumerate(sorted(options.get(section, {}).itervalues(), key=lambda o: o.name)) if option.name.startswith(key_filter)))) for section, section_doc in sorted(sections.iteritems()))
def expand_macro(self, formatter, name, args): from trac.config import ConfigSection, Option section_filter = key_filter = '' args, kw = parse_args(args) if args: section_filter = args.pop(0).strip() if args: key_filter = args.pop(0).strip() registry = ConfigSection.get_registry(self.compmgr) sections = dict((name, dgettext(section.doc_domain, to_unicode(section.__doc__))) for name, section in registry.iteritems() if name.startswith(section_filter)) registry = Option.get_registry(self.compmgr) options = {} for (section, key), option in registry.iteritems(): if section.startswith(section_filter): options.setdefault(section, {})[key] = option sections.setdefault(section, '') return tag.div(class_='tracini')( (tag.h3(tag.code('[%s]' % section), id='%s-section' % section), format_to_html(self.env, formatter.context, section_doc), tag.table(class_='wiki')(tag.tbody( tag.tr(tag.td(tag.tt(option.name)), tag.td(format_to_oneliner( self.env, formatter.context, dgettext(option.doc_domain, to_unicode(option.__doc__)))), tag.td(tag.code(option.default or 'false') if option.default or option.default is False else _("(no default)"), class_='default' if option.default or option.default is False else 'nodefault')) for option in sorted(options.get(section, {}).itervalues(), key=lambda o: o.name) if option.name.startswith(key_filter)))) for section, section_doc in sorted(sections.iteritems()))
class PlantUmlMacro(WikiMacroBase): """ A wiki processor that renders PlantUML diagrams in wiki text. Example: {{{ {{{ #!PlantUML @startuml Alice -> Bob: Authentication Reque st Bob --> Alice: Authentication Response Alice -> Bob: Another authentication Request Alice <-- Bob: another authentication Response @enduml }}} }}} Results in: {{{ #!PlantUML @startuml Alice -> Bob: Authentication Request Bob --> Alice: Authentication Response Alice -> Bob: Another authentication Request Alice <-- Bob: another authentication Response @enduml }}} """ implements(IRequestHandler) plantuml_jar = Option( 'plantuml', 'plantuml_jar', '', """Path to PlantUML jar file. The jar file can be downloaded from the [http://plantuml.sourceforge.net/download.html PlantUML] site.""") java_bin = Option( 'plantuml', 'java_bin', 'java', """Path to the Java binary file. The default is `java`, which and assumes that the Java binary is on the search path.""") def __init__(self): self.abs_img_dir = os.path.join(os.path.abspath(self.env.path), img_dir) if not os.path.isdir(self.abs_img_dir): os.makedirs(self.abs_img_dir) def expand_macro(self, formatter, name, content): if not self.plantuml_jar: return system_message( _("Installation error: plantuml_jar option not defined in trac.ini" )) if not os.path.exists(self.plantuml_jar): return system_message( _("Installation error: plantuml.jar not found at '%s'") % self.plantuml_jar) # Trac 0.12 supports expand_macro(self, formatter, name, content, args) # which allows us to readily differentiate between a WikiProcess and WikiMacro # call. To support Trac 0.11, some additional work is required. try: args = formatter.code_processor.args except AttributeError: args = None path = None if not 'path' in args: #Could be WikiProcessor or WikiMacro call if content.strip().startswith("@startuml"): markup = content path = None else: path = content if not path: return system_message(_("Path not specified")) elif args: #WikiProcessor with args path = args.get('path') if not path: return system_message(_("Path not specified")) if path: markup, exists = self._read_source_from_repos(formatter, path) if not exists: return system_message( _("File not found in repository: " + path)) else: if not content: return system_message(_("No UML text defined")) markup = content.encode('utf-8').strip() img_id = hashlib.sha1(markup).hexdigest() if not self._is_img_existing(img_id): cmd = '%s -jar -Djava.awt.headless=true "%s" -charset UTF-8 -pipe' % ( self.java_bin, self.plantuml_jar) p = Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE) (img_data, stderr) = p.communicate(input=markup) if p.returncode != 0: return system_message( _("Error running plantuml: '%s'") % stderr) self._write_img_to_file(img_id, img_data) link = formatter.href('plantuml', id=img_id) return tag.img(src=link) def get_macros(self): yield 'plantuml' #WikiProcessor syntax yield 'PlantUml' #WikiMacros syntax yield 'PlantUML' #deprecated, retained for backward compatibility # IRequestHandler def match_request(self, req): return re.match(r'/plantuml?$', req.path_info) def process_request(self, req): img_id = req.args.get('id') img_data = self._read_img_from_file(img_id) req.send(img_data, 'image/png', status=200) return "" # Internal def _get_img_path(self, img_id): img_path = os.path.join(self.abs_img_dir, img_id) img_path += '.png' return img_path def _is_img_existing(self, img_id): img_path = self._get_img_path(img_id) return os.path.isfile(img_path) def _write_img_to_file(self, img_id, data): img_path = self._get_img_path(img_id) open(img_path, 'wb').write(data) def _read_img_from_file(self, img_id): img_path = self._get_img_path(img_id) img_data = open(img_path, 'rb').read() return img_data def _read_source_from_repos(self, formatter, src_path): repos_mgr = RepositoryManager(self.env) try: #0.12+ repos_name, repos, source_obj = repos_mgr.get_repository_by_path( src_path) except AttributeError, e: #0.11 repos = repos_mgr.get_repository(formatter.req.authname) path, rev = _split_path(src_path) if repos.has_node(path, rev): node = repos.get_node(path, rev) content = node.get_content().read() exists = True else: rev = rev or repos.get_youngest_rev() # TODO: use `raise NoSuchNode(path, rev)` content = system_message( _("No such node '%s' at revision '%s'") % (path, rev)) exists = False return (content, exists)
def __init__(self, section, name, default=None, choices='', doc=''): Option.__init__(self, section, name, default, doc) self.choices = filter(None, [c.strip() for c in choices.split(',')])
class FullBlogAnnouncement(Component): """Send announcements on blog events.""" implements(IBlogChangeListener) implements(IAnnouncementFormatter) implements(IAnnouncementEmailDecorator) blog_email_subject = Option( 'fullblog-announcement', 'blog_email_subject', _("Blog: ${blog.name} ${action}"), """Format string for the blog email subject. This is a mini genshi template and it is passed the blog_post and action objects. """) # IBlogChangeListener interface def blog_post_changed(self, postname, version): """Called when a new blog post 'postname' with 'version' is added. version==1 denotes a new post, version>1 is a new version on existing post. """ blog_post = BlogPost(self.env, postname, version) action = 'post created' if version > 1: action = 'post changed' announcer = AnnouncementSystem(self.env) announcer.send( BlogChangeEvent(blog_post, action, self.env.abs_href.blog(blog_post.name))) def blog_post_deleted(self, postname, version, fields): """Called when a blog post is deleted: version==0 means all versions (or last remaining) version is deleted. Any version>0 denotes a specific version only. Fields is a dict with the pre-existing values of the blog post. If all (or last) the dict will contain the 'current' version contents. """ blog_post = BlogPost(self.env, postname, version) announcer = AnnouncementSystem(self.env) announcer.send( BlogChangeEvent(blog_post, 'post deleted', self.env.abs_href.blog(blog_post.name))) def blog_comment_added(self, postname, number): """Called when Blog comment number N on post 'postname' is added.""" blog_post = BlogPost(self.env, postname, 0) blog_comment = BlogComment(self.env, postname, number) announcer = AnnouncementSystem(self.env) announcer.send( BlogChangeEvent(blog_post, 'comment created', self.env.abs_href.blog(blog_post.name), blog_comment)) def blog_comment_deleted(self, postname, number, fields): """Called when blog post comment 'number' is deleted. number==0 denotes all comments is deleted and fields will be empty. (usually follows a delete of the blog post). number>0 denotes a specific comment is deleted, and fields will contain the values of the fields as they existed pre-delete. """ blog_post = BlogPost(self.env, postname, 0) announcer = AnnouncementSystem(self.env) announcer.send( BlogChangeEvent(blog_post, 'comment deleted', self.env.abs_href.blog(blog_post.name), fields)) # IAnnouncementEmailDecorator def decorate_message(self, event, message, decorates=None): if event.realm == "blog": template = NewTextTemplate(self.blog_email_subject.encode('utf8')) subject = template.generate(blog=event.blog_post, action=event.category).render( 'text', encoding=None) set_header(message, 'Subject', subject) return next_decorator(event, message, decorates) # IAnnouncementFormatter interface def styles(self, transport, realm): if realm == 'blog': yield 'text/plain' def alternative_style_for(self, transport, realm, style): if realm == 'blog' and style != 'text/plain': return 'text/plain' def format(self, transport, realm, style, event): if realm == 'blog' and style == 'text/plain': return self._format_plaintext(event) def _format_plaintext(self, event): blog_post = event.blog_post blog_comment = event.blog_comment data = dict( name=blog_post.name, author=event.author, time=event.timestamp, category=event.category, version=event.version, link=event.remote_addr, title=blog_post.title, body=blog_post.body, comment=event.comment, ) chrome = Chrome(self.env) dirs = [] for provider in chrome.template_providers: dirs += provider.get_templates_dirs() templates = TemplateLoader(dirs, variable_lookup='lenient') template = templates.load('fullblog_plaintext.txt', cls=NewTextTemplate) if template: stream = template.generate(**data) output = stream.render('text') return output
class RequestDispatcher(Component): """Web request dispatcher. This component dispatches incoming requests to registered handlers. Besides, it also takes care of user authentication and request pre- and post-processing. """ required = True authenticators = ExtensionPoint(IAuthenticator) handlers = ExtensionPoint(IRequestHandler) filters = OrderedExtensionsOption( 'trac', 'request_filters', IRequestFilter, doc="""Ordered list of filters to apply to all requests (''since 0.10'').""") default_handler = ExtensionOption( 'trac', 'default_handler', IRequestHandler, 'WikiModule', """Name of the component that handles requests to the base URL. Options include `TimelineModule`, `RoadmapModule`, `BrowserModule`, `QueryModule`, `ReportModule`, `TicketModule` and `WikiModule`. The default is `WikiModule`. (''since 0.9'')""") default_timezone = Option('trac', 'default_timezone', '', """The default timezone to use""") default_language = Option( 'trac', 'default_language', '', """The preferred language to use if no user preference has been set. (''since 0.12.1'') """) default_date_format = Option( 'trac', 'default_date_format', '', """The date format. Valid options are 'iso8601' for selecting ISO 8601 format, or leave it empty which means the default date format will be inferred from the browser's default language. (''since 1.0'') """) use_xsendfile = BoolOption( 'trac', 'use_xsendfile', 'false', """When true, send a `X-Sendfile` header and no content when sending files from the filesystem, so that the web server handles the content. This requires a web server that knows how to handle such a header, like Apache with `mod_xsendfile` or lighttpd. (''since 1.0'') """) # Public API def authenticate(self, req): for authenticator in self.authenticators: authname = authenticator.authenticate(req) if authname: return authname else: return 'anonymous' def dispatch(self, req): """Find a registered handler that matches the request and let it process it. In addition, this method initializes the data dictionary passed to the the template and adds the web site chrome. """ self.log.debug('Dispatching %r', req) chrome = Chrome(self.env) # Setup request callbacks for lazily-evaluated properties req.callbacks.update({ 'authname': self.authenticate, 'chrome': chrome.prepare_request, 'perm': self._get_perm, 'session': self._get_session, 'locale': self._get_locale, 'lc_time': self._get_lc_time, 'tz': self._get_timezone, 'form_token': self._get_form_token, 'use_xsendfile': self._get_use_xsendfile, }) try: try: # Select the component that should handle the request chosen_handler = None try: for handler in self.handlers: if handler.match_request(req): chosen_handler = handler break if not chosen_handler: if not req.path_info or req.path_info == '/': chosen_handler = self.default_handler # pre-process any incoming request, whether a handler # was found or not chosen_handler = self._pre_process_request( req, chosen_handler) except TracError, e: raise HTTPInternalError(e) if not chosen_handler: if req.path_info.endswith('/'): # Strip trailing / and redirect target = req.path_info.rstrip('/').encode('utf-8') if req.query_string: target += '?' + req.query_string req.redirect(req.href + target, permanent=True) raise HTTPNotFound('No handler matched request to %s', req.path_info) req.callbacks['chrome'] = partial(chrome.prepare_request, handler=chosen_handler) # Protect against CSRF attacks: we validate the form token # for all POST requests with a content-type corresponding # to form submissions if req.method == 'POST': ctype = req.get_header('Content-Type') if ctype: ctype, options = cgi.parse_header(ctype) if ctype in ('application/x-www-form-urlencoded', 'multipart/form-data') and \ req.args.get('__FORM_TOKEN') != req.form_token: if self.env.secure_cookies and req.scheme == 'http': msg = _('Secure cookies are enabled, you must ' 'use https to submit forms.') else: msg = _('Do you have cookies enabled?') raise HTTPBadRequest( _('Missing or invalid form token.' ' %(msg)s', msg=msg)) # Process the request and render the template resp = chosen_handler.process_request(req) if resp: if len(resp) == 2: # old Clearsilver template and HDF data self.log.error( "Clearsilver template are no longer " "supported (%s)", resp[0]) raise TracError( _("Clearsilver templates are no longer supported, " "please contact your Trac administrator.")) # Genshi template, data, content_type = \ self._post_process_request(req, *resp) if 'hdfdump' in req.args: req.perm.require('TRAC_ADMIN') # debugging helper - no need to render first out = StringIO() pprint(data, out) req.send(out.getvalue(), 'text/plain') output = chrome.render_template(req, template, data, content_type) req.send(output, content_type or 'text/html') else: self._post_process_request(req) except RequestDone: # Give the session a chance to persist changes after a send() req.session.save() raise except: # post-process the request in case of errors err = sys.exc_info() try: self._post_process_request(req) except RequestDone: raise except Exception, e: self.log.error( "Exception caught while post-processing" " request: %s", exception_to_unicode(e, traceback=True)) raise err[0], err[1], err[2]
def get_estimation_suffix(): return Option('estimation-tools', 'estimation_suffix', 'h', doc="""Suffix used for estimations. Defaults to 'h'""")
class MapDashboard(Component): implements(IRequestHandler, INavigationContributor) ### configuration options openlayers_url = Option('geo', 'openlayers_url', 'http://openlayers.org/api/2.8-rc2/OpenLayers.js', "URL of OpenLayers JS to use") dashboard_tickets = IntOption( 'geo', 'dashboard_tickets', '6', "number of tickets to display on the dashboard map") display_cloud = BoolOption( 'geo', 'display_cloud', 'true', "whether to display the cloud on the map dashboard") dashboard = ListOption('geo', 'dashboard', 'activeissues', "which viewports to display on the dashboard") marker_style = OrderedExtensionsOption( 'geo', 'marker_style', IMapMarkerStyle, '', include_missing=False, doc="component to use to set feature style") def panels(self): """return the panel configuration""" retval = [] # XXX ugly hack because self.dashboard doesn't return # a list for no apparent reason for panel in self.env.config.getlist('geo', 'dashboard'): defaults = {'label': panel, 'query': None} config = {} for key, default in defaults.items(): config[key] = self.env.config.get('geo', '%s.%s' % (panel, key)) or default if config['query'] is not None: config['id'] = panel retval.append(config) return retval ### methods for IRequestHandler """Extension point interface for request handlers.""" def match_request(self, req): """Return whether the handler wants to process the given request.""" return req.path_info.strip('/') == 'map' def process_request(self, req): """Process the request. For ClearSilver, return a (template_name, content_type) tuple, where `template` is the ClearSilver template to use (either a `neo_cs.CS` object, or the file name of the template), and `content_type` is the MIME type of the content. For Genshi, return a (template_name, data, content_type) tuple, where `data` is a dictionary of substitutions for the template. For both templating systems, "text/html" is assumed if `content_type` is `None`. Note that if template processing should not occur, this method can simply send the response itself and not return anything. """ # get the GeoTicket component assert self.env.is_component_enabled(GeoTicket) geoticket = GeoTicket(self.env) # add the query script add_script(req, 'common/js/query.js') # get the panel configuration config = self.panels() # build the panels panels = [] located_tickets = geoticket.tickets_with_location() for panel in config: # query the tickets query_string = panel['query'] query = Query.from_string(self.env, query_string) # decide the date to sort by if query.order == 'time': date_to_display = 'time_created' else: date_to_display = 'time_changed' results = query.execute(req) n_tickets = len(results) results = [ result for result in results if result['id'] in located_tickets ] locations = [] tickets = [] results = results[:self.dashboard_tickets] for result in results: ticket = Ticket(self.env, result['id']) try: address, (lat, lon) = geoticket.locate_ticket(ticket) content = geoticket.feature_content(req, ticket) # style for the markers style = {} for extension in self.marker_style: style.update(extension.style(ticket, req, **style)) style = style or None locations.append({ 'latitude': lat, 'longitude': lon, 'style': style, 'content': Markup(content) }) tickets.append(ticket) except GeolocationException: continue title = panel['label'] panels.append({ 'title': title, 'id': panel['id'], 'locations': Markup(simplejson.dumps(locations)), 'tickets': tickets, 'n_tickets': n_tickets, 'date_to_display': date_to_display, 'query_href': query.get_href(req.href) }) # add the tag cloud, if enabled cloud = None if self.display_cloud: if TagCloudMacro is None: self.log.warn( "[geo] display_cloud is set but the TagsPlugin is not installed" ) else: formatter = Formatter(self.env, Context.from_request(req)) macro = TagCloudMacro(self.env) cloud = macro.expand_macro(formatter, 'TagCloud', '') add_stylesheet(req, 'tags/css/tractags.css') add_stylesheet(req, 'tags/css/tagcloud.css') # compile data for the genshi template data = dict(panels=panels, cloud=cloud, openlayers_url=self.openlayers_url) return ('mapdashboard.html', data, 'text/html') ### methods for INavigationContributor """Extension point interface for components that contribute items to the navigation. """ def get_active_navigation_item(self, req): """This method is only called for the `IRequestHandler` processing the request. It should return the name of the navigation item that should be highlighted as active/current. """ return 'map' def get_navigation_items(self, req): """Should return an iterable object over the list of navigation items to add, each being a tuple in the form (category, name, text). """ yield ('mainnav', 'map', tag.a('Map', href=req.href.map(), accesskey='M'))
def __init__(self, section, name, default=None, sep=',', itemsep=':', doc=''): Option.__init__(self, section, name, default, doc) self.sep = sep self.itemsep = itemsep
class SlackNotifcationPlugin(Component): implements(ITicketChangeListener) webhook = Option('slack', 'webhook', 'https://hooks.slack.com/services/', doc="Incoming webhook for Slack") channel = Option('slack', 'channel', '#Trac', doc="Channel name on Slack") username = Option('slack', 'username', 'Trac-Bot', doc="Username of the bot on Slack notify") fields = Option('slack', 'fields', 'type,component,resolution', doc="Fields to include in Slack notification") authmap = Option( 'slack', 'authmap', '', doc= "Map trac-authors to Name, Slack user IDs (preferred) and/or email addresses (<TracUsername>:<Name>,<@SlackUserID>,<email>;...)" ) def mapAuth(self, values): author = values.get('author', None) if not author: return # make sure author formatting is correct... author = re.sub(r' <.*', u'', author) if not self.authmap: values['author'] = author return try: for am in self.authmap.strip().split(";"): au, ad = am.strip().split(":") if not au: continue if author != au: continue if not ad: continue ad = ad.strip().split(",") if len(ad) > 1 and ad[1]: author = "<%s>" % (ad[1]) break if len(ad) > 0 and ad[0]: author = ad[0] if len(ad) > 2 and ad[2]: author = "<mailto:%s|%s>" % (ad[2], author) break except Exception as err: self.log.warning("failed to map author: %s" % (str(err))) values['author'] = author def notify(self, ntype, values): # values['type'] = ntype self.mapAuth(values) #template = u'%(project)s/%(branch)s %(rev)s %(author)s: %(logmsg)s' #template = u'%(project)s %(rev)s %(author)s: %(logmsg)s' template = u'_%(project)s_ :ticket:\n%(type)s ticket <%(url)s|%(id)s>: %(summary)s [*%(action)s* by %(author)s]' attachments = [] if values['action'] == u'closed': template += u' :white_check_mark:' if values['action'] == u'created': template += u' :pushpin:' if values['attrib']: attachments.append({ 'title': u'Attributes', 'text': values['attrib'] }) if values.get('changes', False): attachments.append({ 'title': u':small_red_triangle: Changes', 'text': values['changes'] }) # For comment and description, strip the {{{, }}} markers. They add nothing # of value in Slack, and replacing them with ` or ``` doesn't help as these # end up being formatted as blockquotes anyway. if values['description']: attachments.append({ 'title': u'Description', 'text': re.sub(r'({{{|}}})', u'', values['description']) }) if values['comment']: attachments.append({ 'title': u'Comment:', 'text': re.sub(r'({{{|}}})', u'', values['comment']) }) message = template % values data = { "channel": self.channel, "username": self.username, "text": message.encode('utf-8').strip(), "attachments": attachments } try: r = requests.post(self.webhook, data={"payload": json.dumps(data)}) except requests.exceptions.RequestException as e: return False return True def ticket_created(self, ticket): values = prepare_ticket_values(ticket, u'created') values['author'] = values['reporter'] values['comment'] = u'' fields = self.fields.split(',') attrib = [] for field in fields: if ticket[field] != u'': attrib.append(u'\u2022 %s: %s' % (field, ticket[field])) values['attrib'] = u"\n".join(attrib) or u'' self.notify(u'ticket', values) def ticket_changed(self, ticket, comment, author, old_values): action = u'changed' if 'status' in old_values: if 'status' in ticket.values: if ticket.values['status'] != old_values['status']: action = ticket.values['status'] values = prepare_ticket_values(ticket, action) values.update({ 'comment': comment or u'', 'author': author or u'', 'old_values': old_values }) if 'description' not in old_values.keys(): values['description'] = u'' fields = self.fields.split(',') changes = [] attrib = [] for field in fields: if ticket[field] != u'': attrib.append(u'\u2022 %s: %s' % (field, ticket[field])) if field in old_values.keys(): changes.append(u'\u2022 %s: %s \u2192 %s' % (field, old_values[field], ticket[field])) values['attrib'] = u"\n".join(attrib) or u'' values['changes'] = u"\n".join(changes) or u'' self.notify(u'ticket', values) def ticket_deleted(self, ticket): pass
class TranslatedPagesMacro(Component): """ Macro to show the translated pages list. """ implements(IWikiMacroProvider) # IWikiMacroProvider methods def get_macros(self): """Yield the name of the macro based on the class name.""" yield u'TranslatedPages' def get_macro_description(self, name): return """Macro to show the translated pages list. Simply calling that macro in a page adds a menu linking to all available translations of a page. A language page (usually [wiki:TracLanguages]) must provide the language codes as a table with following entries: {{{ ||<language code>||<language name>||<english name>||<description>|| }}} The description contains the text displayed above language links in that language (usually a variant of 'Other languages'). A table title line starting with {{{||=}}} is not parsed. The Macro accepts arguments as well: * '''revision=<num>''' to specify the version of the base page when last translated, a negative revision indicates that a page needs updating in the status overview table * '''outdated=<text>''' mark the page as outdated with given comment * '''silent''' don't output empty chapter for show options when nothing is shown * '''showoutdated''' to show all pages, where revision does not match base revision * '''showmissing''' to show all pages, where translation is missing * '''showproblems''' to show all pages which have problems * '''showuntranslated''' to show all untranslated pages * '''showstatus''' to show one big status table * '''lang=<code>''' to restrict output of show outdated, status or missing to a specific language * '''label_outdated''' label to display when using the showoutdated option""" def parse_macro(self, parser, name, content): raise NotImplementedError # -- lang_page_name = Option( 'translatedpages', 'languages_page', u'TracLanguages', """Page name of table containing available languages""") page_code = Option('translatedpages', 'template', u'{lang}:{page}', """Page name template of translated pages""") base_lang = Option('translatedpages', 'base_language', u'En', """Base language to be used without prefix/suffix""") langcode_re = Option('translatedpages', 'regexp', u'([A-Z][a-z]{1,2}(?:_[A-Z]{2})?)', """Regular expression to match a language code""") outdated_tx = "<p style=\"background-color:rgb(253,255,221);padding: 10pt; border-color:rgb(128,128,128);border-style: solid; border-width: 1px;\">%s</p>\n" macro_re = re.compile(u"\[\[TranslatedPages(?:\((.+)\))?\]\]") revision_re = re.compile( u"\[\[TranslatedPages(?:\(.*?revision=(-?\d+).*?\))?\]\]") outdated_re = re.compile( u"\[\[TranslatedPages(?:\((?:.*,)?outdated=(.*)\))?\]\]") def __init__(self): self.langpage_re = re.compile(u"^\|\|" + self.langcode_re + u"\|\|(.+?)\|\|(.+?)\|\|(.+?)\|\|$") self.languages_page_version = 0 self._update_languages() self.template_re = re.compile(self.page_code \ .replace('{lang}', r'(?P<lang>%s)' % self.langcode_re) \ .replace('{page}', r'(?P<page>.+?)') + '$') def _parse_languages_list(self, text): langs = {} descr = {} langse = {} for line in text.replace('\r', '').split(u'\n'): regres = self.langpage_re.search(line) if regres == None: if not line.startswith(u'||=') and len(line) > 0: self.env.log.warn( u"Wrong line syntax while parsing languages list: %s" % line) else: code = regres.group(1) name = regres.group(2) engname = regres.group(3) desc = regres.group(4) self.env.log.debug("Adding language %s -> %s [%s] (%s)" \ % (code, name, engname, desc)) langs[code] = name descr[code] = desc langse[code] = engname return (langs, descr, langse) def _update_languages(self): languages_page = WikiPage(self.env, self.lang_page_name) if not languages_page.exists: self.env.log.warn(u"Can't find page %s" % self.lang_page_name) self.languages = {} self.languages_page_version = 0 else: if languages_page.version > self.languages_page_version: (self.languages, self.description, self.languagesbase) = \ self._parse_languages_list(languages_page.text) self.languages_page_version = languages_page.version def _get_language_name(self, lang_code): self._update_languages() return self.languages.get(lang_code, lang_code) def _get_translated_page(self, prefix, name, lang_code): if lang_code != self.base_lang: name = self.page_code.replace('{lang}', lang_code) \ .replace('{page}', name) return prefix + name def _get_page_info(self, page_name): m = self.template_re.search(page_name) if m: page, lang = m.group('page'), m.group('lang') prefix = m.start() else: page = page_name lang = self.base_lang prefix = 0 pages = WikiSystem(self.env).get_pages() for testpage in pages: m = self.template_re.search(testpage) if m and page_name == self._get_translated_page( \ testpage[:m.start()], m.group('page'), lang): page = m.group('page') prefix = m.start() break return (page_name[:prefix], page, lang) def _get_translations(self, prefix, base_page_name): res = [] for l in sorted(self.languages.keys()): tr = self._get_translated_page(prefix, base_page_name, l) if WikiSystem(self.env).has_page(tr): res.append(l) return res def _get_outdated(self, lang, label): if label != None: res = u"== %s ==\n" % label elif lang != None: langd = lang if self.languagesbase.has_key(lang): langd = self.languagesbase[lang] res = u"== Outdated pages for %s ==\n" % langd else: res = u"== Outdated pages ==\n" found = 0 for page in sorted(WikiSystem(self.env).get_pages()): pagetext = WikiPage(self.env, page).text regres = self.revision_re.search(pagetext) out = self.outdated_re.search(pagetext) outcode = "" outver = "" prefix, base_page_name, lang_code = self._get_page_info(page) if out != None and out.group(1) != None and (lang == None \ or lang == lang_code or lang_code == self.base_lang): outcode = "{{{%s}}}" % out.group(1).replace("\,", ",") if regres != None and regres.group(1) != None: if lang_code != self.base_lang and (lang == None or lang == lang_code): newver = WikiPage(self.env, base_page_name).version oldver = abs(int(regres.group(1))) if (newver != oldver): outver = "[[wiki:/%s?action=diff&old_version=%s|@%s-@%s]]" \ % (base_page_name, oldver, oldver, newver) if outcode != "" or outver != "": res += "|| [[wiki:/%s]] || %s || %s ||\n" % (page, outver, outcode) found += 1 if found == 0: res += u'none\n' return res def _get_missing(self, lang): res = "" base_pages = [] for page in sorted(WikiSystem(self.env).get_pages()): for line in WikiPage(self.env, page).text.replace('\r', '').split(u'\n'): regres = self.macro_re.search(line) if regres != None: (prefix, base_page_name, lang_code) = self._get_page_info(page) basename = self._get_translated_page(prefix, \ base_page_name, self.base_lang) if not basename in base_pages: base_pages.append(basename) langs = [] if lang != None: langs = [lang] else: langs = self.languages.keys() langs.sort() for l in langs: reslang = "" for base_page in base_pages: (prefix, page, lang_code) = self._get_page_info(base_page) tr = self._get_translated_page(prefix, page, l) if not WikiSystem(self.env).has_page(tr): reslang += " * [[wiki:/%s]]\n" % tr if len(reslang) > 0: langd = l if self.languagesbase.has_key(l): langd = self.languagesbase[l] res += u"== Missing pages for %s ==\n%s" % (langd, reslang) if len(res) == 0: res += u'== Missing pages ==\nnone\n' return res def _get_untranslated(self, silent): res = "" for page in sorted(WikiSystem(self.env).get_pages()): if self.macro_re.search(WikiPage(self.env, page).text) == None: res += " * [[wiki:/%s]]\n" % page if len(res) == 0: if (silent): return u" " res = u'none\n' return "== Untranslated pages ==\n" + res def _check_args(self, page, argstr, lang_code): if argstr == None or len(argstr) == 0: if lang_code != self.base_lang: return "||[[wiki:/%s]]|| ||No revision specified for translated page\n" \ % page else: return "" resargs = "" args, kw = parse_args(argstr) show = False for arg in args: if arg == 'showoutdated' or arg == 'showuntranslated' or \ arg == 'showmissing' or arg == 'showstatus' or arg == 'showproblems': show = True elif arg != 'silent': resargs += "||[wiki:/%s]||%s||unknown argument '%s'||\n" % ( page, argstr, arg) for arg in kw.keys(): if arg == 'lang': if not ('showoutdated' in args or 'showmissing' in args or \ 'showstatus' in args): resargs += "||[[wiki:/%s]]||%s||'lang' argument without proper show argument'||\n" \ % (page, argstr) elif not self.languages.has_key(kw[arg]): resargs += "||[[wiki:/%s]]||%s||'lang'='%s' argument uses unknown language||\n" \ % (page, argstr, kw[arg]) elif arg == 'revision': try: int(kw[arg]) #if int(kw[arg]) < 0: # resargs += "||[[wiki:/%s]]||%s||'revision'='%s' is no positive value||\n" \ # % (page, argstr, kw[arg]) except: resargs += "||[[wiki:/%s]]||%s||'revision'='%s' is no integer value||\n" \ % (page, argstr, kw[arg]) if show: resargs += "||[[wiki:/%s]]||%s||'revision'='%s' used with show argument||\n" \ % (page, argstr, kw[arg]) elif lang_code == self.base_lang: resargs += "||[[wiki:/%s]]||%s||Revision specified for base page\n" \ % (page, argstr) elif arg != 'outdated': resargs += "||[[wiki:/%s]]||%s||unknown argument '%s'='%s'||\n" \ % (page, argstr, arg, kw[arg]) if lang_code != self.base_lang and not kw.has_key( u'revision') and not show: resargs += "||[[wiki:/%s]]||%s||No revision specified for translated page\n" \ % (page, argstr) return resargs def _get_problems(self, silent): res = u"" resargs = u"" respages = u"" base_pages = [] for page in sorted(WikiSystem(self.env).get_pages()): for line in WikiPage(self.env, page).text.replace('\r', '').split(u'\n'): regres = self.macro_re.search(line) if regres != None: (prefix, base_page_name, lang_code) = self._get_page_info(page) basename = self._get_translated_page(prefix, \ base_page_name, self.base_lang) if not basename in base_pages: base_pages.append(basename) resargs += self._check_args(page, regres.group(1), lang_code) if self.languages.get(lang_code, None) == None: respages += "||[[wiki:/%s]]||Translated page language code unknown||\n" % page base_pages.sort() for base_page in base_pages: (prefix, page, lang_code) = self._get_page_info(base_page) translations = self._get_translations(prefix, page) basever = 0 if not self.base_lang in translations: respages += "||[[wiki:/%s]]||Base language is missing for translated pages||\n" % base_page else: basever = WikiPage(self.env, base_page).version for translation in translations: transpage = self._get_translated_page(prefix, page, translation) regres = self.macro_re.search( WikiPage(self.env, transpage).text) if regres != None: argstr = regres.group(1) if argstr != None and len(argstr) > 0: args, kw = parse_args(argstr) try: rev = int(kw[u'revision']) if rev != 0 and rev > basever: respages += "||[[wiki:/%s]]||Revision %s is higher than base revision %s||\n" \ % (transpage, rev, basever) except: pass else: respages += "||[[wiki:/%s]]||Translated page misses macro 'TranslatedPages'||\n" % transpage if len(resargs): res += u"=== Errors in supplied arguments ===\n||= Page =||= Arguments =||= Issue =||\n" + resargs if len(respages): res += u"=== Errors in page structure ===\n||= Page =||= Issue =||\n" + respages if not len(res): if (silent): return u" " res = u'none\n' return u"== Problem pages ==\n" + res def _get_status(self, lang): res = "" base_pages = [] langs = [] errors = [] for page in sorted(WikiSystem(self.env).get_pages()): for line in WikiPage(self.env, page).text.replace('\r', '').split(u'\n'): regres = self.macro_re.search(line) if regres != None: (prefix, base_page_name, lang_code) = self._get_page_info(page) basename = self._get_translated_page(prefix, \ base_page_name, self.base_lang) if not basename in base_pages: base_pages.append(basename) if len(self._check_args(page, regres.group(1), lang_code)) > 0: errors.append(page) if not lang_code in langs: langs.append(lang_code) if lang != None: langs = [lang] else: langs.sort() res += "\n||= Page =||= " + (" =||= ".join(langs)) + "=||\n" base_pages.sort() for base_page in base_pages: (prefix, page, lang_code) = self._get_page_info(base_page) basever = 0 if WikiSystem(self.env).has_page(base_page): basever = WikiPage(self.env, base_page).version if lang == None: res += "||[[wiki:/%s]]" % base_page for l in langs: color = "green" transpage = self._get_translated_page(prefix, page, l) if transpage in errors: color = "red" elif WikiSystem(self.env).has_page(transpage): regres = self.macro_re.search( WikiPage(self.env, transpage).text) if regres != None: argstr = regres.group(1) if argstr != None and len(argstr) > 0: args, kw = parse_args(argstr) if u'outdated' in kw: color = "yellow" elif l != self.base_lang: try: rev = int(kw[u'revision']) if rev != 0 and rev > basever: color = "red" elif rev != basever: color = "yellow" except: color = "red" else: color = "red" else: color = "grey" if lang != None: res += "||$$$%s$$$[[wiki:/%s|%s]]" % (color, transpage, base_page) else: res += "||$$$%s$$$[[wiki:/%s|%s]]" % (color, transpage, l) res += "||\n" return res def expand_macro(self, formatter, name, args): """ Return a list of translated pages with the native language names. The list of languages supported can be configured by adding new entries to TracLanguages page. Refer to ISO 639-1 for more information. """ args, kw = parse_args(args) # first handle special cases show = u"" lang = None silent = u'silent' in args outdated = u"" if u'lang' in kw: lang = kw[u'lang'] if u'outdated' in kw: outdated = self.outdated_tx % kw[u'outdated'] if u'showproblems' in args: show += self._get_problems(silent) if u'showstatus' in args: show += self._get_status(lang) if u'showoutdated' in args: label = None if u'label_outdated' in kw: label = kw[u'label_outdated'] show += self._get_outdated(lang, label) if u'showmissing' in args: show += self._get_missing(lang) if u'showuntranslated' in args: show += self._get_untranslated(silent) if len(show): outshow = StringIO() Formatter(self.env, formatter.context).format(show, outshow) val = outshow.getvalue() val = re.sub('>\$\$\$([a-z]+?)\$\$\$<a class=".*?"', \ ' style="background-color:\\1"><a style="color:#151B8D"', val) # try again more secure in case previous fails due to Wiki engine changes val = re.sub('>\$\$\$([a-z]+?)\$\$\$', \ ' style="background-color:\\1">', val) return val page_name = formatter.context.resource.id prefix, base_page_name, lang_code = self._get_page_info(page_name) lang_link_list = [] for translation in self._get_translations(prefix, base_page_name): if translation != lang_code: page_name = self._get_translated_page(prefix, base_page_name, translation) lang_link_list.append(u" * [[wiki:/%s|%s]]" % (page_name, \ self._get_language_name(translation))) else: lang_link_list.append(u" * '''%s'''" % self._get_language_name(translation)) baselink = "" if lang_code != self.base_lang and u'revision' in kw: basepage = self._get_translated_page(prefix, base_page_name, self.base_lang) newver = WikiPage(self.env, basepage).version oldver = abs(int(kw[u'revision'])) if oldver < newver: baselink = u"\n * [[wiki:/%s?action=diff&old_version=%s|@%s - @%s]]" \ % (basepage, oldver, oldver, newver) if len(lang_link_list) <= 1: return outdated out = StringIO() Formatter(self.env, formatter.context).format(u'\n'.join(lang_link_list) \ +baselink, out) desc = u"Languages" if self.description.has_key(lang_code): desc = self.description[lang_code] return outdated + u""" <div class="wiki-toc trac-nav" style="clear:both"> <h4>%s:</h4> %s </div>""" % (desc, out.getvalue())
class Mimeview(Component): """Generic HTML renderer for data, typically source code.""" required = True renderers = ExtensionPoint(IHTMLPreviewRenderer) annotators = ExtensionPoint(IHTMLPreviewAnnotator) converters = ExtensionPoint(IContentConverter) default_charset = Option('trac', 'default_charset', 'utf-8', """Charset to be used when in doubt.""") tab_width = IntOption('mimeviewer', 'tab_width', 8, """Displayed tab width in file preview.""") max_preview_size = IntOption('mimeviewer', 'max_preview_size', 262144, """Maximum file size for HTML preview.""") mime_map = ListOption( 'mimeviewer', 'mime_map', 'text/x-dylan:dylan, text/x-idl:ice, text/x-ada:ads:adb', doc="""List of additional MIME types and keyword mappings. Mappings are comma-separated, and for each MIME type, there's a colon (":") separated list of associated keywords or file extensions. """) mime_map_patterns = ListOption( 'mimeviewer', 'mime_map_patterns', 'text/plain:README|INSTALL(?!\.rst)|COPYING.*', doc="""List of additional MIME types associated to filename patterns. Mappings are comma-separated, and each mapping consists of a MIME type and a Python regexp used for matching filenames, separated by a colon (":"). (''since 1.0'') """) treat_as_binary = ListOption( 'mimeviewer', 'treat_as_binary', 'application/octet-stream, application/pdf, application/postscript, ' 'application/msword,application/rtf,', doc="""Comma-separated list of MIME types that should be treated as binary data. """) def __init__(self): self._mime_map = None self._mime_map_patterns = None # Public API def get_supported_conversions(self, mimetype): """Return a list of target MIME types as instances of the `namedtuple` `MimeConversion`. Output is ordered from best to worst quality. The `MimeConversion` `namedtuple` has fields: key, name, extension, in_mimetype, out_mimetype, quality, converter. """ fields = ('key', 'name', 'extension', 'in_mimetype', 'out_mimetype', 'quality', 'converter') _MimeConversion = namedtuple('MimeConversion', fields) converters = [] for c in self.converters: for k, n, e, im, om, q in c.get_supported_conversions() or []: if im == mimetype and q > 0: converters.append(_MimeConversion(k, n, e, im, om, q, c)) converters = sorted(converters, key=lambda i: i.quality, reverse=True) return converters def convert_content(self, req, mimetype, content, key, filename=None, url=None, iterable=False): """Convert the given content to the target MIME type represented by `key`, which can be either a MIME type or a key. Returns a tuple of (content, output_mime_type, extension).""" if not content: return '', 'text/plain;charset=utf-8', '.txt' # Ensure we have a MIME type for this content full_mimetype = mimetype if not full_mimetype: if hasattr(content, 'read'): content = content.read(self.max_preview_size) full_mimetype = self.get_mimetype(filename, content) if full_mimetype: mimetype = ct_mimetype(full_mimetype) # split off charset else: mimetype = full_mimetype = 'text/plain' # fallback if not binary # Choose best converter candidates = [ c for c in self.get_supported_conversions(mimetype) if key in (c.key, c.out_mimetype) ] if not candidates: raise TracError( _("No available MIME conversions from %(old)s to %(new)s", old=mimetype, new=key)) # First successful conversion wins for conversion in candidates: output = conversion.converter.convert_content( req, mimetype, content, conversion.key) if output: content, content_type = output if iterable: if isinstance(content, basestring): content = (content, ) else: if not isinstance(content, basestring): content = ''.join(content) return content, content_type, conversion.extension raise TracError( _("No available MIME conversions from %(old)s to %(new)s", old=mimetype, new=key)) def get_annotation_types(self): """Generator that returns all available annotation types.""" for annotator in self.annotators: yield annotator.get_annotation_type() def render(self, context, mimetype, content, filename=None, url=None, annotations=None, force_source=False): """Render an XHTML preview of the given `content`. `content` is the same as an `IHTMLPreviewRenderer.render`'s `content` argument. The specified `mimetype` will be used to select the most appropriate `IHTMLPreviewRenderer` implementation available for this MIME type. If not given, the MIME type will be infered from the filename or the content. Return a string containing the XHTML text. When rendering with an `IHTMLPreviewRenderer` fails, a warning is added to the request associated with the context (if any), unless the `disable_warnings` hint is set to `True`. """ if not content: return '' if not isinstance(context, RenderingContext): raise TypeError("RenderingContext expected (since 0.11)") # Ensure we have a MIME type for this content full_mimetype = mimetype if not full_mimetype: if hasattr(content, 'read'): content = content.read(self.max_preview_size) full_mimetype = self.get_mimetype(filename, content) if full_mimetype: mimetype = ct_mimetype(full_mimetype) # split off charset else: mimetype = full_mimetype = 'text/plain' # fallback if not binary # Determine candidate `IHTMLPreviewRenderer`s candidates = [] for renderer in self.renderers: qr = renderer.get_quality_ratio(mimetype) if qr > 0: candidates.append((qr, renderer)) candidates.sort(lambda x, y: cmp(y[0], x[0])) # Wrap file-like object so that it can be read multiple times if hasattr(content, 'read'): content = Content(content, self.max_preview_size) # First candidate which renders successfully wins. # Also, we don't want to expand tabs more than once. expanded_content = None for qr, renderer in candidates: if force_source and not getattr(renderer, 'returns_source', False): continue # skip non-source renderers in force_source mode if isinstance(content, Content): content.reset() try: ann_names = ', '.join(annotations) if annotations else \ 'no annotations' self.log.debug('Trying to render HTML preview using %s [%s]', renderer.__class__.__name__, ann_names) # check if we need to perform a tab expansion rendered_content = content if getattr(renderer, 'expand_tabs', False): if expanded_content is None: content = content_to_unicode(self.env, content, full_mimetype) expanded_content = content.expandtabs(self.tab_width) rendered_content = expanded_content result = renderer.render(context, full_mimetype, rendered_content, filename, url) if not result: continue if not (force_source or getattr(renderer, 'returns_source', False)): # Direct rendering of content if isinstance(result, basestring): if not isinstance(result, unicode): result = to_unicode(result) return Markup(to_unicode(result)) elif isinstance(result, Fragment): return result.generate() else: return result # Render content as source code if annotations: marks = context.req.args.get('marks') if context.req \ else None if marks: context.set_hints(marks=marks) return self._render_source(context, result, annotations) else: if isinstance(result, list): result = Markup('\n').join(result) return tag.div(class_='code')(tag.pre(result)).generate() except Exception as e: self.log.warning('HTML preview using %s failed: %s', renderer.__class__.__name__, exception_to_unicode(e, traceback=True)) if context.req and not context.get_hint('disable_warnings'): from trac.web.chrome import add_warning add_warning( context.req, _("HTML preview using %(renderer)s failed (%(err)s)", renderer=renderer.__class__.__name__, err=exception_to_unicode(e))) def _render_source(self, context, stream, annotations): from trac.web.chrome import add_warning annotators, labels, titles = {}, {}, {} for annotator in self.annotators: atype, alabel, atitle = annotator.get_annotation_type() if atype in annotations: labels[atype] = alabel titles[atype] = atitle annotators[atype] = annotator annotations = [a for a in annotations if a in annotators] if isinstance(stream, list): stream = HTMLParser(StringIO(u'\n'.join(stream))) elif isinstance(stream, unicode): text = stream def linesplitter(): for line in text.splitlines(True): yield TEXT, line, (None, -1, -1) stream = linesplitter() annotator_datas = [] for a in annotations: annotator = annotators[a] try: data = (annotator, annotator.get_annotation_data(context)) except TracError as e: self.log.warning("Can't use annotator '%s': %s", a, e) add_warning( context.req, tag.strong( tag_("Can't use %(annotator)s annotator: %(error)s", annotator=tag.em(a), error=tag.pre(e)))) data = None, None annotator_datas.append(data) def _head_row(): return tag.tr([ tag.th(labels[a], class_=a, title=titles[a]) for a in annotations ] + [tag.th(u'\xa0', class_='content')]) def _body_rows(): for idx, line in enumerate(_group_lines(stream)): row = tag.tr() for annotator, data in annotator_datas: if annotator: annotator.annotate_row(context, row, idx + 1, line, data) else: row.append(tag.td()) row.append(tag.td(line)) yield row return tag.table(class_='code')(tag.thead(_head_row()), tag.tbody(_body_rows())) def get_charset(self, content='', mimetype=None): """Infer the character encoding from the `content` or the `mimetype`. `content` is either a `str` or an `unicode` object. The charset will be determined using this order: * from the charset information present in the `mimetype` argument * auto-detection of the charset from the `content` * the configured `default_charset` """ if mimetype: ctpos = mimetype.find('charset=') if ctpos >= 0: return mimetype[ctpos + 8:].strip() if isinstance(content, str): utf = detect_unicode(content) if utf is not None: return utf return self.default_charset @property def mime_map(self): # Extend default extension to MIME type mappings with configured ones if not self._mime_map: self._mime_map = MIME_MAP.copy() # augment mime_map from `IHTMLPreviewRenderer`s for renderer in self.renderers: if hasattr(renderer, 'get_extra_mimetypes'): for mimetype, kwds in renderer.get_extra_mimetypes() or []: self._mime_map[mimetype] = mimetype for keyword in kwds: self._mime_map[keyword] = mimetype # augment/override mime_map from trac.ini for mapping in self.config['mimeviewer'].getlist('mime_map'): if ':' in mapping: assocations = mapping.split(':') for keyword in assocations: # Note: [0] kept on purpose self._mime_map[keyword] = assocations[0] return self._mime_map def get_mimetype(self, filename, content=None): """Infer the MIME type from the `filename` or the `content`. `content` is either a `str` or an `unicode` object. Return the detected MIME type, augmented by the charset information (i.e. "<mimetype>; charset=..."), or `None` if detection failed. """ mimetype = get_mimetype(filename, content, self.mime_map, self.mime_map_patterns) charset = None if mimetype: charset = self.get_charset(content, mimetype) if mimetype and charset and not 'charset' in mimetype: mimetype += '; charset=' + charset return mimetype @property def mime_map_patterns(self): if not self._mime_map_patterns: self._mime_map_patterns = {} for mapping in self.config['mimeviewer'] \ .getlist('mime_map_patterns'): if ':' in mapping: mimetype, regexp = mapping.split(':', 1) try: self._mime_map_patterns[mimetype] = re.compile(regexp) except re.error as e: self.log.warning( "mime_map_patterns contains invalid " "regexp '%s' for mimetype '%s' (%s)", regexp, mimetype, exception_to_unicode(e)) return self._mime_map_patterns def is_binary(self, mimetype=None, filename=None, content=None): """Check if a file must be considered as binary.""" if not mimetype and filename: mimetype = self.get_mimetype(filename, content) if mimetype: mimetype = ct_mimetype(mimetype) if mimetype in self.treat_as_binary: return True if content is not None and is_binary(content): return True return False def to_unicode(self, content, mimetype=None, charset=None): """Convert `content` (an encoded `str` object) to an `unicode` object. This calls `trac.util.to_unicode` with the `charset` provided, or the one obtained by `Mimeview.get_charset()`. """ if not charset: charset = self.get_charset(content, mimetype) return to_unicode(content, charset) def configured_modes_mapping(self, renderer): """Return a MIME type to `(mode,quality)` mapping for given `option`""" types, option = {}, '%s_modes' % renderer for mapping in self.config['mimeviewer'].getlist(option): if not mapping: continue try: mimetype, mode, quality = mapping.split(':') types[mimetype] = (mode, int(quality)) except (TypeError, ValueError): self.log.warning( "Invalid mapping '%s' specified in '%s' " "option.", mapping, option) return types def preview_data(self, context, content, length, mimetype, filename, url=None, annotations=None, force_source=False): """Prepares a rendered preview of the given `content`. Note: `content` will usually be an object with a `read` method. """ data = { 'raw_href': url, 'size': length, 'max_file_size': self.max_preview_size, 'max_file_size_reached': False, 'rendered': None, } if length >= self.max_preview_size: data['max_file_size_reached'] = True else: result = self.render(context, mimetype, content, filename, url, annotations, force_source=force_source) data['rendered'] = result return data def send_converted(self, req, in_type, content, selector, filename='file'): """Helper method for converting `content` and sending it directly. `selector` can be either a key or a MIME Type.""" from trac.web.chrome import Chrome from trac.web.api import RequestDone iterable = Chrome(self.env).use_chunked_encoding content, output_type, ext = self.convert_content(req, in_type, content, selector, iterable=iterable) if iterable: def encoder(content): for chunk in content: if isinstance(chunk, unicode): chunk = chunk.encode('utf-8') yield chunk content = encoder(content) length = None else: if isinstance(content, unicode): content = content.encode('utf-8') length = len(content) req.send_response(200) req.send_header('Content-Type', output_type) if length is not None: req.send_header('Content-Length', length) if filename: req.send_header( 'Content-Disposition', content_disposition('attachment', '%s.%s' % (filename, ext))) req.end_headers() req.write(content) raise RequestDone
class BatchModifyModule(Component): implements(IPermissionRequestor, ITemplateProvider, IRequestFilter, ITemplateStreamFilter) fields_as_list = ListOption( "batchmod", "fields_as_list", default="keywords", doc="field names modified as a value list(separated by ',')") list_separator_regex = Option("batchmod", "list_separator_regex", default='[,\s]+', doc="separator regex used for 'list' fields") list_connector_string = Option( "batchmod", "list_connector_string", default=' ', doc="Connectr string for 'list' fields. Defaults to a space.") # IPermissionRequestor methods def get_permission_actions(self): yield 'TICKET_BATCH_MODIFY' # ITemplateProvider methods def get_htdocs_dirs(self): """Return a list of directories with static resources (such as style sheets, images, etc.) Each item in the list must be a `(prefix, abspath)` tuple. The `prefix` part defines the path in the URL that requests to these resources are prefixed with. The `abspath` is the absolute path to the directory containing the resources on the local file system. """ from pkg_resources import resource_filename return [('batchmod', resource_filename(__name__, 'htdocs'))] def get_templates_dirs(self): from pkg_resources import resource_filename return [resource_filename(__name__, 'templates')] # IRequestFilter methods def pre_process_request(self, req, handler): """Look for QueryHandler posts and hijack them""" if req.path_info == '/query' and req.method=='POST' and \ req.args.get('batchmod_submit') and self._has_permission(req): self.log.debug('BatchModifyModule: executing') batch_modifier = BatchModifier(self.fields_as_list, self.list_separator_regex, self.list_connector_string) batch_modifier.process_request(req, self.env, self.log) # redirect to original Query # TODO: need better way to fake QueryModule... req.redirect(req.args.get('query_href')) return handler def post_process_request(self, req, template, content_type): """No-op""" return (template, content_type) def post_process_request(self, req, template, data, content_type): """No-op""" return (template, data, content_type) # ITemplateStreamFilter methods def filter_stream(self, req, method, filename, stream, formdata): """Adds BatchModify form to the query page""" if filename == 'query.html' and self._has_permission(req): self.log.debug('BatchModifyPlugin: rendering template') return stream | Transformer('//div[@id="help"]'). \ before(self._generate_form(req, formdata) ) return stream def _generate_form(self, req, data): batchFormData = dict(data) batchFormData['query_href']= req.session['query_href'] \ or req.href.query() batchFormData['notify_enabled'] = self.config.getbool( 'notification', 'smtp_enabled', False) ticketSystem = TicketSystem(self.env) fields = [] for field in ticketSystem.get_ticket_fields(): if field['name'] not in ('summary', 'reporter', 'description'): fields.append(field) if field['name'] == 'owner' \ and hasattr(ticketSystem, 'eventually_restrict_owner'): ticketSystem.eventually_restrict_owner(field) fields.sort(key=lambda f: f['name']) batchFormData['fields'] = fields add_script(req, 'batchmod/js/batchmod.js') add_stylesheet(req, 'batchmod/css/batchmod.css') stream = Chrome(self.env).render_template(req, 'batchmod.html', batchFormData, fragment=True) return stream.select('//form[@id="batchmod_form"]') # Helper methods def _has_permission(self, req): return req.perm.has_permission('TICKET_ADMIN') or \ req.perm.has_permission('TICKET_BATCH_MODIFY')
class TicketEmailFormatter(Component): implements(IAnnouncementFormatter) ticket_email_subject = Option('announcer', 'ticket_email_subject', "Ticket #${ticket.id}: ${ticket['summary']} " \ "{% if action %}[${action}]{% end %}", """Format string for ticket email subject. This is a mini genshi template that is passed the ticket event and action objects.""") ticket_email_header_fields = ListOption( 'announcer', 'ticket_email_header_fields', 'owner, reporter, milestone, priority, severity', doc="""Comma seperated list of fields to appear in tickets. Use * to include all headers.""") def get_format_transport(self): return "email" def get_format_realms(self, transport): if transport == "email": yield "ticket" return def get_format_styles(self, transport, realm): if transport == "email": if realm == "ticket": yield "text/plain" yield "text/html" def get_format_alternative(self, transport, realm, style): if transport == "email": if realm == "ticket": if style == "text/html": return "text/plain" return None def format_headers(self, transport, realm, style, event): ticket = event.target return dict(realm=realm, ticket=ticket.id, priority=ticket['priority'], severity=ticket['severity']) def format_subject(self, transport, realm, style, event): action = None if transport == "email": if realm == "ticket": if event.changes: if 'status' in event.changes: action = 'Status -> %s' % (event.target['status']) template = NewTextTemplate(self.ticket_email_subject) return to_unicode( template.generate(ticket=event.target, event=event, action=action).render()) def format(self, transport, realm, style, event): if transport == "email": if realm == "ticket": if style == "text/plain": return self._format_plaintext(event) elif style == "text/html": return self._format_html(event) def _format_plaintext(self, event): ticket = event.target short_changes = {} long_changes = {} changed_items = [(field, to_unicode(old_value)) for \ field, old_value in event.changes.items()] for field, old_value in changed_items: new_value = to_unicode(ticket[field]) if ('\n' in new_value) or ('\n' in old_value): long_changes[field.capitalize()] = '\n'.join( lineup(wrap(new_value, cols=67).split('\n'))) else: short_changes[field.capitalize()] = (old_value, new_value) data = dict(ticket=ticket, author=event.author, comment=event.comment, fields=self._header_fields(ticket), category=event.category, ticket_link=self.env.abs_href('ticket', ticket.id), project_name=self.env.project_name, project_desc=self.env.project_description, project_link=self.env.project_url or self.env.abs_href(), has_changes=short_changes or long_changes, long_changes=long_changes, short_changes=short_changes, attachment=event.attachment) chrome = Chrome(self.env) dirs = [] for provider in chrome.template_providers: dirs += provider.get_templates_dirs() templates = TemplateLoader(dirs, variable_lookup='lenient') template = templates.load('ticket_email_plaintext.txt', cls=NewTextTemplate) if template: stream = template.generate(**data) output = stream.render('text') return output def _header_fields(self, ticket): headers = self.ticket_email_header_fields fields = TicketSystem(self.env).get_ticket_fields() if len(headers) and headers[0].strip() != '*': def _filter(i): return i['name'] in headers fields = filter(_filter, fields) return fields def _format_html(self, event): ticket = event.target short_changes = {} long_changes = {} chrome = Chrome(self.env) for field, old_value in event.changes.items(): new_value = ticket[field] if (new_value and '\n' in new_value) or \ (old_value and '\n' in old_value): long_changes[field.capitalize()] = HTML( "<pre>\n%s\n</pre>" % ('\n'.join( diff_cleanup( difflib.unified_diff( wrap(old_value, cols=60).split('\n'), wrap(new_value, cols=60).split('\n'), lineterm='', n=3))))) else: short_changes[field.capitalize()] = (old_value, new_value) try: req = Mock(href=Href(self.env.abs_href()), abs_href=self.env.abs_href(), authname=event.author, perm=MockPerm(), chrome=dict(warnings=[], notices=[]), args={}) context = Context.from_request(req, event.realm, event.target.id) formatter = HtmlFormatter(self.env, context, event.comment) temp = formatter.generate(True) except Exception, e: self.log.error(exception_to_unicode(e, traceback=True)) temp = 'Comment in plain text: %s' % event.comment data = dict(ticket=ticket, author=event.author, fields=self._header_fields(ticket), comment=temp, category=event.category, ticket_link=self.env.abs_href('ticket', ticket.id), project_name=self.env.project_name, project_desc=self.env.project_description, project_link=self.env.project_url or self.env.abs_href(), has_changes=short_changes or long_changes, long_changes=long_changes, short_changes=short_changes, attachment=event.attachment, attachment_link=self.env.abs_href('attachment/ticket', ticket.id)) chrome = Chrome(self.env) dirs = [] for provider in chrome.template_providers: dirs += provider.get_templates_dirs() templates = TemplateLoader(dirs, variable_lookup='lenient') template = templates.load('ticket_email_mimic.html', cls=MarkupTemplate) if template: stream = template.generate(**data) output = stream.render() return output
class LoginModule(Component): """User authentication manager. This component implements user authentication based on HTTP authentication provided by the web-server, combined with cookies for communicating the login information across the whole site. This mechanism expects that the web-server is setup so that a request to the path '/login' requires authentication (such as Basic or Digest). The login name is then stored in the database and associated with a unique key that gets passed back to the user agent using the 'trac_auth' cookie. This cookie is used to identify the user in subsequent requests to non-protected resources. """ implements(IAuthenticator, INavigationContributor, IRequestHandler) is_valid_default_handler = False check_ip = BoolOption( 'trac', 'check_auth_ip', 'false', """Whether the IP address of the user should be checked for authentication. (''since 0.9'')""") ignore_case = BoolOption( 'trac', 'ignore_auth_case', 'false', """Whether login names should be converted to lower case (''since 0.9'').""") auth_cookie_lifetime = IntOption( 'trac', 'auth_cookie_lifetime', 0, """Lifetime of the authentication cookie, in seconds. This value determines how long the browser will cache authentication information, and therefore, after how much inactivity a user will have to log in again. The value of 0 makes the cookie expire at the end of the browsing session. (''since 0.12'')""") auth_cookie_path = Option( 'trac', 'auth_cookie_path', '', """Path for the authentication cookie. Set this to the common base path of several Trac instances if you want them to share the cookie. (''since 0.12'')""") # IAuthenticator methods def authenticate(self, req): authname = None if req.remote_user: authname = req.remote_user elif 'trac_auth' in req.incookie: authname = self._get_name_for_cookie(req, req.incookie['trac_auth']) if not authname: return None if self.ignore_case: authname = authname.lower() return authname # INavigationContributor methods def get_active_navigation_item(self, req): return 'login' def get_navigation_items(self, req): if req.authname and req.authname != 'anonymous': yield ('metanav', 'login', _('logged in as %(user)s', user=req.authname)) yield ('metanav', 'logout', tag.form(tag.div( tag.button(_('Logout'), name='logout', type='submit')), action=req.href.logout(), method='post', id='logout', class_='trac-logout')) else: yield ('metanav', 'login', tag.a(_('Login'), href=req.href.login())) # IRequestHandler methods def match_request(self, req): return re.match('/(login|logout)/?$', req.path_info) def process_request(self, req): if req.path_info.startswith('/login'): self._do_login(req) elif req.path_info.startswith('/logout'): self._do_logout(req) self._redirect_back(req) # Internal methods def _do_login(self, req): """Log the remote user in. This function expects to be called when the remote user name is available. The user name is inserted into the `auth_cookie` table and a cookie identifying the user on subsequent requests is sent back to the client. If the Authenticator was created with `ignore_case` set to true, then the authentication name passed from the web server in req.remote_user will be converted to lower case before being used. This is to avoid problems on installations authenticating against Windows which is not case sensitive regarding user names and domain names """ if not req.remote_user: # TRANSLATOR: ... refer to the 'installation documentation'. (link) inst_doc = tag.a(_('installation documentation'), title=_("Configuring Authentication"), href=req.href.wiki('TracInstall') + "#ConfiguringAuthentication") raise TracError( tag_( "Authentication information not available. " "Please refer to the %(inst_doc)s.", inst_doc=inst_doc)) remote_user = req.remote_user if self.ignore_case: remote_user = remote_user.lower() if req.authname not in ('anonymous', remote_user): raise TracError( _('Already logged in as %(user)s.', user=req.authname)) with self.env.db_transaction as db: # Delete cookies older than 10 days db("DELETE FROM auth_cookie WHERE time < %s", (int(time.time()) - 86400 * 10, )) # Insert a new cookie if we haven't already got one cookie = None trac_auth = req.incookie.get('trac_auth') if trac_auth is not None: name = self._cookie_to_name(req, trac_auth) cookie = trac_auth.value if name == remote_user else None if cookie is None: cookie = hex_entropy() db( """ INSERT INTO auth_cookie (cookie, name, ipnr, time) VALUES (%s, %s, %s, %s) """, (cookie, remote_user, req.remote_addr, int(time.time()))) req.authname = remote_user req.outcookie['trac_auth'] = cookie req.outcookie['trac_auth']['path'] = self.auth_cookie_path \ or req.base_path or '/' if self.env.secure_cookies: req.outcookie['trac_auth']['secure'] = True req.outcookie['trac_auth']['httponly'] = True if self.auth_cookie_lifetime > 0: req.outcookie['trac_auth']['expires'] = self.auth_cookie_lifetime def _do_logout(self, req): """Log the user out. Simply deletes the corresponding record from the auth_cookie table. """ if req.method != 'POST': return if req.authname == 'anonymous': # Not logged in return if 'trac_auth' in req.incookie: self.env.db_transaction("DELETE FROM auth_cookie WHERE cookie=%s", (req.incookie['trac_auth'].value, )) else: self.env.db_transaction("DELETE FROM auth_cookie WHERE name=%s", (req.authname, )) self._expire_cookie(req) custom_redirect = self.config['metanav'].get('logout.redirect') if custom_redirect: if not re.match(r'https?:|/', custom_redirect): custom_redirect = req.href(custom_redirect) req.redirect(custom_redirect) def _expire_cookie(self, req): """Instruct the user agent to drop the auth cookie by setting the "expires" property to a date in the past. """ req.outcookie['trac_auth'] = '' req.outcookie['trac_auth']['path'] = self.auth_cookie_path \ or req.base_path or '/' req.outcookie['trac_auth']['expires'] = -10000 if self.env.secure_cookies: req.outcookie['trac_auth']['secure'] = True req.outcookie['trac_auth']['httponly'] = True def _cookie_to_name(self, req, cookie): # This is separated from _get_name_for_cookie(), because the # latter is overridden in AccountManager. if self.check_ip: sql = "SELECT name FROM auth_cookie WHERE cookie=%s AND ipnr=%s" args = (cookie.value, req.remote_addr) else: sql = "SELECT name FROM auth_cookie WHERE cookie=%s" args = (cookie.value, ) for name, in self.env.db_query(sql, args): return name def _get_name_for_cookie(self, req, cookie): name = self._cookie_to_name(req, cookie) if name is None: # The cookie is invalid (or has been purged from the # database), so tell the user agent to drop it as it is # invalid self._expire_cookie(req) return name def _redirect_back(self, req): """Redirect the user back to the URL she came from.""" referer = self._referer(req) if referer: if not referer.startswith(('http://', 'https://')): # Make URL absolute scheme, host = urlparse.urlparse(req.base_url)[:2] referer = urlparse.urlunparse( (scheme, host, referer, None, None, None)) pos = req.base_url.find(':') base_scheme = req.base_url[:pos] base_noscheme = req.base_url[pos:] base_noscheme_norm = base_noscheme.rstrip('/') referer_noscheme = referer[referer.find(':'):] # only redirect to referer if it is from the same site if referer_noscheme == base_noscheme or \ referer_noscheme.startswith(base_noscheme_norm + '/'): # avoid redirect loops if referer_noscheme.rstrip('/') != \ base_noscheme_norm + req.path_info.rstrip('/'): req.redirect(base_scheme + referer_noscheme) req.redirect(req.abs_href()) def _referer(self, req): return req.args.get('referer') or req.get_header('Referer')
def __init__(self, section, name, choices, doc=''): Option.__init__(self, section, name, to_utf8(choices[0]), doc)
class GitConnector(Component): implements(IRepositoryConnector, ISystemInfoProvider, IWikiSyntaxProvider) def __init__(self): self._version = None try: self._version = PyGIT.Storage.git_version(git_bin=self.git_bin) except PyGIT.GitError as e: self.log.error("GitError: %s", e) if self._version: self.log.info("detected GIT version %s", self._version['v_str']) if not self._version['v_compatible']: self.log.error( "GIT version %s installed not compatible" "(need >= %s)", self._version['v_str'], self._version['v_min_str']) # ISystemInfoProvider methods def get_system_info(self): if self._version: yield 'GIT', self._version['v_str'] # IWikiSyntaxProvider methods def _format_sha_link(self, formatter, sha, label): # FIXME: this function needs serious rethinking... reponame = '' context = formatter.context while context: if context.resource.realm in ('source', 'changeset'): reponame = context.resource.parent.id break context = context.parent try: repos = RepositoryManager(self.env).get_repository(reponame) if not repos: raise Exception("Repository '%s' not found" % reponame) sha = repos.normalize_rev(sha) # in case it was abbreviated changeset = repos.get_changeset(sha) return tag.a(label, class_='changeset', title=shorten_line(changeset.message), href=formatter.href.changeset(sha, repos.reponame)) except Exception as e: return tag.a(label, class_='missing changeset', title=to_unicode(e), rel='nofollow') def get_wiki_syntax(self): yield (r'(?:\b|!)r?[0-9a-fA-F]{%d,40}\b' % self.wiki_shortrev_len, lambda fmt, sha, match: self._format_sha_link( fmt, sha.startswith('r') and sha[1:] or sha, sha)) def get_link_resolvers(self): yield ('sha', lambda fmt, _, sha, label, match=None: self. _format_sha_link(fmt, sha, label)) # IRepositoryConnector methods persistent_cache = BoolOption( 'git', 'persistent_cache', 'false', """Enable persistent caching of commit tree.""") cached_repository = BoolOption( 'git', 'cached_repository', 'false', """Wrap `GitRepository` in `CachedRepository`.""") shortrev_len = IntOption( 'git', 'shortrev_len', 7, """The length at which a sha1 should be abbreviated to (must be >= 4 and <= 40). """) wiki_shortrev_len = IntOption( 'git', 'wikishortrev_len', 40, """The minimum length of an hex-string for which auto-detection as sha1 is performed (must be >= 4 and <= 40). """) trac_user_rlookup = BoolOption( 'git', 'trac_user_rlookup', 'false', """Enable reverse mapping of git email addresses to trac user ids. Performance will be reduced if there are many users and the `cached_repository` option is `disabled`. A repository resync is required after changing the value of this option. """) use_committer_id = BoolOption( 'git', 'use_committer_id', 'true', """Use git-committer id instead of git-author id for the changeset ''Author'' field. """) use_committer_time = BoolOption( 'git', 'use_committer_time', 'true', """Use git-committer timestamp instead of git-author timestamp for the changeset ''Timestamp'' field. """) git_fs_encoding = Option( 'git', 'git_fs_encoding', 'utf-8', """Define charset encoding of paths within git repositories.""") git_bin = Option('git', 'git_bin', 'git', """Path to the git executable.""") def get_supported_types(self): yield ('git', 8) def get_repository(self, type, dir, params): """GitRepository factory method""" assert type == 'git' if not (4 <= self.shortrev_len <= 40): raise TracError( _("%(option)s must be in the range [4..40]", option="[git] shortrev_len")) if not (4 <= self.wiki_shortrev_len <= 40): raise TracError( _("%(option)s must be in the range [4..40]", option="[git] wikishortrev_len")) if not self._version: raise TracError(_("GIT backend not available")) elif not self._version['v_compatible']: raise TracError( _( "GIT version %(hasver)s installed not " "compatible (need >= %(needsver)s)", hasver=self._version['v_str'], needsver=self._version['v_min_str'])) if self.trac_user_rlookup: def rlookup_uid(email): """Reverse map 'real name <*****@*****.**>' addresses to trac user ids. :return: `None` if lookup failed """ try: _, email = email.rsplit('<', 1) email, _ = email.split('>', 1) email = email.lower() except Exception: return None for _uid, _name, _email in self.env.get_known_users(): try: if email == _email.lower(): return _uid except Exception: continue else: def rlookup_uid(_): return None repos = GitRepository( self.env, dir, params, self.log, persistent_cache=self.persistent_cache, git_bin=self.git_bin, git_fs_encoding=self.git_fs_encoding, shortrev_len=self.shortrev_len, rlookup_uid=rlookup_uid, use_committer_id=self.use_committer_id, use_committer_time=self.use_committer_time, ) if self.cached_repository: repos = GitCachedRepository(self.env, repos, self.log) self.log.debug("enabled CachedRepository for '%s'", dir) else: self.log.debug("disabled CachedRepository for '%s'", dir) return repos
class RepositoryManager(Component): """Component registering the supported version control systems, It provides easy access to the configured implementation. """ implements(IRequestFilter) connectors = ExtensionPoint(IRepositoryConnector) repository_type = Option( 'trac', 'repository_type', 'svn', """Repository connector type. (''since 0.10'')""") repository_dir = Option('trac', 'repository_dir', '', """Path to local repository""") def __init__(self): self._cache = {} self._lock = threading.Lock() self._connector = None # IRequestFilter methods def pre_process_request(self, req, handler): from trac.web.chrome import Chrome if handler is not Chrome(self.env): self.get_repository(req.authname) # triggers a sync if applicable return handler def post_process_request(self, req, template, content_type): return (template, content_type) # Public API methods def get_repository(self, authname): if not self._connector: candidates = [] for connector in self.connectors: for repos_type_, prio in connector.get_supported_types(): if self.repository_type != repos_type_: continue heappush(candidates, (-prio, connector)) if not candidates: raise TracError( u'Système de contrôle de version non supporté "%s"' % self.repository_type) self._connector = heappop(candidates)[1] try: self._lock.acquire() tid = threading._get_ident() if tid in self._cache: repos = self._cache[tid] else: rtype, rdir = self.repository_type, self.repository_dir repos = self._connector.get_repository(rtype, rdir, authname) self._cache[tid] = repos return repos finally: self._lock.release() def shutdown(self, tid=None): if tid: assert tid == threading._get_ident() try: self._lock.acquire() self._cache.pop(tid, None) finally: self._lock.release()
class RepositoryManager(Component): """Version control system manager.""" implements(IRequestFilter, IResourceManager, IRepositoryProvider) connectors = ExtensionPoint(IRepositoryConnector) providers = ExtensionPoint(IRepositoryProvider) change_listeners = ExtensionPoint(IRepositoryChangeListener) repositories_section = ConfigSection( 'repositories', """One of the alternatives for registering new repositories is to populate the `[repositories]` section of the `trac.ini`. This is especially suited for setting up convenience aliases, short-lived repositories, or during the initial phases of an installation. See [TracRepositoryAdmin#Intrac.ini TracRepositoryAdmin] for details about the format adopted for this section and the rest of that page for the other alternatives. (''since 0.12'')""") repository_type = Option( 'trac', 'repository_type', 'svn', """Default repository connector type. (''since 0.10'') This is also used as the default repository type for repositories defined in [[TracIni#repositories-section repositories]] or using the "Repositories" admin panel. (''since 0.12'') """) repository_dir = Option( 'trac', 'repository_dir', '', """Path to the default repository. This can also be a relative path (''since 0.11''). This option is deprecated, and repositories should be defined in the [TracIni#repositories-section repositories] section, or using the "Repositories" admin panel. (''since 0.12'')""") repository_sync_per_request = ListOption( 'trac', 'repository_sync_per_request', '(default)', doc="""List of repositories that should be synchronized on every page request. Leave this option empty if you have set up post-commit hooks calling `trac-admin $ENV changeset added` on all your repositories (recommended). Otherwise, set it to a comma-separated list of repository names. Note that this will negatively affect performance, and will prevent changeset listeners from receiving events from the repositories specified here. The default is to synchronize the default repository, for backward compatibility. (''since 0.12'')""") def __init__(self): self._cache = {} self._lock = threading.Lock() self._connectors = None self._all_repositories = None # IRequestFilter methods def pre_process_request(self, req, handler): from trac.web.chrome import Chrome, add_warning if handler is not Chrome(self.env): for reponame in self.repository_sync_per_request: start = time.time() if is_default(reponame): reponame = '' try: repo = self.get_repository(reponame) if repo: repo.sync() else: self.log.warning( "Unable to find repository '%s' for " "synchronization", reponame or '(default)') continue except TracError, e: add_warning( req, _( "Can't synchronize with repository \"%(name)s\" " "(%(error)s). Look in the Trac log for more " "information.", name=reponame or '(default)', error=to_unicode(e.message))) self.log.info("Synchronized '%s' repository in %0.2f seconds", reponame or '(default)', time.time() - start) return handler
class GoogleMapMacro(WikiMacroBase): """ Provides a macro to insert Google Maps(TM) in Wiki pages. == Description == Website: http://trac-hacks.org/wiki/GoogleMapMacro `$Id$` This macro lets the user insert a full dynamic [http://maps.google.com/ Google Map]. Because a lot of javascript is used (by Google) a [http://local.google.com/support/bin/answer.py?answer=16532&topic=1499 Google Map compatible browser] is needed. Newer version of Firefox and MS Internet Explorer are compatible. For javascript-less static maps use the similar [http://trac-hacks.org/wiki/GoogleStaticMapMacro GoogleStaticMapMacro]. Multiple Google Maps on the same wiki page are actively supported. == Dependencies == The recent version (r480) of this macro needs the AdvParseArgsPlugin in revision 4795 or later. == Configuration == A different [http://code.google.com/apis/maps/signup.html Google Map API key] is needed for every web domain which can be get for free from Google. '''Please check if the Google Map API Terms of Use apply for your Trac project.''' They do apply IMHO for non-pay open accessible Trac projects. == Usage == The macro knows the following arguments, which can be used in the normal `key1=value1,key2=value2,...` syntax. If a value includes one or more comma then it must be set in double quotes (`" "`). If a key-less value is given it will be taken as `center` coordinates if it's in the proper format otherwise it's taken as an `address`. Unknown (or misspelled) keys or key-less values except the first are silently ignored. `address`:: Sets the center of the map to the given address. The value must be surrounded by quotes if it includes commas, e.g. `"Street, City, Country"`. If an `address` but no `zoom` value was given an appropriate value will be guessed based on the address accuracy returned by Google. At the moment this isn't very accurate but soon [http://groups.google.com/group/Google-Maps-API/browse_thread/thread/53c4525e8d01e75d Google might improve] this. `center`:: Sets the center of the map to the given coordinates. The format is `{latitude}:{longitude}` or `"{latitude},{longitude}"`. `zoom`:: Sets zoom factor. Allowed values are between 0 (whole world) and 19 (single house). Very high zoom values might not be supported by Google Maps for all parts of the world. `size`:: The size either in the format `{width}x{height}` as numbers in pixel, e.g.: `300x400` means 300px wide and 400px high or in the format `{width}{unit}:{height}{unit}` where unit can be any CSS unit (em, ex, px, in, cm, mm, pt, pc). `types` (optional):: Sets the map types selectable by the user, separated by colons ('`:`'). If not given the standard types of Google Maps are used (Normal, Satellite, Hybrid). The following types are available (values are case-insensitive): * `normal` Normal street-map * `satellite` Satellite picture * `hybrid` Satellite picture with streets as overlay * `physical` Terrain map `type` (optional):: Sets the initial map type. See the `types` argument for the available types. If this argument is not given the first listed type under `types` is used initially. `controls` (optional):: Sets the used map controls. Multiple controls can be given, separated by colon ('`:`'). The value is case-insensitive. If not set the controls `MapType` and `LargeMap` are used. If set but empty no controls are displayed. The following controls are available (descriptions taken from the [http://code.google.com/apis/maps/documentation/reference.html#GControlImpl Google Map API page]): * `LargeMap`: Control with buttons to pan in four directions, and zoom in and zoom out. * `SmallMap`: Control with buttons to pan in four directions, and zoom in and zoom out, and a zoom slider. * `SmallZoom`: Control with buttons to zoom in and zoom out. * `Scale`: Control that displays the map scale. * `MapType`: Standard map type control for selecting and switching between supported map types via buttons. * `HierarchicalMapType`: Drop-down map type control for switching between supported map types. * `OverviewMap`: Collapsible overview mini-map in the corner of the main map for reference location and navigation (through dragging). `marker`:: (New) Allows the user to set labeled markers on given location of the map. The format for a marker is `{latitude}:{longitude};{Letter};{TracLink};{Title}` or `"{Address}";{Letter};{TracLink};{Title}`. If the string 'center' is used instead of an address the center of the map is marked. The optional marker letter can be either A-Z or 'o', '.' or empty for a plain marker. An optional [TracLinks TracLink] can be given which may be opened in a new window (see `target`) when clicked on the marker. An optional marker title can be set which will get displayed when the mouse cursor is over the marker. From revision [4801] on multiple markers can be given which replaces the `markers` argument. `markers`:: (Old) Can be used to set multiple markers which are are separated using the '`|`' character. Optional values can be kept empty, e.g.: `"{Address}";;;My Address` would display the address with the title 'My Address'. Trailing semicolons can be skipped. Addresses, links and titles which include either '`,`', '`;`' or '`|`' must be enclosed in double quotes (`" "`). `target`:: If set to '`new`' or '`newwindow`' all hyperlinks of the map markers will be opened in a new window (or tab, depending on the browser settings) or in the same window otherwise. `from`,`to`:: Request driving directions '`from`'->'`to`'. Multiple `to` addresses are allowed. No `address` or `center` need to be given. == Examples == === Using geographic coordinates === Please use a colon, not a comma, as separator for the coordinates. {{{ [[GoogleMap(center=50.0:10.0,zoom=10,size=400x400)]] or [[GoogleMap("50.0:10.0",zoom=10,size=400x400)]] or [[GoogleMap(50.0:10.0,zoom=10,size=400x400)]] }}} === Using an address === Please use semicolons, not commas, as separators in the address. {{{ [[GoogleMap(address="Street, City, Country",zoom=10,size=400x400)]] or [[GoogleMap("Street, City, Country",zoom=10,size=400x400)]] or, if you really want to: [[GoogleMap(Street; City; Country,zoom=10,size=400x400)]] }}} Please note that the address is converted into coordinates by user-side javascript every time the wiki page is loaded. If this fails no map will be shown, only an empty gray rectangle. === Using both === If both address and center coordinates are given, then the result depends on the [#config `geocoding` setting]: `server`:: The address is resolved on the trac server and the coordinates are completely ignored. `client`:: The map is first centered at the given coordinates and then moved to the given address after (and if) it was resolved by the client-side !JavaScript code. {{{ [[GoogleMap(center=50.0:10.0,address="Street, City, Country",zoom=10,size=400x400)]] }}} === Select Map Types === To show a map with the standard map types where the satellite map is preselected: {{{ [[GoogleMap("Street, City, Country",zoom=10,size=400x400,type=satellite)]] }}} To only show a satellite map (please note the added '`s`'): {{{ [[GoogleMap("Street, City, Country",zoom=10,size=400x400,types=satellite)]] }}} To show a map with hybrid and satellite map types (in this order) where the satellite map is preselected: {{{ [[GoogleMap("Street, City, Country",zoom=10,size=400x400,types=hybrid:satellite,type=satellite)]] }}} To show a map with hybrid and satellite map types (in this order) where the hybrid map is preselected: {{{ [[GoogleMap("Street, City, Country",zoom=10,size=400x400,types=hybrid:satellite)]] or [[GoogleMap("Street, City, Country",zoom=10,size=400x400,types=hybrid:satellite,type=hybrid)]] }}} === Markers === To create three markers: one at the center of the map (A), one at the next street (B) and one at coordinates 10.243,23.343 (C): {{{ [[GoogleMap("Street, City, Country",zoom=10,size=400x400,markers=center;A|"Next street, City, Country";B|10.243:23.343;C)]] }}} The same with hyperlinked markers: {{{ [[GoogleMap("Street, City, Country",zoom=10,size=400x400,markers=center;A;wiki:MyWikiPage|"Next street, City, Country";B;ticket:1|10.243:23.343;C;http://www.example.com/)]] }}} """ implements(IRequestFilter, ITemplateProvider, IRequestFilter, IEnvironmentSetupParticipant) geocoding = Option( 'googlemap', 'geocoding', 'client', 'Which side is handling the geocoding: either "server" or "client" (default).' ) api_key = Option( 'googlemap', 'api_key', '', 'Google Map API key. Available from http://code.google.com/apis/maps/signup.html .' ) default_zoom = IntOption( 'googlemap', 'default_zoom', '6', 'Default map zoom used if no zoom specified by the user (default: 6)') default_size = Option( 'googlemap', 'default_size', '300x300', 'Default map size (width x height, in pixel without units) used if no size specified by the user (default: 300x300)' ) default_target = Option( 'googlemap', 'default_target', '', 'Default target for hyperlinked markers. Use "_blank" to open target in new window. (Default: "")' ) def __init__(self): self.geocoding_server = self.geocoding.lower() == "server" def _create_db_table(self, db=None, commit=True): """ Create DB table if it not exists. """ if self.geocoding_server: self.env.log.debug("Creating DB table (if not already exists).") db = db or self.env.get_db_cnx() cursor = db.cursor() cursor.execute(""" CREATE TABLE IF NOT EXISTS googlemapmacro ( id char(32) Unique, lon decimal(10,6), lat decimal(10,6), acc decimal(2,0) );""") if commit: db.commit() return def environment_created(self): self._create_db_table() return def environment_needs_upgrade(self, db): if not self.geocoding_server: return False cursor = db.cursor() try: cursor.execute("SELECT count(*) FROM googlemapmacro;") cursor.fetchone() except: return True return False def upgrade_environment(self, db): self._create_db_table(db, False) return # ITemplateProvider#get_htdocs_dirs def get_htdocs_dirs(self): from pkg_resources import resource_filename return [('googlemap', resource_filename(__name__, 'htdocs'))] # ITemplateProvider#get_templates_dirs def get_templates_dirs(self): return [] # IRequestFilter#pre_process_request def pre_process_request(self, req, handler): return handler # IRequestFilter#post_process_request def post_process_request(self, req, template, data, content_type): # Add Google Map API key using a link tag: if self.api_key: add_link(req, rel='google-key', href='', title=self.api_key, classname='google-key') add_stylesheet(req, 'googlemap/tracgooglemap.css') add_script(req, 'googlemap/tracgooglemap.js') return (template, data, content_type) def _strip(self, arg): """Strips spaces and a single pair of double quotes as long there are no unescaped double quotes in the middle. """ arg = unicode(arg).strip() if len(arg) < 2: return arg if arg.startswith('"') and arg.endswith('"') \ and not _reDBLQUOTE.match(arg[1:-1]): arg = arg[1:-1] return arg def _format_address(self, address): address = self._strip(address).replace(';', ',') address = _reWHITESPACES.sub(' ', address) address = _reCOMMA.sub(', ', address) return address def _get_coords(self, address): m = md5() m.update(address) hash = m.hexdigest() db = self.env.get_db_cnx() cursor = db.cursor() cursor.execute( "SELECT lon,lat,acc FROM googlemapmacro WHERE id='%s';" % hash) for row in cursor: if len(row) == 3: self.env.log.debug("Reusing coordinates from database") return (str(row[0]), str(row[1]), str(row[2])) response = None url = r'http://maps.google.com/maps/geo?output=csv&q=' + quote_plus( address) try: response = urlopen(url).read() except: raise TracError( "Google Maps could not be contacted to resolve address!") self.env.log.debug("Google geocoding response: '%s'" % response) resp = response.split(',') if len(resp) != 4 or not resp[0] == "200": raise TracError( "Given address '%s' couldn't be resolved by Google Maps!" % address) acc, lon, lat = resp[1:4] cursor.execute( "INSERT INTO googlemapmacro (id, lon, lat, acc) VALUES ('%s', %s, %s, %s);" % (hash, lon, lat, acc)) db.commit() self.env.log.debug("Saving coordinates to database") return (lon, lat, acc) def expand_macro(self, formatter, name, content, args=None): content = content.replace('\n', ',') largs, kwargs = parse_args(content, multi=['marker', 'to']) if len(largs) > 0: arg = unicode(largs[0]) if _reCOORDS.match(arg): if not 'center' in kwargs: kwargs['center'] = arg else: if not 'address' in kwargs: kwargs['address'] = arg if 'from' in kwargs and not 'address' in kwargs and not 'center' in kwargs: arg = unicode(kwargs['from']) if _reCOORDS.match(arg): if not 'center' in kwargs: kwargs['center'] = arg else: if not 'address' in kwargs: kwargs['address'] = arg # Check if Google API key is set (if not the Google Map script file # wasn't inserted by `post_process_request` and the map wont load) if not self.api_key: raise TracError( "No Google Maps API key given! Tell your web admin to get one at http://code.google.com/apis/maps/signup.html .\n" ) # Use default values if needed zoom = None size = None try: if 'zoom' in kwargs: zoom = unicode(int(kwargs['zoom'])) else: zoom = unicode(self.default_zoom) except: raise TracError( "Invalid value for zoom given! Please provide an integer from 0 to 19." ) if 'size' in kwargs: size = unicode(kwargs['size']) else: size = unicode(self.default_size) # Set target for hyperlinked markers target = "" if not 'target' in kwargs: kwargs['target'] = self.default_target if kwargs['target'] in ('new', 'newwindow', '_blank'): target = "newwindow" # Get height and width width = None height = None try: if size.find(':') != -1: (width, height) = size.lower().split(':') # Check for correct units: if not width[-2:] in _css_units \ or not height[-2:] in _css_units: raise TracError("Wrong unit(s)!") # The rest must be a number: float(width[:-2]) float(height[:-2]) else: (width, height) = size.lower().split('x') width = str(int(width)) + "px" height = str(int(height)) + "px" except: raise TracError("Invalid value for size given! Please provide " "{width}x{height} in pixels (without unit) or " "{width}{unit}:{height}{unit} in CSS units (%s)." \ % ', '.join(_css_units) ) # Correct separator for 'center' argument because comma isn't allowed in # macro arguments center = "" if 'center' in kwargs: center = unicode(kwargs['center']).replace(':', ',').strip(' "\'') if not _reCOORDS.match(center): raise TracError("Invalid center coordinates given!") # Format address address = "" if 'address' in kwargs: address = self._format_address(kwargs['address']) if self.geocoding_server: coord = self._get_coords(address) center = ",".join(coord[0:2]) address = "" if not 'zoom' in kwargs: zoom = _accuracy_to_zoom[int(coord[2])] # Internal formatting functions: def gtyp(stype): return "G_%s_MAP" % str(stype) def gcontrol(control): return "map.addControl(new G%sControl());\n" % str(control) def gmarker(lat, lng, letter='', link='', title=''): if not title: title = link if not letter: letter = '' else: letter = str(letter).upper() if str(letter).startswith('.'): letter = '' else: letter = letter[0] return "SetMarkerByCoords(map,%s,%s,'%s','%s','%s', '%s');\n" \ % (str(lat),str(lng),letter,str(link),str(title),str(target)) def gmarkeraddr(address, letter='', link='', title=''): if not title: title = link if not letter: letter = '' else: letter = str(letter).upper() if str(letter).startswith('.'): letter = '' else: letter = letter[0] return "SetMarkerByAddress(map,'%s','%s','%s','%s','%s',geocoder);\n" \ % (str(address),letter,str(link),str(title),str(target)) # Set initial map type type = 'NORMAL' types = [] types_str = None if 'types' in kwargs: types = unicode(kwargs['types']).upper().split(':') types_str = ','.join(map(gtyp, types)) type = types[0] if 'type' in kwargs: type = unicode(kwargs['type']).upper() if 'types' in kwargs and not type in types: types_str += ',' + type types.insert(0, type) elif not type in _supported_map_types: type = 'NORMAL' # if types aren't set and a type is set which is supported # but not a default type: if not 'types' in kwargs and type in _supported_map_types and not type in _default_map_types: # enable type (and all default types): types = _default_map_types + [type] types_str = ','.join(map(gtyp, types)) if types_str: types_str = '[' + types_str + ']' else: types_str = 'G_DEFAULT_MAP_TYPES' # Produce controls control_str = "" controls = ['LargeMap', 'MapType'] if 'controls' in kwargs: controls = [] for control in unicode(kwargs['controls']).upper().split(':'): if control in _supported_controls: controls.append(_supported_controls[control]) controls_str = ''.join(map(gcontrol, controls)) # Produce markers markers_str = "" if not 'marker' in kwargs: kwargs['marker'] = [] if 'markers' in kwargs: kwargs['marker'].extend( parse_args(unicode(kwargs['markers']), delim='|', listonly=True)) if kwargs['marker']: markers = [] for marker in kwargs['marker']: location, letter, link, title = parse_args(marker, delim=';', listonly=True, minlen=4)[:4] if not title: title = link # Convert wiki to HTML link: link = extract_link(self.env, formatter.context, link) if isinstance(link, Element): link = link.attrib.get('href') else: link = '' location = self._format_address(location) if _reCOORDS.match(location): coord = location.split(':') markers.append( gmarker(coord[0], coord[1], letter, link, title)) else: if self.geocoding_server: coord = [] if location == 'center': if address: coord = self._get_coords(address) else: coord = center.split(',') else: coord = self._get_coords(location) markers.append( gmarker(coord[0], coord[1], letter, link, title)) else: if location == 'center': if address: markers.append( gmarkeraddr(address, letter, link, title)) else: coord = center.split(',') markers.append( gmarker(coord[0], coord[1], letter, link, title)) else: markers.append( gmarkeraddr(location, letter, link, title)) markers_str = ''.join(markers) # Get macro count from request object req = formatter.req count = getattr(req, COUNT, 0) id = 'tracgooglemap-%s' % count setattr(req, COUNT, count + 1) # Canvas for this map mapdiv = tag.div("Google Map is loading ... (JavaScript enabled?)", id=id, style="width: %s; height: %s;" % (width, height), class_="tracgooglemap") if 'from' in kwargs and 'to' in kwargs: directions = "from: %s to: %s" % (kwargs['from'], ' to: '.join( list(kwargs['to']))) mapnmore = tag.table(tag.tr( tag.td( tag.div("", class_='tracgooglemap-directions', id='tracgooglemap-directions-%s' % count), style="vertical-align:top;", ), tag.td( mapdiv, style="vertical-align:top;", )), class_='tracgooglemaps') else: directions = "" mapnmore = mapdiv # put everything in a tidy div html = tag.div( [ # Initialization script for this map tag.script(Markup( _javascript_code % { 'id': id, 'center': center, 'zoom': zoom, 'address': address, 'type': type, 'width': width, 'height': height, 'types_str': types_str, 'controls_str': controls_str, 'markers_str': markers_str, 'directions': directions, }), type="text/javascript"), mapnmore ], class_="tracgooglemap-parent") return html
class TicketModule(TicketModuleBase): implements(INavigationContributor, IRequestHandler, ITimelineEventProvider, IContentConverter) default_version = Option('ticket', 'default_version', '', """Default version for newly created tickets.""") default_type = Option( 'ticket', 'default_type', 'defect', """Default type for newly created tickets (''since 0.9'').""") default_priority = Option( 'ticket', 'default_priority', 'major', """Default priority for newly created tickets.""") default_milestone = Option( 'ticket', 'default_milestone', '', """Default milestone for newly created tickets.""") default_component = Option( 'ticket', 'default_component', '', """Default component for newly created tickets""") timeline_details = BoolOption( 'timeline', 'ticket_show_details', 'false', """Enable the display of all ticket changes in the timeline (''since 0.9'').""") # IContentConverter methods def get_supported_conversions(self): yield ('csv', 'Comma-delimited Text', 'csv', 'trac.ticket.Ticket', 'text/csv', 8) yield ('tab', 'Tab-delimited Text', 'tsv', 'trac.ticket.Ticket', 'text/tab-separated-values', 8) yield ('rss', 'RSS Feed', 'xml', 'trac.ticket.Ticket', 'application/rss+xml', 8) def convert_content(self, req, mimetype, ticket, key): if key == 'csv': return self.export_csv(ticket, mimetype='text/csv') elif key == 'tab': return self.export_csv(ticket, sep='\t', mimetype='text/tab-separated-values') elif key == 'rss': return self.export_rss(req, ticket) # INavigationContributor methods def get_active_navigation_item(self, req): return 'tickets' def get_navigation_items(self, req): return [] # IRequestHandler methods def match_request(self, req): match = re.match(r'/ticket/([0-9]+)$', req.path_info) if match: req.args['id'] = match.group(1) return True def process_request(self, req): req.perm.assert_permission('TICKET_VIEW') action = req.args.get('action', 'view') db = self.env.get_db_cnx() id = int(req.args.get('id')) ticket = Ticket(self.env, id, db=db) if req.method == 'POST': if not req.args.has_key('preview'): self._do_save(req, db, ticket) else: # Use user supplied values ticket.populate(req.args) self._validate_ticket(req, ticket) req.hdf['ticket.action'] = action req.hdf['ticket.ts'] = req.args.get('ts') req.hdf['ticket.reassign_owner'] = req.args.get('reassign_owner') \ or req.authname req.hdf['ticket.resolve_resolution'] = req.args.get( 'resolve_resolution') comment = req.args.get('comment') if comment: req.hdf['ticket.comment'] = comment # Wiki format a preview of comment req.hdf['ticket.comment_preview'] = wiki_to_html( comment, self.env, req, db) else: req.hdf['ticket.reassign_owner'] = req.authname # Store a timestamp in order to detect "mid air collisions" req.hdf['ticket.ts'] = ticket.time_changed self._insert_ticket_data(req, db, ticket, get_reporter_id(req, 'author')) mime = Mimeview(self.env) format = req.args.get('format') if format: mime.send_converted(req, 'trac.ticket.Ticket', ticket, format, 'ticket_%d' % ticket.id) # If the ticket is being shown in the context of a query, add # links to help navigate in the query result set if 'query_tickets' in req.session: tickets = req.session['query_tickets'].split() if str(id) in tickets: idx = tickets.index(str(ticket.id)) if idx > 0: add_link(req, 'first', req.href.ticket(tickets[0]), 'Ticket #%s' % tickets[0]) add_link(req, 'prev', req.href.ticket(tickets[idx - 1]), 'Ticket #%s' % tickets[idx - 1]) if idx < len(tickets) - 1: add_link(req, 'next', req.href.ticket(tickets[idx + 1]), 'Ticket #%s' % tickets[idx + 1]) add_link(req, 'last', req.href.ticket(tickets[-1]), 'Ticket #%s' % tickets[-1]) add_link(req, 'up', req.session['query_href']) add_stylesheet(req, 'common/css/ticket.css') # Add registered converters for conversion in mime.get_supported_conversions('trac.ticket.Ticket'): conversion_href = req.href.ticket(ticket.id, format=conversion[0]) add_link(req, 'alternate', conversion_href, conversion[1], conversion[3]) return 'ticket.cs', None # ITimelineEventProvider methods def get_timeline_filters(self, req): if req.perm.has_permission('TICKET_VIEW'): yield ('ticket', 'Ticket changes') if self.timeline_details: yield ('ticket_details', 'Ticket details', False) def get_timeline_events(self, req, start, stop, filters): format = req.args.get('format') status_map = { 'new': ('newticket', 'created'), 'reopened': ('newticket', 'reopened'), 'closed': ('closedticket', 'closed'), 'edit': ('editedticket', 'updated') } href = format == 'rss' and req.abs_href or req.href def produce( (id, t, author, type, summary), status, fields, comment, cid): if status == 'edit': if 'ticket_details' in filters: info = '' if len(fields) > 0: info = ', '.join(['<i>%s</i>' % f for f in \ fields.keys()]) + ' changed<br />' else: return None elif 'ticket' in filters: if status == 'closed' and fields.has_key('resolution'): info = fields['resolution'] if info and comment: info = '%s: ' % info else: info = '' else: return None kind, verb = status_map[status] if format == 'rss': title = 'Ticket #%s (%s %s): %s' % \ (id, type.lower(), verb, summary) else: title = Markup('Ticket <em title="%s">#%s</em> (%s) %s by %s', summary, id, type, verb, author) ticket_href = href.ticket(id) if cid: ticket_href += '#comment:' + cid if status == 'new': message = summary else: message = Markup(info) if comment: if format == 'rss': message += wiki_to_html(comment, self.env, req, db, absurls=True) else: message += wiki_to_oneliner(comment, self.env, db, shorten=True) return kind, ticket_href, title, t, author, message # Ticket changes if 'ticket' in filters or 'ticket_details' in filters: db = self.env.get_db_cnx() cursor = db.cursor() cursor.execute("SELECT t.id,tc.time,tc.author,t.type,t.summary, " " tc.field,tc.oldvalue,tc.newvalue " " FROM ticket_change tc " " INNER JOIN ticket t ON t.id = tc.ticket " " AND tc.time>=%s AND tc.time<=%s " "ORDER BY tc.time" % (start, stop)) previous_update = None for id, t, author, type, summary, field, oldvalue, newvalue in cursor: if not previous_update or (id, t, author) != previous_update[:3]: if previous_update: ev = produce(previous_update, status, fields, comment, cid) if ev: yield ev status, fields, comment, cid = 'edit', {}, '', None previous_update = (id, t, author, type, summary) if field == 'comment': comment = newvalue cid = oldvalue and oldvalue.split('.')[-1] elif field == 'status' and newvalue in ('reopened', 'closed'): status = newvalue else: fields[field] = newvalue if previous_update: ev = produce(previous_update, status, fields, comment, cid) if ev: yield ev # New tickets if 'ticket' in filters: cursor.execute( "SELECT id,time,reporter,type,summary" " FROM ticket WHERE time>=%s AND time<=%s", (start, stop)) for row in cursor: yield produce(row, 'new', {}, None, None) # Attachments if 'ticket_details' in filters: def display(id): return Markup('ticket %s', html.EM('#', id)) att = AttachmentModule(self.env) for event in att.get_timeline_events(req, db, 'ticket', format, start, stop, display): yield event
class TracBuildBotWatcher(Component): implements(ITimelineEventProvider, IRequestHandler, ITemplateProvider, INavigationContributor) buildbot_url = Option( 'bbwatcher', 'buildmaster', '127.0.0.1:8010', 'The location of the BuildBot webserver. Do not include the /xmlrpc') BUILDER_REGEX = r'/buildbot/builder(?:/(.+))?$' BUILDER_RE = re.compile(BUILDER_REGEX) # Template Provider def get_htdocs_dirs(self): return [] def get_templates_dirs(self): return [pkg_resources.resource_filename('bbwatcher', 'templates')] # Nav Contributor def get_active_navigation_item(self, req): return 'buildbot' def get_navigation_items(self, req): yield 'mainnav', 'buildbot', tag.a('BuildBot', href=req.href.buildbot()) # Timeline Methods def get_timeline_filters(self, req): yield ('bbwatcher', 'Builds', False) def get_timeline_events(self, req, start, stop, filters): try: master = BuildBotSystem(self.buildbot_url) except Exception as e: print('Error hitting BuildBot', e) return # This was a comprehension: the loop is clearer for build in master.getAllBuildsInInterval(to_timestamp(start), to_timestamp(stop)): # BuildBot builds are reported as # (builder_name, num, end, branch, rev, results, text) print('Reporting build', build) yield ('build', to_datetime(build[2]), '', build) def render_timeline_event(self, context, field, event): builder_name, num, end, branch, rev, results, text = event[3] if field == 'url': return None elif field == 'title': return tag( 'Build ', tag.a('#%s' % num, href=context.href.buildbot('builder/%s/%s' % (builder_name, num))), ' of ', builder_name, ' ', results == 'success' and tag.span('passed', style="color: #080") or tag.span('failed', style="color: #f00")) elif field == 'description': return format_to_oneliner( self.env, context, 'Built from %s' % (rev and 'r%s sources' % rev or 'local changes (see TryBuildUsage)')) # RequestHandler def _handle_builder(self, req): m = self.BUILDER_RE.match(req.path_info) try: builder = m.group(1) or None except Exception as e: builder = None master = BuildBotSystem(self.buildbot_url) if builder is None: data = {'names': master.getAllBuilders()} return 'bbw_allbuilders.html', data, 'text/html' else: class Foo: pass b = Foo() b.name = str(builder) b.current = 'CURRENT-TEXT' b.recent = [] b.slaves = [] data = {'builder': b} try: master = BuildBotSystem(self.buildbot_url) data = {'builder': master.getBuilder(builder)} except Exception as e: print('Error fetching builder stats', e) data['context'] = Context.from_request(req, ('buildbot', builder)) return 'bbw_builder.html', data, 'text/html' def match_request(self, req): return req.path_info.startswith('/buildbot') and 1 or 0 def process_request(self, req): if req.path_info.startswith('/buildbot/builder'): return self._handle_builder(req) return 'bbw_welcome.html', {'url': self.buildbot_url}, 'text/html'
class AMBProjectMake(Component): """ AMB Project Manager - Make Project. Provides the administrative pages for creating a new project. """ implements(IAdminPanelProvider) sql_defaults = Option('projectmanager', 'sql_defaults', doc='SQL to be run on every project after creation') groups = ListOption('projectmanager', 'groups', doc='Comma separated list of available project groups') tracadmin_command = Option('projectmanager', 'tracadmin_command', 'trac-admin.exe', doc='Full path to the trac-admin command') svnadmin_command = Option('projectmanager', 'svnadmin_command', 'svnadmin.exe', doc='Full path to the svnadmin command') repos_dir = Option('projectmanager', 'repos_dir', doc='Path to the base of the repositories') environments_dir = Option('projectmanager', 'environments_dir', doc='Path to the base of the environments') ################################## ## IAdminPanelProvider def get_admin_panels(self, req): """ Adds new Administrative pages for Project Properties """ if req.perm.has_permission('TRAC_ADMIN'): yield ('projectmanager', 'Project Management', 'newproject', 'New Project') def render_admin_panel(self, req, cat, page, component): """ Render the project properties Admin page """ req.perm.assert_permission('TRAC_ADMIN') #Check if DB needs upgrading check_upgrade(self.env) if req.method == 'POST': if req.args.has_key('make'): data = self._do_createproject(req) if req.args.has_key('back'): req.redirect(req.href.admin(cat, page)) return else: data = self._render_view(req) add_stylesheet(req, 'tracprojectmanager/css/projectproperties.css') return 'newproject.html', data ################################## def _do_createproject(self, req): """ Update project properties from the Admin form """ data = {} data['project_created'] = True group = req.args['group'] templates_dir = self.env.config.get('inherit', 'templates_dir') inherit_file = self.env.config.get('inherit', 'file') #Try to get custom settings for this project group templates_dir = self.env.config.get('projectmanager', 'groups.%s.templates_dir' % group, templates_dir) inherit_file = self.env.config.get('projectmanager', 'groups.%s.inherit_file' % group, inherit_file) group_dir = self.env.config.get('projectmanager', 'groups.%s.dirname' % group, group) sql_defaults = self.env.config.get('projectmanager', 'groups.%s.sql_defaults' % group, self.sql_defaults) assert self.repos_dir, "repos_dir not defined in .ini" assert self.environments_dir, "environments_dir not defined in .ini" if req.args.has_key('makesvn'): cmd = "%s create %s" % (self.svnadmin_command,os.path.join(self.repos_dir, group_dir, req.args['short_name'])) p = Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE) (stdout, stderr) = p.communicate() if p.returncode != 0: data['svn_error'] = True data['svn_created'] = True data['svn_output'] = stdout data['svn_errors'] = stderr if req.args.has_key('maketrac'): repos_dir = os.path.join(self.repos_dir, group_dir, req.args['short_name']) env_dir = os.path.join(self.environments_dir, group_dir, req.args['short_name']) cmd = '%s "%s" initenv "%s" sqlite:%s svn "%s" --inherit="%s"' % ( self.tracadmin_command, env_dir, req.args['full_name'], os.path.join('db','trac.db'), repos_dir, inherit_file ) #stdin, stdout, stderr = os.popen3(command) p = Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE) (stdout, stderr) = p.communicate() data['trac_error'] = p.returncode != 0 try: template = os.path.join(env_dir, 'templates', 'site.html') stdout = stdout + 'Removing template: %s' % template os.remove(template) except: pass data['trac_created'] = True data['trac_output'] = stdout data['trac_errors'] = stderr try: env = open_environment(env_dir, use_cache=False) db = env.get_db_cnx() cursor = db.cursor() output = "" output = output + "\n\nSQL Defaults, executing statements\n" output = output + "==================================\n" for statement in sql_defaults.splitlines(): output = output + "Executing: %s\n" % statement cursor.execute(statement) db.commit() data['trac_output'] = data['trac_output'] + output env.shutdown() except: data['trac_output'] = data['trac_output'] + "\n\nError running SQL Defaults statements, skipping\n" return data def _render_view(self,req): """ Build template data for the Admin page """ groups = [] for group in self.groups: description = self.env.config.get('projectmanager', 'groups.%s.description' % group, group) groups.append(dict(name=group, label=description)) return {'groups': groups }
class MilestoneModule(Component): """View and edit individual milestones.""" implements(INavigationContributor, IPermissionRequestor, IRequestHandler, IResourceManager, ISearchSource, ITimelineEventProvider, IWikiSyntaxProvider) realm = 'milestone' stats_provider = ExtensionOption('milestone', 'stats_provider', ITicketGroupStatsProvider, 'DefaultTicketGroupStatsProvider', """Name of the component implementing `ITicketGroupStatsProvider`, which is used to collect statistics on groups of tickets for display in the milestone views.""") default_retarget_to = Option('milestone', 'default_retarget_to', doc="""Default milestone to which tickets are retargeted when closing or deleting a milestone. (''since 1.1.2'')""") default_group_by = Option('milestone', 'default_group_by', 'component', """Default field to use for grouping tickets in the grouped progress bar. (''since 1.2'')""") # INavigationContributor methods def get_active_navigation_item(self, req): return 'roadmap' def get_navigation_items(self, req): return [] # IPermissionRequestor methods def get_permission_actions(self): actions = ['MILESTONE_CREATE', 'MILESTONE_DELETE', 'MILESTONE_MODIFY', 'MILESTONE_VIEW'] return actions + [('MILESTONE_ADMIN', actions)] # ITimelineEventProvider methods def get_timeline_filters(self, req): if 'MILESTONE_VIEW' in req.perm: yield ('milestone', _("Milestones completed")) def get_timeline_events(self, req, start, stop, filters): if 'milestone' in filters: milestone_realm = Resource(self.realm) for name, due, completed, description \ in MilestoneCache(self.env).milestones.itervalues(): if completed and start <= completed <= stop: # TODO: creation and (later) modifications should also be # reported milestone = milestone_realm(id=name) if 'MILESTONE_VIEW' in req.perm(milestone): yield ('milestone', completed, '', # FIXME: author? (milestone, description)) # Attachments for event in AttachmentModule(self.env).get_timeline_events( req, milestone_realm, start, stop): yield event def render_timeline_event(self, context, field, event): milestone, description = event[3] if field == 'url': return context.href.milestone(milestone.id) elif field == 'title': return tag_("Milestone %(name)s completed", name=tag.em(milestone.id)) elif field == 'description': child_resource = context.child(resource=milestone) return format_to(self.env, None, child_resource, description) # IRequestHandler methods def match_request(self, req): match = re.match(r'/milestone(?:/(.+))?$', req.path_info) if match: if match.group(1): req.args['id'] = match.group(1) return True def process_request(self, req): milestone_id = req.args.get('id') action = req.args.get('action', 'view') if not milestone_id and action == 'view': req.redirect(req.href.roadmap()) req.perm(self.realm, milestone_id).require('MILESTONE_VIEW') add_link(req, 'up', req.href.roadmap(), _("Roadmap")) try: milestone = Milestone(self.env, milestone_id) except ResourceNotFound: if 'MILESTONE_CREATE' not in req.perm(self.realm, milestone_id): raise milestone = Milestone(self.env) milestone.name = milestone_id action = 'edit' # rather than 'new', so it works for POST/save if req.method == 'POST': if 'cancel' in req.args: if milestone.exists: req.redirect(req.href.milestone(milestone.name)) else: req.redirect(req.href.roadmap()) elif action == 'edit': return self._do_save(req, milestone) elif action == 'delete': self._do_delete(req, milestone) else: raise HTTPBadRequest(_("Invalid request arguments.")) elif action in ('new', 'edit'): return self._render_editor(req, milestone) elif action == 'delete': return self._render_confirm(req, milestone) if not milestone.name: req.redirect(req.href.roadmap()) return self._render_view(req, milestone) # Public methods def get_default_due(self, req): """Returns a `datetime` object representing the default due date in the user's timezone. The default due time is 18:00 in the user's time zone. """ now = datetime_now(req.tz) default_due = datetime(now.year, now.month, now.day, 18) if now.hour > 18: default_due += timedelta(days=1) return to_datetime(default_due, req.tz) def save_milestone(self, req, milestone): # Instead of raising one single error, check all the constraints # and let the user fix them by going back to edit mode and showing # the warnings warnings = [] def warn(msg): add_warning(req, msg) warnings.append(msg) milestone.description = req.args.get('description', '') if 'due' in req.args: duedate = req.args.get('duedate') milestone.due = user_time(req, parse_date, duedate, hint='datetime') \ if duedate else None else: milestone.due = None # -- check completed date if 'completed' in req.args: completed = req.args.get('completeddate', '') completed = user_time(req, parse_date, completed, hint='datetime') if completed else None if completed and completed > datetime_now(utc): warn(_("Completion date may not be in the future")) else: completed = None milestone.completed = completed # -- check the name # If the name has changed, check that the milestone doesn't already # exist # FIXME: the whole .exists business needs to be clarified # (#4130) and should behave like a WikiPage does in # this respect. new_name = req.args.get('name') try: new_milestone = Milestone(self.env, new_name) except ResourceNotFound: milestone.name = new_name else: if new_milestone.name != milestone.name: if new_milestone.name: warn(_('Milestone "%(name)s" already exists, please ' 'choose another name.', name=new_milestone.name)) else: warn(_("You must provide a name for the milestone.")) if warnings: return False # -- actually save changes if milestone.exists: milestone.update(author=req.authname) if completed and 'retarget' in req.args: comment = req.args.get('comment', '') retarget_to = req.args.get('target') or None retargeted_tickets = \ milestone.move_tickets(retarget_to, req.authname, comment, exclude_closed=True) add_notice(req, _('The open tickets associated with ' 'milestone "%(name)s" have been retargeted ' 'to milestone "%(retarget)s".', name=milestone.name, retarget=retarget_to)) new_values = {'milestone': retarget_to} comment = comment or \ _("Open tickets retargeted after milestone closed") event = BatchTicketChangeEvent(retargeted_tickets, None, req.authname, comment, new_values, None) try: NotificationSystem(self.env).notify(event) except Exception as e: self.log.error("Failure sending notification on ticket " "batch change: %s", exception_to_unicode(e)) add_warning(req, tag_("The changes have been saved, but " "an error occurred while sending " "notifications: %(message)s", message=to_unicode(e))) add_notice(req, _("Your changes have been saved.")) else: milestone.insert() add_notice(req, _('The milestone "%(name)s" has been added.', name=milestone.name)) return True # Internal methods _default_retarget_to = default_retarget_to @property def default_retarget_to(self): if self._default_retarget_to and \ not any(self._default_retarget_to == m.name for m in Milestone.select(self.env)): self.log.warning('Milestone "%s" does not exist. Update the ' '"default_retarget_to" option in the ' '[milestone] section of trac.ini', self._default_retarget_to) return self._default_retarget_to def _do_delete(self, req, milestone): req.perm(milestone.resource).require('MILESTONE_DELETE') retarget_to = req.args.get('target') or None # Don't translate ticket comment (comment:40:ticket:5658) retargeted_tickets = \ milestone.move_tickets(retarget_to, req.authname, "Ticket retargeted after milestone deleted") milestone.delete() add_notice(req, _('The milestone "%(name)s" has been deleted.', name=milestone.name)) if retargeted_tickets: add_notice(req, _('The tickets associated with milestone ' '"%(name)s" have been retargeted to milestone ' '"%(retarget)s".', name=milestone.name, retarget=retarget_to)) new_values = {'milestone': retarget_to} comment = _("Tickets retargeted after milestone deleted") event = BatchTicketChangeEvent(retargeted_tickets, None, req.authname, comment, new_values, None) try: NotificationSystem(self.env).notify(event) except Exception as e: self.log.error("Failure sending notification on ticket batch " "change: %s", exception_to_unicode(e)) add_warning(req, tag_("The changes have been saved, but an " "error occurred while sending " "notifications: %(message)s", message=to_unicode(e))) req.redirect(req.href.roadmap()) def _do_save(self, req, milestone): if milestone.exists: req.perm(milestone.resource).require('MILESTONE_MODIFY') else: req.perm(milestone.resource).require('MILESTONE_CREATE') if self.save_milestone(req, milestone): req.redirect(req.href.milestone(milestone.name)) return self._render_editor(req, milestone) def _render_confirm(self, req, milestone): req.perm(milestone.resource).require('MILESTONE_DELETE') milestones = [m for m in Milestone.select(self.env) if m.name != milestone.name and 'MILESTONE_VIEW' in req.perm(m.resource)] attachments = Attachment.select(self.env, self.realm, milestone.name) data = { 'milestone': milestone, 'milestone_groups': group_milestones(milestones, 'TICKET_ADMIN' in req.perm), 'num_tickets': get_num_tickets_for_milestone(self.env, milestone), 'retarget_to': self.default_retarget_to, 'attachments': list(attachments) } add_stylesheet(req, 'common/css/roadmap.css') return 'milestone_delete.html', data def _render_editor(self, req, milestone): data = { 'milestone': milestone, 'datetime_hint': get_datetime_format_hint(req.lc_time), 'default_due': self.get_default_due(req), 'milestone_groups': [], } if milestone.exists: req.perm(milestone.resource).require('MILESTONE_MODIFY') milestones = [m for m in Milestone.select(self.env) if m.name != milestone.name and 'MILESTONE_VIEW' in req.perm(m.resource)] data['milestone_groups'] = \ group_milestones(milestones, 'TICKET_ADMIN' in req.perm) data['num_open_tickets'] = \ get_num_tickets_for_milestone(self.env, milestone, exclude_closed=True) data['retarget_to'] = self.default_retarget_to else: req.perm(milestone.resource).require('MILESTONE_CREATE') if milestone.name: add_notice(req, _("Milestone %(name)s does not exist. You " "can create it here.", name=milestone.name)) chrome = Chrome(self.env) chrome.add_jquery_ui(req) chrome.add_wiki_toolbars(req) add_stylesheet(req, 'common/css/roadmap.css') return 'milestone_edit.html', data def _render_view(self, req, milestone): milestone_groups = [] available_groups = [] default_group_by_available = False ticket_fields = TicketSystem(self.env).get_ticket_fields() # collect fields that can be used for grouping for field in ticket_fields: if field['type'] == 'select' and field['name'] != 'milestone' \ or field['name'] in ('owner', 'reporter'): available_groups.append({'name': field['name'], 'label': field['label']}) if field['name'] == self.default_group_by: default_group_by_available = True # determine the field currently used for grouping by = None if default_group_by_available: by = self.default_group_by elif available_groups: by = available_groups[0]['name'] by = req.args.get('by', by) tickets = get_tickets_for_milestone(self.env, milestone=milestone.name, field=by) tickets = apply_ticket_permissions(self.env, req, tickets) stat = get_ticket_stats(self.stats_provider, tickets) context = web_context(req, milestone.resource) data = { 'context': context, 'milestone': milestone, 'attachments': AttachmentModule(self.env).attachment_data(context), 'available_groups': available_groups, 'grouped_by': by, 'groups': milestone_groups } data.update(milestone_stats_data(self.env, req, stat, milestone.name)) if by: def per_group_stats_data(gstat, group_name): return milestone_stats_data(self.env, req, gstat, milestone.name, by, group_name) milestone_groups.extend( grouped_stats_data(self.env, self.stats_provider, tickets, by, per_group_stats_data)) add_stylesheet(req, 'common/css/roadmap.css') def add_milestone_link(rel, milestone): href = req.href.milestone(milestone.name, by=req.args.get('by')) add_link(req, rel, href, _('Milestone "%(name)s"', name=milestone.name)) milestones = [m for m in Milestone.select(self.env) if 'MILESTONE_VIEW' in req.perm(m.resource)] idx = [i for i, m in enumerate(milestones) if m.name == milestone.name] if idx: idx = idx[0] if idx > 0: add_milestone_link('first', milestones[0]) add_milestone_link('prev', milestones[idx - 1]) if idx < len(milestones) - 1: add_milestone_link('next', milestones[idx + 1]) add_milestone_link('last', milestones[-1]) prevnext_nav(req, _("Previous Milestone"), _("Next Milestone"), _("Back to Roadmap")) return 'milestone_view.html', data # IWikiSyntaxProvider methods def get_wiki_syntax(self): return [] def get_link_resolvers(self): yield ('milestone', self._format_link) def _format_link(self, formatter, ns, name, label): name, query, fragment = formatter.split_link(name) return self._render_link(formatter.context, name, label, query + fragment) def _render_link(self, context, name, label, extra=''): if not (name or extra): return tag() try: milestone = Milestone(self.env, name) except ResourceNotFound: milestone = None # Note: the above should really not be needed, `Milestone.exists` # should simply be false if the milestone doesn't exist in the db # (related to #4130) href = context.href.milestone(name) exists = milestone and milestone.exists if exists: if 'MILESTONE_VIEW' in context.perm(milestone.resource): title = None if hasattr(context, 'req'): if milestone.is_completed: title = _( "Completed %(duration)s ago (%(date)s)", duration=pretty_timedelta(milestone.completed), date=user_time(context.req, format_datetime, milestone.completed)) elif milestone.is_late: title = _("%(duration)s late (%(date)s)", duration=pretty_timedelta(milestone.due), date=user_time(context.req, format_datetime, milestone.due)) elif milestone.due: title = _("Due in %(duration)s (%(date)s)", duration=pretty_timedelta(milestone.due), date=user_time(context.req, format_datetime, milestone.due)) else: title = _("No date set") closed = 'closed ' if milestone.is_completed else '' return tag.a(label, class_='%smilestone' % closed, href=href + extra, title=title) elif 'MILESTONE_CREATE' in context.perm(self.realm, name): return tag.a(label, class_='missing milestone', href=href + extra, rel='nofollow') return tag.a(label, class_=classes('milestone', missing=not exists)) # IResourceManager methods def get_resource_realms(self): yield self.realm def get_resource_description(self, resource, format=None, context=None, **kwargs): desc = resource.id if format != 'compact': desc = _("Milestone %(name)s", name=resource.id) if context: return self._render_link(context, resource.id, desc) else: return desc def resource_exists(self, resource): """ >>> from trac.test import EnvironmentStub >>> env = EnvironmentStub() >>> m1 = Milestone(env) >>> m1.name = 'M1' >>> m1.insert() >>> MilestoneModule(env).resource_exists(Resource('milestone', 'M1')) True >>> MilestoneModule(env).resource_exists(Resource('milestone', 'M2')) False """ return resource.id in MilestoneCache(self.env).milestones # ISearchSource methods def get_search_filters(self, req): if 'MILESTONE_VIEW' in req.perm: yield ('milestone', _("Milestones")) def get_search_results(self, req, terms, filters): if 'milestone' not in filters: return term_regexps = search_to_regexps(terms) milestone_realm = Resource(self.realm) for name, due, completed, description \ in MilestoneCache(self.env).milestones.itervalues(): if all(r.search(description) or r.search(name) for r in term_regexps): milestone = milestone_realm(id=name) if 'MILESTONE_VIEW' in req.perm(milestone): dt = (completed if completed else due if due else datetime_now(utc)) yield (get_resource_url(self.env, milestone, req.href), get_resource_name(self.env, milestone), dt, '', shorten_result(description, terms)) # Attachments for result in AttachmentModule(self.env).get_search_results( req, milestone_realm, terms): yield result
class TicketBudgetingView(Component): implements(ITemplateProvider, IRequestFilter, ITemplateStreamFilter, ITicketManipulator) # ITicketChangeListener _CONFIG_SECTION = 'budgeting-plugin' # these options won't be saved to trac.ini _types = Option( _CONFIG_SECTION, 'types', 'Implementation|Documentation|Specification|Test', """Types of work, which could be selected in select-box.""") Option( _CONFIG_SECTION, 'retrieve_users', "permission", 'indicates whether users should be retrieved from session or permission table; possible values: permission, session' ) Option( _CONFIG_SECTION, 'exclude_users', "'anonymous','authenticated','tracadmin'", 'list of users, which should be excluded to show in the drop-down list; should be usable as SQL-IN list' ) _type_list = None _name_list = None _name_list_str = None _budgets = None _changed_by_author = None #=============================================================================== # see trac/db_default.py, method get_reports (line 175) #=============================================================================== # BUDGET_REPORT_ALL_MINE_ID = self.BUDGET_REPORT_ALL_ID + 1 BUDGET_REPORTS = [( BUDGET_REPORT_ALL_ID, 'report_title_90', 'report_description_90', u"""SELECT t.id, t.summary, t.milestone AS __group__, '../milestone/' || t.milestone AS __grouplink__, t.owner, t.reporter, t.status, t.type, t.priority, t.component, count(b.ticket) AS Anz, sum(b.cost) AS Aufwand, sum(b.estimation) AS Schaetzung, floor(avg(b.status)) || '%' AS "Status", (CASE t.status WHEN 'closed' THEN 'color: #777; background: #ddd; border-color: #ccc;' ELSE (CASE sum(b.cost) > sum(b.estimation) WHEN true THEN 'font-weight: bold; background: orange;' END) END) AS __style__ from ticket t left join budgeting b ON b.ticket = t.id where t.milestone like (CASE $MILESTONE WHEN '''' THEN ''%'' ELSE $MILESTONE END) and (t.component like (CASE $COMPONENT WHEN '' THEN '%' ELSE $COMPONENT END) or t.component is null) and (t.owner like (CASE $OWNER WHEN '' THEN $USER ELSE $OWNER END) or t.owner is null or b.username like (CASE $OWNER WHEN '' THEN $USER ELSE $OWNER END) ) group by t.id, t.type, t.priority, t.summary, t.owner, t.reporter, t.component, t.status, t.milestone having count(b.ticket) > 0 order by t.milestone desc, t.status, t.id desc""")] #=============================================================================== # SELECT t.id, t.summary, t.milestone AS __group__, t.owner, t.reporter, t.type, t.priority, t.component, # count(b.ticket) AS Anz, sum(b.cost) AS Aufwand, sum(b.estimation) AS Schätzung, floor(avg(b.status)) AS "Status in %%", # (CASE floor(avg(b.status)) = 100 # WHEN true THEN 'font-weight: bold; color: green;' # ELSE CASE sum(b.cost) > sum(b.estimation) # WHEN true THEN 'font-weight: bold; background: orange;' # ELSE '' END END) AS __style__ # from ticket t # left join budgeting b ON b.ticket = t.id # where t.milestone like # (CASE $MILESTONE # WHEN '' THEN '%' # ELSE $MILESTONE END) and # t.component like (CASE $COMPONENT # WHEN '' THEN '%' # ELSE $COMPONENT END) and # (t.owner like (CASE $OWNER # WHEN '' THEN $USER # ELSE $OWNER END) or # b.username like (CASE $OWNER # WHEN '' THEN $USER # ELSE $OWNER END) ) # group by t.id, t.type, t.priority, t.summary, t.owner, t.reporter, t.component, t.milestone # having count(b.ticket) > 0 # order by t.milestone desc, t.id desc #=============================================================================== #Alle Tickets, in der der angemeldete Benutzer beteiligt ist. # (BUDGET_REPORT_ALL_MINE_ID, '[budgeting] All tickets, I am involved', # """All tickets with budget data, where logged user is involved in budget, or as reporter or owner. # """, # """SELECT t.id, t.summary, t.owner, t.reporter, t.type, t.priority, t.component, # count(b.ticket) AS Anz, sum(b.cost) AS Aufwand, sum(b.estimation) AS Schätzung, avg(b.status) AS Status, # t.milestone AS __group__ # from ticket t # left join budgeting b ON b.ticket = t.id # where b.username = $USER or t.owner = $USER or t.reporter = $USER # group by t.id, t.type, t.priority, t.summary, t.owner, t.reporter, t.component, t.milestone""") def __init__(self): locale_dir = pkg_resources.resource_filename(__name__, 'locale') add_domain(self.env.path, locale_dir) def filter_stream(self, req, method, filename, stream, data): """ overloaded from ITemplateStreamFilter """ # print "-------- > filename: %s" % filename if filename == 'ticket.html' and data: if self._check_init() == False: self.create_table() self.log.info("table successfully initialized") tkt = data['ticket'] if tkt and tkt.id: self._load_budget(tkt.id) else: self._budgets = {} input_html, preview_html = self._get_ticket_html() if 'TICKET_MODIFY' in req.perm(tkt.resource): visibility = ' style="visibility:hidden"' if self._budgets: visibility = '' # Load default values for Type, Estimation, Cost an State from trac.ini def_type = self.config.get('budgeting-plugin', 'default_type') if not def_type: # If the configured default-type is not available, submit -1 ==> first element in type list will be selected def_type = '-1' def_est = self.config.get('budgeting-plugin', 'default_estimation') if not def_est: def_est = '0.0' def_cost = self.config.get('budgeting-plugin', 'default_cost') if not def_cost: def_est = '0.0' def_state = self.config.get('budgeting-plugin', 'default_state') if not def_state: def_state = '0' fieldset_str = self._get_budget_fieldset() % (visibility, input_html) html = HTML('<div style="display: none" id="selectTypes">%s</div>' \ '<div style="display: none" id="selectNames">%s</div>' \ '<div style="display: none" id="def_name">%s</div>' \ '<div style="display: none" id="def_type">%s</div>' \ '<div style="display: none" id="def_est">%s</div>' \ '<div style="display: none" id="def_cost">%s</div>' \ '<div style="display: none" id="def_state">%s</div>' \ '%s' % (self._type_list, self._name_list_str, req.authname, def_type , def_est, def_cost, def_state, fieldset_str)) stream |= Transformer('.//fieldset [@id="properties"]').after( html) if preview_html: # print "preview_html: %s" % preview_html fieldset_str = self._get_budget_preview() % preview_html stream |= Transformer('//div [@id="content"]//div [@id="ticket"]') \ .after(HTML(fieldset_str)) elif filename == 'milestone_view.html': # print "________________ MILESTONE !!" # print "req.args: %s " % req.args by = 'component' if 'by' in req.args: by = req.args['by'] # print "------------- link to by: %s " % req.href.query(component=by) budget_stats, stats_by = self._get_milestone_html(req, by) stats_by = "<fieldset><legend>Budget</legend><table>%s</table></fieldset>" % stats_by stream |= Transformer('//form[@id="stats"]').append(HTML(stats_by)) stream |= Transformer('//div[@class="info"]').append( HTML(budget_stats)) # print input / preview return stream def _get_budget_fieldset(self): title = _('in hours') fieldset = '<fieldset id="budget">' \ '<legend>' + _('Budget Estimation') + '</legend>' \ '<button type="button" onclick="addBudgetRow()">[+]</button> ' \ '<label>' + _('Add a new row') + '</label>' \ '<span id="hiddenbudgettable"%s>' \ '<table>' \ '<thead id="budgethead">' \ '<tr>' \ '<th>' + _('Person') + '</th>' \ '<th>' + _('Type') + '</th>' \ '<th title="' + title + '">' + _('Estimation') + '</th>' \ '<th title="' + title + '">' + _('Cost') + '</th>' \ '<th>' + _('State') + '</th>' \ '<th>' + _('Comment') + '</th>' \ '</tr>' \ '</thead>' \ '<tbody id="budget_container">%s</tbody>' \ '</table>' \ '</span>' \ '</fieldset>' return fieldset def _get_budget_preview(self): fieldset = '<div id="budgetpreview">' \ '<h2 class="foldable">' + _('Budget Estimation') + '</h2>' \ '<table class="listing">' \ '<thead>' \ '<tr>' \ '<th>' + _('Person') + '</th>' \ '<th>' + _('Type') + '</th>' \ '<th>' + _('Estimation') + '</th>' \ '<th>' + _('Cost') + '</th>' \ '<th>' + _('State') + '</th>' \ '<th>' + _('Comment') + '</th>' \ '</tr>' \ '</thead>' \ '<tbody id="previewContainer">%s' \ '</tbody>' \ '</table>' \ '</div>' return fieldset def pre_process_request(self, req, handler): """ overridden from IRequestFilter""" return handler def post_process_request(self, req, template, data, content_type): """ overridden from IRequestFilter""" if req.path_info.startswith('/newticket') or \ req.path_info.startswith('/ticket'): add_script(req, 'hw/js/budgeting.js') if not data: return template, data, content_type tkt = data['ticket'] if tkt and tkt.id and Ticket.id_is_valid( tkt.id): # ticket is ready for saving if self._changed_by_author: self._save_budget(tkt) self._budgets = None return template, data, content_type def _get_fields(self, req): budget_dict = {} budget_obj = None # searching budgetfields an send them to db for arg in req.args: list = [] list = arg.split(":") if len(list) == 3: row_no = list[1] if budget_dict.has_key(row_no): budget_obj = budget_dict[row_no] else: budget_obj = Budget() budget_dict[row_no] = budget_obj budget_obj.set(list[2], req.args.get(arg)) # New created field, should be insered if list[0] == "GSfield": budget_obj.set_as_insert() elif list[0] == "GSDBfield": budget_obj.set_as_update() elif list[0] == "DELGSDBfield": budget_obj.set_as_delete() return budget_dict def _get_milestone_html(self, req, group_by): html = '' stats_by = '' db = self.env.get_read_db() cursor = db.cursor() ms = req.args['id'] sql = "select sum(b.cost),sum(b.estimation), avg(b.status) from budgeting b, ticket t" \ " where b.ticket=t.id and t.milestone='%s'" % ms try: # print "milestone sql: %s" % sql cursor.execute(sql) for row in cursor: # print "row" # html = self._get_progress_html(row[0], row[1], row[2]) html = '<dl><dt>' + _('Budget in hours') + ':</dt><dd> </dd>' \ '<dt>' + _('Cost') + ': <dd>%.2f</dd></dt>' \ '<dt>' + _('Estimation') + ': <dd>%.2f</dd></dt>' \ '<dt>' + _('Status') + ': <dd>%.1f%%</dd></dt></dl>' html = html % (row[0], row[1], row[2]) html = self._get_progress_html(row[0], row[1], row[2]) + html except Exception, e: self.log.error("Error executing SQL Statement \n %s" % e) db.rollback() if not group_by: return html, stats_by sql = "select t.%s, sum(b.cost), sum(b.estimation), avg(b.status) from budgeting b, ticket t" \ " where b.ticket=t.id and t.milestone='%s'" \ " group by t.%s order by t.%s" % (group_by, ms, group_by, group_by) try: # print "sql: %s" % sql cursor.execute(sql) for row in cursor: status_bar = self._get_progress_html(row[1], row[2], row[3], 75) link = req.href.query({'milestone': ms, group_by: row[0]}) if group_by == 'component': link = req.href.report(BUDGET_REPORT_ALL_ID, { 'MILESTONE': ms, 'COMPONENT': row[0], 'OWNER': '%' }) stats_by += '<tr><th scope="row"><a href="%s">' \ '%s</a></th>' % (link, row[0]) stats_by += '<td>%s</td></tr>' % status_bar except Exception, e: self.log.error("Error executing SQL Statement \n %s" % e) db.rollback()
class TicketSystem(Component): implements(IPermissionRequestor, IWikiSyntaxProvider, IResourceManager, ITicketManipulator) change_listeners = ExtensionPoint(ITicketChangeListener) milestone_change_listeners = ExtensionPoint(IMilestoneChangeListener) realm = 'ticket' ticket_custom_section = ConfigSection( 'ticket-custom', """In this section, you can define additional fields for tickets. See TracTicketsCustomFields for more details.""") action_controllers = OrderedExtensionsOption( 'ticket', 'workflow', ITicketActionController, default='ConfigurableTicketWorkflow', include_missing=False, doc="""Ordered list of workflow controllers to use for ticket actions. """) restrict_owner = BoolOption( 'ticket', 'restrict_owner', 'false', """Make the owner field of tickets use a drop-down menu. Be sure to understand the performance implications before activating this option. See [TracTickets#Assign-toasDrop-DownList Assign-to as Drop-Down List]. Please note that e-mail addresses are '''not''' obfuscated in the resulting drop-down menu, so this option should not be used if e-mail addresses must remain protected. """) default_version = Option('ticket', 'default_version', '', """Default version for newly created tickets.""") default_type = Option('ticket', 'default_type', 'defect', """Default type for newly created tickets.""") default_priority = Option( 'ticket', 'default_priority', 'major', """Default priority for newly created tickets.""") default_milestone = Option( 'ticket', 'default_milestone', '', """Default milestone for newly created tickets.""") default_component = Option( 'ticket', 'default_component', '', """Default component for newly created tickets.""") default_severity = Option( 'ticket', 'default_severity', '', """Default severity for newly created tickets.""") default_summary = Option( 'ticket', 'default_summary', '', """Default summary (title) for newly created tickets.""") default_description = Option( 'ticket', 'default_description', '', """Default description for newly created tickets.""") default_keywords = Option( 'ticket', 'default_keywords', '', """Default keywords for newly created tickets.""") default_owner = Option( 'ticket', 'default_owner', '< default >', """Default owner for newly created tickets. The component owner is used when set to the value `< default >`. """) default_cc = Option('ticket', 'default_cc', '', """Default cc: list for newly created tickets.""") default_resolution = Option( 'ticket', 'default_resolution', 'fixed', """Default resolution for resolving (closing) tickets.""") allowed_empty_fields = ListOption( 'ticket', 'allowed_empty_fields', 'milestone, version', doc="""Comma-separated list of `select` fields that can have an empty value. (//since 1.1.2//)""") max_comment_size = IntOption( 'ticket', 'max_comment_size', 262144, """Maximum allowed comment size in characters.""") max_description_size = IntOption( 'ticket', 'max_description_size', 262144, """Maximum allowed description size in characters.""") max_summary_size = IntOption( 'ticket', 'max_summary_size', 262144, """Maximum allowed summary size in characters. (//since 1.0.2//)""") def __init__(self): self.log.debug('action controllers for ticket workflow: %r', [c.__class__.__name__ for c in self.action_controllers]) # Public API def get_available_actions(self, req, ticket): """Returns a sorted list of available actions""" # The list should not have duplicates. actions = {} for controller in self.action_controllers: weighted_actions = controller.get_ticket_actions(req, ticket) or [] for weight, action in weighted_actions: if action in actions: actions[action] = max(actions[action], weight) else: actions[action] = weight all_weighted_actions = [(weight, action) for action, weight in actions.items()] return [x[1] for x in sorted(all_weighted_actions, reverse=True)] def get_all_status(self): """Returns a sorted list of all the states all of the action controllers know about.""" valid_states = set() for controller in self.action_controllers: valid_states.update(controller.get_all_status() or []) return sorted(valid_states) def get_ticket_field_labels(self): """Produce a (name,label) mapping from `get_ticket_fields`.""" labels = {f['name']: f['label'] for f in self.get_ticket_fields()} labels['attachment'] = _("Attachment") return labels def get_ticket_fields(self): """Returns list of fields available for tickets. Each field is a dict with at least the 'name', 'label' (localized) and 'type' keys. It may in addition contain the 'custom' key, the 'optional' and the 'options' keys. When present 'custom' and 'optional' are always `True`. """ fields = copy.deepcopy(self.fields) label = 'label' # workaround gettext extraction bug for f in fields: f[label] = gettext(f[label]) return fields def reset_ticket_fields(self): """Invalidate ticket field cache.""" del self.fields @cached def fields(self): """Return the list of fields available for tickets.""" from trac.ticket import model fields = TicketFieldList() # Basic text fields fields.append({ 'name': 'summary', 'type': 'text', 'label': N_('Summary') }) fields.append({ 'name': 'reporter', 'type': 'text', 'label': N_('Reporter') }) # Owner field, by default text but can be changed dynamically # into a drop-down depending on configuration (restrict_owner=true) fields.append({'name': 'owner', 'type': 'text', 'label': N_('Owner')}) # Description fields.append({ 'name': 'description', 'type': 'textarea', 'format': 'wiki', 'label': N_('Description') }) # Default select and radio fields selects = [('type', N_('Type'), model.Type), ('status', N_('Status'), model.Status), ('priority', N_('Priority'), model.Priority), ('milestone', N_('Milestone'), model.Milestone), ('component', N_('Component'), model.Component), ('version', N_('Version'), model.Version), ('severity', N_('Severity'), model.Severity), ('resolution', N_('Resolution'), model.Resolution)] for name, label, cls in selects: options = [val.name for val in cls.select(self.env)] if not options: # Fields without possible values are treated as if they didn't # exist continue field = { 'name': name, 'type': 'select', 'label': label, 'value': getattr(self, 'default_' + name, ''), 'options': options } if name in ('status', 'resolution'): field['type'] = 'radio' field['optional'] = True elif name in self.allowed_empty_fields: field['optional'] = True fields.append(field) # Advanced text fields fields.append({ 'name': 'keywords', 'type': 'text', 'format': 'list', 'label': N_('Keywords') }) fields.append({ 'name': 'cc', 'type': 'text', 'format': 'list', 'label': N_('Cc') }) # Date/time fields fields.append({ 'name': 'time', 'type': 'time', 'format': 'relative', 'label': N_('Created') }) fields.append({ 'name': 'changetime', 'type': 'time', 'format': 'relative', 'label': N_('Modified') }) for field in self.custom_fields: if field['name'] in [f['name'] for f in fields]: self.log.warning('Duplicate field name "%s" (ignoring)', field['name']) continue if field['name'] in self.reserved_field_names: self.log.warning( 'Field name "%s" is a reserved name ' '(ignoring)', field['name']) continue if not re.match('^[a-zA-Z][a-zA-Z0-9_]+$', field['name']): self.log.warning( 'Invalid name for custom field: "%s" ' '(ignoring)', field['name']) continue fields.append(field) return fields reserved_field_names = [ 'report', 'order', 'desc', 'group', 'groupdesc', 'col', 'row', 'format', 'max', 'page', 'verbose', 'comment', 'or', 'id' ] def get_custom_fields(self): return copy.deepcopy(self.custom_fields) @cached def custom_fields(self): """Return the list of custom ticket fields available for tickets.""" fields = TicketFieldList() config = self.ticket_custom_section for name in [ option for option, value in config.options() if '.' not in option ]: field = { 'name': name, 'custom': True, 'type': config.get(name), 'order': config.getint(name + '.order', 0), 'label': config.get(name + '.label') or name.replace("_", " ").strip().capitalize(), 'value': config.get(name + '.value', '') } if field['type'] == 'select' or field['type'] == 'radio': field['options'] = config.getlist(name + '.options', sep='|') if '' in field['options'] or \ field['name'] in self.allowed_empty_fields: field['optional'] = True if '' in field['options']: field['options'].remove('') elif field['type'] == 'checkbox': field['value'] = '1' if as_bool(field['value']) else '0' elif field['type'] == 'text': field['format'] = config.get(name + '.format', 'plain') field['max_size'] = config.getint(name + '.max_size', 0) elif field['type'] == 'textarea': field['format'] = config.get(name + '.format', 'plain') field['max_size'] = config.getint(name + '.max_size', 0) field['height'] = config.getint(name + '.rows') elif field['type'] == 'time': field['format'] = config.get(name + '.format', 'datetime') fields.append(field) fields.sort(key=lambda f: (f['order'], f['name'])) return fields def get_field_synonyms(self): """Return a mapping from field name synonyms to field names. The synonyms are supposed to be more intuitive for custom queries.""" # i18n TODO - translated keys return {'created': 'time', 'modified': 'changetime'} def eventually_restrict_owner(self, field, ticket=None): """Restrict given owner field to be a list of users having the TICKET_MODIFY permission (for the given ticket) """ if self.restrict_owner: field['type'] = 'select' field['options'] = self.get_allowed_owners(ticket) field['optional'] = True def get_allowed_owners(self, ticket=None): """Returns a list of permitted ticket owners (those possessing the TICKET_MODIFY permission). Returns `None` if the option `[ticket]` `restrict_owner` is `False`. If `ticket` is not `None`, fine-grained permission checks are used to determine the allowed owners for the specified resource. :since: 1.0.3 """ if self.restrict_owner: allowed_owners = [] for user in PermissionSystem(self.env) \ .get_users_with_permission('TICKET_MODIFY'): if not ticket or \ 'TICKET_MODIFY' in PermissionCache(self.env, user, ticket.resource): allowed_owners.append(user) allowed_owners.sort() return allowed_owners # ITicketManipulator methods def prepare_ticket(self, req, ticket, fields, actions): pass def validate_ticket(self, req, ticket): # Validate select fields for known values. for field in ticket.fields: if 'options' not in field: continue name = field['name'] if name == 'status': continue if name in ticket and name in ticket._old: value = ticket[name] if value: if value not in field['options']: yield name, _('"%(value)s" is not a valid value', value=value) elif not field.get('optional', False): yield name, _("field cannot be empty") # Validate description length. if len(ticket['description'] or '') > self.max_description_size: yield 'description', _( "Must be less than or equal to %(num)s " "characters", num=self.max_description_size) # Validate summary length. if not ticket['summary']: yield 'summary', _("Tickets must contain a summary.") elif len(ticket['summary'] or '') > self.max_summary_size: yield 'summary', _( "Must be less than or equal to %(num)s " "characters", num=self.max_summary_size) # Validate custom field length. for field in ticket.custom_fields: field_attrs = ticket.fields.by_name(field) max_size = field_attrs.get('max_size', 0) if 0 < max_size < len(ticket[field] or ''): label = field_attrs.get('label') yield label or field, _( "Must be less than or equal to " "%(num)s characters", num=max_size) # Validate time field content. for field in ticket.time_fields: value = ticket[field] if field in ticket.custom_fields and \ field in ticket._old and \ not isinstance(value, datetime): field_attrs = ticket.fields.by_name(field) format = field_attrs.get('format') try: ticket[field] = user_time(req, parse_date, value, hint=format) \ if value else None except TracError as e: # Degrade TracError to warning. ticket[field] = value label = field_attrs.get('label') yield label or field, to_unicode(e) def validate_comment(self, req, comment): # Validate comment length if len(comment or '') > self.max_comment_size: yield _("Must be less than or equal to %(num)s characters", num=self.max_comment_size) # IPermissionRequestor methods def get_permission_actions(self): return [ 'TICKET_APPEND', 'TICKET_CREATE', 'TICKET_CHGPROP', 'TICKET_VIEW', 'TICKET_EDIT_CC', 'TICKET_EDIT_DESCRIPTION', 'TICKET_EDIT_COMMENT', ('TICKET_MODIFY', ['TICKET_APPEND', 'TICKET_CHGPROP']), ('TICKET_ADMIN', [ 'TICKET_CREATE', 'TICKET_MODIFY', 'TICKET_VIEW', 'TICKET_EDIT_CC', 'TICKET_EDIT_DESCRIPTION', 'TICKET_EDIT_COMMENT' ]) ] # IWikiSyntaxProvider methods def get_link_resolvers(self): return [('bug', self._format_link), ('issue', self._format_link), ('ticket', self._format_link), ('comment', self._format_comment_link)] def get_wiki_syntax(self): yield ( # matches #... but not &#... (HTML entity) r"!?(?<!&)#" # optional intertrac shorthand #T... + digits r"(?P<it_ticket>%s)%s" % (WikiParser.INTERTRAC_SCHEME, Ranges.RE_STR), lambda x, y, z: self._format_link(x, 'ticket', y[1:], y, z)) def _format_link(self, formatter, ns, target, label, fullmatch=None): intertrac = formatter.shorthand_intertrac_helper( ns, target, label, fullmatch) if intertrac: return intertrac try: link, params, fragment = formatter.split_link(target) r = Ranges(link) if len(r) == 1: num = r.a ticket = formatter.resource(self.realm, num) from trac.ticket.model import Ticket if Ticket.id_is_valid(num) and \ 'TICKET_VIEW' in formatter.perm(ticket): # TODO: attempt to retrieve ticket view directly, # something like: t = Ticket.view(num) for type, summary, status, resolution in \ self.env.db_query(""" SELECT type, summary, status, resolution FROM ticket WHERE id=%s """, (str(num),)): description = self.format_summary( summary, status, resolution, type) title = '#%s: %s' % (num, description) href = formatter.href.ticket(num) + params + fragment return tag.a(label, title=title, href=href, class_='%s ticket' % status) else: ranges = str(r) if params: params = '&' + params[1:] label_wrap = label.replace(',', u',\u200b') ranges_wrap = ranges.replace(',', u', ') return tag.a(label_wrap, title=_("Tickets %(ranges)s", ranges=ranges_wrap), href=formatter.href.query(id=ranges) + params) except ValueError: pass return tag.a(label, class_='missing ticket') def _format_comment_link(self, formatter, ns, target, label): resource = None if ':' in target: elts = target.split(':') if len(elts) == 3: cnum, realm, id = elts if cnum != 'description' and cnum and not cnum[0].isdigit(): realm, id, cnum = elts # support old comment: style id = as_int(id, None) if realm in ('bug', 'issue'): realm = 'ticket' resource = formatter.resource(realm, id) else: resource = formatter.resource cnum = target if resource and resource.id and resource.realm == self.realm and \ cnum and (cnum.isdigit() or cnum == 'description'): href = title = class_ = None if self.resource_exists(resource): from trac.ticket.model import Ticket ticket = Ticket(self.env, resource.id) if cnum != 'description' and not ticket.get_change(cnum): title = _("ticket comment does not exist") class_ = 'missing ticket' elif 'TICKET_VIEW' in formatter.perm(resource): href = formatter.href.ticket(resource.id) + \ "#comment:%s" % cnum if resource.id != formatter.resource.id: summary = self.format_summary(ticket['summary'], ticket['status'], ticket['resolution'], ticket['type']) if cnum == 'description': title = _("Description for #%(id)s: %(summary)s", id=resource.id, summary=summary) else: title = _( "Comment %(cnum)s for #%(id)s: " "%(summary)s", cnum=cnum, id=resource.id, summary=summary) class_ = ticket['status'] + ' ticket' else: title = _("Description") if cnum == 'description' \ else _("Comment %(cnum)s", cnum=cnum) class_ = 'ticket' else: title = _("no permission to view ticket") class_ = 'forbidden ticket' else: title = _("ticket does not exist") class_ = 'missing ticket' return tag.a(label, class_=class_, href=href, title=title) return label # IResourceManager methods def get_resource_realms(self): yield self.realm def get_resource_description(self, resource, format=None, context=None, **kwargs): if format == 'compact': return '#%s' % resource.id elif format == 'summary': from trac.ticket.model import Ticket ticket = Ticket(self.env, resource.id) args = [ ticket[f] for f in ('summary', 'status', 'resolution', 'type') ] return self.format_summary(*args) return _("Ticket #%(shortname)s", shortname=resource.id) def format_summary(self, summary, status=None, resolution=None, type=None): summary = shorten_line(summary) if type: summary = type + ': ' + summary if status: if status == 'closed' and resolution: status += ': ' + resolution return "%s (%s)" % (summary, status) else: return summary def resource_exists(self, resource): """ >>> from trac.test import EnvironmentStub >>> from trac.resource import Resource, resource_exists >>> env = EnvironmentStub() >>> resource_exists(env, Resource('ticket', 123456)) False >>> from trac.ticket.model import Ticket >>> t = Ticket(env) >>> int(t.insert()) 1 >>> resource_exists(env, t.resource) True """ try: id_ = int(resource.id) except (TypeError, ValueError): return False if self.env.db_query("SELECT id FROM ticket WHERE id=%s", (id_, )): if resource.version is None: return True revcount = self.env.db_query( """ SELECT count(DISTINCT time) FROM ticket_change WHERE ticket=%s """, (id_, )) return revcount[0][0] >= resource.version else: return False
class BrowserModule(Component): implements(INavigationContributor, IPermissionRequestor, IRequestHandler, IWikiSyntaxProvider, IHTMLPreviewAnnotator, IWikiMacroProvider) property_renderers = ExtensionPoint(IPropertyRenderer) realm = RepositoryManager.source_realm downloadable_paths = ListOption( 'browser', 'downloadable_paths', '/trunk, /branches/*, /tags/*', doc="""List of repository paths that can be downloaded. Leave this option empty if you want to disable all downloads, otherwise set it to a comma-separated list of authorized paths (those paths are glob patterns, i.e. "*" can be used as a wild card). In a multi-repository environment, the path must be qualified with the repository name if the path does not point to the default repository (e.g. /reponame/trunk). Note that a simple prefix matching is performed on the paths, so aliases won't get automatically resolved. """) color_scale = BoolOption('browser', 'color_scale', True, doc="""Enable colorization of the ''age'' column. This uses the same color scale as the source code annotation: blue is older, red is newer. """) NEWEST_COLOR = (255, 136, 136) newest_color = Option( 'browser', 'newest_color', repr(NEWEST_COLOR), doc="""(r,g,b) color triple to use for the color corresponding to the newest color, for the color scale used in ''blame'' or the browser ''age'' column if `color_scale` is enabled. """) OLDEST_COLOR = (136, 136, 255) oldest_color = Option( 'browser', 'oldest_color', repr(OLDEST_COLOR), doc="""(r,g,b) color triple to use for the color corresponding to the oldest color, for the color scale used in ''blame'' or the browser ''age'' column if `color_scale` is enabled. """) intermediate_point = Option( 'browser', 'intermediate_point', '', doc="""If set to a value between 0 and 1 (exclusive), this will be the point chosen to set the `intermediate_color` for interpolating the color value. """) intermediate_color = Option( 'browser', 'intermediate_color', '', doc="""(r,g,b) color triple to use for the color corresponding to the intermediate color, if two linear interpolations are used for the color scale (see `intermediate_point`). If not set, the intermediate color between `oldest_color` and `newest_color` will be used. """) render_unsafe_content = BoolOption( 'browser', 'render_unsafe_content', 'false', """Whether raw files should be rendered in the browser, or only made downloadable. Pretty much any file may be interpreted as HTML by the browser, which allows a malicious user to create a file containing cross-site scripting attacks. For open repositories where anyone can check-in a file, it is recommended to leave this option disabled.""") hidden_properties = ListOption( 'browser', 'hide_properties', 'svk:merge', doc="""Comma-separated list of version control properties to hide from the repository browser. """) # public methods def get_custom_colorizer(self): """Returns a converter for values from [0.0, 1.0] to a RGB triple.""" def interpolate(old, new, value): # Provides a linearly interpolated color triple for `value` # which must be a floating point value between 0.0 and 1.0 return tuple([int(b + (a - b) * value) for a, b in zip(new, old)]) def parse_color(rgb, default): # Get three ints out of a `rgb` string or return `default` try: t = tuple([int(v) for v in re.split(r'(\d+)', rgb)[1::2]]) return t if len(t) == 3 else default except ValueError: return default newest_color = parse_color(self.newest_color, self.NEWEST_COLOR) oldest_color = parse_color(self.oldest_color, self.OLDEST_COLOR) try: intermediate = float(self.intermediate_point) except ValueError: intermediate = None if intermediate: intermediate_color = parse_color(self.intermediate_color, None) if not intermediate_color: intermediate_color = tuple([ (a + b) / 2 for a, b in zip(newest_color, oldest_color) ]) def colorizer(value): if value <= intermediate: value = value / intermediate return interpolate(oldest_color, intermediate_color, value) else: value = (value - intermediate) / (1.0 - intermediate) return interpolate(intermediate_color, newest_color, value) else: def colorizer(value): return interpolate(oldest_color, newest_color, value) return colorizer # INavigationContributor methods def get_active_navigation_item(self, req): return 'browser' def get_navigation_items(self, req): rm = RepositoryManager(self.env) if any( repos.is_viewable(req.perm) for repos in rm.get_real_repositories()): yield ('mainnav', 'browser', tag.a(_('Browse Source'), href=req.href.browser())) # IPermissionRequestor methods def get_permission_actions(self): return ['BROWSER_VIEW', 'FILE_VIEW'] # IRequestHandler methods def match_request(self, req): match = re.match(r'/(export|browser|file)(/.*)?$', req.path_info) if match: mode, path = match.groups() if mode == 'export': if path and '/' in path: path_elts = path.split('/', 2) if len(path_elts) != 3: return False path = path_elts[2] req.args['rev'] = path_elts[1] req.args['format'] = 'raw' elif mode == 'file': req.redirect(req.href.browser(path, rev=req.args.get('rev'), format=req.args.get('format')), permanent=True) req.args['path'] = path or '/' return True def process_request(self, req): presel = req.args.get('preselected') if presel and (presel + '/').startswith(req.href.browser() + '/'): req.redirect(presel) path = req.args.get('path', '/') rev = req.args.get('rev', '') if rev.lower() in ('', 'head'): rev = None format = req.args.get('format') order = req.args.get('order', 'name').lower() desc = 'desc' in req.args rm = RepositoryManager(self.env) all_repositories = rm.get_all_repositories() reponame, repos, path = rm.get_repository_by_path(path) # Repository index show_index = not reponame and path == '/' if show_index: if repos and (as_bool(all_repositories[''].get('hidden')) or not repos.is_viewable(req.perm)): repos = None if not repos and reponame: raise ResourceNotFound( _("Repository '%(repo)s' not found", repo=reponame)) if reponame and reponame != repos.reponame: # Redirect alias qs = req.query_string req.redirect( req.href.browser(repos.reponame or None, path) + ('?' + qs if qs else '')) reponame = repos.reponame if repos else None # Find node for the requested path/rev context = web_context(req) node = None changeset = None display_rev = lambda rev: rev if repos: try: if rev: rev = repos.normalize_rev(rev) # If `rev` is `None`, we'll try to reuse `None` consistently, # as a special shortcut to the latest revision. rev_or_latest = rev or repos.youngest_rev node = get_existing_node(req, repos, path, rev_or_latest) except NoSuchChangeset as e: raise ResourceNotFound(e, _('Invalid changeset number')) if node: try: # use changeset instance to retrieve branches and tags changeset = repos.get_changeset(node.rev) except NoSuchChangeset: pass context = context.child( repos.resource.child(self.realm, path, version=rev_or_latest)) display_rev = repos.display_rev # Prepare template data path_links = get_path_links(req.href, reponame, path, rev, order, desc) repo_data = dir_data = file_data = None if show_index: repo_data = self._render_repository_index(context, all_repositories, order, desc) if node: if not node.is_viewable(req.perm): raise PermissionError( 'BROWSER_VIEW' if node.isdir else 'FILE_VIEW', node.resource, self.env) if node.isdir: if format in ('zip', ): # extension point here... self._render_zip(req, context, repos, node, rev) # not reached dir_data = self._render_dir(req, repos, node, rev, order, desc) elif node.isfile: file_data = self._render_file(req, context, repos, node, rev) if not repos and not (repo_data and repo_data['repositories']): # If no viewable repositories, check permission instead of # repos.is_viewable() req.perm.require('BROWSER_VIEW') if show_index: raise ResourceNotFound(_("No viewable repositories")) else: raise ResourceNotFound(_("No node %(path)s", path=path)) quickjump_data = properties_data = None if node and not req.is_xhr: properties_data = self.render_properties('browser', context, node.get_properties()) quickjump_data = list(repos.get_quickjump_entries(rev)) data = { 'context': context, 'reponame': reponame, 'repos': repos, 'repoinfo': all_repositories.get(reponame or ''), 'path': path, 'rev': node and node.rev, 'stickyrev': rev, 'display_rev': display_rev, 'changeset': changeset, 'created_path': node and node.created_path, 'created_rev': node and node.created_rev, 'properties': properties_data, 'path_links': path_links, 'order': order, 'desc': 1 if desc else None, 'repo': repo_data, 'dir': dir_data, 'file': file_data, 'quickjump_entries': quickjump_data, 'wiki_format_messages': self.config['changeset'].getbool('wiki_format_messages'), } if req.is_xhr: # render and return the content only return 'dir_entries.html', data if dir_data or repo_data: add_script(req, 'common/js/expand_dir.js') add_script(req, 'common/js/keyboard_nav.js') # Links for contextual navigation if node: if node.isfile: prev_rev = repos.previous_rev(rev=node.created_rev, path=node.created_path) if prev_rev: href = req.href.browser(reponame, node.created_path, rev=prev_rev) add_link(req, 'prev', href, _('Revision %(num)s', num=display_rev(prev_rev))) if rev is not None: add_link(req, 'up', req.href.browser(reponame, node.created_path)) next_rev = repos.next_rev(rev=node.created_rev, path=node.created_path) if next_rev: href = req.href.browser(reponame, node.created_path, rev=next_rev) add_link(req, 'next', href, _('Revision %(num)s', num=display_rev(next_rev))) prevnext_nav(req, _('Previous Revision'), _('Next Revision'), _('Latest Revision')) else: if path != '/': add_link(req, 'up', path_links[-2]['href'], _('Parent directory')) add_ctxtnav( req, tag.a(_('Last Change'), href=req.href.changeset(node.created_rev, reponame, node.created_path))) if node.isfile: annotate = data['file']['annotate'] if annotate: add_ctxtnav(req, _('Normal'), title=_('View file without annotations'), href=req.href.browser(reponame, node.created_path, rev=rev)) if annotate != 'blame': add_ctxtnav(req, _('Blame'), title=_('Annotate each line with the last ' 'changed revision ' '(this can be time consuming...)'), href=req.href.browser(reponame, node.created_path, rev=rev, annotate='blame')) add_ctxtnav(req, _('Revision Log'), href=req.href.log(reponame, path, rev=rev)) path_url = repos.get_path_url(path, rev) if path_url: if path_url.startswith('//'): path_url = req.scheme + ':' + path_url add_ctxtnav(req, _('Repository URL'), href=path_url) add_stylesheet(req, 'common/css/browser.css') return 'browser.html', data # Internal methods def _render_repository_index(self, context, all_repositories, order, desc): # Color scale for the age column timerange = custom_colorizer = None if self.color_scale: custom_colorizer = self.get_custom_colorizer() rm = RepositoryManager(self.env) repositories = [] for reponame, repoinfo in all_repositories.iteritems(): if not reponame or as_bool(repoinfo.get('hidden')): continue try: repos = rm.get_repository(reponame) except TracError as err: entry = (reponame, repoinfo, None, None, exception_to_unicode(err), None) else: if repos: if not repos.is_viewable(context.perm): continue try: youngest = repos.get_changeset(repos.youngest_rev) except NoSuchChangeset: youngest = None if self.color_scale and youngest: if not timerange: timerange = TimeRange(youngest.date) else: timerange.insert(youngest.date) raw_href = self._get_download_href(context.href, repos, None, None) entry = (reponame, repoinfo, repos, youngest, None, raw_href) else: entry = (reponame, repoinfo, None, None, u"\u2013", None) if entry[4] is not None: # Check permission in case of error root = Resource('repository', reponame).child(self.realm, '/') if 'BROWSER_VIEW' not in context.perm(root): continue repositories.append(entry) # Ordering of repositories if order == 'date': def repo_order(args): reponame, repoinfo, repos, youngest, err, href = args return (youngest.date if youngest else to_datetime(0), embedded_numbers(reponame.lower())) elif order == 'author': def repo_order(args): reponame, repoinfo, repos, youngest, err, href = args return (youngest.author.lower() if youngest else '', embedded_numbers(reponame.lower())) else: def repo_order(args): reponame, repoinfo, repos, youngest, err, href = args return embedded_numbers(reponame.lower()) repositories = sorted(repositories, key=repo_order, reverse=desc) return { 'repositories': repositories, 'timerange': timerange, 'colorize_age': custom_colorizer } def _render_dir(self, req, repos, node, rev, order, desc): req.perm(node.resource).require('BROWSER_VIEW') download_href = self._get_download_href # Entries metadata class entry(object): _copy = 'name rev created_rev kind isdir path content_length' \ .split() __slots__ = _copy + ['raw_href'] def __init__(self, node): for f in entry._copy: setattr(self, f, getattr(node, f)) self.raw_href = download_href(req.href, repos, node, rev) entries = [ entry(n) for n in node.get_entries() if n.is_viewable(req.perm) ] changes = get_changes(repos, [i.created_rev for i in entries], self.log) if rev: newest = repos.get_changeset(rev).date else: newest = datetime_now(req.tz) # Color scale for the age column timerange = custom_colorizer = None if self.color_scale: timerange = TimeRange(newest) max_s = req.args.get('range_max_secs') min_s = req.args.get('range_min_secs') parent_range = [ timerange.from_seconds(long(s)) for s in [max_s, min_s] if s ] this_range = [c.date for c in changes.values() if c] for dt in this_range + parent_range: timerange.insert(dt) custom_colorizer = self.get_custom_colorizer() # Ordering of entries if order == 'date': def file_order(a): return (changes[a.created_rev].date, embedded_numbers(a.name.lower())) elif order == 'size': def file_order(a): return (a.content_length, embedded_numbers(a.name.lower())) elif order == 'author': def file_order(a): return (changes[a.created_rev].author.lower(), embedded_numbers(a.name.lower())) else: def file_order(a): return embedded_numbers(a.name.lower()) dir_order = 1 if desc else -1 def browse_order(a): return dir_order if a.isdir else 0, file_order(a) entries = sorted(entries, key=browse_order, reverse=desc) # ''Zip Archive'' alternate link zip_href = self._get_download_href(req.href, repos, node, rev) if zip_href: add_link(req, 'alternate', zip_href, _('Zip Archive'), 'application/zip', 'zip') return { 'entries': entries, 'changes': changes, 'timerange': timerange, 'colorize_age': custom_colorizer, 'range_max_secs': (timerange and timerange.to_seconds(timerange.newest)), 'range_min_secs': (timerange and timerange.to_seconds(timerange.oldest)), } def _iter_nodes(self, node): stack = [node] while stack: node = stack.pop() yield node if node.isdir: stack.extend( sorted(node.get_entries(), key=lambda x: x.name, reverse=True)) def _render_zip(self, req, context, repos, root_node, rev=None): if not self.is_path_downloadable(repos, root_node.path): raise TracError(_("Path not available for download")) req.perm(context.resource).require('FILE_VIEW') root_path = root_node.path.rstrip('/') if root_path: archive_name = root_node.name else: archive_name = repos.reponame or 'repository' filename = '%s-%s.zip' % (archive_name, root_node.rev) render_zip(req, filename, repos, root_node, self._iter_nodes) def _render_file(self, req, context, repos, node, rev=None): req.perm(node.resource).require('FILE_VIEW') mimeview = Mimeview(self.env) # MIME type detection content = node.get_processed_content() chunk = content.read(CHUNK_SIZE) mime_type = node.content_type if not mime_type or mime_type == 'application/octet-stream': mime_type = mimeview.get_mimetype(node.name, chunk) or \ mime_type or 'text/plain' # Eventually send the file directly format = req.args.get('format') if format in ('raw', 'txt'): req.send_response(200) req.send_header('Content-Type', 'text/plain' if format == 'txt' else mime_type) req.send_header('Last-Modified', http_date(node.last_modified)) if rev is None: req.send_header('Pragma', 'no-cache') req.send_header('Cache-Control', 'no-cache') req.send_header('Expires', 'Fri, 01 Jan 1999 00:00:00 GMT') if not self.render_unsafe_content: # Force browser to download files instead of rendering # them, since they might contain malicious code enabling # XSS attacks req.send_header('Content-Disposition', 'attachment') req.end_headers() # Note: don't pass an iterable instance to RequestDone, instead # call req.write() with each chunk here to avoid SEGVs (#11805) while chunk: req.write(chunk) chunk = content.read(CHUNK_SIZE) raise RequestDone else: # The changeset corresponding to the last change on `node` # is more interesting than the `rev` changeset. changeset = repos.get_changeset(node.created_rev) # add ''Plain Text'' alternate link if needed if not is_binary(chunk) and mime_type != 'text/plain': plain_href = req.href.browser(repos.reponame or None, node.path, rev=rev, format='txt') add_link(req, 'alternate', plain_href, _('Plain Text'), 'text/plain') # add ''Original Format'' alternate link (always) raw_href = req.href.export(rev or repos.youngest_rev, repos.reponame or None, node.path) add_link(req, 'alternate', raw_href, _('Original Format'), mime_type) self.log.debug("Rendering preview of node %s@%s with mime-type %s", node.name, rev, mime_type) content = None # the remainder of that content is not needed add_stylesheet(req, 'common/css/code.css') annotations = ['lineno'] annotate = req.args.get('annotate') if annotate: annotations.insert(0, annotate) preview_data = mimeview.preview_data(context, node.get_processed_content(), node.get_content_length(), mime_type, node.created_path, raw_href, annotations=annotations, force_source=bool(annotate)) return { 'changeset': changeset, 'size': node.content_length, 'preview': preview_data, 'annotate': annotate, } def _get_download_href(self, href, repos, node, rev): """Return the URL for downloading a file, or a directory as a ZIP.""" if node is not None and node.isfile: return href.export(rev or 'HEAD', repos.reponame or None, node.path) path = '' if node is None else node.path.strip('/') if self.is_path_downloadable(repos, path): return href.browser(repos.reponame or None, path, rev=rev or repos.youngest_rev, format='zip') # public methods def is_path_downloadable(self, repos, path): if repos.reponame: path = repos.reponame + '/' + path return any( fnmatchcase(path, dp.strip('/')) for dp in self.downloadable_paths) def render_properties(self, mode, context, props): """Prepare rendering of a collection of properties.""" return filter(None, [ self.render_property(name, mode, context, props) for name in sorted(props) ]) def render_property(self, name, mode, context, props): """Renders a node property to HTML.""" if name in self.hidden_properties: return candidates = [] for renderer in self.property_renderers: quality = renderer.match_property(name, mode) if quality > 0: candidates.append((quality, renderer)) candidates.sort(reverse=True) for (quality, renderer) in candidates: try: rendered = renderer.render_property(name, mode, context, props) if not rendered: return rendered if isinstance(rendered, RenderedProperty): value = rendered.content else: value = rendered rendered = None prop = {'name': name, 'value': value, 'rendered': rendered} return prop except Exception as e: self.log.warning( 'Rendering failed for property %s with ' 'renderer %s: %s', name, renderer.__class__.__name__, exception_to_unicode(e, traceback=True)) # IWikiSyntaxProvider methods def get_wiki_syntax(self): return [] def get_link_resolvers(self): """TracBrowser link resolvers. - `source:` and `browser:` * simple paths (/dir/file) * paths at a given revision (/dir/file@234) * paths with line number marks (/dir/file@234:10,20-30) * paths with line number anchor (/dir/file@234#L100) Marks and anchor can be combined. The revision must be present when specifying line numbers. In the few cases where it would be redundant (e.g. for tags), the revision number itself can be omitted: /tags/v10/file@100-110#L99 """ return [('repos', self._format_browser_link), ('export', self._format_export_link), ('source', self._format_browser_link), ('browser', self._format_browser_link)] def _format_export_link(self, formatter, ns, export, label): export, query, fragment = formatter.split_link(export) if ':' in export: rev, path = export.split(':', 1) elif '@' in export: path, rev = export.split('@', 1) else: rev, path = None, export node, raw_href, title = self._get_link_info(path, rev, formatter.href, formatter.perm) if raw_href: return tag.a(label, class_='export', href=raw_href + fragment, title=title) return tag.a(label, class_='missing export') def _format_browser_link(self, formatter, ns, path, label): path, query, fragment = formatter.split_link(path) rev = marks = None match = self.PATH_LINK_RE.match(path) if match: path, rev, marks = match.groups() href = formatter.href src_href = href.browser(path, rev=rev, marks=marks) + query + fragment node, raw_href, title = self._get_link_info(path, rev, formatter.href, formatter.perm) if not node: return tag.a(label, class_='missing source') link = tag.a(label, class_='source', href=src_href) if raw_href: link = tag( link, tag.a( u'\u200b', href=raw_href + fragment, title=title, class_='trac-rawlink' if node.isfile else 'trac-ziplink')) return link PATH_LINK_RE = re.compile( r"([^@#:]*)" # path r"[@:]([^#:]+)?" # rev r"(?::(\d+(?:-\d+)?(?:,\d+(?:-\d+)?)*))?" # marks ) def _get_link_info(self, path, rev, href, perm): rm = RepositoryManager(self.env) node = raw_href = title = None try: reponame, repos, npath = rm.get_repository_by_path(path) node = get_allowed_node(repos, npath, rev, perm) if node is not None: raw_href = self._get_download_href(href, repos, node, rev) title = _("Download") if node.isfile \ else _("Download as Zip archive") except TracError: pass return node, raw_href, title # IHTMLPreviewAnnotator methods def get_annotation_type(self): return 'blame', _('Rev'), _('Revision in which the line changed') def get_annotation_data(self, context): """Cache the annotation data corresponding to each revision.""" return BlameAnnotator(self.env, context) def annotate_row(self, context, row, lineno, line, blame_annotator): blame_annotator.annotate(row, lineno) # IWikiMacroProvider methods def get_macros(self): yield "RepositoryIndex" def get_macro_description(self, name): description = cleandoc_(""" Display the list of available repositories. Can be given the following named arguments: ''format'':: Select the rendering format: - ''compact'' produces a comma-separated list of repository prefix names (default) - ''list'' produces a description list of repository prefix names - ''table'' produces a table view, similar to the one visible in the ''Browse View'' page ''glob'':: Do a glob-style filtering on the repository names (defaults to '*') ''order'':: Order repositories by the given column (one of "name", "date" or "author") ''desc'':: When set to 1, order by descending order (''since 0.12'') """) return 'messages', description def expand_macro(self, formatter, name, content): args, kwargs = parse_args(content) format = kwargs.get('format', 'compact') glob = kwargs.get('glob', '*') order = kwargs.get('order') desc = as_bool(kwargs.get('desc', 0)) rm = RepositoryManager(self.env) all_repos = dict(rdata for rdata in rm.get_all_repositories().items() if fnmatchcase(rdata[0], glob)) if format == 'table': repo = self._render_repository_index(formatter.context, all_repos, order, desc) add_stylesheet(formatter.req, 'common/css/browser.css') wiki_format_messages = self.config['changeset'] \ .getbool('wiki_format_messages') data = { 'repo': repo, 'order': order, 'desc': 1 if desc else None, 'reponame': None, 'path': '/', 'stickyrev': None, 'wiki_format_messages': wiki_format_messages } return Chrome(self.env).render_fragment(formatter.context.req, 'repository_index.html', data) def get_repository(reponame): try: return rm.get_repository(reponame) except TracError: return all_repos = [(reponame, get_repository(reponame)) for reponame in all_repos] all_repos = sorted( ((reponame, repos) for reponame, repos in all_repos if repos and not as_bool(repos.params.get('hidden')) and repos.is_viewable(formatter.perm)), reverse=desc) def repolink(reponame, repos): label = reponame or _('(default)') return Markup( tag.a(label, title=_('View repository %(repo)s', repo=label), href=formatter.href.browser(repos.reponame or None))) if format == 'list': return tag.dl([ tag(tag.dt(repolink(reponame, repos)), tag.dd(repos.params.get('description'))) for reponame, repos in all_repos ]) else: # compact return Markup(', ').join( repolink(reponame, repos) for reponame, repos in all_repos)
class NotificationSystem(Component): email_sender = ExtensionOption( 'notification', 'email_sender', IEmailSender, 'SmtpEmailSender', """Name of the component implementing `IEmailSender`. This component is used by the notification system to send emails. Trac currently provides `SmtpEmailSender` for connecting to an SMTP server, and `SendmailEmailSender` for running a `sendmail`-compatible executable. """) smtp_enabled = BoolOption('notification', 'smtp_enabled', 'false', """Enable email notification.""") smtp_from = Option( 'notification', 'smtp_from', 'trac@localhost', """Sender address to use in notification emails. At least one of `smtp_from` and `smtp_replyto` must be set, otherwise Trac refuses to send notification mails.""") smtp_from_name = Option('notification', 'smtp_from_name', '', """Sender name to use in notification emails.""") smtp_from_author = BoolOption( 'notification', 'smtp_from_author', 'false', """Use the author of the change as the sender in notification emails (e.g. reporter of a new ticket, author of a comment). If the author hasn't set an email address, `smtp_from` and `smtp_from_name` are used instead. (''since 1.0'')""") smtp_replyto = Option( 'notification', 'smtp_replyto', 'trac@localhost', """Reply-To address to use in notification emails. At least one of `smtp_from` and `smtp_replyto` must be set, otherwise Trac refuses to send notification mails.""") smtp_always_cc_list = ListOption( 'notification', 'smtp_always_cc', '', sep=(',', ' '), doc="""Comma-separated list of email addresses to always send notifications to. Addresses can be seen by all recipients (Cc:).""") smtp_always_bcc_list = ListOption( 'notification', 'smtp_always_bcc', '', sep=(',', ' '), doc="""Comma-separated list of email addresses to always send notifications to. Addresses are not public (Bcc:). """) smtp_default_domain = Option( 'notification', 'smtp_default_domain', '', """Default host/domain to append to addresses that do not specify one. Fully qualified addresses are not modified. The default domain is appended to all username/login for which an email address cannot be found in the user settings.""") ignore_domains_list = ListOption( 'notification', 'ignore_domains', '', doc="""Comma-separated list of domains that should not be considered part of email addresses (for usernames with Kerberos domains).""") admit_domains_list = ListOption( 'notification', 'admit_domains', '', doc="""Comma-separated list of domains that should be considered as valid for email addresses (such as localdomain).""") mime_encoding = Option( 'notification', 'mime_encoding', 'none', """Specifies the MIME encoding scheme for emails. Supported values are: `none`, the default value which uses 7-bit encoding if the text is plain ASCII or 8-bit otherwise. `base64`, which works with any kind of content but may cause some issues with touchy anti-spam/anti-virus engine. `qp` or `quoted-printable`, which works best for european languages (more compact than base64) if 8-bit encoding cannot be used. """) use_public_cc = BoolOption( 'notification', 'use_public_cc', 'false', """Addresses in the To and Cc fields are visible to all recipients. If this option is disabled, recipients are put in the Bcc list. """) use_short_addr = BoolOption( 'notification', 'use_short_addr', 'false', """Permit email address without a host/domain (i.e. username only). The SMTP server should accept those addresses, and either append a FQDN or use local delivery. See also `smtp_default_domain`. Do not use this option with a public SMTP server. """) smtp_subject_prefix = Option( 'notification', 'smtp_subject_prefix', '__default__', """Text to prepend to subject line of notification emails. If the setting is not defined, then `[$project_name]` is used as the prefix. If no prefix is desired, then specifying an empty option will disable it. """) message_id_hash = Option( 'notification', 'message_id_hash', 'md5', """Hash algorithm to create unique Message-ID header. ''(since 1.0.13)''""") notification_subscriber_section = ConfigSection( 'notification-subscriber', """The notifications subscriptions are controlled by plugins. All `INotificationSubscriber` components are in charge. These components may allow to be configured via this section in the `trac.ini` file. See TracNotification for more details. Available subscribers: [[SubscriberList]] """) distributors = ExtensionPoint(INotificationDistributor) subscribers = ExtensionPoint(INotificationSubscriber) @property def smtp_always_cc(self): # For backward compatibility, remove in 1.5.1 return self.config.get('notification', 'smtp_always_cc') @property def smtp_always_bcc(self): # For backward compatibility, remove in 1.5.1 return self.config.get('notification', 'smtp_always_bcc') @property def ignore_domains(self): # For backward compatibility, remove in 1.5.1 return self.config.get('notification', 'ignore_domains') @property def admit_domains(self): # For backward compatibility, remove in 1.5.1 return self.config.get('notification', 'admit_domains') @lazy def subscriber_defaults(self): rawsubscriptions = self.notification_subscriber_section.options() return parse_subscriber_config(rawsubscriptions) def default_subscriptions(self, klass): for d in self.subscriber_defaults[klass]: yield (klass, d['distributor'], d['format'], d['priority'], d['adverb']) def get_default_format(self, transport): return self.config.get('notification', 'default_format.' + transport) or 'text/plain' def get_preferred_format(self, sid, authenticated, transport): from trac.notification.prefs import get_preferred_format return get_preferred_format(self.env, sid, authenticated, transport) or \ self.get_default_format(transport) def send_email(self, from_addr, recipients, message): """Send message to recipients via e-mail.""" self.email_sender.send(from_addr, recipients, message) def notify(self, event): """Distribute an event to all subscriptions. :param event: a `NotificationEvent` """ self.distribute_event(event, self.subscriptions(event)) def distribute_event(self, event, subscriptions): """Distribute a event to all subscriptions. :param event: a `NotificationEvent` :param subscriptions: a list of tuples (sid, authenticated, address, transport, format) where either sid or address can be `None` """ packages = {} for sid, authenticated, address, transport, format in subscriptions: package = packages.setdefault(transport, {}) key = (sid, authenticated, address) if key in package: continue package[key] = format or self.get_preferred_format( sid, authenticated, transport) for distributor in self.distributors: for transport in distributor.transports(): if transport in packages: recipients = [ (k[0], k[1], k[2], format) for k, format in packages[transport].iteritems() ] distributor.distribute(transport, recipients, event) def subscriptions(self, event): """Return all subscriptions for a given event. :return: a list of (sid, authenticated, address, transport, format) """ subscriptions = [] for subscriber in self.subscribers: if event.category == 'batchmodify': for ticket_event in event.get_ticket_change_events(self.env): subscriptions.extend( x for x in subscriber.matches(ticket_event) if x) else: subscriptions.extend(x for x in subscriber.matches(event) if x) # For each (transport, sid, authenticated) combination check the # subscription with the highest priority: # If it is "always" keep it. If it is "never" drop it. # sort by (transport, sid, authenticated, priority) ordered = sorted(subscriptions, key=itemgetter(1, 2, 3, 6)) previous_combination = None for rule, transport, sid, auth, addr, fmt, prio, adverb in ordered: if (transport, sid, auth) == previous_combination: continue if adverb == 'always': self.log.debug( "Adding (%s [%s]) for 'always' on rule (%s) " "for (%s)", sid, auth, rule, transport) yield (sid, auth, addr, transport, fmt) else: self.log.debug( "Ignoring (%s [%s]) for 'never' on rule (%s) " "for (%s)", sid, auth, rule, transport) # Also keep subscriptions without sid (raw email subscription) if sid: previous_combination = (transport, sid, auth)
class TagRequestHandler(TagTemplateProvider): """[main] Implements the /tags handler.""" implements(INavigationContributor, IRequestHandler) cloud_mincount = Option('tags', 'cloud_mincount', 1, doc="""Integer threshold to hide tags with smaller count.""") default_cols = Option('tags', 'default_table_cols', 'id|description|tags', doc="""Select columns and order for table format using a "|"-separated list of column names. Supported columns: realm, id, description, tags """) default_format = Option('tags', 'default_format', 'oldlist', doc="""Set the default format for the handler of the `/tags` domain. || `oldlist` (default value) || The original format with a bulleted-list of "linked-id description (tags)" || || `compact` || bulleted-list of "linked-description" || || `table` || table... (see corresponding column option) || """) exclude_realms = ListOption('tags', 'exclude_realms', [], doc="""Comma-separated list of realms to exclude from tags queries by default, unless specifically included using "realm:realm-name" in a query.""") # INavigationContributor methods def get_active_navigation_item(self, req): if 'TAGS_VIEW' in req.perm: return 'tags' def get_navigation_items(self, req): if 'TAGS_VIEW' in req.perm: label = tag_("Tags") yield ('mainnav', 'tags', builder.a(label, href=req.href.tags(), accesskey='T')) # IRequestHandler methods def match_request(self, req): return req.path_info.startswith('/tags') def process_request(self, req): req.perm.require('TAGS_VIEW') match = re.match(r'/tags/?(.*)', req.path_info) tag_id = match.group(1) and match.group(1) or None query = req.args.get('q', '') # Consider only providers, that are permitted for display. tag_system = TagSystem(self.env) all_realms = tag_system.get_taggable_realms(req.perm) if not (tag_id or query) or [r for r in all_realms if r in req.args] == []: for realm in all_realms: if realm not in self.exclude_realms: req.args[realm] = 'on' checked_realms = [r for r in all_realms if r in req.args] if query: # Add permitted realms from query expression. checked_realms.extend(query_realms(query, all_realms)) realm_args = dict(zip([r for r in checked_realms], ['on' for r in checked_realms])) # Switch between single tag and tag query expression mode. if tag_id and not re.match(r"""(['"]?)(\S+)\1$""", tag_id, re.UNICODE): # Convert complex, invalid tag ID's --> query expression. req.redirect(req.href.tags(realm_args, q=tag_id)) elif query: single_page = re.match(r"""(['"]?)(\S+)\1$""", query, re.UNICODE) if single_page: # Convert simple query --> single tag. req.redirect(req.href.tags(single_page.group(2), realm_args)) data = dict(page_title=_("Tags"), checked_realms=checked_realms) # Populate the TagsQuery form field. data['tag_query'] = tag_id and tag_id or query data['tag_realms'] = list(dict(name=realm, checked=realm in checked_realms) for realm in all_realms) if tag_id: data['tag_page'] = WikiPage(self.env, tag_system.wiki_page_prefix + tag_id) if query or tag_id: macro = 'ListTagged' # TRANSLATOR: The meta-nav link label. add_ctxtnav(req, _("Back to Cloud"), req.href.tags()) args = "%s,format=%s,cols=%s" % \ (tag_id and tag_id or query, self.default_format, self.default_cols) data['mincount'] = 0 else: macro = 'TagCloud' mincount = as_int(req.args.get('mincount', 0), self.cloud_mincount) args = mincount and "mincount=%s" % mincount or None data['mincount'] = mincount # When using the given req the page isn't rendered properly. The call # to expand_macro() leads to Chrome().render_template(req, ...). # The function render_template() breaks something in the request handling. # That used to work with Genshi. # # With this mocked req everything is just fine. mock_req = MockRequest(self.env, path_info=req.path_info, authname=req.authname, script_name=req.href()) formatter = Formatter(self.env, web_context(mock_req, Resource('tag'))) self.env.log.debug("%s macro arguments: %s", macro, args and args or '(none)') macros = TagWikiMacros(self.env) try: # Query string without realm throws 'NotImplementedError'. data['tag_body'] = checked_realms and \ macros.expand_macro(formatter, macro, args, realms=checked_realms) \ or '' data['tag_body'] = Markup(to_unicode(data['tag_body'])) except InvalidQuery as e: data['tag_query_error'] = to_unicode(e) data['tag_body'] = macros.expand_macro(formatter, 'TagCloud', '') data['realm_args'] = realm_args add_stylesheet(req, 'tags/css/tractags.css') return 'tag_view.html', data, {'domain': 'tractags'}
def __init__(self, section, prefix, default=None, doc=''): Option.__init__(self, section, prefix, default, doc) self.prefix = prefix self._options = []
class Environment(Component, ComponentManager): """Trac environment manager. Trac stores project information in a Trac environment. It consists of a directory structure containing among other things: * a configuration file, * project-specific templates and plugins, * the wiki and ticket attachments files, * the SQLite database file (stores tickets, wiki pages...) in case the database backend is SQLite """ implements(ISystemInfoProvider) required = True system_info_providers = ExtensionPoint(ISystemInfoProvider) setup_participants = ExtensionPoint(IEnvironmentSetupParticipant) components_section = ConfigSection( 'components', """This section is used to enable or disable components provided by plugins, as well as by Trac itself. The component to enable/disable is specified via the name of the option. Whether its enabled is determined by the option value; setting the value to `enabled` or `on` will enable the component, any other value (typically `disabled` or `off`) will disable the component. The option name is either the fully qualified name of the components or the module/package prefix of the component. The former enables/disables a specific component, while the latter enables/disables any component in the specified package/module. Consider the following configuration snippet: {{{ [components] trac.ticket.report.ReportModule = disabled acct_mgr.* = enabled }}} The first option tells Trac to disable the [wiki:TracReports report module]. The second option instructs Trac to enable all components in the `acct_mgr` package. Note that the trailing wildcard is required for module/package matching. To view the list of active components, go to the ''Plugins'' page on ''About Trac'' (requires `CONFIG_VIEW` [wiki:TracPermissions permissions]). See also: TracPlugins """) shared_plugins_dir = PathOption( 'inherit', 'plugins_dir', '', """Path to the //shared plugins directory//. Plugins in that directory are loaded in addition to those in the directory of the environment `plugins`, with this one taking precedence. Non-absolute paths are relative to the Environment `conf` directory. """) base_url = Option( 'trac', 'base_url', '', """Reference URL for the Trac deployment. This is the base URL that will be used when producing documents that will be used outside of the web browsing context, like for example when inserting URLs pointing to Trac resources in notification e-mails.""") base_url_for_redirect = BoolOption( 'trac', 'use_base_url_for_redirect', False, """Optionally use `[trac] base_url` for redirects. In some configurations, usually involving running Trac behind a HTTP proxy, Trac can't automatically reconstruct the URL that is used to access it. You may need to use this option to force Trac to use the `base_url` setting also for redirects. This introduces the obvious limitation that this environment will only be usable when accessible from that URL, as redirects are frequently used. """) secure_cookies = BoolOption( 'trac', 'secure_cookies', False, """Restrict cookies to HTTPS connections. When true, set the `secure` flag on all cookies so that they are only sent to the server on HTTPS connections. Use this if your Trac instance is only accessible through HTTPS. """) anonymous_session_lifetime = IntOption( 'trac', 'anonymous_session_lifetime', '90', """Lifetime of the anonymous session, in days. Set the option to 0 to disable purging old anonymous sessions. (''since 1.0.17'')""") project_name = Option('project', 'name', 'My Project', """Name of the project.""") project_description = Option('project', 'descr', 'My example project', """Short description of the project.""") project_url = Option( 'project', 'url', '', """URL of the main project web site, usually the website in which the `base_url` resides. This is used in notification e-mails.""") project_admin = Option( 'project', 'admin', '', """E-Mail address of the project's administrator.""") project_admin_trac_url = Option( 'project', 'admin_trac_url', '.', """Base URL of a Trac instance where errors in this Trac should be reported. This can be an absolute or relative URL, or '.' to reference this Trac instance. An empty value will disable the reporting buttons. """) project_footer = Option( 'project', 'footer', N_('Visit the Trac open source project at<br />' '<a href="https://trac.edgewall.org/">' 'https://trac.edgewall.org/</a>'), """Page footer text (right-aligned).""") project_icon = Option('project', 'icon', 'common/trac.ico', """URL of the icon of the project.""") log_type = ChoiceOption('logging', 'log_type', log.LOG_TYPES + log.LOG_TYPE_ALIASES, """Logging facility to use. Should be one of (`none`, `file`, `stderr`, `syslog`, `winlog`).""", case_sensitive=False) log_file = Option( 'logging', 'log_file', 'trac.log', """If `log_type` is `file`, this should be a path to the log-file. Relative paths are resolved relative to the `log` directory of the environment.""") log_level = ChoiceOption('logging', 'log_level', log.LOG_LEVELS + log.LOG_LEVEL_ALIASES, """Level of verbosity in log. Should be one of (`CRITICAL`, `ERROR`, `WARNING`, `INFO`, `DEBUG`). """, case_sensitive=False) log_format = Option( 'logging', 'log_format', None, """Custom logging format. If nothing is set, the following will be used: `Trac[$(module)s] $(levelname)s: $(message)s` In addition to regular key names supported by the [http://docs.python.org/library/logging.html Python logger library] one could use: - `$(path)s` the path for the current environment - `$(basename)s` the last path component of the current environment - `$(project)s` the project name Note the usage of `$(...)s` instead of `%(...)s` as the latter form would be interpreted by the !ConfigParser itself. Example: `($(thread)d) Trac[$(basename)s:$(module)s] $(levelname)s: $(message)s` """) def __init__(self, path, create=False, options=[]): """Initialize the Trac environment. :param path: the absolute path to the Trac environment :param create: if `True`, the environment is created and populated with default data; otherwise, the environment is expected to already exist. :param options: A list of `(section, name, value)` tuples that define configuration options """ ComponentManager.__init__(self) self.path = os.path.normpath(os.path.normcase(path)) self.log = None self.config = None if create: self.create(options) for setup_participant in self.setup_participants: setup_participant.environment_created() else: self.verify() self.setup_config() def __repr__(self): return '<%s %r>' % (self.__class__.__name__, self.path) @lazy def name(self): """The environment name. :since: 1.2 """ return os.path.basename(self.path) @property def env(self): """Property returning the `Environment` object, which is often required for functions and methods that take a `Component` instance. """ # The cached decorator requires the object have an `env` attribute. return self @property def system_info(self): """List of `(name, version)` tuples describing the name and version information of external packages used by Trac and plugins. """ info = [] for provider in self.system_info_providers: info.extend(provider.get_system_info() or []) return sorted(set(info), key=lambda args: (args[0] != 'Trac', args[0].lower())) def get_systeminfo(self): """Return a list of `(name, version)` tuples describing the name and version information of external packages used by Trac and plugins. :since 1.3.1: deprecated and will be removed in 1.5.1. Use system_info property instead. """ return self.system_info # ISystemInfoProvider methods def get_system_info(self): yield 'Trac', self.trac_version yield 'Python', sys.version yield 'setuptools', setuptools.__version__ if pytz is not None: yield 'pytz', pytz.__version__ if hasattr(self, 'webfrontend_version'): yield self.webfrontend, self.webfrontend_version def component_activated(self, component): """Initialize additional member variables for components. Every component activated through the `Environment` object gets three member variables: `env` (the environment object), `config` (the environment configuration) and `log` (a logger object).""" component.env = self component.config = self.config component.log = self.log def _component_name(self, name_or_class): name = name_or_class if not isinstance(name_or_class, basestring): name = name_or_class.__module__ + '.' + name_or_class.__name__ return name.lower() @lazy def _component_rules(self): _rules = {} for name, value in self.components_section.options(): name = name.rstrip('.*').lower() _rules[name] = as_bool(value) return _rules def is_component_enabled(self, cls): """Implemented to only allow activation of components that are not disabled in the configuration. This is called by the `ComponentManager` base class when a component is about to be activated. If this method returns `False`, the component does not get activated. If it returns `None`, the component only gets activated if it is located in the `plugins` directory of the environment. """ component_name = self._component_name(cls) rules = self._component_rules cname = component_name while cname: enabled = rules.get(cname) if enabled is not None: return enabled idx = cname.rfind('.') if idx < 0: break cname = cname[:idx] # By default, all components in the trac package except # in trac.test or trac.tests are enabled return component_name.startswith('trac.') and \ not component_name.startswith('trac.test.') and \ not component_name.startswith('trac.tests.') or None def enable_component(self, cls): """Enable a component or module.""" self._component_rules[self._component_name(cls)] = True super(Environment, self).enable_component(cls) @contextmanager def component_guard(self, component, reraise=False): """Traps any runtime exception raised when working with a component and logs the error. :param component: the component responsible for any error that could happen inside the context :param reraise: if `True`, an error is logged but not suppressed. By default, errors are suppressed. """ try: yield except TracError as e: self.log.warning("Component %s failed with %s", component, exception_to_unicode(e)) if reraise: raise except Exception as e: self.log.error("Component %s failed with %s", component, exception_to_unicode(e, traceback=True)) if reraise: raise def verify(self): """Verify that the provided path points to a valid Trac environment directory.""" try: with open(os.path.join(self.path, 'VERSION')) as f: tag = f.readline().rstrip('\n') except Exception as e: raise TracError( _("No Trac environment found at %(path)s\n" "%(e)s", path=self.path, e=exception_to_unicode(e))) if tag != _VERSION: raise TracError( _("Unknown Trac environment type '%(type)s'", type=tag)) @lazy def db_exc(self): """Return an object (typically a module) containing all the backend-specific exception types as attributes, named according to the Python Database API (http://www.python.org/dev/peps/pep-0249/). To catch a database exception, use the following pattern:: try: with env.db_transaction as db: ... except env.db_exc.IntegrityError as e: ... """ return DatabaseManager(self).get_exceptions() @property def db_query(self): """Return a context manager (`~trac.db.api.QueryContextManager`) which can be used to obtain a read-only database connection. Example:: with env.db_query as db: cursor = db.cursor() cursor.execute("SELECT ...") for row in cursor.fetchall(): ... Note that a connection retrieved this way can be "called" directly in order to execute a query:: with env.db_query as db: for row in db("SELECT ..."): ... :warning: after a `with env.db_query as db` block, though the `db` variable is still defined, you shouldn't use it as it might have been closed when exiting the context, if this context was the outermost context (`db_query` or `db_transaction`). If you don't need to manipulate the connection itself, this can even be simplified to:: for row in env.db_query("SELECT ..."): ... """ return QueryContextManager(self) @property def db_transaction(self): """Return a context manager (`~trac.db.api.TransactionContextManager`) which can be used to obtain a writable database connection. Example:: with env.db_transaction as db: cursor = db.cursor() cursor.execute("UPDATE ...") Upon successful exit of the context, the context manager will commit the transaction. In case of nested contexts, only the outermost context performs a commit. However, should an exception happen, any context manager will perform a rollback. You should *not* call `commit()` yourself within such block, as this will force a commit even if that transaction is part of a larger transaction. Like for its read-only counterpart, you can directly execute a DML query on the `db`:: with env.db_transaction as db: db("UPDATE ...") :warning: after a `with env.db_transaction` as db` block, though the `db` variable is still available, you shouldn't use it as it might have been closed when exiting the context, if this context was the outermost context (`db_query` or `db_transaction`). If you don't need to manipulate the connection itself, this can also be simplified to:: env.db_transaction("UPDATE ...") """ return TransactionContextManager(self) def shutdown(self, tid=None): """Close the environment.""" from trac.versioncontrol.api import RepositoryManager RepositoryManager(self).shutdown(tid) DatabaseManager(self).shutdown(tid) if tid is None: log.shutdown(self.log) def create(self, options=[]): """Create the basic directory structure of the environment, initialize the database and populate the configuration file with default values. If options contains ('inherit', 'file'), default values will not be loaded; they are expected to be provided by that file or other options. :raises TracError: if the base directory of `path` does not exist. :raises TracError: if `path` exists and is not empty. """ base_dir = os.path.dirname(self.path) if not os.path.exists(base_dir): raise TracError( _( "Base directory '%(env)s' does not exist. Please create it " "and retry.", env=base_dir)) if os.path.exists(self.path) and os.listdir(self.path): raise TracError(_("Directory exists and is not empty.")) # Create the directory structure if not os.path.exists(self.path): os.mkdir(self.path) os.mkdir(self.htdocs_dir) os.mkdir(self.log_dir) os.mkdir(self.plugins_dir) os.mkdir(self.templates_dir) # Create a few files create_file(os.path.join(self.path, 'VERSION'), _VERSION + '\n') create_file( os.path.join(self.path, 'README'), 'This directory contains a Trac environment.\n' 'Visit https://trac.edgewall.org/ for more information.\n') # Setup the default configuration os.mkdir(self.conf_dir) config = Configuration(self.config_file_path) for section, name, value in options: config.set(section, name, value) config.save() self.setup_config() if not any((section, option) == ('inherit', 'file') for section, option, value in options): self.config.set_defaults(self) self.config.save() # Create the sample configuration create_file(self.config_file_path + '.sample') self._update_sample_config() # Create the database DatabaseManager(self).init_db() @lazy def database_version(self): """Returns the current version of the database. :since 1.0.2: """ return DatabaseManager(self) \ .get_database_version('database_version') @lazy def database_initial_version(self): """Returns the version of the database at the time of creation. In practice, for a database created before 0.11, this will return `False` which is "older" than any db version number. :since 1.0.2: """ return DatabaseManager(self) \ .get_database_version('initial_database_version') @lazy def trac_version(self): """Returns the version of Trac. :since: 1.2 """ from trac import core, __version__ return get_pkginfo(core).get('version', __version__) def setup_config(self): """Load the configuration file.""" self.config = Configuration(self.config_file_path, {'envname': self.name}) if not self.config.exists: raise TracError( _("The configuration file is not found at " "%(path)s", path=self.config_file_path)) self.setup_log() plugins_dir = self.shared_plugins_dir load_components(self, plugins_dir and (plugins_dir, )) @lazy def config_file_path(self): """Path of the trac.ini file.""" return os.path.join(self.conf_dir, 'trac.ini') @lazy def log_file_path(self): """Path to the log file.""" if not os.path.isabs(self.log_file): return os.path.join(self.log_dir, self.log_file) return self.log_file def _get_path_to_dir(self, *dirs): path = self.path for dir in dirs: path = os.path.join(path, dir) return os.path.realpath(path) @lazy def attachments_dir(self): """Absolute path to the attachments directory. :since: 1.3.1 """ return self._get_path_to_dir('files', 'attachments') @lazy def conf_dir(self): """Absolute path to the conf directory. :since: 1.0.11 """ return self._get_path_to_dir('conf') @lazy def files_dir(self): """Absolute path to the files directory. :since: 1.3.2 """ return self._get_path_to_dir('files') @lazy def htdocs_dir(self): """Absolute path to the htdocs directory. :since: 1.0.11 """ return self._get_path_to_dir('htdocs') @lazy def log_dir(self): """Absolute path to the log directory. :since: 1.0.11 """ return self._get_path_to_dir('log') @lazy def plugins_dir(self): """Absolute path to the plugins directory. :since: 1.0.11 """ return self._get_path_to_dir('plugins') @lazy def templates_dir(self): """Absolute path to the templates directory. :since: 1.0.11 """ return self._get_path_to_dir('templates') def setup_log(self): """Initialize the logging sub-system.""" self.log, log_handler = \ self.create_logger(self.log_type, self.log_file_path, self.log_level, self.log_format) self.log.addHandler(log_handler) self.log.info('-' * 32 + ' environment startup [Trac %s] ' + '-' * 32, self.trac_version) def create_logger(self, log_type, log_file, log_level, log_format): log_id = 'Trac.%s' % hashlib.sha1(self.path).hexdigest() if log_format: log_format = log_format.replace('$(', '%(') \ .replace('%(path)s', self.path) \ .replace('%(basename)s', self.name) \ .replace('%(project)s', self.project_name) return log.logger_handler_factory(log_type, log_file, log_level, log_id, format=log_format) def get_known_users(self, as_dict=False): """Returns information about all known users, i.e. users that have logged in to this Trac environment and possibly set their name and email. By default this function returns an iterator that yields one tuple for every user, of the form (username, name, email), ordered alpha-numerically by username. When `as_dict` is `True` the function returns a dictionary mapping username to a (name, email) tuple. :since 1.2: the `as_dict` parameter is available. """ return self._known_users_dict if as_dict else iter(self._known_users) @cached def _known_users(self): return self.db_query(""" SELECT DISTINCT s.sid, n.value, e.value FROM session AS s LEFT JOIN session_attribute AS n ON (n.sid=s.sid AND n.authenticated=1 AND n.name = 'name') LEFT JOIN session_attribute AS e ON (e.sid=s.sid AND e.authenticated=1 AND e.name = 'email') WHERE s.authenticated=1 ORDER BY s.sid """) @cached def _known_users_dict(self): return {u[0]: (u[1], u[2]) for u in self._known_users} def invalidate_known_users_cache(self): """Clear the known_users cache.""" del self._known_users del self._known_users_dict def backup(self, dest=None): """Create a backup of the database. :param dest: Destination file; if not specified, the backup is stored in a file called db_name.trac_version.bak """ return DatabaseManager(self).backup(dest) def needs_upgrade(self): """Return whether the environment needs to be upgraded.""" for participant in self.setup_participants: try: with self.component_guard(participant, reraise=True): if participant.environment_needs_upgrade(): self.log.warning( "Component %s requires an environment upgrade", participant) return True except Exception as e: raise TracError( _( "Unable to check for upgrade of " "%(module)s.%(name)s: %(err)s", module=participant.__class__.__module__, name=participant.__class__.__name__, err=exception_to_unicode(e))) return False def upgrade(self, backup=False, backup_dest=None): """Upgrade database. :param backup: whether or not to backup before upgrading :param backup_dest: name of the backup file :return: whether the upgrade was performed """ upgraders = [] for participant in self.setup_participants: with self.component_guard(participant, reraise=True): if participant.environment_needs_upgrade(): upgraders.append(participant) if not upgraders: return if backup: try: self.backup(backup_dest) except Exception as e: raise BackupError(e) for participant in upgraders: self.log.info("upgrading %s...", participant) with self.component_guard(participant, reraise=True): participant.upgrade_environment() # Database schema may have changed, so close all connections dbm = DatabaseManager(self) if dbm.connection_uri != 'sqlite::memory:': dbm.shutdown() self._update_sample_config() del self.database_version return True @lazy def href(self): """The application root path""" return Href(urlsplit(self.abs_href.base).path) @lazy def abs_href(self): """The application URL""" if not self.base_url: self.log.warning("[trac] base_url option not set in " "configuration, generated links may be incorrect") return Href(self.base_url) def _update_sample_config(self): filename = os.path.join(self.config_file_path + '.sample') if not os.path.isfile(filename): return config = Configuration(filename) config.set_defaults() try: config.save() except EnvironmentError as e: self.log.warning("Couldn't write sample configuration file (%s)%s", e, exception_to_unicode(e, traceback=True)) else: self.log.info( "Wrote sample configuration file with the new " "settings and their default values: %s", filename)
class WhiteboardModule(Component): implements(INavigationContributor, IRequestHandler, IXMLRPCHandler) scope_element_weight_field = Option('itteco-whiteboard-tickets-config', 'scope_element_weight_field', 'business_value', "The ticket field that would be used for user story weight calculation") work_element_weight_field = Option('itteco-whiteboard-tickets-config', 'work_element_weight_field', 'complexity', "The ticket field that would be used for ticket weight calculation") team_members_provider = ExtensionOption('itteco-whiteboard-config', 'team_members_provider', ITeamMembersProvider, 'ConfigBasedTeamMembersProvider', doc="The component implementing a team member provider interface.") burndown_info_provider = ExtensionOption('itteco-whiteboard-config', 'burndown_info_provider', IBurndownInfoProvider, 'BuildBurndownInfoProvider', doc="The component implementing a burndown info provider interface.") _ticket_type_config = _old_ticket_config = _old_groups = _ticket_groups= None ticket_type_config = property(lambda self: self._get_ticket_config()) ticket_groups = property(lambda self: self._get_ticket_groups()) transitions = property(lambda self: self._get_ticket_transitions()) # INavigationContributor methods def get_active_navigation_item(self, req): return 'whiteboard' def get_navigation_items(self, req): if 'TICKET_VIEW' in req.perm: yield ('mainnav', 'whiteboard', tag.a(_('Whiteboard'), href=req.href.whiteboard(), accesskey=4)) # IRequestHandler methods def match_request(self, req): if req.path_info.startswith('/whiteboard'): path = req.path_info.split('/') path_len = len(path) if path_len>2: req.args['board_type'] = path[2] if path_len>3: req.args['milestone'] = path[3] return True def process_request(self, req): req.perm('ticket').require('TICKET_VIEW') board_type = req.args.get('board_type', 'team_tasks') milestone = req.args.get('milestone') if board_type == 'chart_settings': return self._chart_settings(milestone) else: board_type = _get_req_param(req, 'board_type', 'team_tasks') if board_type != req.args.get('board_type'): #boardtype was not implicitly selected, let's restore previos state req.redirect(req.href.whiteboard(board_type, milestone)) add_stylesheet(req, 'common/css/roadmap.css') add_stylesheet(req, 'itteco/css/common.css') add_jscript( req, [ 'stuff/ui/ui.core.js', 'stuff/ui/ui.draggable.js', 'stuff/ui/ui.droppable.js', 'stuff/ui/ui.resizable.js', 'stuff/ui/plugins/jquery.colorbox.js', 'stuff/plugins/jquery.rpc.js', 'custom_select.js', 'whiteboard2.js' ], IttecoEvnSetup(self.env).debug ) show_closed_milestones = req.args.get('show_closed_milestones', False) scope_item, work_item = self._get_wbitems_config(board_type) structured_milestones = StructuredMilestone.select(self.env, show_closed_milestones) if board_type == 'burndown': structured_milestones, _ignore = self._get_milestones_by_level(structured_milestones, 'Sprint', True) data ={ 'structured_milestones' : structured_milestones, 'current_board_type' : board_type, 'milestone' : milestone, 'milestone_levels': IttecoEvnSetup(self.env).milestone_levels, 'stats_config': self._get_stats_config(), 'show_closed_milestones': show_closed_milestones, 'wbconfig' : { 'rpcurl' : req.href.login("xmlrpc"), 'baseurl' : req.href(), 'workitem' : work_item, 'scopeitem': scope_item, 'groups': self.ticket_groups, 'transitions': self.transitions }, 'team' : self.team_members_provider and self.team_members_provider.get_team_members() or [], 'ticket_types' : work_item['types'] or [] } return 'itteco_whiteboard2.html', data, 'text/html' def _chart_settings(self, milestone): burndown_info = self.burndown_info_provider.metrics(milestone) mils =[] def flatten(mil): mils.append(mil) for kid in mil.kids: flatten(kid) flatten(StructuredMilestone(self.env, milestone)) fmt_date = lambda x: format_datetime(x, '%Y-%m-%dT%H:%M:%S') cvs_data = graphs = events = None if burndown_info: metrics, graphs = burndown_info def get_color(tkt_type): tkt_cfg = self.ticket_type_config if tkt_cfg: cfg = tkt_cfg.get(tkt_type) if cfg: return cfg.get('max_color') graphs = [{'name': graph, 'color': get_color(graph)} for graph in graphs] burndown_cvs_data = burnup_cvs_data = [] keys = ['burndown', 'approximation', 'ideal'] milestone_dates= dict([(mil.completed or mil.due, mil) for mil in mils ]) events =[] prev_burndown = metrics[0]['burndown'] prev_burnup = 0 def genitems(metric): yield fmt_date(metric['datetime']) for key in keys: yield str(metric.get(key,'')) for metric in metrics: ts = metric['datetime'] line = ",".join(genitems(metric)) burnup_sum = 0 burnup = metric.get('burnup',[]) for item in burnup: burnup_sum -= item line +=','+str(-1*item) if burnup: line +=','+str(burnup_sum) burndown_cvs_data.append(line) if ts in milestone_dates: mil = milestone_dates[ts] if mil.is_completed: del milestone_dates[ts] burndown = metric['burndown'] events.append({'datetime': fmt_date(mil.completed), 'extended': True, 'text': '"%s" completed\nBurndown delta %d\nBurnup delta %d.' \ % (mil.name, prev_burndown-burndown, prev_burnup-burnup_sum) , 'url': self.env.abs_href('milestone',mil.name)}) burndown_delta =0 prev_burnup = burnup_sum prev_burndown = burndown events.extend([{'datetime': fmt_date(mil.due), 'text': '"%s" is planned to be completed.' % mil.name , 'url': self.env.abs_href('milestone',mil.name)} for mil in milestone_dates.itervalues()]) cvs_data = "<![CDATA["+"\n".join(burndown_cvs_data)+"]]>" data = {'data': cvs_data, 'graphs': graphs, 'events': events} return 'iiteco_chart_settings.xml', data, 'text/xml' def _get_stats_config(self): all_statuses = set(TicketSystem(self.env).get_all_status()) remaining_statuses = set(all_statuses) groups = DefaultTicketGroupStatsProvider(self.env)._get_ticket_groups() catch_all_group = None for group in groups: status_str = group['status'].strip() if status_str == '*': if catch_all_group: raise TracError(_( "'%(group1)s' and '%(group2)s' milestone groups " "both are declared to be \"catch-all\" groups. " "Please check your configuration.", group1=group['name'], group2=catch_all_group['name'])) catch_all_group = group else: group_statuses = set([s.strip() for s in status_str.split(',')]) \ & all_statuses if group_statuses - remaining_statuses: raise TracError(_( "'%(groupname)s' milestone group reused status " "'%(status)s' already taken by other groups. " "Please check your configuration.", groupname=group['name'], status=', '.join(group_statuses - remaining_statuses))) else: remaining_statuses -= group_statuses group['statuses'] = group_statuses if catch_all_group: catch_all_group['statuses'] = remaining_statuses return groups # IXMLRPCHandler methods def xmlrpc_namespace(self): return 'whiteboard' def xmlrpc_methods(self): yield (None, ((list,), (list,dict)), self.query) def query(self, req, context={}): """ Returns all tickets that are to be rendered on a whiteboard.""" board = context.get('board', 'team_tasks') if board=='team_tasks': return self.query_tasks(req, context) elif board=='stories': return self.query_stories(req, context) def query_tasks(self, req, context): milestone_name = self._resolve_milestone(context.get('milestone'), context.get('show_sub_mils'), context.get('show_completed')) all_tkt_types = set([ticket_type.name for ticket_type in Type.select(self.env)]) scope_tkt_types = set([t for t in IttecoEvnSetup(self.env).scope_element]) workitem_tkt_types = all_tkt_types - scope_tkt_types \ - set([t for t in IttecoEvnSetup(self.env).excluded_element]) self.env.log.debug('workitem_tkt_types ="%s"' % (workitem_tkt_types,)) roots, ticket_ids = self._get_tickets_graph(req, milestone_name, (scope_tkt_types , workitem_tkt_types)) self.env.log.debug('roots ="%s"' % (roots,)) empty_scope_element = {'summary': 'Not assigned to any story'} not_assigned_work_items, _ignore = self._get_tickets_graph(req, milestone_name, (workitem_tkt_types,)) for ticket in not_assigned_work_items: if ticket['id'] not in ticket_ids: empty_scope_element.setdefault('references', []).append(ticket) roots.append(empty_scope_element) return roots def query_stories(self, req, context): level = context.get('level') all_milestones = StructuredMilestone.select(self.env, True) mils, mils_dict = self._get_milestones_by_level(all_milestones, level, context.get('show_completed')) milestones = [mil.name for mil in mils] +[''] fields = [ 'summary', 'description', 'owner', self.scope_element_weight_field, self.work_element_weight_field ] def milestone_as_dict(milestone): res = dict([(f, milestone.ticket[f]) for f in fields]) res.update( { 'id': milestone.name, 'references': [] } ) return res empty_scope_element = {'id': '', 'summary': 'Backlog (no milestone)','references': []} roots = [empty_scope_element] + [milestone_as_dict(m) for m in mils] milestone_by_name = dict([(m['id'], m) for m in roots]) scope_tkt_types = set([t for t in IttecoEvnSetup(self.env).scope_element]) tickets, ticket_ids = self._get_tickets_graph(req, milestones, (scope_tkt_types,)) self.env.log.debug('roots ="%s"' % (roots,)) for ticket in tickets: root = milestone_by_name.get(ticket['milestone'],empty_scope_element) root['references'].append(ticket) return roots def _get_tickets_graph(self, req, milestones, type_groups): db = self.env.get_db_cnx() mils = isinstance(milestones, basestring) and [milestones] or list(milestones) if '' in mils: mils +=[None] all_requested_fields = \ self._get_ticket_fields( [ 'summary', 'description', 'milestone', 'owner', self.scope_element_weight_field, self.work_element_weight_field ] ) all_ids = [] roots = back_trace = None for types in type_groups: if roots is not None and not back_trace: #we do not have ids for none root of the graph break filters = { 'milestone' : mils, 'type' : types } if back_trace is not None: filters['id']= back_trace.keys() tickets = apply_ticket_permissions( self.env, req, get_tickets_by_filter(db, all_requested_fields, **filters)) all_ids.extend([ticket['id'] for ticket in tickets]) if roots is None: roots = list(tickets) if back_trace: for ticket in tickets: referers = back_trace[ticket['id']] if referers: for referer in referers: referer.setdefault('references',[]).append(ticket); back_trace = defaultdict(list) if tickets: tickets_by_id =dict((tkt['id'], tkt) for tkt in tickets) src_ids = tickets_by_id.keys() cursor = db.cursor() cursor.execute("SELECT dest, src FROM tkt_links WHERE dest IN (%s)" % (len(src_ids)*"%s,")[:-1], src_ids) for dest, src in cursor: back_trace[src].append(tickets_by_id[dest]) return roots, all_ids def _get_ticket_fields(self, names, default = None): if not names: return default names = isinstance(names, basestring) and [name.strip() for name in names.split(',')] or names return [field for field in TicketSystem(self.env).get_ticket_fields() \ if field['name'] in names] def _resolve_milestone(self, name, include_kids, show_completed): def _flatten_and_get_names(mil, include_kids, show_completed): names= [] if mil: mil = isinstance(mil, StructuredMilestone) and [mil,] or mil for m in mil: if show_completed or not m.completed: names.append(m.name) if include_kids: names.extend(_flatten_and_get_names(m.kids, include_kids, show_completed)) return names if name=='nearest': db = self.env.get_db_cnx() cursor = db.cursor() cursor.execute( 'SELECT name FROM milestone WHERE due>%s ORDER BY due LIMIT 1', \ (to_timestamp(datetime.now(utc)),)) row = cursor.fetchone() name=row and row[0] or 'none' elif name=='not_completed_milestones': return _flatten_and_get_names(StructuredMilestone.select(self.env, False), \ include_kids, show_completed) if name=='none': return '' try: mil = StructuredMilestone(self.env, name) names = _flatten_and_get_names(mil, include_kids, show_completed) if not names: names = mil.name return names except ResourceNotFound: return '' def _get_milestones_by_level(self, mils_tree, level_name, include_completed): mils =[] mils_dict={} def filter_mils(mil, force_add=False): mils_dict[mil.name] = mil if mil.level['label']==level_name: if not mil.is_completed or include_completed: mils.append(mil) for kid in mil.kids: filter_mils(kid, True) else: for kid in mil.kids: filter_mils(kid, force_add) for mil in mils_tree: filter_mils(mil) return (mils, mils_dict) def _get_wbitems_config(self, board_type): milestoneitem = { 'realm' : 'milestone', 'types' : [milestone_ticket_type], 'weight': self.work_element_weight_field, 'weightlabel' : 'CP' } scopeitem = { 'realm' : 'ticket', 'types' : [t for t in IttecoEvnSetup(self.env).scope_element], 'weight': self.scope_element_weight_field, 'weightlabel' : 'BV' } def read_options(fname): options = [ f['options'] \ for f in TicketSystem(self.env).get_ticket_fields() \ if f['name']==fname ] if options: return options[0] workitem = { 'realm' : 'ticket', 'types' : [t for t in IttecoEvnSetup(self.env).work_element], 'weight': self.work_element_weight_field, 'weightlabel' : 'CP', 'options' : read_options(self.work_element_weight_field) } if board_type=='team_tasks': return (scopeitem, workitem) else: return (milestoneitem, scopeitem) def _get_ticket_transitions(self): groups_config = self.env.config['itteco-whiteboard-groups'] if self._old_groups!=groups_config or self._transitions is None: actions = ConfigurableTicketWorkflow(self.env).actions transitions = [ { 'newstatus': act_info['newstate'], 'action': act_id, 'oldstatuses':act_info['oldstates'] } \ for act_id, act_info in actions.iteritems() if act_id!='_reset' ] self.env.log.debug('transitions="%s"' % transitions) self._transitions = transitions return self._transitions def _get_ticket_config(self): ticket_config = self.env.config['itteco-whiteboard-tickets-config'] if self._old_ticket_config!=ticket_config: default_fields = ticket_config.getlist('default_fields') show_workflow = ticket_config.getbool('show_workflow') allowed_tkt_types = [ type.name for type in Type.select(self.env)] _ticket_type_config = {} for option in ticket_config: try: tkt_type, prop = option.split('.',1) if tkt_type and ( tkt_type in allowed_tkt_types or \ tkt_type[0]=='$'): _ticket_type_config.setdefault( tkt_type, { 'fields' : default_fields, 'workflow' : show_workflow } )[prop] = ticket_config.get(option) except ValueError : pass scope_types = IttecoEvnSetup(self.env).scope_element scope_element_field_name = self.scope_element_weight_field work_types = IttecoEvnSetup(self.env).work_element work_element_field_name = self.work_element_weight_field for type in allowed_tkt_types: if type not in _ticket_type_config: _ticket_type_config[type]={'fields':default_fields, 'workflow' : show_workflow} for type in _ticket_type_config.iterkeys(): _ticket_type_config[type]['weight_field_name'] = \ type in scope_types and scope_element_field_name or work_element_field_name _ticket_type_config[type]['fields']=self._get_ticket_fields( _ticket_type_config[type].get('fields'), []) self._ticket_type_config = _ticket_type_config self._old_ticket_config=ticket_config return self._ticket_type_config def _get_ticket_fields(self, names, default = None): if not names: return default names = isinstance(names, basestring) and [name.strip() for name in names.split(',')] or names fields = [field for field in TicketSystem(self.env).get_ticket_fields() \ if field['name'] in names] return sorted(fields, key = lambda field: names.index(field['name'])) def _get_ticket_groups(self): groups_config = self.env.config['itteco-whiteboard-groups'] if self._old_groups!=groups_config: self._transitions = None def get_group_options(group_name): opts ={'name': group_name} for options, accessor in [(('accordion',), groups_config.get), \ (('status',),groups_config.getlist)]: for opt in options: opts[opt] = accessor('group.%s.%s' % (group_name, opt)) return opts self._old_groups=groups_config self._ticket_groups=[get_group_options(gr_name) for gr_name in groups_config.getlist('groups', keep_empty=False)] self.env.log.debug('ticket_groups="%s"' % self._ticket_groups) return self._ticket_groups def get_new_ticket_descriptor(self, types, tkt_id=None): if tkt_id and tkt_id!='new': ticket = Ticket(self.env, tkt_id) if not ticket.exists: raise TracError(_(" Ticket with id '%(ticket)s does not exit", ticket= tkt_id)) types = [ticket['type']] else: ticket = Ticket(self.env) ticket.id = 'new' common_descriptor = {'ticket' : ticket} if types: for type in types: cfg = self.ticket_type_config.get(type, {}) common_descriptor.setdefault('fields',[]). \ extend(cfg.get('fields',[])) common_descriptor['workflow'] = common_descriptor.get('workflow',False) \ or cfg.get('workflow') or False unique_fields = [] found_names = [] for field in common_descriptor['fields']: if field['name'] not in found_names: found_names.append(field['name']) unique_fields.append(field) common_descriptor['fields'] = unique_fields return common_descriptor
class PygmentsRenderer(Component): """Syntax highlighting based on Pygments.""" implements(IHTMLPreviewRenderer, IRequestHandler, IRequestFilter, IWikiMacroProvider, ITemplateProvider) default_style = Option( 'mimeviewer', 'pygments_default_style', 'trac', """The default style to use for Pygments syntax highlighting.""") pygments_modes = ListOption( 'mimeviewer', 'pygments_modes', '', doc="""List of additional MIME types known by Pygments. For each, a tuple `mimetype:mode:quality` has to be specified, where `mimetype` is the MIME type, `mode` is the corresponding Pygments mode to be used for the conversion and `quality` is the quality ratio associated to this conversion. That can also be used to override the default quality ratio used by the Pygments render.""") expand_tabs = True returns_source = True QUALITY_RATIO = 7 EXAMPLE = """<!DOCTYPE html> <html lang="en"> <head> <title>Hello, world!</title> <script> $(document).ready(function() { $("h1").fadeIn("slow"); }); </script> </head> <body> <h1>Hello, world!</h1> </body> </html>""" def __init__(self): self.log.debug("Pygments installed? %r", have_pygments) if have_pygments: version = getattr(pygments, '__version__', None) if version: self.log.debug('Pygments Version: %s' % version) self._types = None # IHTMLPreviewRenderer implementation def get_quality_ratio(self, mimetype): # Extend default MIME type to mode mappings with configured ones self._init_types() try: return self._types[mimetype][1] except KeyError: return 0 def render(self, req, mimetype, content, filename=None, rev=None): self._init_types() try: mimetype = mimetype.split(';', 1)[0] language = self._types[mimetype][0] return self._highlight(language, content, True) except (KeyError, ValueError): raise Exception("No Pygments lexer found for mime-type '%s'." % mimetype) # IWikiMacroProvider implementation def get_macros(self): self._init_types() return self._languages.keys() def get_macro_description(self, name): self._init_types() return 'Syntax highlighting for %s using Pygments' % self._languages[ name] def render_macro(self, req, name, content): self._init_types() return self._highlight(name, content, False) # IRequestFilter def pre_process_request(self, req, handler): return handler def post_process_request(self, req, template, content_type): if not getattr(req, '_no_pygments_stylesheet', False): add_link( req, 'stylesheet', self.env.href( 'pygments', '%s.css' % req.session.get('pygments_style', self.default_style))) return template, content_type # IRequestHandler implementation def match_request(self, req): if have_pygments: if re.match(r'/pygments/?$', req.path_info): return True match = re.match(r'/pygments/(\w+)\.css$', req.path_info) if match: try: req.args['style'] = get_style_by_name(match.group(1)) except ValueError: return False return True return False def process_request(self, req): # settings panel if not 'style' in req.args: req._no_pygments_stylesheet = True styles = list(get_all_styles()) styles.sort(lambda a, b: cmp(a.lower(), b.lower())) if req.method == 'POST': style = req.args.get('new_style') if style and style in styles: req.session['pygments_style'] = style output = self._highlight('html', self.EXAMPLE, False) req.hdf['output'] = Markup(output) req.hdf['current'] = req.session.get('pygments_style', self.default_style) req.hdf['styles'] = styles req.hdf['pygments_path'] = self.env.href.pygments() return 'pygments_settings.cs', None # provide stylesheet else: style = req.args['style'] parts = style.__module__.split('.') filename = resource_filename('.'.join(parts[:-1]), parts[-1] + '.py') mtime = datetime.utcfromtimestamp(os.path.getmtime(filename)) last_modified = http_date(time.mktime(mtime.timetuple())) if last_modified == req.get_header('If-Modified-Since'): req.send_response(304) req.end_headers() return formatter = HtmlFormatter(style=style) content = u'\n\n'.join([ formatter.get_style_defs('div.code pre'), formatter.get_style_defs('table.code td') ]).encode('utf-8') req.send_response(200) req.send_header('Content-Type', 'text/css; charset=utf-8') req.send_header('Last-Modified', last_modified) req.send_header('Content-Length', len(content)) req.write(content) # ITemplateProvider methods def get_templates_dirs(self): return [resource_filename(__name__, 'templates')] def get_htdocs_dirs(self): return () # Internal methods def _init_types(self): if self._types is None: self._types = {} self._languages = {} if have_pygments: for name, aliases, _, mimetypes in get_all_lexers(): for mimetype in mimetypes: self._types[mimetype] = (aliases[0], self.QUALITY_RATIO) for alias in aliases: self._languages[alias] = name self._types.update( Mimeview(self.env).configured_modes_mapping('pygments')) def _highlight(self, language, content, annotate): formatter = HtmlFormatter(cssclass=not annotate and 'code' or '') html = pygments.highlight(content, get_lexer_by_name(language), formatter).rstrip('\n') if annotate: return html[len('<div><pre>'):-len('</pre></div>')].splitlines() return html
def __init__(self, section, name, choices, doc=''): Option.__init__(self, section, name, _to_utf8(choices[0]), doc) self.choices = set(_to_utf8(choice).strip() for choice in choices)
class RequestDispatcher(Component): """Web request dispatcher. This component dispatches incoming requests to registered handlers. Besides, it also takes care of user authentication and request pre- and post-processing. """ required = True implements(ITemplateProvider) authenticators = ExtensionPoint(IAuthenticator) handlers = ExtensionPoint(IRequestHandler) filters = OrderedExtensionsOption( 'trac', 'request_filters', IRequestFilter, doc="""Ordered list of filters to apply to all requests.""") default_handler = ExtensionOption( 'trac', 'default_handler', IRequestHandler, 'WikiModule', """Name of the component that handles requests to the base URL. Options include `TimelineModule`, `RoadmapModule`, `BrowserModule`, `QueryModule`, `ReportModule`, `TicketModule` and `WikiModule`.""") default_timezone = Option('trac', 'default_timezone', '', """The default timezone to use""") default_language = Option( 'trac', 'default_language', '', """The preferred language to use if no user preference has been set. (''since 0.12.1'') """) default_date_format = ChoiceOption( 'trac', 'default_date_format', ('', 'iso8601'), """The date format. Valid options are 'iso8601' for selecting ISO 8601 format, or leave it empty which means the default date format will be inferred from the browser's default language. (''since 1.0'') """) use_xsendfile = BoolOption( 'trac', 'use_xsendfile', 'false', """When true, send a `X-Sendfile` header and no content when sending files from the filesystem, so that the web server handles the content. This requires a web server that knows how to handle such a header, like Apache with `mod_xsendfile` or lighttpd. (''since 1.0'') """) xsendfile_header = Option( 'trac', 'xsendfile_header', 'X-Sendfile', """The header to use if `use_xsendfile` is enabled. If Nginx is used, set `X-Accel-Redirect`. (''since 1.0.6'')""") # Public API def authenticate(self, req): for authenticator in self.authenticators: try: authname = authenticator.authenticate(req) except TracError as e: self.log.error("Can't authenticate using %s: %s", authenticator.__class__.__name__, exception_to_unicode(e, traceback=True)) add_warning( req, _("Authentication error. " "Please contact your administrator.")) break # don't fallback to other authenticators if authname: return authname return 'anonymous' def dispatch(self, req): """Find a registered handler that matches the request and let it process it. In addition, this method initializes the data dictionary passed to the the template and adds the web site chrome. """ self.log.debug('Dispatching %r', req) chrome = Chrome(self.env) try: # Select the component that should handle the request chosen_handler = None for handler in self._request_handlers.values(): if handler.match_request(req): chosen_handler = handler break if not chosen_handler and req.path_info in ('', '/'): chosen_handler = self._get_valid_default_handler(req) # pre-process any incoming request, whether a handler # was found or not self.log.debug("Chosen handler is %s", chosen_handler) chosen_handler = self._pre_process_request(req, chosen_handler) if not chosen_handler: if req.path_info.endswith('/'): # Strip trailing / and redirect target = unicode_quote(req.path_info.rstrip('/')) if req.query_string: target += '?' + req.query_string req.redirect(req.href + target, permanent=True) raise HTTPNotFound('No handler matched request to %s', req.path_info) req.callbacks['chrome'] = partial(chrome.prepare_request, handler=chosen_handler) # Protect against CSRF attacks: we validate the form token # for all POST requests with a content-type corresponding # to form submissions if req.method == 'POST': ctype = req.get_header('Content-Type') if ctype: ctype, options = cgi.parse_header(ctype) if ctype in ('application/x-www-form-urlencoded', 'multipart/form-data') and \ req.args.get('__FORM_TOKEN') != req.form_token: if self.env.secure_cookies and req.scheme == 'http': msg = _('Secure cookies are enabled, you must ' 'use https to submit forms.') else: msg = _('Do you have cookies enabled?') raise HTTPBadRequest( _('Missing or invalid form token.' ' %(msg)s', msg=msg)) # Process the request and render the template resp = chosen_handler.process_request(req) if resp: if len(resp) == 2: # old Clearsilver template and HDF data self.log.error( "Clearsilver template are no longer " "supported (%s)", resp[0]) raise TracError( _("Clearsilver templates are no longer supported, " "please contact your Trac administrator.")) # Genshi template, data, content_type, method = \ self._post_process_request(req, *resp) if 'hdfdump' in req.args: req.perm.require('TRAC_ADMIN') # debugging helper - no need to render first out = io.BytesIO() pprint(data, out) req.send(out.getvalue(), 'text/plain') self.log.debug("Rendering response from handler") output = chrome.render_template( req, template, data, content_type, method=method, iterable=chrome.use_chunked_encoding) req.send(output, content_type or 'text/html') else: self.log.debug("Empty or no response from handler. " "Entering post_process_request.") self._post_process_request(req) except RequestDone: raise except Exception as e: # post-process the request in case of errors err = sys.exc_info() try: self._post_process_request(req) except RequestDone: raise except TracError as e2: self.log.warning( "Exception caught while post-processing" " request: %s", exception_to_unicode(e2)) except Exception as e2: if not (type(e) is type(e2) and e.args == e2.args): self.log.error( "Exception caught while post-processing" " request: %s", exception_to_unicode(e2, traceback=True)) if isinstance(e, PermissionError): raise HTTPForbidden(e) if isinstance(e, ResourceNotFound): raise HTTPNotFound(e) if isinstance(e, NotImplementedError): tb = traceback.extract_tb(err[2])[-1] self.log.warning("%s caught from %s:%d in %s: %s", e.__class__.__name__, tb[0], tb[1], tb[2], to_unicode(e) or "(no message)") raise HTTPInternalError(TracNotImplementedError(e)) if isinstance(e, TracError): raise HTTPInternalError(e) raise err[0], err[1], err[2] # ITemplateProvider methods def get_htdocs_dirs(self): return [] def get_templates_dirs(self): return [pkg_resources.resource_filename('trac.web', 'templates')] # Internal methods def set_default_callbacks(self, req): """Setup request callbacks for lazily-evaluated properties. """ req.callbacks.update({ 'authname': self.authenticate, 'chrome': Chrome(self.env).prepare_request, 'form_token': self._get_form_token, 'lc_time': self._get_lc_time, 'locale': self._get_locale, 'perm': self._get_perm, 'session': self._get_session, 'tz': self._get_timezone, 'use_xsendfile': self._get_use_xsendfile, 'xsendfile_header': self._get_xsendfile_header, }) @lazy def _request_handlers(self): return dict( (handler.__class__.__name__, handler) for handler in self.handlers) def _get_valid_default_handler(self, req): # Use default_handler from the Session if it is a valid value. name = req.session.get('default_handler') handler = self._request_handlers.get(name) if handler and not is_valid_default_handler(handler): handler = None if not handler: # Use default_handler from project configuration. handler = self.default_handler if not is_valid_default_handler(handler): raise ConfigurationError( tag_( "%(handler)s is not a valid default handler. Please " "update %(option)s through the %(page)s page or by " "directly editing trac.ini.", handler=tag.code(handler.__class__.__name__), option=tag.code("[trac] default_handler"), page=tag.a(_("Basic Settings"), href=req.href.admin('general/basics')))) return handler def _get_perm(self, req): if isinstance(req.session, FakeSession): return FakePerm() else: return PermissionCache(self.env, req.authname) def _get_session(self, req): try: return Session(self.env, req) except TracError as e: msg = "can't retrieve session: %s" if isinstance(e, TracValueError): self.log.warning(msg, e) else: self.log.error(msg, exception_to_unicode(e)) return FakeSession() def _get_locale(self, req): if has_babel: preferred = req.session.get('language') default = self.default_language negotiated = get_negotiated_locale([preferred, default] + req.languages) self.log.debug("Negotiated locale: %s -> %s", preferred, negotiated) return negotiated def _get_lc_time(self, req): lc_time = req.session.get('lc_time') if not lc_time or lc_time == 'locale' and not has_babel: lc_time = self.default_date_format if lc_time == 'iso8601': return 'iso8601' return req.locale def _get_timezone(self, req): try: return timezone( req.session.get('tz', self.default_timezone or 'missing')) except Exception: return localtz def _get_form_token(self, req): """Used to protect against CSRF. The 'form_token' is strong shared secret stored in a user cookie. By requiring that every POST form to contain this value we're able to protect against CSRF attacks. Since this value is only known by the user and not by an attacker. If the the user does not have a `trac_form_token` cookie a new one is generated. """ if 'trac_form_token' in req.incookie: return req.incookie['trac_form_token'].value else: req.outcookie['trac_form_token'] = hex_entropy(24) req.outcookie['trac_form_token']['path'] = req.base_path or '/' if self.env.secure_cookies: req.outcookie['trac_form_token']['secure'] = True req.outcookie['trac_form_token']['httponly'] = True return req.outcookie['trac_form_token'].value def _get_use_xsendfile(self, req): return self.use_xsendfile # RFC7230 3.2 Header Fields _xsendfile_header_re = re.compile(r"[-0-9A-Za-z!#$%&'*+.^_`|~]+\Z") _warn_xsendfile_header = False def _get_xsendfile_header(self, req): header = self.xsendfile_header.strip() if self._xsendfile_header_re.match(header): return to_utf8(header) else: if not self._warn_xsendfile_header: self._warn_xsendfile_header = True self.log.warn("[trac] xsendfile_header is invalid: '%s'", header) return None def _pre_process_request(self, req, chosen_handler): for filter_ in self.filters: chosen_handler = filter_.pre_process_request(req, chosen_handler) return chosen_handler def _post_process_request(self, req, *args): resp = args # `method` is optional in IRequestHandler's response. If not # specified, the default value is appended to response. if len(resp) == 3: resp += (None, ) nbargs = len(resp) for f in reversed(self.filters): # As the arity of `post_process_request` has changed since # Trac 0.10, only filters with same arity gets passed real values. # Errors will call all filters with None arguments, # and results will not be not saved. extra_arg_count = arity(f.post_process_request) - 1 if extra_arg_count == nbargs: resp = f.post_process_request(req, *resp) elif extra_arg_count == nbargs - 1: # IRequestFilters may modify the `method`, but the `method` # is forwarded when not accepted by the IRequestFilter. method = resp[-1] resp = f.post_process_request(req, *resp[:-1]) resp += (method, ) elif nbargs == 0: f.post_process_request(req, *(None, ) * extra_arg_count) return resp
class TagInputAutoComplete(TagTemplateProvider): """[opt] Provides auto-complete functionality for tag input fields. This module is based on KeywordSuggestModule from KeywordSuggestPlugin 0.5dev. """ implements(IRequestFilter) field_opt = Option('tags', 'complete_field', 'keywords', "Ticket field to which a drop-down tag list should be attached.") help_opt = Option('tags', 'ticket_help', None, "If specified, 'keywords' label on ticket view will be turned into a " "link to this URL.") help_new_window_opt = BoolOption('tags', 'ticket_help_newwindow', False, "If true and keywords_help specified, wiki page will open in a new " "window. Default is false.") # Needs to be reimplemented, refs th:#8141. #mustmatch = BoolOption('tags', 'complete_mustmatch', False, # "If true, input fields accept values from the word list only.") match_contains_opt = BoolOption('tags', 'complete_matchcontains', True, "Include partial matches in suggestion list. Default is true.") separator_opt = Option('tags', 'separator', ' ', "Character(s) to use as separators between tags. Default is a " "single whitespace.") sticky_tags_opt = ListOption('tags', 'complete_sticky_tags', '', ',', doc="A list of comma separated values available for input.") def __init__(self): self.tags_enabled = self.env.is_enabled(TagSystem) @property def separator(self): return self.separator_opt.strip('\'') or ' ' # IRequestFilter methods def pre_process_request(self, req, handler): return handler def post_process_request(self, req, template, data, content_type): if template == 'ticket.html' or \ (self.tags_enabled and template == 'wiki_edit.html'): keywords = self._get_keywords(req) if not keywords: self.log.debug("No keywords found. TagInputAutoComplete is " "disabled.") return template, data, content_type def add_autocomplete_script(field, help_href=None): match_from_start = '' if self.match_contains_opt else '"^" +' add_script_data(req, tags={ 'autocomplete_field': field, 'keywords': keywords, 'match_from_start': match_from_start, 'separator': self.separator, 'help_href': help_href, 'help_new_window': self.help_new_window_opt, }) add_script(req, 'tags/js/autocomplete_tags.js') if template == 'ticket.html': help_href = self._get_help_href(req) add_autocomplete_script('field-' + self.field_opt, help_href) elif self.tags_enabled and template == 'wiki_edit.html': add_autocomplete_script('tags') Chrome(self.env).add_jquery_ui(req) return template, data, content_type # Private methods def _get_keywords(self, req): keywords = set(self.sticky_tags_opt) # prevent duplicates if self.tags_enabled: # Use TagsPlugin >= 0.7 performance-enhanced API. tags = TagSystem(self.env).get_all_tags(req) keywords.update(tags.keys()) return sorted(keywords) if keywords else [] def _get_help_href(self, req): if not self.help_opt: return None link = resource_id = None if self.help_opt.startswith('/'): # Assume valid URL to arbitrary resource inside # of the current Trac environment. link = req.href(self.help_opt) if not link and ':' in self.help_opt: realm, resource_id = self.help_opt.split(':', 1) # Validate realm-like prefix against resource realm list, # but exclude 'wiki' to allow deferred page creation. rsys = ResourceSystem(self.env) if realm in set(rsys.get_known_realms()) - set('wiki'): mgr = rsys.get_resource_manager(realm) # Handle optional IResourceManager method gracefully. try: if mgr.resource_exists(Resource(realm, resource_id)): link = mgr.get_resource_url(resource_id, req.href) except AttributeError: # Assume generic resource URL build rule. link = req.href(realm, resource_id) if not link: if not resource_id: # Assume wiki page name for backwards-compatibility. resource_id = self.help_opt if '#' in resource_id: path, anchor = resource_id.split('#', 1) anchor = unicode_quote_plus(anchor, safe="?!~*'()") link = '#'.join((req.href.wiki(path), anchor)) else: link = req.href.wiki(resource_id) return link
class EmailDistributor(Component): implements(IAnnouncementDistributor, IAnnouncementPreferenceProvider) formatters = ExtensionPoint(IAnnouncementFormatter) resolvers = OrderedExtensionsOption('announcer', 'email_address_resolvers', IAnnouncementAddressResolver, 'SpecifiedEmailResolver, '\ 'SessionEmailResolver, DefaultDomainEmailResolver', doc="""Comma seperated list of email resolver components in the order they will be called. If an email address is resolved, the remaining resolvers will no be called.""") smtp_enabled = BoolOption('announcer', 'smtp_enabled', 'false', """Enable SMTP (email) notification.""") smtp_debuglevel = IntOption('announcer', 'smtp_debuglevel', 0, """Debug level to pass to smtp python lib""") smtp_server = Option( 'announcer', 'smtp_server', 'localhost', """SMTP server hostname to use for email notifications.""") smtp_port = IntOption( 'announcer', 'smtp_port', 25, """SMTP server port to use for email notification.""") smtp_user = Option('announcer', 'smtp_user', '', """Username for SMTP server. (''since 0.9'').""") smtp_password = Option('announcer', 'smtp_password', '', """Password for SMTP server. (''since 0.9'').""") smtp_from = Option('announcer', 'smtp_from', 'trac@localhost', """Sender address to use in notification emails.""") smtp_ssl = BoolOption('announcer', 'smtp_ssl', 'false', doc="""Use ssl for smtp connection.""") smtp_from_name = Option('announcer', 'smtp_from_name', '', """Sender name to use in notification emails.""") smtp_replyto = Option( 'announcer', 'smtp_replyto', 'trac@localhost', """Reply-To address to use in notification emails.""") smtp_always_cc = Option( 'announcer', 'smtp_always_cc', '', """Email address(es) to always send notifications to, addresses can be see by all recipients (Cc:).""") smtp_always_bcc = Option( 'announcer', 'smtp_always_bcc', '', """Email address(es) to always send notifications to, addresses do not appear publicly (Bcc:). (''since 0.10'').""") ignore_domains = Option( 'announcer', 'ignore_domains', '', """Comma-separated list of domains that should not be considered part of email addresses (for usernames with Kerberos domains)""") admit_domains = Option( 'announcer', 'admit_domains', '', """Comma-separated list of domains that should be considered as valid for email addresses (such as localdomain)""") mime_encoding = Option( 'announcer', 'mime_encoding', 'base64', """Specifies the MIME encoding scheme for emails. Valid options are 'base64' for Base64 encoding, 'qp' for Quoted-Printable, and 'none' for no encoding. Note that the no encoding means that non-ASCII characters in text are going to cause problems with notifications (''since 0.10'').""") use_public_cc = BoolOption( 'announcer', 'use_public_cc', 'false', """Recipients can see email addresses of other CC'ed recipients. If this option is disabled (the default), recipients are put on BCC (''since 0.10'').""") use_short_addr = BoolOption( 'announcer', 'use_short_addr', 'false', """Permit email address without a host/domain (i.e. username only) The SMTP server should accept those addresses, and either append a FQDN or use local delivery (''since 0.10'').""") use_tls = BoolOption( 'announcer', 'use_tls', 'false', """Use SSL/TLS to send notifications (''since 0.10'').""") set_message_id = BoolOption( 'announcer', 'set_message_id', 'true', """Disable if you would prefer to let the email server handle message-id generation. """) smtp_subject_prefix = Option( 'announcer', 'smtp_subject_prefix', '__default__', """Text to prepend to subject line of notification emails. If the setting is not defined, then the [$project_name] prefix. If no prefix is desired, then specifying an empty option will disable it.(''since 0.10.1'').""") smtp_to = Option('announcer', 'smtp_to', None, 'Default To: field') use_threaded_delivery = BoolOption( 'announcer', 'use_threaded_delivery', 'false', """If true, the actual delivery of the message will occur in a separate thread. Enabling this will improve responsiveness for requests that end up with an announcement being sent over email. It requires building Python with threading support enabled-- which is usually the case. To test, start Python and type 'import threading' to see if it raises an error.""") default_email_format = Option( 'announcer', 'default_email_format', 'text/plain', doc="""The default mime type of the email notifications. This can be overriden on a per user basis through the announcer preferences panel.""") def __init__(self): self.delivery_queue = None self._init_pref_encoding() def get_delivery_queue(self): if not self.delivery_queue: self.delivery_queue = Queue.Queue() thread = DeliveryThread(self.delivery_queue, self._transmit) thread.start() return self.delivery_queue # IAnnouncementDistributor def get_distribution_transport(self): return "email" def formats(self, transport, realm): "Find valid formats for transport and realm" formats = {} for f in self.formatters: if f.get_format_transport() == transport: if realm in f.get_format_realms(transport): styles = f.get_format_styles(transport, realm) for style in styles: formats[style] = f self.log.debug( "EmailDistributor has found the following formats capable " "of handling '%s' of '%s': %s" % (transport, realm, ', '.join(formats.keys()))) if not formats: self.log.error("EmailDistributor is unable to continue " \ "without supporting formatters.") return formats def distribute(self, transport, recipients, event): if not self.smtp_enabled or \ transport != self.get_distribution_transport(): self.log.debug("EmailDistributer smtp_enabled set to false") return fmtdict = self.formats(transport, event.realm) if not fmtdict: self.log.error("EmailDistributer No formats found for %s %s" % (transport, event.realm)) return msgdict = {} for name, authed, addr in recipients: fmt = name and \ self._get_preferred_format(event.realm, name, authed) or \ self._get_default_format() if fmt not in fmtdict: self.log.debug(("EmailDistributer format %s not available" + "for %s %s, looking for an alternative") % (fmt, transport, event.realm)) # If the fmt is not available for this realm, then try to find # an alternative oldfmt = fmt fmt = None for f in fmtdict.values(): fmt = f.get_format_alternative(transport, event.realm, oldfmt) if fmt: break if not fmt: self.log.error( "EmailDistributer was unable to find a formatter " + "for format %s" % k) continue rslvr = None if name and not addr: # figure out what the addr should be if it's not defined for rslvr in self.resolvers: addr = rslvr.get_address_for_name(name, authed) if addr: break if addr: self.log.debug("EmailDistributor found the " \ "address '%s' for '%s (%s)' via: %s"%( addr, name, authed and \ 'authenticated' or 'not authenticated', rslvr.__class__.__name__)) # ok, we found an addr, add the message msgdict.setdefault(fmt, set()).add((name, authed, addr)) else: self.log.debug("EmailDistributor was unable to find an " \ "address for: %s (%s)"%(name, authed and \ 'authenticated' or 'not authenticated')) for k, v in msgdict.items(): if not v or not fmtdict.get(k): continue self.log.debug("EmailDistributor is sending event as '%s' to: %s" % (fmt, ', '.join(x[2] for x in v))) self._do_send(transport, event, k, v, fmtdict[k]) def _get_default_format(self): return self.default_email_format def _get_preferred_format(self, realm, sid, authenticated): if authenticated is None: authenticated = 0 db = self.env.get_db_cnx() cursor = db.cursor() cursor.execute( """ SELECT value FROM session_attribute WHERE sid=%s AND authenticated=%s AND name=%s """, (sid, int(authenticated), 'announcer_email_format_%s' % realm)) result = cursor.fetchone() if result: chosen = result[0] self.log.debug("EmailDistributor determined the preferred format" \ " for '%s (%s)' is: %s"%(sid, authenticated and \ 'authenticated' or 'not authenticated', chosen)) return chosen else: return self._get_default_format() def _init_pref_encoding(self): from email.Charset import Charset, QP, BASE64 self._charset = Charset() self._charset.input_charset = 'utf-8' pref = self.mime_encoding.lower() if pref == 'base64': self._charset.header_encoding = BASE64 self._charset.body_encoding = BASE64 self._charset.output_charset = 'utf-8' self._charset.input_codec = 'utf-8' self._charset.output_codec = 'utf-8' elif pref in ['qp', 'quoted-printable']: self._charset.header_encoding = QP self._charset.body_encoding = QP self._charset.output_charset = 'utf-8' self._charset.input_codec = 'utf-8' self._charset.output_codec = 'utf-8' elif pref == 'none': self._charset.header_encoding = None self._charset.body_encoding = None self._charset.input_codec = None self._charset.output_charset = 'ascii' else: raise TracError(_('Invalid email encoding setting: %s' % pref)) def _message_id(self, event, event_id, modtime): """Generate a predictable, but sufficiently unique message ID.""" s = '%s.%s.%d' % (self.env.project_url, event_id, modtime) dig = md5(s).hexdigest() host = self.smtp_from[self.smtp_from.find('@') + 1:] msgid = '<%03d.%s@%s>' % (len(s), dig, host) return msgid def _event_id(self, event): """FIXME: badly needs improvement Hacked bullshit. """ if hasattr(event.target, 'id'): return "%08d" % event.target.id elif hasattr(event.target, 'name'): return event.target.name else: return str(event.target) def _do_send(self, transport, event, format, recipients, formatter): output = formatter.format(transport, event.realm, format, event) subject = formatter.format_subject(transport, event.realm, format, event) alternate_format = formatter.get_format_alternative( transport, event.realm, format) if alternate_format: alternate_output = formatter.format(transport, event.realm, alternate_format, event) else: alternate_output = None rootMessage = MIMEMultipart("related") rootMessage.set_charset(self._charset) proj_name = self.env.project_name trac_version = get_pkginfo(trac.core).get('version', trac.__version__) announcer_version = get_pkginfo(announcerplugin).get( 'version', 'Undefined') rootMessage['X-Mailer'] = 'AnnouncerPlugin v%s on Trac ' \ 'v%s'%(announcer_version, trac_version) rootMessage['X-Trac-Version'] = trac_version rootMessage['X-Announcer-Version'] = announcer_version rootMessage['X-Trac-Project'] = proj_name rootMessage['X-Trac-Announcement-Realm'] = event.realm event_id = self._event_id(event) rootMessage['X-Trac-Announcement-ID'] = event_id if self.set_message_id: msgid = self._message_id(event, event_id, 0) if event.category is not 'created': rootMessage['In-Reply-To'] = msgid rootMessage['References'] = msgid msgid = self._message_id(event, event_id, time.time()) rootMessage['Message-ID'] = msgid rootMessage['Precedence'] = 'bulk' rootMessage['Auto-Submitted'] = 'auto-generated' provided_headers = formatter.format_headers(transport, event.realm, format, event) for key in provided_headers: rootMessage['X-Announcement-%s'%key.capitalize()] = \ to_unicode(provided_headers[key]) rootMessage['Date'] = formatdate() # sanity check if not self._charset.body_encoding: try: dummy = output.encode('ascii') except UnicodeDecodeError: raise TracError(_("Ticket contains non-ASCII chars. " \ "Please change encoding setting")) prefix = self.smtp_subject_prefix if prefix == '__default__': prefix = '[%s]' % self.env.project_name if event.category is not 'created': prefix = 'Re: %s' % prefix if prefix: subject = "%s %s" % (prefix, subject) rootMessage['Subject'] = Header(subject, self._charset) from_header = '"%s" <%s>' % (Header(self.smtp_from_name or proj_name, self._charset), self.smtp_from) rootMessage['From'] = from_header if self.smtp_always_bcc: rootMessage['Bcc'] = self.smtp_always_bcc if self.smtp_to: rootMessage['To'] = '"%s"' % (self.smtp_to) if self.use_public_cc: rootMessage['Cc'] = ', '.join([x[2] for x in recipients if x]) rootMessage['Reply-To'] = self.smtp_replyto rootMessage.preamble = 'This is a multi-part message in MIME format.' if alternate_output: parentMessage = MIMEMultipart('alternative') rootMessage.attach(parentMessage) else: parentMessage = rootMessage if alternate_output: alt_msg_format = 'html' in alternate_format and 'html' or 'plain' msgText = MIMEText(alternate_output, alt_msg_format) parentMessage.attach(msgText) msg_format = 'html' in format and 'html' or 'plain' msgText = MIMEText(output, msg_format) del msgText['Content-Transfer-Encoding'] msgText.set_charset(self._charset) parentMessage.attach(msgText) start = time.time() package = (from_header, [x[2] for x in recipients if x], rootMessage.as_string()) if self.use_threaded_delivery: self.get_delivery_queue().put(package) else: self._transmit(*package) stop = time.time() self.log.debug("EmailDistributor took %s seconds to send."\ %(round(stop-start,2))) def _transmit(self, smtpfrom, addresses, message): # use defaults to make sure connect() is called in the constructor if self.smtp_ssl: smtp = smtplib.SMTP_SSL(host=self.smtp_server, port=self.smtp_port) else: smtp = smtplib.SMTP(host=self.smtp_server, port=self.smtp_port) if self.smtp_debuglevel: smtp.set_debuglevel(self.smtp_debuglevel) if self.use_tls: smtp.ehlo() if not smtp.esmtp_features.has_key('starttls'): raise TracError(_("TLS enabled but server does not support " \ "TLS")) smtp.starttls() smtp.ehlo() if self.smtp_user: smtp.login(self.smtp_user, self.smtp_password) smtp.sendmail(smtpfrom, addresses, message) smtp.quit() # IAnnouncementDistributor def get_announcement_preference_boxes(self, req): yield "email", "E-Mail Format" def render_announcement_preference_box(self, req, panel): transport = self.get_distribution_transport() supported_realms = {} for formatter in self.formatters: if formatter.get_format_transport() == transport: for realm in formatter.get_format_realms(transport): if realm not in supported_realms: supported_realms[realm] = set() supported_realms[realm].update( formatter.get_format_styles(transport, realm)) if req.method == "POST": for realm in supported_realms: opt = req.args.get('email_format_%s' % realm, False) if opt: req.session['announcer_email_format_%s' % realm] = opt prefs = {} for realm in supported_realms: prefs[realm] = req.session.get('announcer_email_format_%s' % realm, None) or self._get_default_format() data = dict( realms=supported_realms, preferences=prefs, ) return "prefs_announcer_email.html", data