class TagWikiMacros(TagTemplateProvider): """[opt] Provides macros, that utilize the tag system in wiki markup.""" implements(IWikiMacroProvider) caseless_sort = BoolOption( 'tags', 'cloud_caseless_sort', default=False, doc="Whether the tag cloud should be sorted case-sensitive.") default_cols = Option( 'tags', 'listtagged_default_table_cols', 'id|description|tags', doc="Select columns and column order for table format.\n\n" "See `ListTagged` description (WikiMacros) for supported values.") default_format = Option( 'tags', 'listtagged_default_format', 'oldlist', doc="Set default format for the handler of the `/tags` domain.\n\n" "See `ListTagged` description (WikiMacros) for supported values.") exclude_realms = ListOption( 'tags', 'listtagged_exclude_realms', [], doc="Comma-separated list of realms to exclude from tags queries " "by default, unless specifically included using 'realm:<realm>' " "in a query.") items_per_page = Option( 'tags', 'listtagged_items_per_page', 100, doc="Number of tagged resources displayed per page of tag query " "results requested by `ListTagged` macros and from `/tags`.") items_per_page = as_int(items_per_page, 100) supported_cols = frozenset(['realm', 'id', 'description', 'tags']) def __init__(self): # TRANSLATOR: Keep macro doc style formatting here, please. self.doc_cloud = N_("""Display a tag cloud. Show a tag cloud for all tags on resources matching query. Usage: {{{ [[TagCloud(<query>[,caseless_sort=<bool>][,mincount=<n>])]] }}} caseless_sort:: Whether the tag cloud should be sorted case-sensitive. mincount:: Optional integer threshold to hide tags with smaller count. See tags documentation for the query syntax. """) self.doc_listtagged = N_("""List tagged resources. Usage: {{{ [[ListTagged(<query>[,exclude=<list>],[[format=<format>],cols=<columns>])]] }}} format:: result list presentation; supported values: || `compact` || comma-separated inline list of "linked-description" || || `oldlist` (default) || " * linked-id description (tags)" list || || `table` || table... (see corresponding column option too) || || `short` or other value || bulleted list of "linked-description" || cols:: columns for 'table' format using a "|"-separated list of column names (order matters); supported columns: realm, id, description, tags exclude:: exclude tagged resources that match a name in the colon-separated list of resource ids, accepts shell-style patterns See tags documentation for the query syntax. """) # IWikiMacroProvider def get_macros(self): yield 'ListTagged' yield 'TagCloud' def get_macro_description(self, name): if name == 'ListTagged': return gettext(self.doc_listtagged) if name == 'TagCloud': return gettext(self.doc_cloud) def expand_macro(self, formatter, name, content, realms=[]): """Evaluate macro call and render results. Calls from web-UI come with pre-processed realm selection. """ env = self.env req = formatter.req tag_system = TagSystem(env) all_realms = tag_system.get_taggable_realms() if not all_realms: # Tag providers are required, no result without at least one. return '' args, kw = parse_args(content) query = args and args[0].strip() or None if not realms: # Check macro arguments for realms (typical wiki macro call). realms = 'realm' in kw and kw['realm'].split('|') or [] if query: # Add realms from query expression. realms.extend(query_realms(query, all_realms)) # Remove redundant realm selection for performance. if set(realms) == all_realms: query = re.sub('(^|\W)realm:\S+(\W|$)', ' ', query).strip() if name == 'TagCloud': # Set implicit 'all tagged realms' as default. if not realms: realms = all_realms if query: all_tags = Counter() # Require per resource query including view permission checks. for resource, tags in tag_system.query(req, query): all_tags.update(tags) else: # Allow faster per tag query, side steps permission checks. all_tags = tag_system.get_all_tags(req, realms=realms) mincount = 'mincount' in kw and kw['mincount'] or None return self.render_cloud(req, all_tags, caseless_sort=self.caseless_sort, mincount=mincount, realms=realms) elif name == 'ListTagged': if content and _OBSOLETE_ARGS_RE.search(content): data = {'warning': 'obsolete_args'} else: data = {'warning': None} context = formatter.context # Use TagsQuery arguments (most likely wiki macro calls). cols = 'cols' in kw and kw['cols'] or self.default_cols format = 'format' in kw and kw['format'] or self.default_format if not realms: # Apply ListTagged defaults to macro call w/o realm. realms = list(set(all_realms) - set(self.exclude_realms)) if not realms: return '' query = '(%s) (%s)' % (query or '', ' or '.join( ['realm:%s' % (r) for r in realms])) query_result = tag_system.query(req, query) excludes = [ exc.strip() for exc in kw.get('exclude', '').split(':') if exc.strip() ] if excludes and query_result: filtered_result = [(resource, tags) for resource, tags in query_result if not any( fnmatchcase(resource.id, exc) for exc in excludes)] query_result = filtered_result if not query_result: return '' def _link(resource): if resource.realm == 'tag': # Keep realm selection in tag links. return builder.a(resource.id, href=self.get_href(req, realms, tag=resource)) elif resource.realm == 'ticket': # Return resource link including ticket status dependend # class to allow for common Trac ticket link style. ticket = Ticket(env, resource.id) return builder.a('#%s' % ticket.id, class_=ticket['status'], href=formatter.href.ticket(ticket.id), title=shorten_line(ticket['summary'])) return render_resource_link(env, context, resource, 'compact') if format == 'table': cols = [ col for col in cols.split('|') if col in self.supported_cols ] # Use available translations from Trac core. try: labels = TicketSystem(env).get_ticket_field_labels() labels['id'] = _('Id') except AttributeError: # Trac 0.11 neither has the attribute nor uses i18n. labels = {'id': 'Id', 'description': 'Description'} labels['realm'] = _('Realm') labels['tags'] = _('Tags') headers = [{'label': labels.get(col)} for col in cols] data.update({'cols': cols, 'headers': headers}) try: results = sorted( query_result, key=lambda r: embedded_numbers(to_unicode(r[0].id))) except (InvalidQuery, InvalidTagRealm), e: return system_message(_("ListTagged macro error"), e) results = self._paginate(req, results, realms) rows = [] for resource, tags in results: desc = tag_system.describe_tagged_resource(req, resource) tags = sorted(tags) wiki_desc = format_to_oneliner(env, context, desc) if tags: rendered_tags = [ _link(Resource('tag', tag)) for tag in tags ] if 'oldlist' == format: resource_link = _link(resource) else: resource_link = builder.a(wiki_desc, href=get_resource_url( env, resource, context.href)) if 'table' == format: cells = [] for col in cols: if col == 'id': cells.append(_link(resource)) # Don't duplicate links to resource in both. elif col == 'description' and 'id' in cols: cells.append(wiki_desc) elif col == 'description': cells.append(resource_link) elif col == 'realm': cells.append(resource.realm) elif col == 'tags': cells.append( builder([(tag, ' ') for tag in rendered_tags])) rows.append({'cells': cells}) continue rows.append({ 'desc': wiki_desc, 'rendered_tags': None, 'resource_link': _link(resource) }) data.update({ 'format': format, 'paginator': results, 'results': rows, 'tags_url': req.href('tags') }) # Work around a bug in trac/templates/layout.html, that causes a # TypeError for the wiki macro call, if we use add_link() alone. add_stylesheet(req, 'common/css/search.css') return Chrome(env).render_template(req, 'listtagged_results.html', data, 'text/html', True)
class Download(Component): implements(INavigationContributor, IRequestHandler, IAdminPanelProvider, ITemplateProvider, IPermissionRequestor) path = PathOption('download', 'path', '../download', doc="Path where to store uploaded downloads.") ext = ListOption( 'download', 'ext', 'zip,gz,bz2,rar', doc="""List of file extensions allowed to upload. Set to 'all' to specify that any file extensions is allowed. """) max_size = IntOption( 'download', 'max_size', 268697600, """Maximum allowed file size (in bytes) for downloads. Default is 256 MB. """) # INavigationContributor methods def get_active_navigation_item(self, req): return 'download' def get_navigation_items(self, req): if 'DOWNLOAD_VIEW' in req.perm('download'): yield ('mainnav', 'ownload', html.a('Download', href=req.href.download())) # IRequestHandler methods def match_request(self, req): return req.path_info.find('/download') == 0 def process_request(self, req): data = {} self.do_action(req) cursor = self.env.db_query( "SELECT id, file, description FROM download ORDER BY id") data['downloads'] = [(row[0], row[1], row[2]) for row in cursor] return 'download_list.html', data, {} # IAdminPageProvider methods def get_admin_panels(self, req): if 'DOWNLOAD_ADMIN' in req.perm('download'): yield 'download', 'Download', 'settings', 'Settings' def render_admin_panel(self, req, cat, page, version): # here comes the page content, handling, etc. data = {} self.do_action(req) cursor = self.env.db_query( "SELECT id, file, description, size, time, author FROM download ORDER BY id" ) data['downloads'] = [(row[0], row[1], row[2]) for row in cursor] return 'download_admin.html', data, {} # ITemplateProvider def get_htdocs_dirs(self): """Return the absolute path of a directory containing additional static resources (such as images, style sheets, etc). """ from pkg_resources import resource_filename return [resource_filename(__name__, 'htdocs')] def get_templates_dirs(self): """Return the absolute path of the directory containing the provided ClearSilver/Genshi templates. """ from pkg_resources import resource_filename return [resource_filename(__name__, 'templates')] # IPermissionRequestor methods. def get_permission_actions(self): view = 'DOWNLOAD_VIEW' add = ('DOWNLOAD_ADD', ['DOWNLOAD_VIEW']) admin = ('DOWNLOAD_ADMIN', ['DOWNLOAD_VIEW', 'DOWNLOAD_ADD']) return [view, add, admin] def get_download_id_by_time(self, time): cursor = self.env.db_query( "SELECT id, file, description, size, time, author FROM download where time={}" .format(time)) for row in cursor: return row[0] return {} def get_file_from_req(self, req): file = req.args['file'] # Test if file is uploaded. if not hasattr(file, 'filename') or not file.filename: raise TracError("No file uploaded.") # Get file size. if hasattr(file.file, 'fileno'): size = os.fstat(file.file.fileno())[6] else: # Seek to end of file to get its size. file.file.seek(0, 2) size = file.file.tell() file.file.seek(0) if size == 0: raise TracError("Can't upload empty file.") # Try to normalize the filename to unicode NFC if we can. # Files uploaded from OS X might be in NFD. self.log.debug("input filename: %s", file.filename) filename = unicodedata.normalize('NFC', to_unicode(file.filename, 'utf-8')) filename = filename.replace('\\', '/').replace(':', '/') filename = os.path.basename(filename) self.log.debug("output filename: %s", filename) return file.file, filename, size def add_download(self, download, file): # Check for maximum file size. if 0 <= self.max_size < download['size']: raise TracError("Maximum file size: %s bytes" % self.max_size, "Upload failed") # Add new download to DB. sql = "INSERT INTO download (file,description,size,time,author) " \ " VALUES(%s,%s,%s,%s,%s)" args = (download['file'], download['description'], download['size'], download['time'], download['author']) self.env.db_transaction(sql, args) self.log.debug("FileUpload SQL: %s", sql) # Get inserted download by time to get its ID. id = self.get_download_id_by_time(download['time']) self.log.debug("FileUpload id: %s", id) # Prepare file paths. path = os.path.normpath(os.path.join(self.path, to_unicode(id))) filepath = os.path.normpath(os.path.join(path, download['file'])) self.log.debug("FileUpload path: %s", path) self.log.debug("FileUpload filepath: %s", filepath) # Store uploaded image. try: os.mkdir(path.encode('utf-8')) with open(filepath.encode('utf-8'), 'wb+') as fileobj: file.seek(0) shutil.copyfileobj(file, fileobj) except Exception as error: self.log.debug(error) try: os.remove(filepath.encode('utf-8')) except: pass try: os.rmdir(path.encode('utf-8')) except: pass raise TracError("Error storing file %s. Does the directory " "specified in path config option of [downloads] " "section of trac.ini exist?" % download['file']) def do_action(self, req): if req.method == "POST": submit = req.args.get('submit').strip() if submit == 'Add': # Get form values. file, filename, file_size = self.get_file_from_req(req) download = { 'file': filename, 'description': req.args.get('description'), 'size': file_size, 'time': to_timestamp(datetime.datetime.now(utc)), 'count': 0, 'author': req.authname } self.log.debug("FileUpload filename:" + download['file']) self.log.debug("FileUpload description:" + download['description']) self.log.debug("FileUpload size:", download['size']) self.log.debug("FileUpload time:", download['time']) self.log.debug("FileUpload author:" + download['author']) # Upload file to DB and file storage. self.add_download(download, file) file.close() add_notice(req, 'Download has been added.') elif submit == 'Remove': ids = req.args.getlist('sels') if ids is not None and len(ids) > 0: for id in ids: sql = "DELETE FROM download WHERE id ={}".format( int(id)) self.env.db_transaction(sql) add_notice(req, 'Download has been deleted.') else: # Get download. download_id = req.args.get('sel') or 0 if download_id > 0: sql = "SELECT file, description FROM download where id={}".format( download_id) cursor = self.env.db_query(sql) if len(cursor) > 0: fn = cursor[0][0] description = cursor[0][1] else: raise TracError("File not found.") # Get download file path. filename = os.path.basename(fn) filepath = os.path.join(self.path, to_unicode(download_id), filename) filepath = os.path.normpath(filepath) # Increase downloads count. sql = "UPDATE download SET count=count+1 WHERE id ={}".format( download_id) self.env.db_transaction(sql) # Guess mime type. with open(filepath.encode('utf-8'), 'r') as fileobj: file_data = fileobj.read(1000) mimeview = Mimeview(self.env) mime_type = mimeview.get_mimetype(filepath, file_data) if not mime_type: mime_type = 'application/octet-stream' if 'charset=' not in mime_type: charset = mimeview.get_charset(file_data, mime_type) mime_type = mime_type + '; charset=' + charset # Return uploaded file to request. req.send_header( 'Content-Disposition', 'attachment;filename="%s"' % os.path.normpath(fn)) req.send_header('Content-Description', description) req.send_file(filepath.encode('utf-8'), mime_type)
class CsetPropertyRenderer(Component): implements(IPropertyRenderer) # relied upon by GitChangeset def match_property(self, name, mode): # default renderer has priority 1 return (name in ( 'Parents', 'Children', 'Branches', 'git-committer', 'git-author', ) and mode == 'revprop') and 4 or 0 def render_property(self, name, mode, context, props): def sha_link(sha, label=None): # sha is assumed to be a non-abbreviated 40-chars sha id try: reponame = context.resource.parent.id repos = RepositoryManager(self.env).get_repository(reponame) cset = repos.get_changeset(sha) if label is None: label = repos.display_rev(sha) return tag.a(label, class_='changeset', title=shorten_line(cset.message), href=context.href.changeset(sha, repos.reponame)) except Exception as e: return tag.a(sha, class_='missing changeset', title=to_unicode(e), rel='nofollow') if name == 'Branches': branches = props[name] # simple non-merge commit return tag(*intersperse(', ', (sha_link(rev, label) for label, rev in branches))) elif name in ('Parents', 'Children'): revs = props[name] # list of commit ids if name == 'Parents' and len(revs) > 1: # we got a merge... current_sha = context.resource.id reponame = context.resource.parent.id parent_links = intersperse( ', ', ((sha_link(rev), ' (', tag.a(_("diff"), title=_("Diff against this parent (show the " "changes merged from the other parents)"), href=context.href.changeset( current_sha, reponame, old=rev)), ')') for rev in revs)) return tag( list(parent_links), tag.br(), tag.span(Markup( _("Note: this is a <strong>merge" "</strong> changeset, the " "changes displayed below " "correspond to the merge " "itself.")), class_='hint'), tag.br(), tag.span(Markup( _("Use the <code>(diff)</code> " "links above to see all the " "changes relative to each " "parent.")), class_='hint')) # simple non-merge commit return tag(*intersperse(', ', map(sha_link, revs))) elif name in ('git-committer', 'git-author'): user_, time_ = props[name] _str = "%s (%s)" % (Chrome(self.env).format_author( context.req, user_), format_datetime(time_, tzinfo=context.req.tz)) return unicode(_str) raise TracError(_("Internal error"))
class SuccessfulAuthenticator2(Component): implements(IAuthenticator) def authenticate(self, req): return 'user2'
class ExcelTicketModule(Component): implements(IContentConverter) def get_supported_conversions(self): format = get_excel_format(self.env) mimetype = get_excel_mimetype(format) yield ('excel', _("Excel"), format, 'trac.ticket.Query', mimetype, 8) yield ('excel-history', _("Excel including history"), format, 'trac.ticket.Query', mimetype, 8) yield ('excel-history', _("Excel including history"), format, 'trac.ticket.Ticket', mimetype, 8) def convert_content(self, req, mimetype, content, key): if key == 'excel': return self._convert_query(req, content) if key == 'excel-history': kwargs = {} if isinstance(content, Ticket): content = Query.from_string(self.env, 'id=%d' % content.id) kwargs['sheet_query'] = False kwargs['sheet_history'] = True else: kwargs['sheet_query'] = True kwargs['sheet_history'] = True return self._convert_query(req, content, **kwargs) def _convert_query(self, req, query, sheet_query=True, sheet_history=False): book = get_workbook_writer(self.env, req) # no paginator query.max = 0 query.has_more_pages = False query.offset = 0 db = _get_db(self.env) # extract all fields except custom fields custom_fields = [f['name'] for f in query.fields if f.get('custom')] cols = ['id'] cols.extend(f['name'] for f in query.fields if f['name'] not in custom_fields) cols.extend(name for name in ('time', 'changetime') if name not in cols) query.cols = cols # prevent "SELECT COUNT(*)" query saved_count_prop = query._count try: query._count = types.MethodType(lambda self, sql, args, db=None: 0, query, query.__class__) if 'db' in inspect.getargspec(query.execute)[0]: tickets = query.execute(req, db) else: tickets = query.execute(req) query.num_items = len(tickets) finally: query._count = saved_count_prop # add custom fields to avoid error to join many tables self._fill_custom_fields(tickets, query.fields, custom_fields, db) context = Context.from_request(req, 'query', absurls=True) cols.extend([name for name in custom_fields if name not in cols]) data = query.template_data(context, tickets) if sheet_query: self._create_sheet_query(req, context, data, book) if sheet_history: self._create_sheet_history(req, context, data, book) return book.dumps(), book.mimetype def _fill_custom_fields(self, tickets, fields, custom_fields, db): if not tickets or not custom_fields: return fields = dict((f['name'], f) for f in fields) tickets = dict((int(ticket['id']), ticket) for ticket in tickets) query = "SELECT ticket,name,value " \ "FROM ticket_custom WHERE %s ORDER BY ticket" % \ _tkt_id_conditions('ticket', tickets) cursor = db.cursor() cursor.execute(query) for id, name, value in cursor: if id not in tickets: continue f = fields.get(name) if f and f['type'] == 'checkbox': try: value = bool(int(value)) except (TypeError, ValueError): value = False tickets[id][name] = value def _create_sheet_query(self, req, context, data, book): def write_headers(writer, query): writer.write_row([ (u'%s (%s)' % (dgettext('messages', 'Custom Query'), dngettext('messages', '%(num)s match', '%(num)s matches', query.num_items)), 'header', -1, -1) ]) query = data['query'] groups = data['groups'] fields = data['fields'] headers = data['headers'] sheet_count = 1 sheet_name = dgettext("messages", "Custom Query") writer = book.create_sheet(sheet_name) write_headers(writer, query) for groupname, results in groups: results = [ result for result in results if 'TICKET_VIEW' in req.perm( context('ticket', result['id']).resource) ] if not results: continue if writer.row_idx + len(results) + 3 > writer.MAX_ROWS: sheet_count += 1 writer = book.create_sheet('%s (%d)' % (sheet_name, sheet_count)) write_headers(writer, query) if groupname: writer.move_row() cell = fields[query.group]['label'] + ' ' if query.group in ('owner', 'reporter'): cell += Chrome(self.env).format_author(req, groupname) else: cell += groupname cell += ' (%s)' % dngettext('messages', '%(num)s match', '%(num)s matches', len(results)) writer.write_row([(cell, 'header2', -1, -1)]) writer.write_row((header['label'], 'thead', None, None) for idx, header in enumerate(headers)) for result in results: ticket_context = context('ticket', result['id']) cells = [] for idx, header in enumerate(headers): name = header['name'] value, style, width, line = self._get_cell_data( name, result.get(name), req, ticket_context, writer) cells.append((value, style, width, line)) writer.write_row(cells) writer.set_col_widths() def _create_sheet_history(self, req, context, data, book): def write_headers(writer, headers): writer.write_row((header['label'], 'thead', None, None) for idx, header in enumerate(headers)) groups = data['groups'] headers = [ header for header in data['headers'] if header['name'] not in ('id', 'time', 'changetime') ] headers[0:0] = [ { 'name': 'id', 'label': dgettext("messages", "Ticket") }, { 'name': 'time', 'label': dgettext("messages", "Time") }, { 'name': 'author', 'label': dgettext("messages", "Author") }, { 'name': 'comment', 'label': dgettext("messages", "Comment") }, ] sheet_name = dgettext("messages", "Change History") sheet_count = 1 writer = book.create_sheet(sheet_name) write_headers(writer, headers) tkt_ids = [ result['id'] for result in chain(*[results for groupname, results in groups]) ] tickets = BulkFetchTicket.select(self.env, tkt_ids) mod = TicketModule(self.env) for result in chain(*[results for groupname, results in groups]): id = result['id'] ticket = tickets[id] ticket_context = context('ticket', id) if 'TICKET_VIEW' not in req.perm(ticket_context.resource): continue values = ticket.values.copy() changes = [] for change in mod.grouped_changelog_entries(ticket, None): if change['permanent']: changes.append(change) for change in reversed(changes): change['values'] = values values = values.copy() for name, field in change['fields'].iteritems(): if name in values: values[name] = field['old'] changes[0:0] = [{ 'date': ticket.time_created, 'fields': {}, 'values': values, 'cnum': None, 'comment': '', 'author': ticket['reporter'] }] if writer.row_idx + len(changes) >= writer.MAX_ROWS: sheet_count += 1 writer = book.create_sheet('%s (%d)' % (sheet_name, sheet_count)) write_headers(writer, headers) for change in changes: cells = [] for idx, header in enumerate(headers): name = header['name'] if name == 'id': value = id elif name == 'time': value = change.get('date', '') elif name == 'comment': value = change.get('comment', '') elif name == 'author': value = change.get('author', '') value = Chrome(self.env).format_author(req, value) else: value = change['values'].get(name, '') value, style, width, line = \ self._get_cell_data(name, value, req, ticket_context, writer) if name in change['fields']: style = '%s:change' % style cells.append((value, style, width, line)) writer.write_row(cells) writer.set_col_widths() def _get_cell_data(self, name, value, req, context, writer): if name == 'id': url = self.env.abs_href.ticket(value) value = '#%d' % value width = len(value) return value, 'id', width, 1 if isinstance(value, datetime): return value, '[datetime]', None, None if value and name in ('reporter', 'owner'): value = Chrome(self.env).format_author(req, value) return value, name, None, None if name == 'cc': value = Chrome(self.env).format_emails(context, value) return value, name, None, None if name == 'milestone': if value: url = self.env.abs_href.milestone(value) width, line = writer.get_metrics(value) return value, name, width, line else: return '', name, None, None return value, name, None, None
class DefaultHandler(Component): implements(IRequestHandler) def match_request(self, req): return True def process_request(self, req): raise req.exc_class("Raised in process_request")
class UnsuccessfulAuthenticator(Component): implements(IAuthenticator) def authenticate(self, req): return None
class SystemInfoProvider2(Component): implements(ISystemInfoProvider) def get_system_info(self): yield 'pkg1', 1.0
class DjangoPasswordStore(Component): """Manages user accounts stored in Django's database (User-models). To use this implementation add the following configuration section to trac.ini. {{{ [account-manager] password_store = DjangoPasswordStore django_settings_module = myproject.settings django_require_group = Trac }}} django_require_group is optional, it specifies which Django group \ user must belong to be able to login """ implements(IPasswordStore) settings_module = Option('account-manager', 'django_settings_module', '', \ doc=_("Name of Django settings module")) require_group = Option('account-manager', 'django_require_group', '', \ doc=_("Name of required Django group")) def has_user(self, user): # TODO raise NotImplementedError #return user in self.get_users() def get_users(self): """Returns list of available users """ # TODO self.log.debug('acct_mgr: getting user list...') raise NotImplementedError #return [] def set_password(self, user, password, old_password=None): """Sets user password""" self.log.debug('acct_mgr: setting password...') duser = self._get_user(user=user, password=old_password) if duser: duser.set_password(password) duser.save() return True return False def delete_user(self, user): """Deletes specified user from Django's userdb""" self.log.debug('acct_mgr: deleting user...') raise NotImplementedError #duser = self._get_user(user=user) #if duser: # duser.delete() # return True #return False def _get_user(self, user, password=None): """Gets specified user from Django's userdb If setting django_required_group is defined, user MUST be in that group If password is specified, also checks it. Returns User object if user is found (optionally: AND belongs to specified group) (optionally: AND if password is correct) Returns None if user is not found OR error occurs Returns False if user is found, but password is incorrect OR user doesn't belong to required group """ db.reset_queries() try: try: duser = User.objects.get(Q(is_active=True) & \ (Q(**{username_field: user}) | Q(email=user))) group = str(self.require_group) if group != "": if duser.groups.filter(name=group).count() == 0: return False if password and duser.check_password(password): return duser elif password is None: return duser else: return False except User.DoesNotExist: return None finally: db.connection.close() return None def check_password(self, user, password): """Checks user password from Django's userdb""" self.log.debug('acct_mgr: checking password...') duser = self._get_user(user=user, password=password) if duser: self.log.debug('acct_mgr: user %s authenticated' % user) return True else: self.log.debug('acct_mgr: user %s NOT authenticated' % user) return False
class SQLiteConnector(Component): """Database connector for SQLite. Database URLs should be of the form: {{{ sqlite:path/to/trac.db }}} """ implements(IDatabaseConnector, ISystemInfoProvider) required = False extensions = ListOption( 'sqlite', 'extensions', doc="""Paths to sqlite extensions, relative to Trac environment's directory or absolute. (''since 0.12'')""") memory_cnx = None def __init__(self): self.error = None # ISystemInfoProvider methods def get_system_info(self): if self.required: yield 'SQLite', sqlite_version_string yield 'pysqlite', pysqlite_version_string # IDatabaseConnector methods def get_supported_schemes(self): if sqlite_version < min_sqlite_version: self.error = _( "SQLite version is %(version)s. Minimum required " "version is %(min_version)s.", version=sqlite_version_string, min_version='%d.%d.%d' % min_sqlite_version) elif pysqlite_version < min_pysqlite_version: self.error = _("Need at least PySqlite %(version)s or higher", version='%d.%d.%d' % min_pysqlite_version) elif (2, 5, 2) <= pysqlite_version < (2, 5, 5): self.error = _("PySqlite 2.5.2 - 2.5.4 break Trac, please use " "2.5.5 or higher") yield 'sqlite', -1 if self.error else 1 def get_connection(self, path, log=None, params={}): self.required = True params['extensions'] = self._extensions if path == ':memory:': if not self.memory_cnx: self.memory_cnx = SQLiteConnection(path, log, params) return self.memory_cnx else: return SQLiteConnection(path, log, params) def get_exceptions(self): return sqlite def init_db(self, path, schema=None, log=None, params={}): if path != ':memory:': # make the directory to hold the database if os.path.exists(path): raise TracError( _("Database already exists at %(path)s", path=path)) dir = os.path.dirname(path) if not os.path.exists(dir): os.makedirs(dir) if isinstance(path, unicode): # needed with 2.4.0 path = path.encode('utf-8') # this direct connect will create the database if needed cnx = sqlite.connect(path, timeout=int(params.get('timeout', 10000))) else: cnx = self.get_connection(path, log, params) cursor = cnx.cursor() _set_journal_mode(cursor, params.get('journal_mode')) if schema is None: from trac.db_default import schema for table in schema: for stmt in self.to_sql(table): cursor.execute(stmt) cnx.commit() def destroy_db(self, path, log=None, params={}): if path != ':memory:': if not os.path.isabs(path): path = os.path.join(self.env.path, path) try: os.remove(path) except OSError as e: if e.errno != errno.ENOENT: raise def to_sql(self, table): return _to_sql(table) def alter_column_types(self, table, columns): """Yield SQL statements altering the type of one or more columns of a table. Type changes are specified as a `columns` dict mapping column names to `(from, to)` SQL type tuples. """ for name, (from_, to) in sorted(columns.iteritems()): if _type_map.get(to, to) != _type_map.get(from_, from_): raise NotImplementedError("Conversion from %s to %s is not " "implemented" % (from_, to)) return () def backup(self, dest_file): """Simple SQLite-specific backup of the database. @param dest_file: Destination file basename """ import shutil db_str = self.config.get('trac', 'database') try: db_str = db_str[:db_str.index('?')] except ValueError: pass db_name = os.path.join(self.env.path, db_str[7:]) shutil.copy(db_name, dest_file) if not os.path.exists(dest_file): raise TracError(_("No destination file created")) return dest_file @lazy def _extensions(self): _extensions = [] for extpath in self.extensions: if not os.path.isabs(extpath): extpath = os.path.join(self.env.path, extpath) _extensions.append(extpath) return _extensions
class HtmlNotificationModule(Component): implements(ITemplateProvider) def get_htdocs_dirs(self): return () def get_templates_dirs(self): from pkg_resources import resource_filename return [resource_filename(__name__, 'templates')] def substitute_message(self, message): try: chrome = Chrome(self.env) req = self._create_request(chrome) try: make_activable(lambda: req.locale, self.env.path) return self._substitute_message(chrome, req, message) finally: deactivate() except: self.log.warn('Caught exception while substituting message', exc_info=True) return message def _create_request(self, chrome): req = Request( { 'REQUEST_METHOD': 'GET', 'trac.base_url': self.env.abs_href(), }, lambda *args, **kwargs: None) req.arg_list = () req.args = {} req.authname = 'anonymous' req.session = FakeSession({'dateinfo': 'absolute'}) req.perm = PermissionCache(self.env, req.authname) req.href = req.abs_href req.callbacks.update({ 'chrome': chrome.prepare_request, 'tz': self._get_tz, 'locale': self._get_locale, 'lc_time': lambda req: 'iso8601', }) return req def _get_tz(self, req): tzname = self.config.get('trac', 'default_timezone') return get_timezone(tzname) or localtz def _get_locale(self, req): lang = self.config.get('trac', 'default_language') return _parse_locale(lang) def _substitute_message(self, chrome, req, message): parsed = email.message_from_string(message) link = parsed.get('X-Trac-Ticket-URL') if not link: return message match = _TICKET_URI_RE.search(link) if not match: return message tktid = match.group('tktid') cnum = match.group('cnum') if cnum is not None: cnum = int(cnum) db = _get_db(self.env) try: ticket = Ticket(self.env, tktid) except ResourceNotFound: return message container = MIMEMultipart('alternative') for header, value in parsed.items(): lower = header.lower() if lower in ('content-type', 'content-transfer-encoding'): continue if lower != 'mime-version': container[header] = value del parsed[header] container.attach(parsed) html = self._create_html_body(chrome, req, ticket, cnum, link) part = MIMEText(html.encode('utf-8'), 'html') self._set_charset(part) container.attach(part) return container.as_string() def _create_html_body(self, chrome, req, ticket, cnum, link): tktmod = TicketModule(self.env) attmod = AttachmentModule(self.env) data = tktmod._prepare_data(req, ticket) tktmod._insert_ticket_data(req, ticket, data, req.authname, {}) data['ticket']['link'] = link changes = data.get('changes') if cnum is None: changes = [] else: changes = [ change for change in (changes or []) if change.get('cnum') == cnum ] data['changes'] = changes context = Context.from_request(req, ticket.resource, absurls=True) data.update({ 'can_append': False, 'show_editor': False, 'start_time': ticket['changetime'], 'context': context, 'alist': attmod.attachment_data(context), 'styles': self._get_styles(chrome), 'link': tag.a(link, href=link), 'tag_': tag_, }) rendered = chrome.render_template(req, 'htmlnotification_ticket.html', data, fragment=True) return unicode(rendered) def _get_styles(self, chrome): for provider in chrome.template_providers: for prefix, dir in provider.get_htdocs_dirs(): if prefix != 'common': continue url_re = re.compile(r'\burl\([^\]]*\)') buf = ['#content > hr { display: none }'] for name in ('trac.css', 'ticket.css'): f = open(os.path.join(dir, 'css', name)) try: lines = f.read().splitlines() finally: f.close() buf.extend( url_re.sub('none', to_unicode(line)) for line in lines if not line.startswith('@import')) return ('/*<![CDATA[*/\n' + '\n'.join(buf).replace(']]>', ']]]]><![CDATA[>') + '\n/*]]>*/') return '' def _set_charset(self, mime): from email.Charset import Charset, QP, BASE64, SHORTEST mime_encoding = self.config.get('notification', 'mime_encoding').lower() charset = Charset() charset.input_charset = 'utf-8' charset.output_charset = 'utf-8' charset.input_codec = 'utf-8' charset.output_codec = 'utf-8' if mime_encoding == 'base64': charset.header_encoding = BASE64 charset.body_encoding = BASE64 elif mime_encoding in ('qp', 'quoted-printable'): charset.header_encoding = QP charset.body_encoding = QP elif mime_encoding == 'none': charset.header_encoding = SHORTEST charset.body_encoding = None del mime['Content-Transfer-Encoding'] mime.set_charset(charset)
class ProjectDownloadsWiki(Component): implements(IWikiMacroProvider, IWikiSyntaxProvider) macros = {'FilesDownloadsCount': """Display the count of files downloads. Also the downloads which have been deleted are counted in.""", 'FilesDownloadsNarrow': """Display a narrow list of files downloads. Default is to not show anything when there are no downloads. Optional params: - no_hide: If no_hide is given, shows a text "No files downloads" instead. - title: h2 title for the downloads. If not given, default title is shown. - only_featured: Show only featured downloads Example usages: {{{ [[FilesDownloadsNarrow]] [[FilesDownloadsNarrow(no_hide=True)]] [[FilesDownloadsNarrow(title=My title, only_featured=True)]] }}} """, 'FilesDownloads': """Display a list of files downloads. Default is to not show anything when there are no downloads. Optional params: - no_hide: If no_hide is given, shows a text "No files downloads" instead. - title: h2 title for the downloads. If not given, default title is shown. - only_featured: Show only featured downloads Example usages: {{{ [[FilesDownloads]] [[FilesDownloads(no_hide=True)]] [[FilesDownloads(title=My title, only_featured=True)]] }}} """} override_download_links = Option('multiproject-files', 'override_download_link', default='True', doc="""Whether or not to override download links, if the DownloadsGlue component is not enabled. Can be useful to set to False, if other than multiproject components are using that.""") def get_macros(self): for macro in self.macros: yield macro def get_macro_description(self, name): return self.macros.get(name) def expand_macro(self, formatter, name, content, args=None): # Parse optional arguments if args is None: args = parse_args(content) if len(args) > 1: args = args[1] files_core = FilesCoreComponent(self.env) node_factory, download_config = files_core.files_node_factory_and_config(formatter.req) if 'FILES_DOWNLOADS_VIEW' not in formatter.req.perm: return '' if name == 'FilesDownloadsCount': count = ProjectDownloadEntry.total_download_count(node_factory.project_id) return html.span(count, class_="files_downloads_count") elif name == 'FilesDownloads' or name == 'FilesDownloadsNarrow': is_narrow = True if name == 'FilesDownloads': is_narrow = False no_hide = False if args.has_key('no_hide') and args['no_hide'].lower() == 'true': no_hide = True only_featured = False if args.has_key('only_featured') and args['only_featured'].lower() == 'true': only_featured = True title = _('Featured downloads') if only_featured else _('Downloads') try: title = _(args['title']) except KeyError: title = _('Featured downloads') if only_featured else _('Downloads') except ValueError as e: title = _('Invalid title: %(reason)s', reason=str(e)) download_entries = ProjectDownloadEntry.get_all_download_entries(node_factory.project_id, only_featured=only_featured) downloads = [] user_store = None user_by_id = {} if not is_narrow: user_store = get_userstore() for download_entry in download_entries: if not is_narrow and not user_by_id.has_key(download_entry.uploader_id): user = user_store.getUserWhereId(download_entry.uploader_id) user_by_id[download_entry.uploader_id] = user # This doesn't check whether the node really exists node = MappedFileNode.from_download_entry(download_entry, node_factory) downloads.append(node) add_stylesheet(formatter.req, 'multiproject/css/files.css') add_script(formatter.req, 'multiproject/js/files.js') return Chrome(self.env).render_template(formatter.req, 'multiproject_files_wiki.html', {'downloads' : downloads, 'downloads_dir': download_config.downloads_dir, 'is_narrow': is_narrow, 'no_hide': no_hide, 'only_featured': only_featured, 'user_by_id': user_by_id, 'title': title, 'format_filename': format_filename}, 'text/html', True) # IWikiSyntaxProvider def get_link_resolvers(self): # We allow this to be configurated, so that the "download:example.txt" links can work by # other means also. if not self.env.is_component_enabled(DOWNLOADS_GLUE_COMPONENT) and self.override_download_links: yield ('download', self.file_link) yield ('filesdownload', self.file_link) yield ('file', self.file_link) def get_wiki_syntax(self): return [] def file_link(self, formatter, ns, target, label): req = formatter.req if ns != 'file' and ns != 'download' and ns != 'filesdownload': return files_core = FilesCoreComponent(self.env) node_factory, download_config = files_core.files_node_factory_and_config(req) try: if ns == 'file': node = MappedFileNode.from_path(target, node_factory, True) else: node = MappedFileNode.from_download_path(target, node_factory, True) missing_perm = None if node.is_download(): if 'FILES_DOWNLOADS_VIEW' not in req.perm: missing_perm = 'FILES_DOWNLOADS_VIEW' elif 'FILES_VIEW' not in req.perm: missing_perm = 'FILES_VIEW' if missing_perm: return html.a(label, href='#', title = _('Missing %(permission)s permission', permission=missing_perm), class_ = 'missing') if node.exists(): if node.is_file(): if node.is_download(): if not node.download().is_available(): return html.a(label, href='#', title = _('Download information not available for %(path)s', path=node.relative_path), class_ = 'missing') else: return html.a(label, href=node.get_url(req), title = _('Download %(name)s (%(size)s)',name=node.filename, size= pretty_size(node.size))) else: return html.a(label, href=node.get_url(req), title = _('File %(name)s',name=node.filename)) elif node.is_dir(): return html.a(label, href=node.get_url(req), title = _('Folder %(name)s',name=node.filename)) else: return html.a(label, href='#', title = _('Not existing file: %(path)s', path=node.relative_path), class_ = 'missing') except TracError: # File doesn't exist return html.a(label, href='#', title=_('Invalid target for %(ns)s: %(path)s',ns=ns, path=target), class_='missing')
class PortfolioPage(Component): implements(IAdminPanelProvider, ITemplateProvider) def __init__(self): self.env.log.debug('Initial') # IAdminPageProvider methods def get_admin_panels(self, req): if req.perm.has_permission('TRAC_ADMIN'): yield ('general', 'General', 'portfolio', 'Portfolio') def render_admin_panel(self, req, cat, page, version, user='******'): # here comes the page content, handling, etc. data = {} if req.method == "POST": submit = req.args.get('submit').strip() if submit == 'Add': name = req.args.get('name').strip() description = req.args.get('description').strip() sql = "INSERT INTO portfolios (name, description, createtime, user) " \ " VALUES('{}','{}',now(),'{}')".format(name, description, user) self.env.db_transaction(sql) add_notice(req, 'Portfolio has been added.') elif submit == 'Remove': sels = req.args.getlist('sels') if sels is not None and len(sels) > 0: for sel in sels: sql = "DELETE FROM portfolios WHERE id ={}".format( int(sel)) self.env.db_transaction(sql) add_notice(req, 'Portfolio has been deleted.') elif submit == 'Save': sel = req.args.get('sel').strip() name = req.args.get('name').strip() description = req.args.get('description').strip() sql = "UPDATE portfolios SET name='{}', description='{}', createtime=now(), user='******' " \ " WHERE id={}".format(name, description, user, int(sel)) self.env.db_transaction(sql) add_notice(req, 'Portfolio has been saved.') else: sel = req.args.get('sel') if sel is not None: sql = "SELECT id, name, description, createtime, user FROM portfolios where id={}".format( int(sel)) cursor = self.env.db_query(sql) if len(cursor) > 0: data['view'] = 'detail' data['sel'] = sel data['name'] = cursor[0][1] data['description'] = cursor[0][2] cursor = self.env.db_query( "SELECT id, name, description, createtime, user FROM portfolios ORDER BY name" ) data['portfolios'] = [(row[0], row[1], row[2], row[3], row[4]) for row in cursor] return ('admin_portfolio.html', data, None) # ITemplateProvider def get_htdocs_dirs(self): """Return the absolute path of a directory containing additional static resources (such as images, style sheets, etc). """ from pkg_resources import resource_filename return [resource_filename(__name__, 'htdocs')] def get_templates_dirs(self): """Return the absolute path of the directory containing the provided ClearSilver/Genshi templates. """ from pkg_resources import resource_filename return [resource_filename(__name__, 'templates')]
class TradePlugin(Component): implements(INavigationContributor, IRequestHandler) # INavigationContributor methods def get_active_navigation_item(self, req): if 'TICKET_CREATE' in req.perm: return 'trades' def get_navigation_items(self, req): if 'TICKET_CREATE' in req.perm: yield ('mainnav', 'trades', html.a('Trades', href=req.href.trades())) # IRequestHandler methods def match_request(self, req): return req.path_info.find('/trades') == 0 def process_request(self, req, user='******'): if req.path_info.find('/trades/list') == 0: data = {} cursor = self.env.db_query( "SELECT id, portfolio, buysell, quantity, exchange, symbol, cash, currency, tradedate, tradeid FROM trades ORDER BY id" ) data['trades'] = [(row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9]) for row in cursor] return ('list.html', data, None) else: data = {} cursor = self.env.db_query( "SELECT value FROM parameters WHERE type='tolerance' and metric='quantity' LIMIT 1" ) data['tolerance'] = [(row[0]) for row in cursor] cursor = self.env.db_query( "SELECT metric, value FROM codes WHERE type='buysell' ORDER BY metric" ) data['buysellList'] = [(row[0], row[1]) for row in cursor] cursor = self.env.db_query( "SELECT metric, value FROM codes WHERE type='currency' ORDER BY metric" ) data['currencyList'] = [(row[0], row[1]) for row in cursor] cursor = self.env.db_query( "SELECT metric, value FROM codes WHERE type='exchange' ORDER BY metric" ) data['exchangeList'] = [(row[0], row[1]) for row in cursor] cursor = self.env.db_query( "SELECT name FROM portfolios ORDER BY name") data['portfolioList'] = [(row[0]) for row in cursor] cursor = self.env.db_query( "SELECT exchange, symbol, name FROM components ORDER BY exchange, symbol" ) data['symbolList'] = [(row[0], row[1], row[2]) for row in cursor] data['tradedate'] = datetime.now().strftime('%Y-%m-%d') if req.method == 'POST': portfolio = req.args.get('portfolio').strip() buysell = req.args.get('buysell').strip() quantity = req.args.get('quantity').strip() exchange = req.args.get('exchange').strip() cash = req.args.get('cash').strip() currency = req.args.get('currency').strip() temp = req.args.get('symbol').strip() tradedate = req.args.get('tradedate').strip() symbol = '' for row in cursor: if row[0] == exchange: if row[1] == temp: symbol = temp break elif row[2] == temp: symbol = row[1] break if symbol == '': data['portfolio'] = portfolio data['buysell'] = buysell data['quantity'] = quantity data['exchange'] = exchange data['currency'] = currency data['cash'] = cash data['symbol'] = temp data['tradedate'] = tradedate add_warning(req, 'Please enter valid symbol or name.') else: sql = "SELECT IFNULL(avg(close),0)*(1+(SELECT avg(value) FROM parameters WHERE type='tolerance' and metric='cash')) " \ " FROM prices WHERE exchange=%s and symbol=%s and datediff(now(), date) < 10" args = (exchange, symbol) cursor = self.env.db_query(sql, args) data['price'] = [(row[0]) for row in cursor] if float(data['price'][0]) <= 0: data['portfolio'] = portfolio data['buysell'] = buysell data['quantity'] = quantity data['exchange'] = exchange data['currency'] = currency data['cash'] = cash data['symbol'] = temp data['tradedate'] = tradedate add_warning(req, 'The security is no longer tradeable.') elif float(cash) / int(quantity) > float(data['price'][0]): data['portfolio'] = portfolio data['buysell'] = buysell data['quantity'] = quantity data['exchange'] = exchange data['currency'] = currency data['cash'] = cash data['symbol'] = temp data['tradedate'] = tradedate add_warning( req, 'The cash/quantity exceeds the price tolerance of the last 10 days.' ) else: sql = "INSERT INTO trades (portfolio,buysell,quantity,exchange,symbol,cash,currency,tradedate,tradeid) " \ " VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s)" args = (portfolio, buysell, quantity, exchange, symbol, cash, currency, tradedate, user) self.env.db_transaction(sql, args) add_notice(req, 'Your trade has been saved.') return ('trades.html', data, None)
class DefaultHandler(Component): implements(IRequestHandler) def match_request(self, req): return True def process_request(self, req): pass
class AuthzSourcePolicy(Component): """Permission policy for `source:` and `changeset:` resources using a Subversion authz file. `FILE_VIEW` and `BROWSER_VIEW` permissions are granted as specified in the authz file. `CHANGESET_VIEW` permission is granted for changesets where `FILE_VIEW` is granted on at least one modified file, as well as for empty changesets. """ implements(IPermissionPolicy) authz_file = PathOption('svn', 'authz_file', '', """The path to the Subversion [%(svnbook)s authorization (authz) file]. To enable authz permission checking, the `AuthzSourcePolicy` permission policy must be added to `[trac] permission_policies`. Non-absolute paths are relative to the Environment `conf` directory. """, doc_args={ 'svnbook': 'http://svnbook.red-bean.com/en/1.7/' 'svn.serverconfig.pathbasedauthz.html' }) authz_module_name = Option( 'svn', 'authz_module_name', '', """The module prefix used in the `authz_file` for the default repository. If left empty, the global section is used. """) _handled_perms = frozenset([(None, 'BROWSER_VIEW'), (None, 'CHANGESET_VIEW'), (None, 'FILE_VIEW'), (None, 'LOG_VIEW'), ('source', 'BROWSER_VIEW'), ('source', 'FILE_VIEW'), ('source', 'LOG_VIEW'), ('changeset', 'CHANGESET_VIEW')]) def __init__(self): self._mtime = 0 self._authz = {} self._users = set() # IPermissionPolicy methods def check_permission(self, action, username, resource, perm): realm = resource.realm if resource else None if (realm, action) in self._handled_perms: authz, users = self._get_authz_info() if authz is None: return False if username == 'anonymous': usernames = '$anonymous', '*' else: usernames = username, '$authenticated', '*' if resource is None: return True if users & set(usernames) else None rm = RepositoryManager(self.env) try: repos = rm.get_repository(resource.parent.id) except TracError: return True # Allow error to be displayed in the repo index if repos is None: return True modules = [resource.parent.id or self.authz_module_name] if modules[0]: modules.append('') def check_path_0(spath): sections = [ authz.get(module, {}).get(spath) for module in modules ] sections = [section for section in sections if section] denied = False for user in usernames: for section in sections: if user in section: if section[user]: return True denied = True # Don't check section without module name # because the section with module name defines # the user's permissions. break if denied: # All users has no readable permission. return False def check_path(path): path = '/' + pathjoin(repos.scope, path) if path != '/': path += '/' # Allow access to parent directories of allowed resources for spath in set( sum((authz.get(module, {}).keys() for module in modules), [])): if spath.startswith(path): result = check_path_0(spath) if result is True: return True # Walk from resource up parent directories for spath in parent_iter(path): result = check_path_0(spath) if result is not None: return result if realm == 'source': return check_path(resource.id) elif realm == 'changeset': changes = list(repos.get_changeset(resource.id).get_changes()) if not changes or any( check_path(change[0]) for change in changes): return True def _get_authz_info(self): if not self.authz_file: self.log.error("The [svn] authz_file configuration option in " "trac.ini is empty or not defined") raise ConfigurationError() try: mtime = os.path.getmtime(self.authz_file) except OSError as e: self.log.error( "Error accessing svn authz permission policy " "file: %s", exception_to_unicode(e)) raise ConfigurationError() if mtime != self._mtime: self._mtime = mtime rm = RepositoryManager(self.env) modules = set(repos.reponame for repos in rm.get_real_repositories()) if '' in modules and self.authz_module_name: modules.add(self.authz_module_name) modules.add('') self.log.info("Parsing authz file: %s", self.authz_file) try: self._authz = parse(self.authz_file, modules) except ParsingError as e: self.log.error( "Error parsing svn authz permission policy " "file: %s", exception_to_unicode(e)) raise ConfigurationError() else: self._users = { user for paths in self._authz.itervalues() for path in paths.itervalues() for user, result in path.iteritems() if result } return self._authz, self._users
class RequestFilter(Component): implements(IRequestFilter) def pre_process_request(self, req, handler): raise TracError("Raised in pre_process_request") def post_process_request(self, req, template, data, metadata): return template, data, metadata
class EmailDistributor(Component): """Distributes notification events as emails.""" implements(INotificationDistributor) formatters = ExtensionPoint(INotificationFormatter) decorators = ExtensionPoint(IEmailDecorator) resolvers = OrderedExtensionsOption( 'notification', 'email_address_resolvers', IEmailAddressResolver, 'SessionEmailResolver', include_missing=False, doc="""Comma separated list of email resolver components in the order they will be called. If an email address is resolved, the remaining resolvers will not be called. """) default_format = Option( 'notification', 'default_format.email', 'text/plain', doc="Default format to distribute email notifications.") def __init__(self): self._charset = create_charset( self.config.get('notification', 'mime_encoding')) # INotificationDistributor methods def transports(self): yield 'email' def distribute(self, transport, recipients, event): if transport != 'email': return if not self.config.getbool('notification', 'smtp_enabled'): self.log.debug("%s skipped because smtp_enabled set to false", self.__class__.__name__) return formats = {} for f in self.formatters: for style, realm in f.get_supported_styles(transport): if realm == event.realm: formats[style] = f if not formats: self.log.error("%s No formats found for %s %s", self.__class__.__name__, transport, event.realm) return self.log.debug( "%s has found the following formats capable of " "handling '%s' of '%s': %s", self.__class__.__name__, transport, event.realm, ', '.join(formats.keys())) notify_sys = NotificationSystem(self.env) always_cc = set(notify_sys.smtp_always_cc_list) use_public_cc = notify_sys.use_public_cc addresses = {} for sid, authed, addr, fmt in recipients: if fmt not in formats: self.log.debug("%s format %s not available for %s %s", self.__class__.__name__, fmt, transport, event.realm) continue if sid and not addr: for resolver in self.resolvers: addr = resolver.get_address_for_session(sid, authed) if addr: status = 'authenticated' if authed else \ 'not authenticated' self.log.debug( "%s found the address '%s' for '%s " "(%s)' via %s", self.__class__.__name__, addr, sid, status, resolver.__class__.__name__) break if addr: addresses.setdefault(fmt, set()).add(addr) if use_public_cc or sid and sid in always_cc: always_cc.add(addr) else: status = 'authenticated' if authed else 'not authenticated' self.log.debug( "%s was unable to find an address for: %s " "(%s)", self.__class__.__name__, sid, status) outputs = {} failed = [] for fmt, formatter in formats.iteritems(): if fmt not in addresses and fmt != 'text/plain': continue try: outputs[fmt] = formatter.format(transport, fmt, event) except Exception as e: self.log.warning( '%s caught exception while ' 'formatting %s to %s for %s: %s%s', self.__class__.__name__, event.realm, fmt, transport, formatter.__class__, exception_to_unicode(e, traceback=True)) failed.append(fmt) # Fallback to text/plain when formatter is broken if failed and 'text/plain' in outputs: for fmt in failed: addresses.setdefault('text/plain', set()) \ .update(addresses.pop(fmt, ())) for fmt, addrs in addresses.iteritems(): self.log.debug("%s is sending event as '%s' to: %s", self.__class__.__name__, fmt, ', '.join(addrs)) message = self._create_message(fmt, outputs) if message: addrs = set(addrs) cc_addrs = sorted(addrs & always_cc) bcc_addrs = sorted(addrs - always_cc) self._do_send(transport, event, message, cc_addrs, bcc_addrs) else: self.log.warning("%s cannot send event '%s' as '%s': %s", self.__class__.__name__, event.realm, fmt, ', '.join(addrs)) def _create_message(self, format, outputs): if format not in outputs: return None message = create_mime_multipart('related') maintype, subtype = format.split('/') preferred = create_mime_text(outputs[format], subtype, self._charset) if format != 'text/plain' and 'text/plain' in outputs: alternative = create_mime_multipart('alternative') alternative.attach( create_mime_text(outputs['text/plain'], 'plain', self._charset)) alternative.attach(preferred) preferred = alternative message.attach(preferred) return message def _do_send(self, transport, event, message, cc_addrs, bcc_addrs): config = self.config['notification'] smtp_from = config.get('smtp_from') smtp_from_name = config.get('smtp_from_name') or self.env.project_name smtp_reply_to = config.get('smtp_replyto') headers = dict() headers['X-Mailer'] = 'Trac %s, by Edgewall Software'\ % self.env.trac_version headers['X-Trac-Version'] = self.env.trac_version headers['X-Trac-Project'] = self.env.project_name headers['X-URL'] = self.env.project_url headers['X-Trac-Realm'] = event.realm headers['Precedence'] = 'bulk' headers['Auto-Submitted'] = 'auto-generated' if isinstance(event.target, (list, tuple)): targetid = ','.join(map(get_target_id, event.target)) else: targetid = get_target_id(event.target) rootid = create_message_id(self.env, targetid, smtp_from, None, more=event.realm) if event.category == 'created': headers['Message-ID'] = rootid else: headers['Message-ID'] = create_message_id(self.env, targetid, smtp_from, event.time, more=event.realm) headers['In-Reply-To'] = rootid headers['References'] = rootid headers['Date'] = formatdate() headers['From'] = (smtp_from_name, smtp_from) \ if smtp_from_name else smtp_from headers['To'] = 'undisclosed-recipients: ;' if cc_addrs: headers['Cc'] = ', '.join(cc_addrs) if bcc_addrs: headers['Bcc'] = ', '.join(bcc_addrs) headers['Reply-To'] = smtp_reply_to for k, v in headers.iteritems(): set_header(message, k, v, self._charset) for decorator in self.decorators: decorator.decorate_message(event, message, self._charset) from_name, from_addr = parseaddr(str(message['From'])) to_addrs = set() for name in ('To', 'Cc', 'Bcc'): values = map(str, message.get_all(name, ())) to_addrs.update(addr for name, addr in getaddresses(values) if addr) del message['Bcc'] NotificationSystem(self.env).send_email(from_addr, list(to_addrs), message.as_string())
class RequestFilter4Arg(Component): implements(IRequestFilter) def pre_process_request(self, req, handler): return handler def post_process_request(self, req, template, data, metadata): return template, data, metadata
class SmtpEmailSender(Component): """E-mail sender connecting to an SMTP server.""" implements(IEmailSender) smtp_server = Option( 'notification', 'smtp_server', 'localhost', """SMTP server hostname to use for email notifications.""") smtp_port = IntOption( 'notification', 'smtp_port', 25, """SMTP server port to use for email notification.""") smtp_user = Option('notification', 'smtp_user', '', """Username for authenticating with SMTP server.""") smtp_password = Option( 'notification', 'smtp_password', '', """Password for authenticating with SMTP server.""") use_tls = BoolOption('notification', 'use_tls', 'false', """Use SSL/TLS to send notifications over SMTP.""") def send(self, from_addr, recipients, message): global local_hostname # Ensure the message complies with RFC2822: use CRLF line endings message = fix_eol(message, CRLF) self.log.info("Sending notification through SMTP at %s:%d to %s", self.smtp_server, self.smtp_port, recipients) try: server = smtplib.SMTP(self.smtp_server, self.smtp_port, local_hostname) local_hostname = server.local_hostname except smtplib.socket.error as e: raise ConfigurationError( tag_( "SMTP server connection error (%(error)s). Please " "modify %(option1)s or %(option2)s in your " "configuration.", error=to_unicode(e), option1=tag.code("[notification] smtp_server"), option2=tag.code("[notification] smtp_port"))) # server.set_debuglevel(True) if self.use_tls: server.ehlo() if 'starttls' not in server.esmtp_features: raise TracError( _("TLS enabled but server does not support" " TLS")) server.starttls() server.ehlo() if self.smtp_user: server.login(self.smtp_user.encode('utf-8'), self.smtp_password.encode('utf-8')) start = time_now() server.sendmail(from_addr, recipients, message) t = time_now() - start if t > 5: self.log.warning( "Slow mail submission (%.2f s), " "check your mail setup", t) if self.use_tls: # avoid false failure detection when the server closes # the SMTP connection with TLS enabled import socket try: server.quit() except socket.sslerror: pass else: server.quit()
class RaisingAuthenticator(Component): implements(IAuthenticator) def authenticate(self, req): raise TracError("Bad attempt")
class ProjectAdmin(Component): """Adds project number to ticket. User which have right `CONTROLLER_ADMIN` (or `TRAC_ADMIN`) can edit list of default project numbers in admin pane. Editing project number manually: - when creating a ticket, manually typed project numbers will be '''skipped''' ! """ implements(IAdminPanelProvider, ITemplateProvider, ITicketChangeListener, IPermissionRequestor, IRequestFilter) # ITemplateStreamFilter , IRequestHandler, ITicketManipulator changed_fields = ['type', 'milestone'] #=========================================================================== # TODO: maybe using class CustomField from settings.py #=========================================================================== def _init_config(self): section = 'ticket-custom' fld_name = 'projectplugin' if self.config and not self.config.get(section, fld_name): self.log.debug("adding custom-ticket %s" % fld_name) self.config.set(section, fld_name, 'text') self.config.set(section, fld_name + '.label', 'Project') self.config.set(section, fld_name + '.order', '40') self.config.set(section, fld_name + '.value', '') self.config.save() self.log.info("custom-ticket fields added to trac.ini") # ITemplateProvider methods def get_templates_dirs(self): from pkg_resources import resource_filename #@UnresolvedImport return [resource_filename(__name__, 'templates')] def get_htdocs_dirs(self): from pkg_resources import resource_filename #@UnresolvedImport return [('pp_htdocs', resource_filename(__name__, 'htdocs'))] # IPermissionRequestor def get_permission_actions(self): yield ("CONTROLLER_ADMIN") def checkPermissions(self, req): for permission in ("TRAC_ADMIN", "CONTROLLER_ADMIN"): if permission in PermissionSystem(self.env).get_user_permissions( req.authname): return True return False #=============================================================================== # ITicketChangeListener #=============================================================================== def ticket_created(self, ticket): self._save_project_no(ticket, self.changed_fields) return def ticket_changed(self, ticket, comment, author, old_values): if 'projectplugin' in old_values: return flds = [] for cf in self.changed_fields: if cf in old_values: flds.append(cf) self._save_project_no(ticket, flds) return def ticket_deleted(self, ticket): """Called when a ticket is deleted.""" return def pre_process_request(self, req, handler): return handler # for ClearSilver templates def post_process_request(self, req, template, data, content_type): if template == 'ticket.html': self._init_config() if not self.checkPermissions(req): add_stylesheet(req, 'pp_htdocs/prj_style.css') return template, data, content_type # overridden from IAdminPanelProvider def get_admin_panels(self, req): """Return a list of available admin panels. The items returned by this function must be tuples of the form `(category, category_label, page, page_label)`. """ # TODO: add new permission !!! if req.perm.has_permission('TRAC_ADMIN') or req.perm.has_permission( "CONTROLLER_ADMIN"): yield ('ticket', _('Ticket System'), 'projectplugin', _('Project')) # overridden from IAdminPanelProvider def render_admin_panel(self, req, category, page, path_info): """Process a request for an admin panel. This function should return a tuple of the form `(template, data)`, where `template` is the name of the template to use and `data` is the data to be passed to the template. """ errors = [] if req.args and 'projectname' in req.args: errors = self._save({ 'projectname': req.args['projectname'], 'milestone': req.args['milestone'], 'type': req.args['type'] }) return self._print_view(errors) def _save_project_no(self, ticket, changed_fields): """Save project number to corresponding ticket. """ projects, max_order = self._get_projects() project_no = self.config.get('project-plugin', 'default-project') for prj in projects: if prj['field'] in changed_fields: val = ticket.get_value_or_default(prj['field']) if val == prj['value']: project_no = prj['project'] break try: result = self.env.db_query("SELECT value FROM ticket_custom" " WHERE ticket=%s" " AND name='projectplugin'" % (ticket.id)) sql_type = "inserted" for row in result: if row[0] and row[0] == project_no: self.log.info( 'project no %s already committed for ticket %s' % ( project_no, ticket.id, )) return [] else: sql_type = "updated" if sql_type == "inserted": self.env.db_transaction( "INSERT INTO ticket_custom (ticket, name, value)" " VALUES(%s, 'projectplugin', '%s') " % (ticket.id, project_no)) else: self.env.db_transaction("UPDATE ticket_custom " " SET value = '%s'" " WHERE ticket = %s" " AND name='projectplugin'" % (project_no, ticket.id)) self.log.info('%s project no %s for ticket %s' % ( sql_type, project_no, ticket.id, )) return [] except Exception, e: self.log.error("Error executing SQL Statement \n %s" % (e)) return ['projectplugin', e]
class ObjectLinking(Component): implements(ITemplateStreamFilter, IRequestHandler, ITemplateProvider, ITicketChangeListener, IRequestFilter) def pre_process_request(self, req, handler): if re.match(r'/newticket', req.path_info) and 'linkinfo' in req.args: self.link_info = req.args['linkinfo'] else: self.link_info = None return handler def post_process_request(self, req, template, data, content_type): return template, data, content_type # ITicketChangeListener def ticket_created(self, ticket): self.add_link_to_ticket(ticket) def ticket_changed(self, ticket, comment, author, old_values): pass def ticket_deleted(self, ticket): pass # IRequestHandler methods def match_request(self, req): return re.match(r'/link', req.path_info) def process_request(self, req): path_info = req.path_info.split('/')[2] controller = self.get_controller(path_info) return controller(req) # ITemplateProvider methods def get_htdocs_dirs(self): return [('objectlinking', pkg_resources.resource_filename('objectlinking', 'htdocs'))] def get_templates_dirs(self): return [pkg_resources.resource_filename('objectlinking', 'templates')] def filter_stream(self, req, method, filename, stream, original_data): transformer = self.get_transformer_for(req, method, filename) if transformer is None: return stream return stream | transformer.get_stream(req, method, filename, stream, original_data) def get_controller(self, path_info): controller = None if path_info == 'create': controller = CreateLinkController(self.env) if path_info == 'delete': controller = DeleteLinkController(self.env) if path_info == 'search': controller = SearchObjectsController(self.env) return controller def get_transformer_for(self, req, method, template_name): filter = None if re.match(r'/ticket', req.path_info) and template_name == 'ticket.html': filter = TicketLinksTransformer(self.env) if re.match(r'/newticket', req.path_info) and template_name == 'ticket.html': filter = AddTicketLinkInfoToFormTransformer(self.env) return filter def add_link_to_ticket(self, ticket): if self.link_info: target_type, target_id, type = [ x.strip() for x in self.link_info.split(":") ] add_link = CreateLinkController(self.env) add_link.do_the_work('ticket', ticket.id, target_type, target_id, type, None) self.link_info = None
class CommitTicketUpdater(Component): """Update tickets based on commit messages. This component hooks into changeset notifications and searches commit messages for text in the form of: {{{ command #1 command #1, #2 command #1 & #2 command #1 and #2 }}} Instead of the short-hand syntax "#1", "ticket:1" can be used as well, e.g.: {{{ command ticket:1 command ticket:1, ticket:2 command ticket:1 & ticket:2 command ticket:1 and ticket:2 }}} Using the long-form syntax allows a comment to be included in the reference, e.g.: {{{ command ticket:1#comment:1 command ticket:1#comment:description }}} In addition, the ':' character can be omitted and issue or bug can be used instead of ticket. You can have more than one command in a message. The following commands are supported. There is more than one spelling for each command, to make this as user-friendly as possible. close, closed, closes, fix, fixed, fixes:: The specified tickets are closed, and the commit message is added to them as a comment. references, refs, addresses, re, see:: The specified tickets are left in their current status, and the commit message is added to them as a comment. A fairly complicated example of what you can do is with a commit message of: Changed blah and foo to do this or that. Fixes #10 and #12, and refs #12. This will close #10 and #12, and add a note to #12. """ implements(IRepositoryChangeListener) envelope = Option('ticket', 'commit_ticket_update_envelope', '', """Require commands to be enclosed in an envelope. Must be empty or contain two characters. For example, if set to `[]`, then commands must be in the form of `[closes #4]`.""") commands_close = Option('ticket', 'commit_ticket_update_commands.close', 'close closed closes fix fixed fixes', """Commands that close tickets, as a space-separated list.""") commands_refs = Option('ticket', 'commit_ticket_update_commands.refs', 'addresses re references refs see', """Commands that add a reference, as a space-separated list. If set to the special value `<ALL>`, all tickets referenced by the message will get a reference to the changeset.""") check_perms = BoolOption('ticket', 'commit_ticket_update_check_perms', 'true', """Check that the committer has permission to perform the requested operations on the referenced tickets. This requires that the user names be the same for Trac and repository operations.""") notify = BoolOption('ticket', 'commit_ticket_update_notify', 'true', """Send ticket change notification when updating a ticket.""") ticket_prefix = '(?:#|(?:ticket|issue|bug)[: ]?)' ticket_reference = ticket_prefix + \ '[0-9]+(?:#comment:([0-9]+|description))?' ticket_command = (r'(?P<action>[A-Za-z]*)\s*.?\s*' r'(?P<ticket>%s(?:(?:[, &]*|[ ]?and[ ]?)%s)*)' % (ticket_reference, ticket_reference)) @property def command_re(self): begin, end = (re.escape(self.envelope[0:1]), re.escape(self.envelope[1:2])) return re.compile(begin + self.ticket_command + end) ticket_re = re.compile(ticket_prefix + '([0-9]+)') _last_cset_id = None # IRepositoryChangeListener methods def changeset_added(self, repos, changeset): if self._is_duplicate(changeset): return tickets = self._parse_message(changeset.message) comment = self.make_ticket_comment(repos, changeset) self._update_tickets(tickets, changeset, comment, datetime.now(utc)) def changeset_modified(self, repos, changeset, old_changeset): if self._is_duplicate(changeset): return tickets = self._parse_message(changeset.message) old_tickets = {} if old_changeset is not None: old_tickets = self._parse_message(old_changeset.message) tickets = dict(each for each in tickets.iteritems() if each[0] not in old_tickets) comment = self.make_ticket_comment(repos, changeset) self._update_tickets(tickets, changeset, comment, datetime.now(utc)) def _is_duplicate(self, changeset): # Avoid duplicate changes with multiple scoped repositories cset_id = (changeset.rev, changeset.message, changeset.author, changeset.date) if cset_id != self._last_cset_id: self._last_cset_id = cset_id return False return True def _parse_message(self, message): """Parse the commit message and return the ticket references.""" cmd_groups = self.command_re.finditer(message) functions = self._get_functions() tickets = {} for m in cmd_groups: cmd, tkts = m.group('action', 'ticket') func = functions.get(cmd.lower()) if not func and self.commands_refs.strip() == '<ALL>': func = self.cmd_refs if func: for tkt_id in self.ticket_re.findall(tkts): tickets.setdefault(int(tkt_id), []).append(func) return tickets def make_ticket_comment(self, repos, changeset): """Create the ticket comment from the changeset data.""" rev = changeset.rev revstring = str(rev) drev = str(repos.display_rev(rev)) if repos.reponame: revstring += '/' + repos.reponame drev += '/' + repos.reponame return """\ In [changeset:"%s" %s]: {{{ #!CommitTicketReference repository="%s" revision="%s" %s }}}""" % (revstring, drev, repos.reponame, rev, changeset.message.strip()) def _update_tickets(self, tickets, changeset, comment, date): """Update the tickets with the given comment.""" authname = self._authname(changeset) perm = PermissionCache(self.env, authname) for tkt_id, cmds in tickets.iteritems(): try: self.log.debug("Updating ticket #%d", tkt_id) save = False with self.env.db_transaction: ticket = Ticket(self.env, tkt_id) ticket_perm = perm(ticket.resource) for cmd in cmds: if cmd(ticket, changeset, ticket_perm) is not False: save = True if save: ticket.save_changes(authname, comment, date) if save: self._notify(ticket, date, changeset.author, comment) except Exception as e: self.log.error("Unexpected error while processing ticket " "#%s: %s", tkt_id, exception_to_unicode(e)) def _notify(self, ticket, date, author, comment): """Send a ticket update notification.""" if not self.notify: return event = TicketChangeEvent('changed', ticket, date, author, comment) try: NotificationSystem(self.env).notify(event) except Exception as e: self.log.error("Failure sending notification on change to " "ticket #%s: %s", ticket.id, exception_to_unicode(e)) def _get_functions(self): """Create a mapping from commands to command functions.""" functions = {} for each in dir(self): if not each.startswith('cmd_'): continue func = getattr(self, each) for cmd in getattr(self, 'commands_' + each[4:], '').split(): functions[cmd] = func return functions def _authname(self, changeset): """Returns the author of the changeset, normalizing the casing if [trac] ignore_author_case is true.""" return changeset.author.lower() \ if self.env.config.getbool('trac', 'ignore_auth_case') \ else changeset.author # Command-specific behavior # The ticket isn't updated if all extracted commands return False. def cmd_close(self, ticket, changeset, perm): authname = self._authname(changeset) if self.check_perms and not 'TICKET_MODIFY' in perm: self.log.info("%s doesn't have TICKET_MODIFY permission for #%d", authname, ticket.id) return False ticket['status'] = 'closed' ticket['resolution'] = 'fixed' if not ticket['owner']: ticket['owner'] = authname def cmd_refs(self, ticket, changeset, perm): if self.check_perms and not 'TICKET_APPEND' in perm: self.log.info("%s doesn't have TICKET_APPEND permission for #%d", self._authname(changeset), ticket.id) return False
class ExcelReportModule(Component): implements(IRequestFilter) _PATH_INFO_MATCH = re.compile(r'/report/[0-9]+').match def pre_process_request(self, req, handler): if self._PATH_INFO_MATCH(req.path_info) \ and req.args.get('format') in ('xlsx', 'xls') \ and handler.__class__.__name__ == 'ReportModule': req.args['max'] = 0 return handler def post_process_request(self, req, template, data, content_type): if template == 'report_view.html' and req.args.get('id'): format = req.args.getfirst('format') if format in ('xlsx', 'xls'): resource = Resource('report', req.args['id']) data['context'] = Context.from_request(req, resource, absurls=True) self._convert_report(format, req, data) elif not format: self._add_alternate_links(req) return template, data, content_type def _convert_report(self, format, req, data): book = get_workbook_writer(self.env, req) writer = book.create_sheet(dgettext('messages', 'Report')) writer.write_row([ ('%s (%s)' % (data['title'], dngettext('messages', '%(num)s match', '%(num)s matches', data['numrows'])), 'header', -1, -1) ]) for value_for_group, row_group in data['row_groups']: writer.move_row() if value_for_group and len(row_group): writer.write_row([ ('%s (%s)' % (value_for_group, dngettext('messages', '%(num)s match', '%(num)s matches', len(row_group))), 'header2', -1, -1) ]) for header_group in data['header_groups']: writer.write_row([(header['title'], 'thead', None, None) for header in header_group if not header['hidden']]) for row in row_group: for cell_group in row['cell_groups']: cells = [] for cell in cell_group: cell_header = cell['header'] if cell_header['hidden']: continue col = cell_header['col'].strip('_').lower() value, style, width, line = \ self._get_cell_data(req, col, cell, row, writer) cells.append((value, style, width, line)) writer.write_row(cells) writer.set_col_widths() content = book.dumps() req.send_response(200) req.send_header('Content-Type', book.mimetype) req.send_header('Content-Length', len(content)) req.send_header('Content-Disposition', 'filename=report_%s.%s' % (req.args['id'], format)) req.end_headers() req.write(content) raise RequestDone def _get_cell_data(self, req, col, cell, row, writer): value = cell['value'] if col == 'report': url = self.env.abs_href.report(value) width, line = writer.get_metrics(value) return value, col, width, line if col in ('ticket', 'id'): id_value = cell['value'] value = '#%s' % id_value url = get_resource_url(self.env, row['resource'], self.env.abs_href) width = len(value) return id_value, 'id', width, 1 if col == 'milestone': url = self.env.abs_href.milestone(value) width, line = writer.get_metrics(value) return value, col, width, line if col == 'time': if isinstance(value, basestring) and value.isdigit(): value = from_utimestamp(long(value)) return value, '[time]', None, None elif col in ('date', 'created', 'modified'): if isinstance(value, basestring) and value.isdigit(): value = from_utimestamp(long(value)) return value, '[date]', None, None elif col == 'datetime': if isinstance(value, basestring) and value.isdigit(): value = from_utimestamp(long(value)) return value, '[datetime]', None, None width, line = writer.get_metrics(value) return value, col, width, line def _add_alternate_links(self, req): params = {} for arg in req.args.keys(): if not arg.isupper(): continue params[arg] = req.args.get(arg) if 'USER' not in params: params['USER'] = req.authname if 'sort' in req.args: params['sort'] = req.args['sort'] if 'asc' in req.args: params['asc'] = req.args['asc'] href = '' if params: href = '&' + unicode_urlencode(params) format = get_excel_format(self.env) mimetype = get_excel_mimetype(format) add_link(req, 'alternate', '?format=' + format + href, _("Excel"), mimetype)
class SessionAdmin(Component): """trac-admin command provider for session management""" implements(IAdminCommandProvider) request_handlers = ExtensionPoint(IRequestHandler) def get_admin_commands(self): hints = { 'datetime': get_datetime_format_hint(get_console_locale(self.env)), 'iso8601': get_datetime_format_hint('iso8601'), } yield ('session list', '[sid[:0|1]] [...]', """List the name and email for the given sids Specifying the sid 'anonymous' lists all unauthenticated sessions, and 'authenticated' all authenticated sessions. '*' lists all sessions, and is the default if no sids are given. An sid suffix ':0' operates on an unauthenticated session with the given sid, and a suffix ':1' on an authenticated session (the default).""", self._complete_list, self._do_list) yield ('session add', '<sid[:0|1]> [name] [email]', """Create a session for the given sid Populates the name and email attributes for the given session. Adding a suffix ':0' to the sid makes the session unauthenticated, and a suffix ':1' makes it authenticated (the default if no suffix is specified).""", None, self._do_add) yield ('session set', '<name|email|default_handler> ' '<sid[:0|1]> <value>', """Set the name or email attribute of the given sid An sid suffix ':0' operates on an unauthenticated session with the given sid, and a suffix ':1' on an authenticated session (the default).""", self._complete_set, self._do_set) yield ('session delete', '<sid[:0|1]> [...]', """Delete the session of the specified sid An sid suffix ':0' operates on an unauthenticated session with the given sid, and a suffix ':1' on an authenticated session (the default). Specifying the sid 'anonymous' will delete all anonymous sessions.""", self._complete_delete, self._do_delete) yield ('session purge', '<age>', """Purge anonymous sessions older than given age or date Age may be specified as a relative time like "90 days ago", or as a date in the "%(datetime)s" or "%(iso8601)s" (ISO 8601) format.""" % hints, None, self._do_purge) @lazy def _valid_default_handlers(self): return sorted(handler.__class__.__name__ for handler in self.request_handlers if is_valid_default_handler(handler)) def _split_sid(self, sid): if sid.endswith(':0'): return sid[:-2], 0 elif sid.endswith(':1'): return sid[:-2], 1 else: return sid, 1 def _get_sids(self): rows = self.env.db_query("SELECT sid, authenticated FROM session") return ['%s:%d' % (sid, auth) for sid, auth in rows] def _get_list(self, sids): all_anon = 'anonymous' in sids or '*' in sids all_auth = 'authenticated' in sids or '*' in sids sids = {self._split_sid(sid) for sid in sids if sid not in ('anonymous', 'authenticated', '*')} rows = self.env.db_query(""" SELECT DISTINCT s.sid, s.authenticated, s.last_visit, n.value, e.value, h.value FROM session AS s LEFT JOIN session_attribute AS n ON (n.sid=s.sid AND n.authenticated=s.authenticated AND n.name='name') LEFT JOIN session_attribute AS e ON (e.sid=s.sid AND e.authenticated=s.authenticated AND e.name='email') LEFT JOIN session_attribute AS h ON (h.sid=s.sid AND h.authenticated=s.authenticated AND h.name='default_handler') ORDER BY s.sid, s.authenticated """) for sid, authenticated, last_visit, name, email, handler in rows: if all_anon and not authenticated or all_auth and authenticated \ or (sid, authenticated) in sids: yield (sid, authenticated, format_date(to_datetime(last_visit), console_date_format), name, email, handler) def _complete_list(self, args): all_sids = self._get_sids() + ['*', 'anonymous', 'authenticated'] return set(all_sids) - set(args) def _complete_set(self, args): if len(args) == 1: return ['name', 'email'] elif len(args) == 2: return self._get_sids() def _complete_delete(self, args): all_sids = self._get_sids() + ['anonymous'] return set(all_sids) - set(args) def _do_list(self, *sids): if not sids: sids = ['*'] headers = (_("SID"), _("Auth"), _("Last Visit"), _("Name"), _("Email"), _("Default Handler")) print_table(self._get_list(sids), headers) def _do_add(self, sid, name=None, email=None): sid, authenticated = self._split_sid(sid) with self.env.db_transaction as db: try: db("INSERT INTO session VALUES (%s, %s, %s)", (sid, authenticated, int(time_now()))) except Exception: raise AdminCommandError(_("Session '%(sid)s' already exists", sid=sid)) if name: db("INSERT INTO session_attribute VALUES (%s,%s,'name',%s)", (sid, authenticated, name)) if email: db("INSERT INTO session_attribute VALUES (%s,%s,'email',%s)", (sid, authenticated, email)) self.env.invalidate_known_users_cache() def _do_set(self, attr, sid, val): if attr not in ('name', 'email', 'default_handler'): raise AdminCommandError(_("Invalid attribute '%(attr)s'", attr=attr)) if attr == 'default_handler': if val and val not in self._valid_default_handlers: raise AdminCommandError(_("Invalid default_handler '%(val)s'", val=val)) sid, authenticated = self._split_sid(sid) with self.env.db_transaction as db: if not db("""SELECT sid FROM session WHERE sid=%s AND authenticated=%s""", (sid, authenticated)): raise AdminCommandError(_("Session '%(sid)s' not found", sid=sid)) db(""" DELETE FROM session_attribute WHERE sid=%s AND authenticated=%s AND name=%s """, (sid, authenticated, attr)) if val: db("INSERT INTO session_attribute VALUES (%s, %s, %s, %s)", (sid, authenticated, attr, val)) self.env.invalidate_known_users_cache() def _do_delete(self, *sids): with self.env.db_transaction as db: for sid in sids: sid, authenticated = self._split_sid(sid) if sid == 'anonymous': db("DELETE FROM session WHERE authenticated=0") db("DELETE FROM session_attribute WHERE authenticated=0") else: db(""" DELETE FROM session WHERE sid=%s AND authenticated=%s """, (sid, authenticated)) db(""" DELETE FROM session_attribute WHERE sid=%s AND authenticated=%s """, (sid, authenticated)) self.env.invalidate_known_users_cache() def _do_purge(self, age): when = parse_date(age, hint='datetime', locale=get_console_locale(self.env)) with self.env.db_transaction as db: ts = to_timestamp(when) db(""" DELETE FROM session WHERE authenticated=0 AND last_visit<%s """, (ts,)) db(""" DELETE FROM session_attribute WHERE authenticated=0 AND NOT EXISTS (SELECT * FROM session AS s WHERE s.sid=session_attribute.sid AND s.authenticated=0) """)
class GitConnector(Component): implements(IRepositoryConnector, ISystemInfoProvider, IWikiSyntaxProvider) def __init__(self): self._version = None try: self._version = PyGIT.Storage.git_version(git_bin=self.git_bin) except PyGIT.GitError as e: self.log.error("GitError: %s", e) if self._version: self.log.info("detected GIT version %s", self._version['v_str']) if not self._version['v_compatible']: self.log.error( "GIT version %s installed not compatible" "(need >= %s)", self._version['v_str'], self._version['v_min_str']) # ISystemInfoProvider methods def get_system_info(self): if self._version: yield 'GIT', self._version['v_str'] # IWikiSyntaxProvider methods def _format_sha_link(self, formatter, sha, label): # FIXME: this function needs serious rethinking... reponame = '' context = formatter.context while context: if context.resource.realm in ('source', 'changeset'): reponame = context.resource.parent.id break context = context.parent try: repos = RepositoryManager(self.env).get_repository(reponame) if not repos: raise Exception("Repository '%s' not found" % reponame) sha = repos.normalize_rev(sha) # in case it was abbreviated changeset = repos.get_changeset(sha) return tag.a(label, class_='changeset', title=shorten_line(changeset.message), href=formatter.href.changeset(sha, repos.reponame)) except Exception as e: return tag.a(label, class_='missing changeset', title=to_unicode(e), rel='nofollow') def get_wiki_syntax(self): yield (r'(?:\b|!)r?[0-9a-fA-F]{%d,40}\b' % self.wiki_shortrev_len, lambda fmt, sha, match: self._format_sha_link( fmt, sha.startswith('r') and sha[1:] or sha, sha)) def get_link_resolvers(self): yield ('sha', lambda fmt, _, sha, label, match=None: self. _format_sha_link(fmt, sha, label)) # IRepositoryConnector methods persistent_cache = BoolOption( 'git', 'persistent_cache', 'false', """Enable persistent caching of commit tree.""") cached_repository = BoolOption( 'git', 'cached_repository', 'false', """Wrap `GitRepository` in `CachedRepository`.""") shortrev_len = IntOption( 'git', 'shortrev_len', 7, """The length at which a sha1 is abbreviated (must be >= 4 and <= 40). """) wiki_shortrev_len = IntOption( 'git', 'wikishortrev_len', 40, """The minimum length at which a hex-string in wiki content is formatted as a changeset TracLink (must be >= 4 and <= 40). """) trac_user_rlookup = BoolOption( 'git', 'trac_user_rlookup', 'false', """Enable reverse mapping of git email addresses to trac user ids. Performance will be reduced if there are many users and the `cached_repository` option is `disabled`. A repository resync is required after changing the value of this option. """) use_committer_id = BoolOption( 'git', 'use_committer_id', 'true', """Use git-committer id instead of git-author id for the changeset ''Author'' field. """) use_committer_time = BoolOption( 'git', 'use_committer_time', 'true', """Use git-committer timestamp instead of git-author timestamp for the changeset ''Timestamp'' field. """) git_fs_encoding = Option( 'git', 'git_fs_encoding', 'utf-8', """Define charset encoding of paths within git repositories.""") git_bin = Option('git', 'git_bin', 'git', """Path to the git executable.""") def get_supported_types(self): yield ('git', 8) def get_repository(self, type, dir, params): """GitRepository factory method""" assert type == 'git' if not (4 <= self.shortrev_len <= 40): raise TracError( _("%(option)s must be in the range [4..40]", option="[git] shortrev_len")) if not (4 <= self.wiki_shortrev_len <= 40): raise TracError( _("%(option)s must be in the range [4..40]", option="[git] wikishortrev_len")) if not self._version: raise TracError(_("GIT backend not available")) elif not self._version['v_compatible']: raise TracError( _( "GIT version %(hasver)s installed not " "compatible (need >= %(needsver)s)", hasver=self._version['v_str'], needsver=self._version['v_min_str'])) if self.trac_user_rlookup: def rlookup_uid(email): """Reverse map 'real name <*****@*****.**>' addresses to trac user ids. :return: `None` if lookup failed """ try: _, email = email.rsplit('<', 1) email, _ = email.split('>', 1) email = email.lower() except Exception: return None for _uid, _name, _email in self.env.get_known_users(): try: if email == _email.lower(): return _uid except Exception: continue else: def rlookup_uid(_): return None repos = GitRepository( self.env, dir, params, self.log, persistent_cache=self.persistent_cache, git_bin=self.git_bin, git_fs_encoding=self.git_fs_encoding, shortrev_len=self.shortrev_len, rlookup_uid=rlookup_uid, use_committer_id=self.use_committer_id, use_committer_time=self.use_committer_time, ) if self.cached_repository: repos = GitCachedRepository(self.env, repos, self.log) self.log.debug("enabled CachedRepository for '%s'", dir) else: self.log.debug("disabled CachedRepository for '%s'", dir) return repos
class WikiCalendarMacros(Component): """Provides macros to display wiki page navigation in a calendar view.""" implements(IWikiMacroProvider, ITemplateProvider) # Common [wikicalendar] section internal_css = BoolOption( 'wikicalendar', 'internal_css', False, """Whether CSS should be embedded into the HTML. This is meant as fallback, if linking the external style sheet file fails.""") ticket_due = Option('wikicalendar', 'ticket.due_field', 'due_close', doc="""Custom due date field name to evaluate for displaying tickets by date.""") ticket_due_fmt = Option('wikicalendar', 'ticket.due_field.format', '%y-%m-%d', doc="""Custom due date value format, that is any expression supported by strftime or 'ts' identifier for POSIX microsecond time stamps as supported in Trac since 1.1.1.""") # Old [wikiticketcalendar] section due_field_name = Option('wikiticketcalendar', 'ticket.due_field.name', 'due_close', doc="""Custom due date field name to evaluate for displaying tickets by date. (''depreciated - see wikicalendar section'')""") due_field_fmt = Option('wikiticketcalendar', 'ticket.due_field.format', '%y-%m-%d', doc="""Custom due date value format, that is any expression supported by strftime or 'ts' identifier for POSIX microsecond time stamps as supported in later Trac versions. (''depreciated - see wikicalendar section'')""") htdocs_path = resource_filename(__name__, 'htdocs') def __init__(self): # Bind 'wikicalendar' catalog to the specified locale directory. locale_dir = resource_filename(__name__, 'locale') add_domain(self.env.path, locale_dir) # Parse 'wikicalendar' configuration section for test instructions. # Valid options are written as check.<item>.<test name>, where item # is optional. The value must be a SQL query with arguments depending # on the item it applies to. self.checks = {} conf_section = self.config['wikicalendar'] for key, sql in conf_section.options(): if key.startswith('check.'): check_type = key.split('.') if len(check_type) in range(2, 4): self.checks[check_type[-1]] = {'test': sql} if len(check_type) == 3: # We've got test type information too. self.checks[check_type[-1]]['type'] = check_type[1] # Options in 'wikicalendar' configuration section take precedence over # those in old 'wikiticketcalendar' section. cfg = self.config if 'wikicalendar' in cfg.sections(): # Rewrite option name for easier plugin upgrade. if cfg.has_option('wikicalendar', 'ticket.due_field.name'): self.env.log.debug("Old 'wikiticketcalendar' option found.") cfg.set('wikicalendar', 'ticket.due_field', cfg.get('wikicalendar', 'ticket.due_field.name')) cfg.remove('wikicalendar', 'ticket.due_field.name') cfg.save() self.env.log.debug('Updated to new option: ticket.due_field') self.tkt_due_field = self.ticket_due self.tkt_due_format = self.ticket_due_fmt else: self.tkt_due_field = self.due_field_name self.tkt_due_format = self.due_field_fmt # ITemplateProvider methods def get_htdocs_dirs(self): """Returns additional path, where stylesheets are placed.""" return [('wikicalendar', self.htdocs_path)] def get_templates_dirs(self): """Returns additional path, where templates are placed.""" return [] # IWikiMacroProvider methods def get_macros(self): """Returns list of provided macro names.""" yield 'WikiCalendar' yield 'WikiTicketCalendar' def get_macro_description(self, name): """Returns documentation for provided macros.""" # TRANSLATOR: Keep Trac style WikiFormatting here, please. cal_doc = cleandoc_( """Inserts a small calendar, where each day links to a wiki page, whose name matches the format set by `wiki`. The current day is highlighted, and days with a due Milestone are marked in bold. Usage: {{{ [[WikiCalendar([year, month, nav, wiki, base=<page.name>)]] }}} Arguments (all optional, but positional - order matters): 1. `year` (4-digit year), defaults to `*` (current year) 1. `month` (2-digit month), defaults to `*` (current month) 1. `nav` (boolean) - show previous/next navigation, defaults to `true` 1. `wiki` (valid strftime expression) - page name format, defaults to `%Y-%m-%d` Keyword-only argument: * `base` (page name string) - create new pages from that template in PageTemplates, defaults to `''` (empty string) Examples: {{{ [[WikiCalendar(2006,07)]] [[WikiCalendar(2006,07,false)]] [[WikiCalendar(*,*,true,Meeting-%Y-%m-%d)]] [[WikiCalendar(2006,07,false,Meeting-%Y-%m-%d)]] [[WikiCalendar(*,*,true,Meeting-%Y-%m-%d,base=MeetingNotes)]] }}}""") tcal_doc = cleandoc_( """Display Milestones and Tickets in a calendar view. Days include links to: * all milestones, that are due on that day * all tickets, that are due on that day * all tickets created on that day (configurable) * one or more wiki pages with name matching the configured format preparing links for creating new wiki pages from a template too Usage: {{{ [[WikiTicketCalendar(year, month, nav, wiki, cdate, base, query, short, width)]] }}} Arguments (all optional, but positional - order matters): 1. `year` (4-digit year), defaults to `*` (current year) 1. `month` (2-digit month), defaults to `*` (current month) 1. `nav` (boolean) - show previous/next navigation, defaults to `true` 1. `wiki` (valid strftime expression) - page name format, defaults to `%Y-%m-%d` 1. `cdate` (boolean) - show tickets created on that day too, defaults to `true` 1. `base` (page name string) - create new pages from that template in PageTemplates, defaults to `''` (empty string) 1. `query` (valid TracQuery) - including expressions grouped by AND (OR since 0.12) for general ticket selection, defaults to `id!=0` 1. `short` (integer) - total ticket count per day, that will have ticket list display condensed to just ticket numbers, defaults to `0` (never condense ticket list 1. `width` (valid CSS size), prefixed `+` forces more, defaults to `100%;` Examples: {{{ [[WikiTicketCalendar(2006,07)]] [[WikiTicketCalendar(2006,07,false)]] [[WikiTicketCalendar(*,*,true,Meeting-%Y-%m-%d)]] [[WikiTicketCalendar(2006,07,false,Meeting-%Y-%m-%d)]] [[WikiTicketCalendar(2006,07,true,*,true)]] [[WikiTicketCalendar(2006,07,true,Meeting-%Y-%m-%d,true,Meeting)]] }}} Equivalent keyword arguments are available for all but the first two arguments. Examples: {{{ [[WikiTicketCalendar(wiki=Talk-%Y-%m-%d,base=Talk)]] same as [[WikiTicketCalendar(*,*,true,Talk-%Y-%m-%d,true,Talk)]] [[WikiTicketCalendar(wiki=Meeting-%Y-%m-%d,query=type=task)]] [[WikiTicketCalendar(wiki=Meeting_%Y/%m/%d,short=6)]] }}} Mixed use of both, simple and keyword arguments is possible, while order of simple arguments (see above) still applies and keyword arguments in-between do not count for positional argument mapping. Example: {{{ [[WikiTicketCalendar(wiki=Meeting_%Y/%m/%d,*,*,true,width=+75%;)]] }}} Keyword-only argument: * `subpages` (list of page names separated by '|') - replace wiki page link per day with one link per sub-page labeled by first character of sub-page name, defaults to an empty list Example: {{{ [[WikiTicketCalendar(wiki=Meetings_%Y/%m/%d, subpages=Morning|Afternoon)]] }}}""") if name == 'WikiCalendar': if macro_doc_compat: # Optionally translated doc for Trac < 1.0. return gettext(cal_doc) return ('wikicalendar', cal_doc) elif name == 'WikiTicketCalendar': if macro_doc_compat: return gettext(tcal_doc) return ('wikicalendar', tcal_doc) def expand_macro(self, formatter, name, arguments): """Returns macro content.""" env = self.env req = formatter.req tz = req.tz # Parse arguments from macro invocation. args, kwargs = parse_args(arguments, strict=False) # Enable week number display regardless of argument position. week_pref = 'w' in args and args.pop(args.index('w')) week_pref = week_pref and week_pref or kwargs.get('w') week_start = None week_num_start = None # Parse per-instance week calculation rules, if available. if week_pref: if ':' not in week_pref: # Treat undelimitted setting as week start. week_pref += ':' w_start, wn_start = week_pref.split(':') try: week_start = int(w_start) except ValueError: week_start = None else: week_start = week_start > -1 and week_start < 7 and \ week_start or None try: week_num_start = int(wn_start) except ValueError: week_num_start = None else: week_num_start = week_num_start in (1, 4, 7) and \ week_num_start or None # Respect user's locale, if available. try: locale = Locale.parse(str(req.locale)) except (AttributeError, UnknownLocaleError): # Attribute 'req.locale' vailable since Trac 0.12. locale = None if has_babel: if locale: if not locale.territory: # Search first locale, which has the same `language` and # territory in preferred languages. for l in req.languages: l = l.replace('-', '_').lower() if l.startswith(locale.language.lower() + '_'): try: l = Locale.parse(l) if l.territory: locale = l break # first one rules except UnknownLocaleError: pass if not locale.territory and locale.language in LOCALE_ALIASES: locale = Locale.parse(LOCALE_ALIASES[locale.language]) else: # Default fallback. locale = Locale('en', 'US') env.log.debug('Locale setting for wiki calendar: %s' % locale.get_display_name('en')) if not week_start: if week_pref and week_pref.lower().startswith('iso'): week_start = 0 week_num_start = 4 elif has_babel: week_start = locale.first_week_day else: import calendar week_start = calendar.firstweekday() # ISO calendar will remain as default. if not week_num_start: if week_start == 6: week_num_start = 1 else: week_num_start = 4 env.log.debug('Effective settings: first_week_day=%s, ' '1st_week_of_year_rule=%s' % (week_start, week_num_start)) # Find year and month of interest. year = req.args.get('year') # Not clicked on any previous/next button, next look for macro args. if not year and len(args) >= 1 and args[0] != "*": year = args[0] year = year and year.isnumeric() and int(year) or None month = req.args.get('month') # Not clicked on any previous/next button, next look for macro args. if not month and len(args) >= 2 and args[1] != "*": month = args[1] month = month and month.isnumeric() and int(month) or None now = datetime.now(tz) # Force offset from start-of-day to avoid a false 'today' marker, # but use it only on request of different month/year. now.replace(second=1) today = None if (month and month != now.month) or (year and year != now.year): today = now.replace(year=year, month=month, day=1) # Use current month and year, if nothing else has been requested. if not today: today = now.replace(hour=0, minute=0, second=0, microsecond=0) showbuttons = True if len(args) >= 3 or kwargs.has_key('nav'): try: showbuttons = kwargs['nav'] in _TRUE_VALUES except KeyError: showbuttons = args[2] in _TRUE_VALUES wiki_page_format = "%Y-%m-%d" if len(args) >= 4 and args[3] != "*" or kwargs.has_key('wiki'): try: wiki_page_format = str(kwargs['wiki']) except KeyError: wiki_page_format = str(args[3]) # Support relative paths in macro arguments for wiki page links. wiki_page_format = resolve_relative_name(wiki_page_format, formatter.resource.id) list_condense = 0 show_t_open_dates = True wiki_subpages = [] # Read optional check plan. check = [] if kwargs.has_key('check'): check = kwargs['check'].split('.') if name == 'WikiTicketCalendar': if len(args) >= 5 or kwargs.has_key('cdate'): try: show_t_open_dates = kwargs['cdate'] in _TRUE_VALUES except KeyError: show_t_open_dates = args[4] in _TRUE_VALUES # TracQuery support for ticket selection query_args = "id!=0" if len(args) >= 7 or kwargs.has_key('query'): # prefer query arguments provided by kwargs try: query_args = kwargs['query'] except KeyError: query_args = args[6] # compress long ticket lists if len(args) >= 8 or kwargs.has_key('short'): # prefer query arguments provided by kwargs try: list_condense = int(kwargs['short']) except KeyError: list_condense = int(args[7]) # control calendar display width cal_width = "100%;" if len(args) >= 9 or kwargs.has_key('width'): # prefer query arguments provided by kwargs try: cal_width = kwargs['width'] except KeyError: cal_width = args[8] # multiple wiki (sub)pages per day if kwargs.has_key('subpages'): wiki_subpages = kwargs['subpages'].split('|') # Prepare datetime objects for previous/next navigation link creation. prev_year = month_offset(today, -12) prev_quarter = month_offset(today, -3) prev_month = month_offset(today, -1) next_month = month_offset(today, 1) next_quarter = month_offset(today, 3) next_year = month_offset(today, 12) # Find first and last calendar day, probably in last/next month, # using datetime objects exactly at start-of-day here. # Note: Calendar days are numbered 0 (Mo) - 6 (Su). first_day_month = today.replace(day=1, second=0) first_day = first_day_month - timedelta( week_index(first_day_month, week_start)) last_day_month = next_month.replace(day=1) - timedelta(1) if ((last_day_month - first_day).days + 1) % 7 > 0: last_day = last_day_month + timedelta(7 - ( (last_day_month - first_day).days + 1) % 7) else: last_day = last_day_month # Find relevant tickets. if name == 'WikiTicketCalendar': daystr = (uts and '..' or ':').join([ format_datetime(first_day, locale=locale), format_datetime(last_day, locale=locale) ]) provider = WikiCalendarTicketProvider(env) query_args = query_args and query_args + '&' or '' tkt_due = provider.harvest( req, query_args + '='.join([self.tkt_due_field, daystr])) if show_t_open_dates: tkt_new = provider.harvest( req, query_args + '='.join(['created', daystr])) # Finally building the output now. # Begin with caption and optional navigation links. buff = tag.tr() if showbuttons is True: # Create calendar navigation buttons. nx = 'next' pv = 'prev' nav_pv_y = _nav_link(req, '<<', pv, prev_year, locale) nav_pv_q = _nav_link(req, ' «', pv, prev_quarter, locale) nav_pv_m = _nav_link(req, ' <', pv, prev_month, locale) nav_nx_m = _nav_link(req, '> ', nx, next_month, locale) nav_nx_q = _nav_link(req, '» ', nx, next_quarter, locale) nav_nx_y = _nav_link(req, '>>', nx, next_year, locale) # Add buttons for going to previous months and year. buff(nav_pv_y, nav_pv_q, nav_pv_m) # The caption will always be there. if has_babel: heading = tag.td(format_datetime(today, 'MMMM y', locale=locale)) else: heading = tag.td(format_date(today, '%B %Y')) buff = buff(heading(class_='y')) if showbuttons is True: # Add buttons for going to next months and year. buff(nav_nx_m, nav_nx_q, nav_nx_y) buff = tag.caption(tag.table(tag.tbody(buff))) buff = tag.table(buff) if name == 'WikiTicketCalendar': if cal_width.startswith('+') is True: width = ":".join(['min-width', cal_width]) buff(class_='wikitcalendar', style=width) else: buff(class_='wikitcalendar') if name == 'WikiCalendar': buff(class_='wiki-calendar') heading = tag.tr() heading(align='center') if week_pref: # Add an empty cell matching the week number column below. heading(tag.th()) day_names = [(idx, day_name) for idx, day_name in get_day_names( 'abbreviated', 'format', locale).iteritems()] # Read day names after shifting into correct position. for idx, name_ in day_names[week_start:7] + day_names[0:week_start]: col = tag.th(name_) if has_babel: weekend = idx >= locale.weekend_start and \ idx <= locale.weekend_end else: weekend = idx > 4 col(class_=('workday', 'weekend')[weekend], scope='col') heading(col) heading = buff(tag.thead(heading)) # Building main calendar table body buff = tag.tbody() day = first_day while day.date() <= last_day.date(): # Insert a new row for every week. if (day - first_day).days % 7 == 0: line = tag.tr() line(align='right') if week_pref: cell = tag.td( week_num(env, day, week_start, week_num_start)) line(cell(class_='week')) if not (day < first_day_month or day > last_day_month): wiki = format_date(day, wiki_page_format) if day == today: a_class = 'day today' td_class = 'today' else: a_class = 'day' td_class = 'day' if uts: day_ts = to_utimestamp(day) day_ts_eod = day_ts + 86399999999 else: day_ts = to_timestamp(day) day_ts_eod = day_ts + 86399 # Check for milestone(s) on that day. #db = env.get_read_db() #cursor = db.cursor() #cursor.execute(""" # SELECT name # FROM milestone # WHERE due >= %s and due <= %s #""", (day_ts, day_ts_eod)) cursor = self.env.db_query( """ SELECT name FROM milestone WHERE due >= %s and due <= %s """, (day_ts, day_ts_eod)) milestones = tag() for row in cursor: if not a_class.endswith('milestone'): a_class += ' milestone' milestone = to_unicode(row[0]) url = env.href.milestone(milestone) milestone = '* ' + milestone milestones = tag( milestones, tag.div(tag.a(milestone, href=url), class_='milestone')) label = tag.span(day.day) label(class_='day') # Generate wiki page links with name specified in # 'wiki_page_format', and check their existence. if len(wiki_subpages) > 0: pages = tag(label, Markup('<br />')) for page in wiki_subpages: label = tag(' ', page[0]) page = '/'.join([wiki, page]) pages( self._wiki_link(req, args, kwargs, page, label, 'subpage', check)) else: pages = self._wiki_link(req, args, kwargs, wiki, label, a_class, check) cell = tag.td(pages) cell(class_=td_class, valign='top') if name == 'WikiCalendar': line(cell) else: if milestones: cell(milestones) else: cell(tag.br()) match = [] match_od = [] ticket_heap = tag('') ticket_list = tag.div('') ticket_list(align='left', class_='condense') # Get tickets with due date set to day. for t in tkt_due: due = t.get(self.tkt_due_field) if due is None or due in ('', '--'): continue else: if self.tkt_due_format == 'ts': if not isinstance(due, datetime): continue if uts: due_ts = to_utimestamp(due) else: due_ts = to_timestamp(due) if due_ts < day_ts or due_ts > day_ts_eod: continue else: # Beware: Format might even be unicode string, # but str is required by the function. duedate = format_date(day, str(self.tkt_due_format)) if not due == duedate: continue tkt_id = t.get('id') ticket, short = _ticket_links(env, formatter, t) ticket_heap(ticket) if not tkt_id in match: if len(match) == 0: ticket_list(short) else: ticket_list(', ', short) match.append(tkt_id) # Optionally, get tickets created on day too. if show_t_open_dates: ticket_od_list = tag.div('') ticket_od_list(align='left', class_='opendate_condense') for t in tkt_new: if uts: ticket_ts = to_utimestamp(t.get('time')) else: ticket_ts = to_timestamp(t.get('time')) if ticket_ts < day_ts or ticket_ts > day_ts_eod: continue a_class = 'opendate_' tkt_id = t.get('id') ticket, short = _ticket_links( env, formatter, t, a_class) ticket_heap(ticket) if not tkt_id in match: if len(match_od) == 0: ticket_od_list(short) else: ticket_od_list(', ', short) match_od.append(tkt_id) matches = len(match) + len(match_od) if list_condense > 0 and matches >= list_condense: if len(match_od) > 0: if len(match) > 0: ticket_list(', ') ticket_list = tag(ticket_list, ticket_od_list) line(cell(ticket_list)) else: line(cell(ticket_heap)) else: if name == 'WikiCalendar': wiki = format_date(day, wiki_page_format) a_class = 'day adjacent_month' pages = self._wiki_link(req, args, kwargs, wiki, day.day, a_class) cell = tag.td(pages, class_='day adjacent_month') line(cell) else: cell = tag.td('', class_='day adjacent_month') line(cell) # Append completed week rows. if (day - first_day).days % 7 == 6: buff(line) day += timedelta(1) buff = tag.div(heading(buff)) if name == 'WikiTicketCalendar': if cal_width.startswith('+') is True: width = ":".join(['width', cal_width]) buff(class_='wikitcalendar', style=width) else: buff(class_='wikitcalendar') if name == 'WikiCalendar': buff(class_='wiki-calendar') # Add common CSS stylesheet. if self.internal_css and not req.args.get('wikicalendar'): # Put definitions directly into the output. f = open('/'.join([self.htdocs_path, 'wikicalendar.css']), 'Ur') css = tag.style(Markup('<!--\n'), '\n'.join(f.readlines()), Markup('-->\n'))(type="text/css") f.close() # Add hint to prevent multiple inclusions. req.args['wikicalendar'] = True return tag(css, buff) elif not req.args.get('wikicalendar'): add_stylesheet(req, 'wikicalendar/wikicalendar.css') return buff def _do_check(self, test, item): """Execute configurable tests per calendar item.""" # DEVEL: Fail condition not implemented yet, will need additional # configuration too. if test in self.checks.keys(): sql = self.checks[test].get('test') if sql: #db = self.env.get_read_db() #cursor = db.cursor() #cursor.execute(sql, (item,)) cursor = self.env.db_query(sql, (item, )) row = cursor.fetchone() if row is not None: return 1 def _wiki_link(self, req, args, kwargs, wiki, label, a_class, check=None): """Build links to wiki pages.""" check_sign = None url = self.env.href.wiki(wiki) if WikiSystem(self.env).has_page(wiki.lstrip('/')): a_class += " page" title = _("Go to page %s") % wiki if check and check[0] == 'link': chrome_path = '/'.join([req.base_path, 'chrome']) ok_img = 'wikicalendar/check_ok.png' ok = tag.image(src='/'.join([chrome_path, ok_img]), alt='ok', title='ok') nok_img = 'wikicalendar/check_nok.png' nok = tag.image(src='/'.join([chrome_path, nok_img]), alt='X', title='X') unk_img = 'wikicalendar/check_unknown.png' unk = tag.image(src='/'.join([chrome_path, unk_img]), alt='?', title='?') result = self._do_check(check[1], wiki) check_sign = result and (result == 1 and ok or nok) or unk else: # The default (empty page) is used, if template name is invalid. url += "?action=edit" # Add page template to create new wiki pages, if specified. template = None if len(args) >= 6 or kwargs.has_key('base'): try: template = kwargs['base'] except KeyError: template = args[5] if template: url += "&template=" + template title = _("Create page %s") % wiki link = tag.a(tag(label), href=url) link(class_=a_class, title_=title) return tag(link, check_sign)
class AccountManager(Component): """The AccountManager component handles all user account management methods provided by the IPasswordStore interface. The methods will be handled by underlying password storage implementations set in trac.ini with the "account-manager.password_store" option. The "account-manager.password_store" may be an ordered list of password stores, and if so, then each password store is queried in turn. """ implements(IAccountChangeListener, IPermissionRequestor) _password_store = OrderedExtensionsOption( 'account-manager', 'password_store', IPasswordStore, include_missing=False, doc=N_("Ordered list of password stores, queried in turn.")) _password_format = Option( 'account-manager', 'password_format', doc="Legacy option, deprecated since acct_mgr-0.1.2") _register_check = OrderedExtensionsOption( 'account-manager', 'register_check', IAccountRegistrationInspector, default="""BasicCheck, EmailCheck, BotTrapCheck, RegExpCheck, UsernamePermCheck""", include_missing=False, doc="""Ordered list of IAccountRegistrationInspector's to use for registration checks.""") stores = ExtensionPoint(IPasswordStore) change_listeners = ExtensionPoint(IAccountChangeListener) allow_delete_account = BoolOption( 'account-manager', 'allow_delete_account', True, doc="Allow users to delete their own account.") force_passwd_change = BoolOption( 'account-manager', 'force_passwd_change', True, doc="Force the user to change password when it's reset.") persistent_sessions = BoolOption( 'account-manager', 'persistent_sessions', False, doc="""Allow the user to be remembered across sessions without needing to re-authenticate. This is, user checks a \"Remember Me\" checkbox and, next time he visits the site, he'll be remembered.""") refresh_passwd = BoolOption( 'account-manager', 'refresh_passwd', False, doc="""Re-set passwords on successful authentication. This is most useful to move users to a new password store or enforce new store configuration (i.e. changed hash type), but should be disabled/unset otherwise.""") verify_email = BoolOption('account-manager', 'verify_email', True, doc="Verify the email address of Trac users.") username_char_blacklist = Option( 'account-manager', 'username_char_blacklist', ':[]', doc="""Always exclude some special characters from usernames. This is enforced upon new user registration.""") def __init__(self): # Bind the 'acct_mgr' catalog to the specified locale directory. locale_dir = resource_filename(__name__, 'locale') add_domain(self.env.path, locale_dir) # Public API def get_users(self): """Get usernames from all active stores. Because we allow concurrent active stores, and some stores even don't warrant uniqueness within itself, multiple usernames should be expected. """ users = [] for store in self._password_store: users.extend(store.get_users()) return users def has_user(self, user): exists = False user = self.handle_username_casing(user) for store in self._password_store: if store.has_user(user): exists = True break continue return exists def set_password(self, user, password, old_password=None): user = self.handle_username_casing(user) store = self.find_user_store(user) if store and not hasattr(store, 'set_password'): raise TracError( _("""The authentication backend for user %s does not support setting the password. """ % user)) elif not store: store = self.get_supporting_store('set_password') if store: if store.set_password(user, password, old_password): self._notify('created', user, password) else: self._notify('password_changed', user, password) else: raise TracError( _("""None of the IPasswordStore components listed in the trac.ini supports setting the password or creating users. """)) def check_password(self, user, password): valid = False user = self.handle_username_casing(user) for store in self._password_store: valid = store.check_password(user, password) if valid: if valid == True and (self.refresh_passwd == True) and \ self.get_supporting_store('set_password'): self._maybe_update_hash(user, password) break return valid def delete_user(self, user): user = self.handle_username_casing(user) # Delete from password store store = self.find_user_store(user) del_method = getattr(store, 'delete_user', None) if callable(del_method): del_method(user) # Delete session attributes, session and any custom permissions # set for the user. delete_user(self.env, user) self._notify('deleted', user) def supports(self, operation): try: stores = self.password_store except AttributeError: return False else: if self.get_supporting_store(operation): return True else: return False def password_store(self): try: return self._password_store except AttributeError: # fall back on old "password_format" option fmt = self._password_format for store in self.stores: config_key = getattr(store, 'config_key', None) if config_key is None: continue if config_key() == fmt: return [store] # if the "password_format" is not set re-raise the AttributeError raise password_store = property(password_store) def get_supporting_store(self, operation): """Returns the IPasswordStore that implements the specified operation. None is returned if no supporting store can be found. """ supports = False for store in self.password_store: if hasattr(store, operation): supports = True break continue store = supports and store or None return store def get_all_supporting_stores(self, operation): """Returns a list of stores that implement the specified operation""" stores = [] for store in self.password_store: if hasattr(store, operation): stores.append(store) continue return stores def find_user_store(self, user): """Locates which store contains the user specified. If the user isn't found in any IPasswordStore in the chain, None is returned. """ user_stores = [] for store in self._password_store: userlist = store.get_users() user_stores.append((store, userlist)) continue user = self.handle_username_casing(user) for store in user_stores: if user in store[1]: return store[0] continue return None def handle_username_casing(self, user): """Enforce lowercase usernames if required. Comply with Trac's own behavior, when case-insensitive user authentication is set to True. """ ignore_auth_case = self.config.getbool('trac', 'ignore_auth_case') return ignore_auth_case and user.lower() or user def validate_registration(self, req): """Run configured registration checks and prime account on success.""" for inspector in self._register_check: inspector.validate_registration(req) username = self.handle_username_casing( req.args.get('username').strip()) name = req.args.get('name').strip() email = req.args.get('email', '').strip() # Create the user in the configured (primary) password store. self.set_password(username, req.args.get('password')) # Output of a successful account creation request is a made-up # authenticated session, that a new user can refer to later on. prime_auth_session(self.env, username) # Save attributes for the user with reference to that session ID. for attribute in ('name', 'email'): value = req.args.get(attribute) if not value: continue set_user_attribute(self.env, username, attribute, value) def _maybe_update_hash(self, user, password): if not get_user_attribute(self.env, 1, user, 'password_refreshed', 1): self.log.debug("Refresh password for user: %s" % user) store = self.find_user_store(user) pwstore = self.get_supporting_store('set_password') if pwstore.set_password(user, password) == True: # Account re-created according to current settings. if store and not (store.delete_user(user) == True): self.log.warn("Failed to remove old entry for user: %s" % user) set_user_attribute(self.env, user, 'password_refreshed', 1) def _notify(self, mod, *args): mod = '_'.join(['user', mod]) for listener in self.change_listeners: getattr(listener, mod)(*args) # IAccountChangeListener methods def user_created(self, user, password): self.log.info("Created new user: %s" % user) def user_password_changed(self, user, password): self.log.info("Updated password for user: %s" % user) def user_deleted(self, user): self.log.info("Deleted user: %s" % user) def user_password_reset(self, user, email, password): self.log.info("Password reset for user: %s, %s" % (user, email)) def user_email_verification_requested(self, user, token): self.log.info("Email verification requested for user: %s" % user) # IPermissionRequestor methods def get_permission_actions(self): action = [ 'ACCTMGR_CONFIG_ADMIN', 'ACCTMGR_USER_ADMIN', 'EMAIL_VIEW', 'USER_VIEW' ] actions = [('ACCTMGR_ADMIN', action), action[0], (action[1], action[2:]), action[3]] return actions
class ZulipPlugin(Component): implements(ITicketChangeListener) def ticket_created(self, ticket: Any) -> None: """Called when a ticket is created.""" content = "{} created {} in component **{}**, priority **{}**:\n".format( ticket.values.get("reporter"), markdown_ticket_url(ticket), ticket.values.get("component"), ticket.values.get("priority"), ) # Include the full subject if it will be truncated if len(ticket.values.get("summary")) > 60: content += "**{}**\n".format(ticket.values.get("summary")) if ticket.values.get("description") != "": content += "{}".format( markdown_block(ticket.values.get("description"))) send_update(ticket, content) def ticket_changed(self, ticket: Any, comment: str, author: str, old_values: Dict[str, Any]) -> None: """Called when a ticket is modified. `old_values` is a dictionary containing the previous values of the fields that have changed. """ if not (set(old_values.keys()).intersection( set(config.TRAC_NOTIFY_FIELDS)) or (comment and "comment" in set(config.TRAC_NOTIFY_FIELDS))): return content = f"{author} updated {markdown_ticket_url(ticket)}" if comment: content += f" with comment: {markdown_block(comment)}\n\n" else: content += ":\n\n" field_changes = [] for key, value in old_values.items(): if key == "description": content += "- Changed {} from {}\n\nto {}".format( key, markdown_block(value), markdown_block(ticket.values.get(key)), ) elif old_values.get(key) == "": field_changes.append(f"{key}: => **{ticket.values.get(key)}**") elif ticket.values.get(key) == "": field_changes.append(f'{key}: **{old_values.get(key)}** => ""') else: field_changes.append( f"{key}: **{old_values.get(key)}** => **{ticket.values.get(key)}**" ) content += ", ".join(field_changes) send_update(ticket, content) def ticket_deleted(self, ticket: Any) -> None: """Called when a ticket is deleted.""" content = "{} was deleted.".format( markdown_ticket_url(ticket, heading="Ticket")) send_update(ticket, content)