def expand_macro(self, formatter, name, content): args, kw = util.parse_args(content) prefix = args and args[0] or None format = kw.get('format', '') minsize = max(int(kw.get('min', 2)), 2) depth = int(kw.get('depth', -1)) start = prefix and prefix.count('/') or 0 wiki = formatter.wiki pages = sorted([page for page in wiki.get_pages(prefix) \ if 'WIKI_VIEW' in formatter.perm('wiki', page)]) pagelangs = {} for page in pages: name, lang = util.split_lang(page, '') langs = pagelangs.get(name, []) if lang not in langs: langs.append(lang) pagelangs[name] = langs pages = sorted(pagelangs.keys()) # collection of default pages upages, spages = self.split_pages(pages) def format_page_name(page, split=False): try: # for trac 0.11 return wiki.format_page_name(page, split=split) except: # for trac 0.10 if split: return self.PAGE_SPLIT_RE.sub(r"\1 \2", page) return page def split(page): if format != 'group': return [format_page_name(page)] else: return self.SPLIT_RE.split(format_page_name(page, split=True)) # Group by Wiki word and/or Wiki hierarchy upages, spages = [[(split(page), page) for page in pages if depth < 0 or depth >= page.count('/') - start] for pages in (upages, spages)] def split_in_groups(group): """Return list of pagename or (key, sublist) elements""" groups = [] for key, subgrp in groupby(group, lambda (k,p): k and k[0] or ''): subgrp = [(k[1:],p) for k,p in subgrp] if key and len(subgrp) >= minsize: sublist = split_in_groups(sorted(subgrp)) if len(sublist) == 1: elt = (key+sublist[0][0], sublist[0][1]) else: elt = (key, sublist) groups.append(elt) else: for elt in subgrp: groups.append(elt[1]) return groups
def _get_candidate_subjects(self, not_in_list = []): candidates = [] users = [user for user in self._get_all_users() if user not in not_in_list] candidates += sorted(users) candidates += sorted([group.__str__() for group in self.authz.get_groups() if group.__str__() not in not_in_list]) #self.env.log.debug("Candidates:") #for c in candidates: # self.env.log.debug(" %s" % c) return candidates
def _get_candidate_subjects(self, not_in_list = []): candidates = [] if '*' not in not_in_list: candidates.append('*') users = [user for user in self.account_manager.get_users() if user not in not_in_list] candidates += sorted(users) candidates += sorted([group.__str__() for group in self.authz.get_groups() if group.__str__() not in not_in_list]) self.env.log.debug("Candidates:") for c in candidates: self.env.log.debug(" %s" % c) return candidates
def process_request(self, req): mode = req.args.get('mode', 'view') all_fields = [] standard_fields = set() for f in TicketSystem(self.env).get_ticket_fields(): all_fields.append(f['name']) if not f.get('custom'): standard_fields.add(f['name']) if 'owner' in all_fields: curr_idx = all_fields.index('owner') if 'cc' in all_fields: insert_idx = all_fields.index('cc') else: insert_idx = len(all_fields) if curr_idx < insert_idx: all_fields.insert(insert_idx, all_fields[curr_idx]) del all_fields[curr_idx] for t in self.types: fields = set(getattr(self, t+'_fields')) if self.include_std: fields.update(standard_fields) fields.update(self.forced_fields) req.hdf['condfields.types.%s'%t] = dict([ (f, f in fields) for f in all_fields ]) req.hdf['condfields.mode'] = mode req.hdf['condfields.all_fields'] = list(all_fields) req.hdf['condfields.ok_fields'] = sorted(set(all_fields) - self.forced_fields, key=lambda x: all_fields.index(x)) return 'condfields.cs', 'text/javascript'
def render_macro(self, req, name, content): from trac.util import sorted from trac.util.html import html as _ interwikis = [] for k in sorted(self.keys()): prefix, url, title = self[k] interwikis.append({ 'prefix': prefix, 'url': url, 'title': title, 'rc_url': self._expand_or_append(url, ['RecentChanges']), 'description': title == prefix and url or title }) return _.TABLE(_.TR(_.TH(_.EM("Prefix")), _.TH(_.EM("Site"))), [ _.TR(_.TD(_.A(w['prefix'], href=w['rc_url'])), _.TD(_.A(w['description'], href=w['url']))) for w in interwikis ], class_="wiki interwiki")
def expand_macro(self, formatter, name, content): intertracs = {} for key, value in self.config.options('intertrac'): idx = key.rfind('.') # rsplit only in 2.4 if idx > 0: # 0 itself doesn't help much: .xxx = ... prefix, attribute = key[:idx], key[idx+1:] intertrac = intertracs.setdefault(prefix, {}) intertrac[attribute] = value else: intertracs[key] = value # alias def generate_prefix(prefix): intertrac = intertracs[prefix] if isinstance(intertrac, basestring): yield tag.tr(tag.td(tag.b(prefix)), tag.td('Alias for ', tag.b(intertrac))) else: url = intertrac.get('url', '') if url: title = intertrac.get('title', url) yield tag.tr(tag.td(tag.a(tag.b(prefix), href=url + '/timeline')), tag.td(tag.a(title, href=url))) return tag.table(class_="wiki intertrac")( tag.tr(tag.th(tag.em('Prefix')), tag.th(tag.em('Trac Site'))), [generate_prefix(p) for p in sorted(intertracs.keys())])
def _do_config(self, req): if req.method == 'POST': selected_class = req.args.get('selected') self.config.set('account-manager', 'password_store', selected_class) selected = self.account_manager.password_store for attr, option in _getoptions(selected): newvalue = req.args.get('%s.%s' % (selected_class, attr)) if newvalue is not None: self.config.set(option.section, option.name, newvalue) self.config.save() try: selected = self.account_manager.password_store except AttributeError: selected = None sections = [ {'name': store.__class__.__name__, 'classname': store.__class__.__name__, 'selected': store is selected, 'options': [ {'label': attr, 'name': '%s.%s' % (store.__class__.__name__, attr), 'value': option.__get__(store, store), } for attr, option in _getoptions(store) ], } for store in self.account_manager.stores ] sections = sorted(sections, key=lambda i: i['name']) req.hdf['sections'] = sections return 'admin_accountsconfig.cs', None
def save(self): """Write the configuration options to the primary file.""" if not self.filename: return # Only save options that differ from the defaults sections = [] for section in self.sections(): options = [] for option in self[section]: default = self.site_parser.has_option(section, option) and \ self.site_parser.get(section, option) current = self.parser.has_option(section, option) and \ self.parser.get(section, option) if current is not False and current != default: options.append((option, current)) if options: sections.append((section, sorted(options))) fileobj = file(self.filename, 'w') try: print >> fileobj, '# -*- coding: utf-8 -*-' print >> fileobj for section, options in sections: print >> fileobj, '[%s]' % section for key, val in options: if key in self[section].overridden: print >> fileobj, '# %s = <set in global trac.ini>' % key else: val = val.replace(CRLF, '\n').replace('\n', '\n ') print>>fileobj, '%s = %s' % \ (key, to_unicode(val).encode('utf-8')) print >> fileobj finally: fileobj.close()
def render_admin_panel(self, req, cat, page, path_info): db = self.env.get_db_cnx() cursor = db.cursor() field = dict([(field['name'], field) for field in TicketSystem(self.env).get_ticket_fields()])[page] cursor.execute('SELECT sid, value FROM hidevals WHERE field = %s', (field['name'],)) values = cursor.fetchall() enabled = field['name'] not in HideValsSystem(self.env).dont_filter if req.method == 'POST': if req.args.get('add'): group = req.args['group'] value = req.args['value'] if (group, value) not in values: cursor.execute('INSERT INTO hidevals (sid, field, value) VALUES (%s, %s, %s)', (group, field['name'], value)) db.commit() elif req.args.get('remove'): sel = req.args.getlist('sel') for val in sel: group, value = val.split('#', 1) cursor.execute('DELETE FROM hidevals WHERE sid = %s AND field = %s AND value = %s', (group, field['name'], value)) db.commit() elif req.args.get('toggle'): new_val = HideValsSystem(self.env).dont_filter[:] if enabled: new_val.append(field['name']) else: new_val.remove(field['name']) self.config.set('hidevals', 'dont_filter', ', '.join(sorted(new_val))) self.config.save() req.redirect(req.href.admin(cat, page)) data = {'field' : field, 'values' : [{'group': g, 'value': v} for g, v in values], 'enabled' : enabled} return 'admin_hidevals.html', data
def save(self): """Write the configuration options to the primary file.""" if not self.filename: return # Only save options that differ from the defaults sections = [] for section in self.sections(): options = [] for option in self[section]: default = self.site_parser.has_option(section, option) and \ self.site_parser.get(section, option) current = self.parser.has_option(section, option) and \ self.parser.get(section, option) if current is not False and current != default: options.append((option, current)) if options: sections.append((section, sorted(options))) fileobj = file(self.filename, 'w') try: print>>fileobj, '# -*- coding: utf-8 -*-' print>>fileobj for section, options in sections: print>>fileobj, '[%s]' % section for key, val in options: if key in self[section].overridden: print>>fileobj, '# %s = <set in global trac.ini>' % key else: val = val.replace(CRLF, '\n').replace('\n', '\n ') print>>fileobj, '%s = %s' % \ (key, to_unicode(val).encode('utf-8')) print>>fileobj finally: fileobj.close()
def render_macro(self, req, name, filter): from trac.config import Option from trac.wiki.formatter import wiki_to_html, wiki_to_oneliner filter = filter or "" sections = set([section for section, option in Option.registry.keys() if section.startswith(filter)]) return html.DIV(class_="tracini")( [ ( html.H2("[%s]" % section, id="%s-section" % section), html.TABLE(class_="wiki")( html.TBODY( [ html.TR( html.TD(html.TT(option.name)), html.TD(wiki_to_oneliner(option.__doc__, self.env)) ) for option in Option.registry.values() if option.section == section ] ) ), ) for section in sorted(sections) ] )
def process_request(self, req): mode = req.args.get('mode', 'view') all_fields = [] standard_fields = set() for f in TicketSystem(self.env).get_ticket_fields(): all_fields.append(f['name']) if not f.get('custom'): standard_fields.add(f['name']) if 'owner' in all_fields: curr_idx = all_fields.index('owner') if 'cc' in all_fields: insert_idx = all_fields.index('cc') else: insert_idx = len(all_fields) if curr_idx < insert_idx: all_fields.insert(insert_idx, all_fields[curr_idx]) del all_fields[curr_idx] for t in self.types: fields = set(getattr(self, t + '_fields')) if self.include_std: fields.update(standard_fields) fields.update(self.forced_fields) req.hdf['condfields.types.%s' % t] = dict([(f, f in fields) for f in all_fields]) req.hdf['condfields.mode'] = mode req.hdf['condfields.all_fields'] = list(all_fields) req.hdf['condfields.ok_fields'] = sorted( set(all_fields) - self.forced_fields, key=lambda x: all_fields.index(x)) return 'condfields.cs', 'text/javascript'
def _do_config(self, req): if req.method == 'POST': selected_class = req.args.get('selected') self.config.set('account-manager', 'password_store', selected_class) selected = self.account_manager.password_store for attr, option in _getoptions(selected): newvalue = req.args.get('%s.%s' % (selected_class, attr)) if newvalue is not None: self.config.set(option.section, option.name, newvalue) self.config.save() try: selected = self.account_manager.password_store except AttributeError: selected = None sections = [{ 'name': store.__class__.__name__, 'classname': store.__class__.__name__, 'selected': store is selected, 'options': [{ 'label': attr, 'name': '%s.%s' % (store.__class__.__name__, attr), 'value': option.__get__(store, store), } for attr, option in _getoptions(store)], } for store in self.account_manager.stores] sections = sorted(sections, key=lambda i: i['name']) req.hdf['sections'] = sections return 'admin_accountsconfig.cs', None
def _edit_path(self, req, cat, page, path_info): """ Populates the editpath.* parts of the hdf @return the value of editgroup.url or None """ data = {} editpath = url2pathname(path_info[path_info.index('/')+1:len(path_info)]) paths = [(p.get_repo(), p.get_path()) for p in self.authz.get_paths()] validpath = self._get_valid_path(paths, editpath) if validpath: data['editpath_name'] = editpath data['editpath_url'] = pathname2url(editpath) pathmembers = self.authz.find_path(validpath[1], validpath[0]) editpath_members = [] for member in pathmembers: read = write = "" if member.is_read(): read = "checked" if member.is_write(): write = "checked" editpath_members.append({'subject' : member.get_member().__str__(), 'read' : read, 'write' : write}) data['editpath_members'] = sorted(editpath_members, key=lambda member : member['subject'].lower()) # Populate member candidates not_in_list = [m.get_member().__str__() for m in pathmembers] candidates = self._get_candidate_subjects(not_in_list) if candidates != []: data['editpath_candidates'] = candidates return data['editpath_url'], data return None, {}
def render_macro(self, req, name, content): intertracs = {} for key, value in self.config.options('intertrac'): idx = key.rfind('.') # rsplit only in 2.4 if idx > 0: # 0 itself doesn't help much: .xxx = ... prefix, attribute = key[:idx], key[idx+1:] intertrac = intertracs.setdefault(prefix, {}) intertrac[attribute] = value else: intertracs[key] = value # alias def generate_prefix(prefix): intertrac = intertracs[prefix] if isinstance(intertrac, basestring): yield html.TR(html.TD(html.B(prefix)), html.TD('Alias pour ', html.B(intertrac))) else: url = intertrac.get('url', '') if url: title = intertrac.get('title', url) yield html.TR(html.TD(html.A(html.B(prefix), href=url + '/timeline')), html.TD(html.A(title, href=url))) return html.TABLE(class_="wiki intertrac")( html.TR(html.TH(html.EM('Prefix')), html.TH(html.EM('Site Trac'))), [generate_prefix(p) for p in sorted(intertracs.keys())])
def render_macro(self, req, name, content): prefix = content or None wiki = WikiSystem(self.env) return html.UL([html.LI(html.A(wiki.format_page_name(page), href=req.href.wiki(page))) for page in sorted(wiki.get_pages(prefix))])
def write_groups(self, groups): """write new group file""" group_file_name = self.get_group_filename() #load the file into the memory if os.path.exists(group_file_name): group_file = file(group_file_name, 'w') try: for group_name in sorted(groups): if groups[group_name]: #if non empty group_file.write( '%s: %s\n' % (group_name, ' '.join(sorted(groups[group_name])))) finally: group_file.close() self.env.log.debug('htgroup-editor wrote %r' % (group_file_name, ))
def write_groups(self, groups): """write new group file""" group_file_name = self.get_group_filename() #load the file into the memory if os.path.exists(group_file_name): group_file = file(group_file_name, 'w') try: for group_name in sorted(groups): if groups[group_name]: #if non empty group_file.write('%s: %s\n' % ( group_name, ' '.join(sorted(groups[group_name])) )) finally: group_file.close() self.env.log.debug('htgroup-editor wrote %r' % (group_file_name,))
def _render_directory(self, req, repos, node, rev=None): req.perm.assert_permission('BROWSER_VIEW') # Entries metadata info = [] for entry in node.get_entries(): info.append({ 'name': entry.name, 'fullpath': entry.path, 'is_dir': entry.isdir, 'content_length': entry.content_length, 'size': pretty_size(entry.content_length), 'rev': entry.rev, 'log_href': req.href.log(entry.path, rev=rev), 'browser_href': req.href.browser(entry.path, rev=rev) }) changes = get_changes(self.env, repos, [i['rev'] for i in info]) # Ordering of entries order = req.args.get('order', 'name').lower() desc = req.args.has_key('desc') if order == 'date': def file_order(a): return changes[a['rev']]['date_seconds'] elif order == 'size': def file_order(a): return (a['content_length'], embedded_numbers(a['name'].lower())) else: def file_order(a): return embedded_numbers(a['name'].lower()) dir_order = desc and 1 or -1 def browse_order(a): return a['is_dir'] and dir_order or 0, file_order(a) info = sorted(info, key=browse_order, reverse=desc) switch_ordering_hrefs = {} for col in ('name', 'size', 'date'): switch_ordering_hrefs[col] = req.href.browser( node.path, rev=rev, order=col, desc=(col == order and not desc and 1 or None)) # ''Zip Archive'' alternate link patterns = self.downloadable_paths if node.path and patterns and \ filter(None, [fnmatchcase(node.path, p) for p in patterns]): zip_href = req.href.changeset(rev or repos.youngest_rev, node.path, old=rev, old_path='/', format='zip') add_link(req, 'alternate', zip_href, 'Zip Archive', 'application/zip', 'zip') req.hdf['browser'] = {'order': order, 'desc': desc and 1 or 0, 'items': info, 'changes': changes, 'order_href': switch_ordering_hrefs}
def _do_htgroup(self, req): """Provide a list of groups, a current group name and a list of users in the current group. """ # get the groups from the database file groups = self.get_groups() # prepare the selection and user list group_name = req.args.get('group') if not group_name or group_name not in groups: # no group name given, try to use a default if groups: #not empty group_name = sorted(groups.keys())[0] else: group_name = None # process forms/commands if req.method == 'POST': if req.args.get('add'): new_group = req.args.get('new_group') new_user = req.args.get('new_user') groups.setdefault(new_group, []).append(new_user) self.write_groups(groups) req.hdf['message'] = u'added %s to %s' % (new_user, new_group) elif req.args.get('remove') and req.args.get('group'): sel = req.args.get('sel') sel = isinstance(sel, list) and sel or [sel] for user in sel: groups[req.args.get('group')].remove(user) self.write_groups(groups) req.hdf['message'] = u'removed %s from %s' % ( sel, req.args.get('group')) # listst and other info req.hdf['groups'] = sorted(groups.keys()) listing_enabled = isinstance(group_name, basestring) and group_name in groups if listing_enabled: req.hdf['group'] = group_name req.hdf['members'] = sorted(groups[group_name]) req.hdf['listing_enabled'] = listing_enabled req.hdf['selection_enabled'] = len(groups.keys()) > 1 return 'htgroup_editor.cs', None
def _do_htgroup(self, req): """Provide a list of groups, a current group name and a list of users in the current group. """ # get the groups from the database file groups = self.get_groups() # prepare the selection and user list group_name = req.args.get('group') if not group_name or group_name not in groups: # no group name given, try to use a default if groups: #not empty group_name = sorted(groups.keys())[0] else: group_name = None # process forms/commands if req.method == 'POST': if req.args.get('add'): new_group = req.args.get('new_group') new_user = req.args.get('new_user') groups.setdefault(new_group, []).append(new_user) self.write_groups(groups) req.hdf['message'] = u'added %s to %s' % (new_user, new_group) elif req.args.get('remove') and req.args.get('group'): sel = req.args.get('sel') sel = isinstance(sel, list) and sel or [sel] for user in sel: groups[req.args.get('group')].remove(user) self.write_groups(groups) req.hdf['message'] = u'removed %s from %s' % (sel, req.args.get('group')) # listst and other info req.hdf['groups'] = sorted(groups.keys()) listing_enabled = isinstance(group_name, basestring) and group_name in groups if listing_enabled: req.hdf['group'] = group_name req.hdf['members'] = sorted(groups[group_name]) req.hdf['listing_enabled'] = listing_enabled req.hdf['selection_enabled'] = len(groups.keys()) > 1 return 'htgroup_editor.cs', None
def get_supported_conversions(self, mimetype): """Return a list of target MIME types in same form as `IContentConverter.get_supported_conversions()`, but with the converter component appended. Output is ordered from best to worst quality.""" converters = [] for converter in self.converters: for k, n, e, im, om, q in converter.get_supported_conversions(): if im == mimetype and q > 0: converters.append((k, n, e, im, om, q, converter)) converters = sorted(converters, key=lambda i: i[-2], reverse=True) return converters
def render_one(page, langs): result = [tag.a(wiki.format_page_name(page), href=formatter.href.wiki(page))] if langs: for lang in sorted(langs): result.append(', ') p = '%s.%s' % (page, lang) result.append(tag.a(lang or 'default', style='color:#833', href=formatter.href.wiki(p))) result[1] = ' (' result.append(')') return result
def test_conversions(self): conversions = self.mimeview.get_supported_conversions( 'trac.ticket.Ticket') expected = sorted( [('csv', 'Comma-delimited Text', 'csv', 'trac.ticket.Ticket', 'text/csv', 8, self.ticket_module), ('tab', 'Tab-delimited Text', 'tsv', 'trac.ticket.Ticket', 'text/tab-separated-values', 8, self.ticket_module), ('rss', 'RSS Feed', 'xml', 'trac.ticket.Ticket', 'application/rss+xml', 8, self.ticket_module)], key=lambda i: i[-1], reverse=True) self.assertEqual(expected, conversions)
def test_conversions(self): conversions = self.mimeview.get_supported_conversions( 'trac.ticket.Ticket') expected = sorted([('csv', 'Comma-delimited Text', 'csv', 'trac.ticket.Ticket', 'text/csv', 8, self.ticket_module), ('tab', 'Tab-delimited Text', 'tsv', 'trac.ticket.Ticket', 'text/tab-separated-values', 8, self.ticket_module), ('rss', 'RSS Feed', 'xml', 'trac.ticket.Ticket', 'application/rss+xml', 8, self.ticket_module)], key=lambda i: i[-1], reverse=True) self.assertEqual(expected, conversions)
def split_in_groups(group): """Return list of pagename or (key, sublist) elements""" groups = [] for key, subgrp in groupby(group, lambda (k,p): k and k[0] or ''): subgrp = [(k[1:],p) for k,p in subgrp] if key and len(subgrp) >= minsize: sublist = split_in_groups(sorted(subgrp)) if len(sublist) == 1: elt = (key+sublist[0][0], sublist[0][1]) else: elt = (key, sublist) groups.append(elt) else: for elt in subgrp: groups.append(elt[1])
def select(cls, env, db=None): if not db: db = env.get_db_cnx() cursor = db.cursor() cursor.execute("SELECT name,time,description FROM version") versions = [] for name, time, description in cursor: version = cls(env) version.name = name version.time = time and int(time) or None version.description = description or '' versions.append(version) def version_order(v): return (v.time or sys.maxint, embedded_numbers(v.name)) return sorted(versions, key=version_order, reverse=True)
def get_info( self, timestamp=None ): model = Requirement(self.env) met = RequirementMetric(model) results = met.entropy(timestamp) if results is None: self.no_results() else: total_entropy = results[0] avg_entropy = results[1] components_entropies = results[2] req_entropies = results[3] fp_entropies = results[4] object_entropies = results[5] # Ease division below if total_entropy == 0: total_entropy = 1; reqs = [] for req, entropy in req_entropies.iteritems(): my_fp = Fp(self.env, id=req[1])['name'] my_obj = Object(self.env, id=req[2])['name'] reqs.append({'name': '<'+req[0]+' '+my_fp+' '+my_obj+'>', 'link': self.req.href.requirement(req[0]+ '-'+my_fp+'-'+my_obj), 'entropy': '%.3f' % entropy}) components = [] for comp, entropy in components_entropies.iteritems(): components.append({'name': comp, 'percent': '%.1f' % (100 * entropy / total_entropy)}) self.req.hdf['graph_path'] = self.req.href.requirements()+'/graph/' self.req.hdf['reqs'] = sorted(reqs, lambda x,y: cmp(float(y['entropy']), float(x['entropy']))) self.req.hdf['components'] = sorted(components, lambda x,y: cmp(x['name'], y['name']))
def render_macro(self, req, name, content): from trac.util import sorted from trac.util.html import html as _ interwikis = [] for k in sorted(self.keys()): prefix, url, title = self[k] interwikis.append({ 'prefix': prefix, 'url': url, 'title': title, 'rc_url': self._expand_or_append(url, ['RecentChanges']), 'description': title == prefix and url or title}) return _.TABLE(_.TR(_.TH(_.EM("Prefix")), _.TH(_.EM("Site"))), [ _.TR(_.TD(_.A(w['prefix'], href=w['rc_url'])), _.TD(_.A(w['description'], href=w['url']))) for w in interwikis ], class_="wiki interwiki")
def process_admin_request(self, req, cat, page, path_info): assert req.perm.has_permission('TRAC_ADMIN') if page not in set([s for s, _ in Option.registry]): raise TracError("Invalid section %s" % page) # Apply changes if req.method == 'POST': options = [option.name for (section, _), option in Option.registry.iteritems() if section == page] modified = False for option, value in req.args.iteritems(): if option in options: if self.env.config.get(page, option) != value: self.env.config.set(page, option, value) modified = True if modified: self.env.log.debug("Updating trac.ini") self.env.config.save() req.redirect(self.env.href.admin(cat, page)) add_stylesheet(req, 'iniadmin/css/iniadmin.css') options = sorted([option for (section, _), option in Option.registry.iteritems() if section == page], key=lambda a: a.name) hdf_options = [] for option in options: doc = wiki_to_html(inspect.getdoc(option), self.env, req) value = self.env.config.get(page, option.name) # We assume the classes all end in "Option" type = option.__class__.__name__.lower()[:-6] or 'text' hdf_option = {'name': option.name, 'default': option.default, 'doc': Markup(doc), 'value': value, 'type': type} if type == 'extension': options = [] for impl in option.xtnpt.extensions(self): options.append(impl.__class__.__name__) options.sort() hdf_option['options'] = options hdf_options.append(hdf_option) req.hdf['iniadmin.section'] = page req.hdf['iniadmin.options'] = hdf_options return 'iniadmin.cs', None
def _do_config(self, req): stores = StoreOrder(stores=self.account_manager.stores, list=self.account_manager.password_store) if req.method == 'POST': _setorder(req, stores) self.config.set('account-manager', 'password_store', ','.join(stores.get_enabled_store_names())) for store in stores.get_all_stores(): for attr, option in _getoptions(store): newvalue = req.args.get('%s.%s' % (store.__class__.__name__, attr)) self.log.debug("%s.%s: %s" % (store.__class__.__name__, attr, newvalue)) if newvalue is not None: self.config.set(option.section, option.name, newvalue) self.config.save() self.config.set('account-manager', 'force_passwd_change', req.args.get('force_passwd_change')) self.config.set('account-manager', 'persistent_sessions', req.args.get('persistent_sessions')) self.config.save() sections = [] for store in self.account_manager.stores: options = [] for attr, option in _getoptions(store): opt_val = option.__get__(store, store) opt_val = isinstance(opt_val, Component) and \ opt_val.__class__.__name__ or opt_val options.append( {'label': attr, 'name': '%s.%s' % (store.__class__.__name__, attr), 'value': opt_val, }) continue sections.append( {'name': store.__class__.__name__, 'classname': store.__class__.__name__, 'order': stores[store], 'options' : options, }) continue sections = sorted(sections, key=lambda i: i['name']) numstores = range(0, stores.numstores() + 1) data = {'sections': sections, 'numstores': numstores, 'force_passwd_change': self.account_manager.force_passwd_change, 'persistent_sessions': self.account_manager.persistent_sessions} return 'admin_accountsconfig.html', data
def render_macro(self, req, name, filter): from trac.config import Option from trac.wiki.formatter import wiki_to_html, wiki_to_oneliner filter = filter or '' sections = set([section for section, option in Option.registry.keys() if section.startswith(filter)]) return html.DIV(class_='tracini')( [(html.H2('[%s]' % section, id='%s-section' % section), html.TABLE(class_='wiki')( html.TBODY([html.TR(html.TD(html.TT(option.name)), html.TD(wiki_to_oneliner(option.__doc__, self.env))) for option in Option.registry.values() if option.section == section]))) for section in sorted(sections)])
def _render_config_panel(self, req, cat, page): req.perm.assert_permission('SPAM_CONFIG') filtersys = FilterSystem(self.env) strategies = [] for strategy in filtersys.strategies: info = {'name': strategy.__class__.__name__, 'karma_points': strategy.karma_points, 'karma_help': strategy.__class__.karma_points.__doc__} strategies.append(info) return { 'strategies': sorted(strategies, key=lambda x: x['name']), 'min_karma': filtersys.min_karma, 'logging_enabled': filtersys.logging_enabled, 'purge_age': filtersys.purge_age }
def expand_macro(self, formatter, name, content): from trac.util import sorted interwikis = [] for k in sorted(self.keys()): prefix, url, title = self[k] interwikis.append({ 'prefix': prefix, 'url': url, 'title': title, 'rc_url': self._expand_or_append(url, ['RecentChanges']), 'description': title == prefix and url or title}) return tag.table(tag.tr(tag.th(tag.em("Prefix")), tag.th(tag.em("Site"))), [tag.tr(tag.td(tag.a(w['prefix'], href=w['rc_url'])), tag.td(tag.a(w['description'], href=w['url']))) for w in interwikis ], class_="wiki interwiki")
def select(cls, env, include_completed=True, db=None): if not db: db = env.get_db_cnx() sql = "SELECT name,due,completed,description FROM milestone " if not include_completed: sql += "WHERE COALESCE(completed,0)=0 " cursor = db.cursor() cursor.execute(sql) milestones = [] for row in cursor: milestone = Milestone(env) milestone._from_database(row) milestones.append(milestone) def milestone_order(m): return (m.completed or utcmax, m.due or utcmax, embedded_numbers(m.name)) return sorted(milestones, key=milestone_order)
def select(cls, env, db=None): if not db: db = env.get_db_cnx() cursor = db.cursor() cursor.execute("SELECT name,time,description,summery,header,body,argnum FROM blogpart") blogparts = [] for name, time, description,summery,header,body,argnum in cursor: blogpart = cls(env) blogpart.name = blogpart._old_name = name blogpart.time = time and datetime.fromtimestamp(int(time), utc) or None blogpart.description = description or '' blogpart.summery = summery blogpart.header = header blogpart.blody = body blogpart.argnum = argnum blogparts.append(blogpart) def blogpart_order(v): return (v.time or utcmax, embedded_numbers(v.name)) return sorted(blogparts, key=blogpart_order, reverse=True)
def split_pages(self, pages): system_pages, exc = _exclude(self._wiki_default_pages, self._explicit_user_pages) # Pick all wiki pages and make list of master pages and also # make list of language variations for each master page. # The name of master page should not have language suffix. # And make list of user pages (non system page). user_pages = [] # pages marked as user's lang_map = {} # language list for each page for page in pages: # note: language variant is not stored in system_pages. p, l = util.split_lang(page) if p not in system_pages and p not in user_pages: user_pages.append(p) # process explicit system pages user_pages, exc = _exclude(user_pages, self._explicit_system_pages) system_pages += exc system_pages = sorted(system_pages) return user_pages, system_pages
def _get_labels_or_branches(self, paths): """Retrieve known branches or labels.""" for path in self.options.get(paths, []): self.log.debug('_get_labels_or_branches %s' % path) if path.endswith('*'): folder = posixpath.dirname(path) try: entries = [n for n in self.get_node(folder).get_entries()] for node in sorted(entries, key=lambda n: embedded_numbers(n.path.lower())): if node.kind == Node.DIRECTORY: yield node except: # no right (TODO: should use a specific Exception here) pass else: try: yield self.get_node(path) except: # no right pass
class Acronyms(Component): """ Automatically generates HTML acronyms from definitions in tables in a Wiki page (AcronymDefinitions by default). """ implements(IWikiSyntaxProvider, IWikiChangeListener) acronyms = {} compiled_acronyms = None valid_acronym = re.compile(r'^\S+$', re.UNICODE) acronym_page = property(lambda self: self.env.config.get( 'acronym', 'page', 'AcronymDefinitions')) def __init__(self): self._update_acronyms() def _update_acronyms(self): self.env.log.debug('Updating acronym database') page = WikiPage(self.env, self.acronym_page) self.acronyms = {} if not page.exists: return for line in page.text.splitlines(): line = line.rstrip() if line.startswith('||') and line.endswith( '||') and line[3] != "'": try: a, d, u, s = ( [i.strip() for i in line.strip('||').split('||')] + ['', ''])[0:4] assert self.valid_acronym.match( a), "Invalid acronym %s" % a self.acronyms[a] = (escape(d), escape(u), escape(s)) except Exception, e: self.env.log.warning("Invalid acronym line: %s (%s)", line, e) keys = reversed(sorted(self.acronyms.keys(), key=lambda a: len(a))) self.compiled_acronyms = \ r'''\b(?P<acronym>%s)(?P<acronymselector>\w*)\b''' % '|'.join(keys) # XXX Very ugly, but only "reliable" way? from trac.wiki.parser import WikiParser WikiParser(self.env)._compiled_rules = None
def _get_labels_or_branches(self, paths): """Retrieve known branches or labels.""" for path in self.options.get(paths, []): self.log.debug('_get_labels_or_branches %s' % path) if path.endswith('*'): folder = posixpath.dirname(path) try: entries = [n for n in self.get_node(folder).get_entries()] for node in sorted( entries, key=lambda n: embedded_numbers(n.path.lower())): if node.kind == Node.DIRECTORY: yield node except: # no right (TODO: should use a specific Exception here) pass else: try: yield self.get_node(path) except: # no right pass
def select(cls, env, db=None, parent=None): if not db: db = env.get_db_cnx() cursor = db.cursor() if parent: cursor.execute("SELECT name,parent,time,description FROM multiproduct_product_version " "WHERE parent=%s", (parent,)) else: cursor.execute("SELECT name,parent,time,description FROM multiproduct_product_version") versions = [] for name, parent, time, description in cursor: prodversion = cls(env) prodversion.name = prodversion._old_name = name prodversion.parent = prodversion._old_parent = parent prodversion.time = time and datetime.fromtimestamp(int(time), utc) or None prodversion.description = description or '' versions.append(prodversion) def version_order(v): return (v.time or utcmax, embedded_numbers(v.name)) return sorted(versions, key=version_order, reverse=True)
class KeywordReplace(Component): """ Replce wiki keywords from a table in a Wiki page. (KeywordReplace by default). """ implements(IWikiSyntaxProvider, IWikiChangeListener) replace = {} compiled_replace = None valid_replace = re.compile(r'^\S+$', re.UNICODE) replace_page = property( lambda self: self.env.config.get('replace', 'page', 'KeywordReplace')) def __init__(self): self._update_replace() def _update_replace(self): self.env.log.debug('Updating replace database') page = WikiPage(self.env, self.replace_page) self.replace = {} if not page.exists: return for line in page.text.splitlines(): self.env.log.warning(line) line = line.rstrip() if line.startswith('||') and line.endswith( '||') and line[3] != "'": try: a, d = ([i.strip() for i in line.strip('||').split('||')] + ['', ''])[0:2] assert self.valid_replace.match( a), "Invalid replaces %s" % a self.replace[a] = (escape(d)) except Exception, d: self.env.log.warning("Invalid replaces line: %s", line) keys = reversed(sorted(self.replace.keys(), key=lambda a: len(a))) self.compiled_replace = \ r'''\b(?P<replaces>%s)\b''' % '|'.join(keys) # XXX Very ugly, but only "reliable" way? from trac.wiki.parser import WikiParser WikiParser(self.env)._compiled_rules = None
def select(cls, env, include_completed=True, db=None): if not db: db = env.get_db_cnx() sql = "SELECT name,due,completed,description FROM milestone " if not include_completed: sql += "WHERE COALESCE(completed,0)=0 " cursor = db.cursor() cursor.execute(sql) milestones = [] for name, due, completed, description in cursor: milestone = Milestone(env) milestone.name = milestone._old_name = name milestone.due = due and int(due) or 0 milestone.completed = completed and int(completed) or 0 milestone.description = description or '' milestones.append(milestone) def milestone_order(m): return (m.completed or sys.maxint, m.due or sys.maxint, embedded_numbers(m.name)) return sorted(milestones, key=milestone_order)
def _edit_group(self, req, cat, page, path_info): """ Populates the editgroup.* parts of the hdf @return the value of editgroup.url or None """ data = {} editgroup = url2pathname(path_info[path_info.index('/')+1:len(path_info)]) group = self.authz.find_group(editgroup) if group != None: data['editgroup_name'] = editgroup data['editgroup_url'] = pathname2url(editgroup) data['editgroup_members'] = sorted([m.__str__() for m in group], key=lambda member : member.lower()) # Populate member candidates not_in_list = [m.__str__() for m in group] not_in_list.append("@%s" % editgroup) candidates = self._get_candidate_subjects(not_in_list) if candidates != []: data['editgroup_candidates'] = candidates return data['editgroup_url'], data self.env.log.debug("SvnAuthzAdminPlugin: Group %s not found." % editgroup) return None, {}
if colIndex != None: k = 'report.headers.%d.asc' % (colIndex - hiddenCols) asc = req.args.get('asc', None) if asc: asc = int(asc) # string '0' or '1' to int/boolean else: asc = 1 req.hdf[k] = asc def sortkey(row): val = row[colIndex] if isinstance(val, basestring): val = val.lower() return val rows = sorted(rows, key=sortkey, reverse=(not asc)) # Get the email addresses of all known users email_map = {} for username, name, email in self.env.get_known_users(): if email: email_map[username] = email # Convert the rows and cells to HDF-format row_idx = 0 for row in rows: col_idx = 0 numrows = len(row) for cell in row: cell = unicode(cell) column = cols[col_idx]
def sections(self): """Return a list of section names.""" return sorted(set(self.site_parser.sections() + self.parser.sections()))