def _ticket_links(env, formatter, t, a_class=""): """Build links to tickets.""" tkt_id = str(t.get("id")) status = t.get("status") summary = to_unicode(t.get("summary")) owner = to_unicode(t.get("owner")) description = to_unicode(t.get("description")) url = t.get("href") if status == "closed": a_class = a_class + "closed" else: a_class = a_class + "open" # Reduce content for tooltips. markup = format_to_html(env, formatter.context, description) extractor = TextExtractor() extractor.feed(markup) tip = tag.span(shorten_line(extractor.getvalue())) ticket = tag.a("#" + tkt_id, href=url) ticket(tip, class_="tip", target="_blank") ticket = tag.div(ticket, class_=a_class, align="left") # Fix stripping of regular leading space in IE. blank = " " ticket(Markup(blank), summary, " (", owner, ")") summary = tag(summary, " (", owner, ")") ticket_short = tag.span(tag.a("#" + tkt_id, href=url, target="_blank", title_=summary), class_=a_class) return ticket, ticket_short
def __init__(self, path, params, log): self.log = log self.pool = Pool() # Remove any trailing slash or else subversion might abort if isinstance(path, unicode): path_utf8 = path.encode('utf-8') else: # note that this should usually not happen (unicode arg expected) path_utf8 = to_unicode(path).encode('utf-8') path_utf8 = core.svn_path_canonicalize( os.path.normpath(path_utf8).replace('\\', '/')) self.path = path_utf8.decode('utf-8') root_path_utf8 = repos.svn_repos_find_root_path(path_utf8, self.pool()) if root_path_utf8 is None: raise TracError(_("%(path)s does not appear to be a Subversion " "repository.", path=to_unicode(path_utf8))) try: self.repos = repos.svn_repos_open(root_path_utf8, self.pool()) except core.SubversionException, e: raise TracError(_("Couldn't open Subversion repository %(path)s: " "%(svn_error)s", path=to_unicode(path_utf8), svn_error=exception_to_unicode(e)))
def handle_commit(commit, env): from trac.ticket.notification import TicketNotifyEmail from trac.ticket import Ticket from trac.util.text import to_unicode from trac.util.datefmt import utc msg = to_unicode(call_git('rev-list', ['-n', '1', commit, '--pretty=medium']).rstrip()) eml = to_unicode(call_git('rev-list', ['-n', '1', commit, '--pretty=format:%ae']).splitlines()[1]) now = datetime.now(utc) tickets = {} for cmd, tkts in command_re.findall(msg.split('\n\n', 1)[1]): action = COMMANDS.get(cmd.lower()) if action: for tkt_id in ticket_re.findall(tkts): tickets.setdefault(tkt_id, []).append(action) for tkt_id, actions in tickets.iteritems(): try: db = env.get_db_cnx() ticket = Ticket(env, int(tkt_id), db) if 'close' in actions: ticket['status'] = 'closed' ticket['resolution'] = 'fixed' # trac 1.0: `db` parameter is no longer needed and will be removed in 1.1.1 # trac 1.0: `cnum` parameter is deprecated ticket.save_changes(eml, msg, now) db.commit() tn = TicketNotifyEmail(env) tn.notify(ticket, newticket=0, modtime=now) except Exception, e: print >>sys.stderr, 'Unexpected error while processing ticket ID %s: %s' % (tkt_id, e)
def get_lines_from_file(filename, lineno, context=0): """Return `content` number of lines before and after the specified `lineno` from the file identified by `filename`. Returns a `(lines_before, line, lines_after)` tuple. """ if os.path.isfile(filename): fileobj = open(filename, "U") try: lines = fileobj.readlines() lbound = max(0, lineno - context) ubound = lineno + 1 + context charset = None rep = re.compile("coding[=:]\s*([-\w.]+)") for linestr in lines[0], lines[1]: match = rep.search(linestr) if match: charset = match.group(1) break before = [to_unicode(l.rstrip("\n"), charset) for l in lines[lbound:lineno]] line = to_unicode(lines[lineno].rstrip("\n"), charset) after = [to_unicode(l.rstrip("\n"), charset) for l in lines[lineno + 1 : ubound]] return before, line, after finally: fileobj.close() return (), None, ()
def process(self, m): self.updated = True op, argstr = m.groups() op = op or self.default_op self.formatter.env.log.debug('Converting TracForms op: ' + str(op)) kw = {} args = tuple(self.getargs(argstr, kw)) fn = self.env.get('op:' + op.lower()) if fn is None: fn = getattr(self, 'op_' + op.lower(), None) if fn is None: raise FormTooManyValuesError(str(op)) else: try: if op[:5] == 'wikiop_': self.formatter.env.log.debug( 'TracForms wiki value: ' + self.wiki(str(fn(*args)))) return self.wiki(str(fn(*args))) else: self.formatter.env.log.debug( 'TracForms value: ' + to_unicode(fn(*args, **kw))) return to_unicode(fn(*args, **kw)) except FormError, e: return '<PRE>' + str(e) + '</PRE>' except Exception, e: return '<PRE>' + traceback.format_exc() + '</PRE>'
def process_request(self, req): data = {'systeminfo': None, 'plugins': None, 'config': None} if 'CONFIG_VIEW' in req.perm('config', 'systeminfo'): # Collect system information data['systeminfo'] = self.env.get_systeminfo() if 'CONFIG_VIEW' in req.perm('config', 'plugins'): # Collect plugin information data['plugins'] = get_plugin_info(self.env) if 'CONFIG_VIEW' in req.perm('config', 'ini'): # Collect config information defaults = self.config.defaults(self.compmgr) sections = [] for section in self.config.sections(self.compmgr): options = [] default_options = defaults.get(section, {}) for name, value in self.config.options(section, self.compmgr): default = default_options.get(name) or '' options.append({ 'name': name, 'value': value, 'modified': to_unicode(value) != to_unicode(default) }) options.sort(key=lambda o: o['name']) sections.append({'name': section, 'options': options}) sections.sort(key=lambda s: s['name']) data['config'] = sections return 'about.html', data, None
def test_send_mail(self): dir = resource_filename(__name__, os.path.join('..', 'htdocs')) files = [os.path.join(dir, f) for f in os.listdir(dir)] resources = [] parent = Resource('repository', '') for f in files: res = Resource('source', f, parent=parent) resources.append(res) subjects = ('Re: åäö', u'Re: åäö', 'Re: ascii', ) bodies = ('Here you gö (Här får du)', u'Here you gö (Här får du)', 'Ascii body', ) for subject in subjects: subject = to_unicode(subject) for body in bodies: body = to_unicode(body) mail = self.sharesys.send_as_email("anonymous", (u'Pöntus Enmärk', '*****@*****.**'), [(u'Pontus Enmark', '*****@*****.**'), (u'Pöntus Enmärk', '*****@*****.**')], subject, body, *resources) headers, sent_body = parse_smtp_message(self.server.get_message()) assert 'utf-8' in sent_body.split('\n')[2] assert subject == headers['Subject'], headers assert os.path.basename(files[0]) in sent_body
def __init__(self, path, authz, log, options={}): self.log = log self.options = options self.pool = Pool() # Remove any trailing slash or else subversion might abort if isinstance(path, unicode): self.path = path path_utf8 = path.encode("utf-8") else: # note that this should usually not happen (unicode arg expected) self.path = to_unicode(path) path_utf8 = self.path.encode("utf-8") path_utf8 = os.path.normpath(path_utf8).replace("\\", "/") root_path_utf8 = repos.svn_repos_find_root_path(path_utf8, self.pool()) if root_path_utf8 is None: raise TracError(_("%(path)s does not appear to be a Subversion " "repository.", path=to_unicode(path_utf8))) try: self.repos = repos.svn_repos_open(root_path_utf8, self.pool()) except core.SubversionException, e: raise TracError( _( "Couldn't open Subversion repository %(path)s: " "%(svn_error)s", path=to_unicode(path_utf8), svn_error=exception_to_unicode(e), ) )
def send(self, torcpts, ccrcpts): header = {} # Add item specific e-mail header fields. if self.message: # Get this messge ID. header['Message-ID'] = self.get_message_id(self.forum['id'], self.topic['id'], self.message['id']) header['X-Trac-Message-ID'] = to_unicode(self.message['id']) header['X-Trac-Discussion-URL'] = self.message['link'] # Get replied message ID. reply_id = self.get_message_id(self.forum['id'], self.topic['id'], self.message['replyto']) header['In-Reply-To'] = reply_id header['References'] = reply_id else: # Get this message ID. header['Message-ID'] = self.get_message_id(self.forum['id'], self.topic['id'], 0) header['X-Trac-Topic-ID'] = to_unicode(self.topic['id']) header['X-Trac-Discussion-URL'] = self.topic['link'] # Send e-mail. NotifyEmail.send(self, torcpts, ccrcpts, header)
def _save(self, timestamp, value, update=False, db=None): """Saves a remaining time value to the database. The update parameter decides if the value should be updated (True) or inserted (False)""" params = { Key.TABLE : BURNDOWN_TABLE, Key.TASK_ID : self.task.id, Key.DATE : timestamp, Key.REMAINING_TIME : value, } if update: sql_query = "UPDATE %(table)s SET remaining_time=%(remaining_time)d " \ "WHERE task_id=%(task_id)d AND date=%(date)f" % params else: sql_query = "INSERT INTO %(table)s (task_id, date, remaining_time) " \ "VALUES (%(task_id)s, %(date)s, %(remaining_time)s)" % params db, handle_ta = get_db_for_write(self.env, db) try: cursor = db.cursor() cursor.execute(sql_query) if handle_ta: db.commit() debug(self, "DB Committed, saved remaining time (%s) for task %d" % \ (params[Key.REMAINING_TIME], self.task.id)) except Exception, e: error(self, to_unicode(e)) if handle_ta: db.rollback() raise TracError("Error while saving remaining time: %s" % \ to_unicode(e))
def add_value(prefix, value): if value is None: return if value in (True, False): set_str(prefix, int(value)) elif isinstance(value, (Markup, Fragment)): set_unicode(prefix, unicode(value)) elif isinstance(value, str): if do_escape: # Assume UTF-8 here, for backward compatibility reasons set_unicode(prefix, escape(to_unicode(value))) else: set_str(prefix, value) elif isinstance(value, unicode): if do_escape: set_unicode(prefix, escape(value)) else: set_unicode(prefix, value) elif isinstance(value, dict): for k in value.keys(): add_value('%s.%s' % (prefix, to_unicode(k)), value[k]) else: if hasattr(value, '__iter__') or \ isinstance(value, (list, tuple)): for idx, item in enumerate(value): add_value('%s.%d' % (prefix, idx), item) else: set_str(prefix, value)
def get_macro_descr(): for macro_provider in formatter.wiki.macro_providers: names = list(macro_provider.get_macros() or []) if name_filter and not any(name.startswith(name_filter) for name in names): continue try: name_descriptions = [ (name, macro_provider.get_macro_description(name)) for name in names] except Exception, e: yield system_message( _("Error: Can't get description for macro %(name)s", name=names[0]), e), names else: for descr, pairs in groupby(name_descriptions, key=lambda p: p[1]): if descr: if isinstance(descr, (tuple, list)): descr = dgettext(descr[0], to_unicode(descr[1])) \ if descr[1] else '' else: descr = to_unicode(descr) or '' if content == '*': descr = format_to_oneliner( self.env, formatter.context, descr, shorten=True) else: descr = format_to_html( self.env, formatter.context, descr) yield descr, [name for name, descr in pairs]
def __process_div_projects_milestones(self,milestones,div_milestones_array, req): project = self._map_milestones_to_projects(milestones) hide = smp_settings(req, 'roadmap', 'hide') show_proj_descr = False if hide is None or 'projectdescription' not in hide: show_proj_descr = True div_projects_milestones = '' for a in sorted(project.keys()): if(a == "--None Project--"): div_project = '<br><div id="project"><fieldset><legend><h2>No Project</h2></legend>' else: project_info = self.__SmpModel.get_project_info(a) div_project = '<br><div id="project"><fieldset><legend><b>Project </b> <em style="font-size: 12pt; color: black;">%s</em></legend>' % a if project_info and show_proj_descr: div_project = div_project + '<div class="description" xml:space="preserve">' if project_info[2]: div_project = div_project + '%s<br/><br/>' % project_info[2] div_project = div_project + '%s</div>' % wiki_to_html(project_info[3], self.env, req) div_milestone = '' if len(project[a]) > 0: for b in project[a]: mi = '<em>%s</em>' % b for i in range(len(div_milestones_array)): if(div_milestones_array[i].find(mi)>0): div_milestone = div_milestone + div_milestones_array[i] div_project = div_project + to_unicode(div_milestone) + '</fieldset></div>' div_projects_milestones = to_unicode(div_projects_milestones + div_project) stream_div_projects_milestones = HTML(div_projects_milestones) return stream_div_projects_milestones
def send_rpc_error(self, req, e): """Send an XML-RPC fault message back to the caller""" rpcreq = req.rpc fault = None if isinstance(e, ProtocolException): fault = e._exc elif isinstance(e, ServiceException): e = e._exc elif isinstance(e, MethodNotFound): fault = xmlrpclib.Fault(-32601, to_unicode(e)) elif isinstance(e, PermissionError): fault = xmlrpclib.Fault(403, to_unicode(e)) elif isinstance(e, ResourceNotFound): fault = xmlrpclib.Fault(404, to_unicode(e)) if fault is not None : self._send_response(req, xmlrpclib.dumps(fault), rpcreq['mimetype']) else : self.log.error(e) import traceback from tracrpc.util import StringIO out = StringIO() traceback.print_exc(file = out) self.log.error(out.getvalue()) err_code = hasattr(e, 'code') and e.code or 1 method = rpcreq.get('method') self._send_response(req, xmlrpclib.dumps( xmlrpclib.Fault(err_code, "'%s' while executing '%s()'" % (str(e), method))))
def pre_process_request(self, req, handler): from trac.web.chrome import Chrome, add_warning if handler is not Chrome(self.env): for repo_info in self.get_all_repositories().values(): if not as_bool(repo_info.get('sync_per_request')): continue start = time.time() repo_name = repo_info['name'] or '(default)' try: repo = self.get_repository(repo_info['name']) repo.sync() except TracError as e: add_warning(req, _("Can't synchronize with repository \"%(name)s\" " "(%(error)s). Look in the Trac log for more " "information.", name=repo_name, error=to_unicode(e))) except Exception as e: add_warning(req, _("Failed to sync with repository \"%(name)s\": " "%(error)s; repository information may be out of " "date. Look in the Trac log for more information " "including mitigation strategies.", name=repo_name, error=to_unicode(e))) self.log.error( "Failed to sync with repository \"%s\"; You may be " "able to reduce the impact of this issue by " "configuring the sync_per_request option; see " "http://trac.edgewall.org/wiki/TracRepositoryAdmin" "#ExplicitSync for more detail: %s", repo_name, exception_to_unicode(e, traceback=True)) self.log.info("Synchronized '%s' repository in %0.2f seconds", repo_name, time.time() - start) return handler
def _write(self, lines, product=None): r"""Override superclass method by writing configuration values to the database rather than ini file in the filesystem. """ if product is None: product = self.default_product product = to_unicode(product) fp = StringIO(('\n'.join(lines + [''])).encode('utf-8')) parser = ConfigParser() parser.readfp(fp, 'bh-product-test') with self.env.db_transaction as db: # Delete existing setting for target product , if any for setting in ProductSetting.select(self.env, db, {'product' : product}): setting.delete() # Insert new options for section in parser.sections(): option_key = dict( section=to_unicode(section), product=to_unicode(product) ) for option, value in parser.items(section): option_key.update(dict(option=to_unicode(option))) setting = ProductSetting(self.env) setting._data.update(option_key) setting._data['value'] = to_unicode(value) setting.insert()
def onecmd(self, line): """`line` may be a `str` or an `unicode` object""" try: if isinstance(line, str): if self.interactive: encoding = sys.stdin.encoding else: encoding = getpreferredencoding() # sys.argv line = to_unicode(line, encoding) if self.interactive: line = line.replace('\\', '\\\\') rv = cmd.Cmd.onecmd(self, line) or 0 except SystemExit: raise except AdminCommandError as e: printerr(_("Error: %(msg)s", msg=to_unicode(e))) if e.show_usage: print() self.do_help(e.cmd or self.arg_tokenize(line)[0]) rv = 2 except TracError as e: printerr(exception_to_unicode(e)) rv = 2 except Exception as e: printerr(exception_to_unicode(e)) rv = 2 if self.env_check(): self.env.log.error("Exception in trac-admin command: %s", exception_to_unicode(e, traceback=True)) if not self.interactive: return rv
def insert(self, db=None): """Insert a new build log into the database.""" if not db: db = self.env.get_db_cnx() handle_ta = True else: handle_ta = False assert self.build and self.step cursor = db.cursor() cursor.execute( "INSERT INTO bitten_log (build,step,generator,orderno) " "VALUES (%s,%s,%s,%s)", (self.build, self.step, self.generator, self.orderno), ) id = db.get_last_id(cursor, "bitten_log") log_file = "%s.log" % (id,) cursor.execute("UPDATE bitten_log SET filename=%s WHERE id=%s", (log_file, id)) if self.messages: log_file_name = self.get_log_file(log_file) level_file_name = log_file_name + self.LEVELS_SUFFIX codecs.open(log_file_name, "wb", "UTF-8").writelines([to_unicode(msg[1] + "\n") for msg in self.messages]) codecs.open(level_file_name, "wb", "UTF-8").writelines([to_unicode(msg[0] + "\n") for msg in self.messages]) if handle_ta: db.commit() self.id = id
def send(self, to_recipients, cc_recipients): header = {} # Add item specific e-mail header fields. if self.message: # ID of the message. header["Message-ID"] = self.get_message_email_id(self.message["id"]) header["X-Trac-Message-ID"] = to_unicode(self.message["id"]) header["X-Trac-Discussion-URL"] = self.message["link"] # ID of replied message. if self.message["replyto"] != -1: reply_id = self.get_message_email_id(self.message["replyto"]) else: reply_id = self.get_topic_email_id(self.message["topic"]) header["In-Reply-To"] = reply_id header["References"] = reply_id elif self.topic: # ID of the message. header["Message-ID"] = self.get_topic_email_id(self.topic["id"]) header["X-Trac-Topic-ID"] = to_unicode(self.topic["id"]) header["X-Trac-Discussion-URL"] = self.topic["link"] elif self.forum: # ID of the message. header["Message-ID"] = self.get_forum_email_id(self.forum["id"]) header["X-Trac-Forum-ID"] = to_unicode(self.forum["id"]) header["X-Trac-Discussion-URL"] = self.forum["link"] else: # Should not happen. raise TracError("DiscussionPlugin internal error.") # Send e-mail. self.template = Chrome(self.env).load_template(self.template_name, method="text") self.env.log.debug("to_recipients: %s cc_recipients: %s" % (to_recipients, cc_recipients)) NotifyEmail.send(self, to_recipients, cc_recipients, header)
def _do_import(self, filename=None): permsys = PermissionSystem(self.env) try: with file_or_std(filename, 'rb') as f: encoding = stream_encoding(f) linesep = os.linesep if filename else '\n' reader = csv.reader(f, lineterminator=linesep) for row in reader: if len(row) < 2: raise AdminCommandError( _("Invalid row %(line)d. Expected <user>, " "<action>, [action], [...]", line=reader.line_num)) user = to_unicode(row[0], encoding) actions = [to_unicode(action, encoding) for action in row[1:]] if user.isupper(): raise AdminCommandError( _("Invalid user %(user)s on line %(line)d: All " "upper-cased tokens are reserved for permission " "names.", user=user, line=reader.line_num)) old_actions = self.get_user_perms(user) for action in set(actions) - set(old_actions): permsys.grant_permission(user, action) except csv.Error as e: raise AdminCommandError( _("Cannot import from %(filename)s line %(line)d: %(error)s ", filename=path_to_unicode(filename or 'stdin'), line=reader.line_num, error=e)) except IOError as e: raise AdminCommandError( _("Cannot import from %(filename)s: %(error)s", filename=path_to_unicode(filename or 'stdin'), error=e.strerror))
def get(self, key, default=''): """Return the value of the specified option. Valid default input is a string. Returns a string. """ key = self.optionxform(key) cached = self._cache.get(key, _use_default) if cached is not _use_default: return cached name_str = self.name key_str = to_unicode(key) settings = ProductSetting.select(self.env, where={'product':self.product, 'section':name_str, 'option':key_str}) if len(settings) > 0: value = settings[0].value else: for parent in self.config.parents: value = parent[self.name].get(key, _use_default) if value is not _use_default: break else: if default is not _use_default: option = Option.registry.get((self.name, key)) value = option.default if option else _use_default else: value = _use_default if value is _use_default: return default if not value: value = u'' elif isinstance(value, basestring): value = to_unicode(value) self._cache[key] = value return value
def _normalize_xml_output(self, result): """ Normalizes and converts output (traversing it): 1. None => '' 2. datetime => xmlrpclib.DateTime 3. Binary => xmlrpclib.Binary 4. genshi.builder.Fragment|genshi.core.Markup => unicode """ new_result = [] for res in result: if isinstance(res, datetime.datetime): new_result.append(to_xmlrpc_datetime(res)) elif isinstance(res, Binary): res.__class__ = xmlrpclib.Binary new_result.append(res) elif res is None or res is empty: new_result.append('') elif isinstance(res, (genshi.builder.Fragment, \ genshi.core.Markup)): new_result.append(to_unicode(res)) elif babel and isinstance(res, babel.support.LazyProxy): new_result.append(to_unicode(res)) elif isinstance(res, dict): for key, val in res.items(): res[key], = self._normalize_xml_output([val]) new_result.append(res) elif isinstance(res, list) or isinstance(res, tuple): new_result.append(self._normalize_xml_output(res)) else: new_result.append(res) return new_result
def handle_commit(commit, env): from trac.ticket import Ticket from trac.ticket.web_ui import TicketModule from trac.util.text import to_unicode from trac.util.datefmt import utc msg = to_unicode(call_git('rev-list', ['-n', '1', commit, '--pretty=medium']).rstrip()) eml = to_unicode(call_git('rev-list', ['-n', '1', commit, '--pretty=format:%ae']).splitlines()[1]) tickets = {} comtkts = command_re.findall(msg.split('\n\n', 1)[1]) if not comtkts: print "no 'refs' or 'closes' in commitmessage for commit %s, aborting push" % commit sys.exit(1) for cmd, tkts in comtkts: action = COMMANDS.get(cmd.lower()) if action: for tkt_id in ticket_re.findall(tkts): tickets.setdefault(tkt_id, []).append(action) else: #no action specified, bad commit message! print "no 'refs' or 'closes' in commitmessage for commit %s, aborting push" % commit sys.exit(1) for tkt_id, actions in tickets.iteritems(): try: db = env.get_db_cnx() ticket = Ticket(env, int(tkt_id), db) if not ticket['status'] in ACCEPTED_STATUSSES: print "commiting to non-open ticket in commit %s, aborting push" % commit sys.exit(2) except Exception, e: print 'Unexpected error while processing commit %s :' % commit print 'ticket ID %s: %s' % (tkt_id, e) sys.exit(3)
def exception_to_unicode(e, traceback=""): message = '%s: %s' % (e.__class__.__name__, to_unicode(e)) if traceback: from trac.util import get_last_traceback traceback_only = get_last_traceback().split('\n')[:-2] message = '\n%s\n%s' % (to_unicode('\n'.join(traceback_only)), message) return message
def validate_blog_post(self, req, postname, version, fields): if 'blog-preview' in req.args: return [] blog_res = Resource('blog', postname, version) if req.perm(blog_res).has_permission('BLOG_ADMIN'): return [] if version > 1: bp = BlogPost(self.env, postname, version) last_post_fields = bp._fetch_fields(version=version-1) else: last_post_fields = {} field_names = set(fields).union(last_post_fields) changes = [] for field in field_names: old = to_unicode(last_post_fields.get(field, '')) new = to_unicode(fields.get(field, '')) if new and old != new: changes.append((old, new)) author = fields.get('author', '') if arity(FilterSystem.test) == 4: # 0.11 compatible method signature FilterSystem(self.env).test(req, author, changes) else: # 0.12+ compatible that adds an 'ip' argument FilterSystem(self.env).test(req, author, changes, req.remote_addr) return []
def parse_rpc_request(self, req, content_type): """ Parse XML-RPC requests.""" try: args, method = xmlrpclib.loads(req.read(int(req.get_header("Content-Length")))) except Exception, e: self.log.debug("RPC(xml) parse error: %s", to_unicode(e)) raise ProtocolException(xmlrpclib.Fault(-32700, to_unicode(e)))
def get_changes(self): paths_seen = set() for parent in self.props.get('parent', [None]): for mode1,mode2,obj1,obj2,action,path1,path2 in \ self.git.diff_tree(parent, self.rev, find_renames=True): path = path2 or path1 p_path, p_rev = path1, parent kind = Node.FILE if mode2.startswith('04') or mode1.startswith('04'): kind = Node.DIRECTORY action = GitChangeset.action_map[action[0]] if action == Changeset.ADD: p_path = '' p_rev = None # CachedRepository expects unique (rev, path, change_type) key # this is only an issue in case of merges where files required editing if path in paths_seen: continue paths_seen.add(path) yield (to_unicode(path), kind, action, to_unicode(p_path), p_rev)
def _decode_content(cls, content): """Revert the encoding done by `_encode_content` and return an unicode string""" try: return to_unicode(content.decode('base64')) except (UnicodeEncodeError, binascii.Error): # cope with legacy content (stored before base64 encoding) return to_unicode(content)
def _combine_changes(self, changes, sep='\n\n'): fields = [] for old_content, new_content in changes: new_content = to_unicode(new_content) if old_content: old_content = to_unicode(old_content) new_content = self._get_added_lines(old_content, new_content) fields.append(new_content) return sep.join(fields)
def _gen_ticket_entry(self, t, a_class=''): id = str(t.get('id')) status = t.get('status') priority = t.get('priority') hours = t.get(self.hours_field_name) summary = to_unicode(t.get('summary')) owner = to_unicode(t.get('owner')) description = to_unicode(t.get('description')[:1024]) url = t.get('href') if status == 'closed': a_class = a_class + 'closed' else: a_class = a_class + 'open' a_class += " ticket priority-" + priority markup = format_to_html(self.env, self.ref.context, description) # Escape, if requested if self.sanitize is True: try: description = HTMLParser(StringIO(markup) ).parse() | HTMLSanitizer() except ParseError: description = escape(markup) else: description = markup # Replace tags that destruct tooltips too much desc = self.end_RE.sub(']', Markup(description)) desc = self.del_RE.sub('', desc) # need 2nd run after purging newline in table cells in 1st run desc = self.del_RE.sub('', desc) desc = self.item_RE.sub('X', desc) desc = self.tab_RE.sub('[|||]', desc) description = self.open_RE.sub('[', desc) tip = tag.span(Markup(description)) ticket = '#' + id ticket = tag.a(ticket, href=url) ticket(tip, class_='tip', target='_blank') ticket = tag.div(ticket) ticket(class_=a_class, align='left', **{"data-ticketid": id}) # fix stripping of regular leading space in IE blank = ' ' ticket(Markup(blank), summary, ' (', owner, ')') ticket(tag.span(str(hours) + "h", class_="hours")) summary = tag(summary, ' (', owner, ')') ticket_short = '#' + id ticket_short = tag.a(ticket_short, href=url) ticket_short(target='_blank', title_=summary) ticket_short = tag.span(ticket_short) ticket_short(class_=a_class) return ticket,ticket_short
req.hdf['title'] = e.reason or 'Error' req.hdf['error'] = { 'title': e.reason or 'Error', 'type': 'TracError', 'message': e.message } try: req.send_error(sys.exc_info(), status=e.code) except RequestDone: return [] except Exception, e: env.log.exception(e) if req.hdf: req.hdf['title'] = to_unicode(e) or 'Error' req.hdf['error'] = { 'title': to_unicode(e) or 'Error', 'type': 'internal', 'traceback': get_last_traceback() } try: req.send_error(sys.exc_info(), status=500) except RequestDone: return [] def send_project_index(environ, start_response, parent_dir=None, env_paths=None):
def __init__(self, config, name): self.config = config self.name = to_unicode(name) self.overridden = {} self._cache = {}
def __init__(self, git_dir, log, git_bin='git', git_fs_encoding=None, rev_cache=None): """Initialize PyGit.Storage instance `git_dir`: path to .git folder; this setting is not affected by the `git_fs_encoding` setting `log`: logger instance `git_bin`: path to executable this setting is not affected by the `git_fs_encoding` setting `git_fs_encoding`: encoding used for paths stored in git repository; if `None`, no implicit decoding/encoding to/from unicode objects is performed, and bytestrings are returned instead """ self.logger = log self.commit_encoding = None # caches self.__rev_cache = rev_cache or self.RevCache.empty() self.__rev_cache_refresh = True self.__rev_cache_lock = Lock() # cache the last 200 commit messages self.__commit_msg_cache = SizedDict(200) self.__commit_msg_lock = Lock() self.__cat_file_pipe = None self.__cat_file_pipe_lock = Lock() if git_fs_encoding is not None: # validate encoding name codecs.lookup(git_fs_encoding) # setup conversion functions self._fs_to_unicode = lambda s: s.decode(git_fs_encoding) self._fs_from_unicode = lambda s: s.encode(git_fs_encoding) else: # pass bytestrings as-is w/o any conversion self._fs_to_unicode = self._fs_from_unicode = lambda s: s # simple sanity checking __git_file_path = partial(os.path.join, git_dir) control_files = ['HEAD', 'objects', 'refs'] control_files_exist = \ lambda p: all(map(os.path.exists, map(p, control_files))) if not control_files_exist(__git_file_path): __git_file_path = partial(os.path.join, git_dir, '.git') if os.path.exists(__git_file_path()) and \ control_files_exist(__git_file_path): git_dir = __git_file_path() else: self.logger.error("GIT control files missing in '%s'" % git_dir) raise GitError("GIT control files not found, maybe wrong " "directory?") # at least, check that the HEAD file is readable head_file = os.path.join(git_dir, 'HEAD') try: with open(head_file, 'rb'): pass except IOError as e: raise GitError( "Make sure the Git repository '%s' is readable: %s" % (git_dir, to_unicode(e))) self.repo = GitCore(git_dir, git_bin=git_bin, log=log) self.repo_path = git_dir self.logger.debug("PyGIT.Storage instance for '%s' is constructed", git_dir)
def do_initenv(self, line): def initenv_error(msg): printerr(_("Initenv for '%(env)s' failed.", env=self.envname), "\n%s" % msg) if self.env_check(): initenv_error(_("Does an environment already exist?")) return 2 if os.path.exists(self.envname) and os.listdir(self.envname): initenv_error(_("Directory exists and is not empty.")) return 2 if not os.path.exists(os.path.dirname(self.envname)): initenv_error( _( "Base directory '%(env)s' does not exist. Please " "create it manually and retry.", env=os.path.dirname(self.envname))) return 2 arg = self.arg_tokenize(line) inherit_paths = [] config_file_path = None i = 0 while i < len(arg): item = arg[i] if item.startswith('--inherit='): inherit_paths.append(arg.pop(i)[10:]) elif item.startswith('--config='): config_file_path = arg.pop(i)[9:] else: i += 1 config = None if config_file_path: if not os.path.exists(config_file_path): initenv_error( _( "The file specified in the --config argument " "does not exist: %(path)s.", path=config_file_path)) return 2 try: config = Configuration(config_file_path) except TracError as e: initenv_error(e) return 2 arg = arg or ['' ] # Reset to usual empty in case we popped the only one project_name = None db_str = None repository_type = None repository_dir = None if len(arg) == 1 and not arg[0]: project_name, db_str = self.get_initenv_args() elif len(arg) == 2: project_name, db_str = arg elif len(arg) == 4: project_name, db_str, repository_type, repository_dir = arg else: initenv_error('Wrong number of arguments: %d' % len(arg)) return 2 try: printout(_("Creating and Initializing Project")) options = [] if config: for section in config.sections(defaults=False): options.extend( (section, option, value) for option, value in config.options(section)) options.extend([ ('project', 'name', project_name), ('trac', 'database', db_str), ]) def add_nav_order_options(section, default): for i, name in enumerate(default, 1): options.append((section, name + '.order', float(i))) add_nav_order_options('mainnav', default_mainnav_order) add_nav_order_options('metanav', default_metanav_order) if repository_dir: options.extend([ ('repositories', '.type', repository_type), ('repositories', '.dir', repository_dir), ]) if inherit_paths: options.append( ('inherit', 'file', ",\n ".join(inherit_paths))) try: self.__env = Environment(self.envname, create=True, options=options) except Exception as e: initenv_error(_('Failed to create environment.')) printerr(e) traceback.print_exc() sys.exit(1) # Add a few default wiki pages printout(_(" Installing default wiki pages")) pages_dir = pkg_resources.resource_filename( 'trac.wiki', 'default-pages') WikiAdmin(self.__env).load_pages(pages_dir) if repository_dir: try: repos = RepositoryManager(self.__env).get_repository('') if repos: printout(_(" Indexing default repository")) repos.sync(self._resync_feedback) except TracError as e: printerr( _(""" --------------------------------------------------------------------- Warning: couldn't index the default repository. This can happen for a variety of reasons: wrong repository type, no appropriate third party library for this repository type, no actual repository at the specified repository path... You can nevertheless start using your Trac environment, but you'll need to check again your trac.ini file and the [trac] repository_type and repository_path settings. """)) except Exception as e: initenv_error(to_unicode(e)) traceback.print_exc() return 2 printout( _(""" --------------------------------------------------------------------- Project environment for '%(project_name)s' created. You may now configure the environment by editing the file: %(config_path)s If you'd like to take this new project environment for a test drive, try running the Trac standalone web server `tracd`: tracd --port 8000 %(project_path)s Then point your browser to http://localhost:8000/%(project_dir)s. There you can also browse the documentation for your installed version of Trac, including information on further setup (such as deploying Trac to a real web server). The latest documentation can also always be found on the project website: http://trac.edgewall.org/ Congratulations! """, project_name=project_name, project_path=self.envname, project_dir=os.path.basename(self.envname), config_path=os.path.join(self.envname, 'conf', 'trac.ini')))
def _format_link(self, formatter, ns, match, label, fullmatch=None): if ns == 'log1': groups = fullmatch.groupdict() it_log = groups.get('it_log') revs = groups.get('log_revs') path = groups.get('log_path') or '/' target = '%s%s@%s' % (it_log, path, revs) # prepending it_log is needed, as the helper expects it there intertrac = formatter.shorthand_intertrac_helper( 'log', target, label, fullmatch) if intertrac: return intertrac path, query, fragment = formatter.split_link(path) else: assert ns in ('log', 'log2') if ns == 'log': match, query, fragment = formatter.split_link(match) else: query = fragment = '' match = ''.join(reversed(match.split('/', 1))) path = match revs = '' if self.LOG_LINK_RE.match(match): indexes = [sep in match and match.index(sep) for sep in ':@'] idx = min([i for i in indexes if i is not False]) path, revs = match[:idx], match[idx + 1:] rm = RepositoryManager(self.env) try: reponame, repos, path = rm.get_repository_by_path(path) if not reponame: reponame = rm.get_default_repository(formatter.context) if reponame is not None: repos = rm.get_repository(reponame) if repos: revranges = None if any(c for c in ':-,' if c in revs): revranges = self._normalize_ranges(repos, path, revs) revs = None if 'LOG_VIEW' in formatter.perm: if revranges: href = formatter.href.log(repos.reponame or None, path or '/', revs=str(revranges)) else: try: rev = repos.normalize_rev(revs) except NoSuchChangeset: rev = None href = formatter.href.log(repos.reponame or None, path or '/', rev=rev) if query and (revranges or revs): query = '&' + query[1:] return tag.a(label, class_='source', href=href + query + fragment) errmsg = _("No permission to view change log") elif reponame: errmsg = _("Repository '%(repo)s' not found", repo=reponame) else: errmsg = _("No default repository defined") except TracError, e: errmsg = to_unicode(e)
def items(self, section, raw=False, vars=None): section_str = to_utf8(section) return [(to_unicode(k), to_unicode(v)) for k, v in ConfigParser.items(self, section_str, raw, vars)]
if self.showErrors: textlines.extend(errors) textlines.append(line) textlines.extend(srciter) # Determine our destination context and load the current state. self.context = tuple([realm, resource_id, self.subcontext is not None and \ self.subcontext or '']) state = self.macro.get_tracform_state(self.context) self.formatter.env.log.debug( 'TracForms state = ' + (state is not None and state or '')) for name, value in json.loads(state or '{}').iteritems(): self.env[name] = value self.formatter.env.log.debug( name + ' = ' + to_unicode(value)) if self.subcontext is not None: self.env[self.subcontext + ':' + name] = value self.sorted_env = None (self.form_id, self.form_realm, self.form_resource_id, self.form_subcontext, self.form_updater, self.form_updated_on, self.form_keep_history, self.form_track_fields) = \ self.macro.get_tracform_meta(self.context) self.form_id = self.form_id is not None and int(self.form_id) or None # Wiki-ize the text, this will allow other macros to execute after # which we can do our own replacements within whatever formatted # junk is left over. text = self.wiki('\n'.join(textlines)) # Keep replacing tf: sections until there are no more
def test_from_exception_using_str_args(self): u = u'Das Ger\xe4t oder die Ressource ist belegt' try: raise ValueError(u.encode('utf-8')) except ValueError as e: self.assertEqual(u, to_unicode(e))
def test_from_exception_using_unicode_args(self): u = u'\uB144' try: raise ValueError('%s is not a number.' % u) except ValueError as e: self.assertEqual(u'\uB144 is not a number.', to_unicode(e))
def render_admin_panel(self, req, cat, page, version): # here comes the page content, handling, etc. data = {} if req.method == "POST": submit = req.args.get('submit').strip() if submit == 'Add': # Get form values. #file = req.args['download'] # Test if file is uploaded. #if hasattr(file, 'filename'): # self.log.debug("Filename:" + file.filename) file, filename, file_size = self.get_file_from_req(req) download = { 'file': filename, 'description': req.args.get('description'), 'size': file_size, 'time': to_timestamp(datetime.datetime.now(utc)), 'count': 0, 'author': req.authname } self.log.debug("FileUpload filename:" + download['file']) self.log.debug("FileUpload description:" + download['description']) self.log.debug("FileUpload size:", download['size']) self.log.debug("FileUpload time:", download['time']) self.log.debug("FileUpload author:" + download['author']) # Upload file to DB and file storage. add_download(download, file) file.close() add_notice(req, 'Download has been added.') elif submit == 'Remove': ids = req.args.getlist('sels') if ids is not None and len(ids) > 0: for id in ids: sql = "DELETE FROM download WHERE id ={}".format( int(id)) self.env.db_transaction(sql) add_notice(req, 'Download has been deleted.') else: # Get download. download_id = req.args.get('sel') or 0 if download_id > 0: sql = "SELECT file, description FROM download where id={}".format( download_id) cursor = self.env.db_query(sql) if len(cursor) > 0: fn = cursor[0][0] description = cursor[0][1] else: raise TracError("File not found.") # Get download file path. filename = os.path.basename(fn) filepath = os.path.join(self.path, to_unicode(download_id), filename) filepath = os.path.normpath(filepath) # Increase downloads count. sql = "UPDATE download SET count=count+1 WHERE id ={}".format( download_id) self.env.db_transaction(sql) # Guess mime type. with open(filepath.encode('utf-8'), 'r') as fileobj: file_data = fileobj.read(1000) mimeview = Mimeview(self.env) mime_type = mimeview.get_mimetype(filepath, file_data) if not mime_type: mime_type = 'application/octet-stream' if 'charset=' not in mime_type: charset = mimeview.get_charset(file_data, mime_type) mime_type = mime_type + '; charset=' + charset # Return uploaded file to request. req.send_header( 'Content-Disposition', 'attachment;filename="%s"' % os.path.normpath(fn)) req.send_header('Content-Description', description) req.send_file(filepath.encode('utf-8'), mime_type) cursor = self.env.db_query( "SELECT id, file, description, size, time, author FROM download ORDER BY id" ) data['downloads'] = [(row[0], row[1], row[2]) for row in cursor] return ('admin_download.html', data, None)
def handle_ref(old, new, ref, env): """Handle all the new commits to the ref.""" from trac.util.text import to_unicode if VERBOSE: print ref # Regex the ticket number out of the refname match = ticket_from_ref_re.search(ref) tkt_id_from_ref = DEFAULT_POST_RECEIVE_TKT_ID if match: tkt_id_from_ref = int(match.group(1)) if VERBOSE: print "Parsed ticket from refname: %s" % tkt_id_from_ref # Get the list of hashs for commits in the changeset. args = (old == '0' * 40) and [new] or [new, '^' + old] pending_commits = call_git('rev-list', args).splitlines() if VERBOSE: print "pending commits: %s" % pending_commits if not pending_commits: return # Get the subset of pending commits that are already seen. db = env.get_db_cnx() cursor = db.cursor() try: cursor.execute('SELECT sha1 FROM git_seen WHERE sha1 IN (%s)' % ', '.join(['%s'] * len(pending_commits)), pending_commits) seen_commits = map(itemgetter(0), cursor.fetchall()) except psycopg2.ProgrammingError: # almost definitely due to git_seen missing cursor.close() db.close() # get a new cursor db = env.get_db_cnx() cursor = db.cursor() cursor.execute('CREATE TABLE git_seen (sha1 VARCHAR(40));') seen_commits = [] ticket_msgs = defaultdict(list) # Iterate over commits, starting with earliest for commit in reversed(pending_commits): # If the commit was seen already, we do not repost it. if commit in seen_commits and not REPOST_SEEN: continue remember_commit(commit, db, cursor) # Get message from commit msg = get_commit_message(commit, env) # First check for explicit "Refs #999"-style ticket refs. matches = ticket_from_explicit_refs_re.findall(msg) for ticket_id in matches: ticket_msgs[ticket_id].append(to_unicode(msg)) # If a merge commit, try to identify origin ticket. match = ticket_from_msg_re.search(msg) if match: source_tkt_id = int(match.group(1)) target_tkt_id = match.group(2) ticket_msgs[source_tkt_id].append(to_unicode(msg)) if target_tkt_id: ticket_msgs[int(target_tkt_id)].append(to_unicode(msg)) else: # Otherwise, we comment on the ticket corresponding to the ref ticket_msgs[tkt_id_from_ref].append(to_unicode(msg)) # the wire (hook) hears all author = "the wire" try: if POST_COMMENT: for tkt_id, commit_msgs in ticket_msgs.items(): print "Posting to ticket #%s" % tkt_id post_to_ticket('\n----\n'.join(commit_msgs), author, tkt_id, env) except Exception, e: msg = 'Unexpected error processing commit %s: %s' % (commit[:7], e) print >>sys.stderr, msg db.rollback()
def send_project_index(environ, start_response, parent_dir=None, env_paths=None): req = Request(environ, start_response) loadpaths = [pkg_resources.resource_filename('trac', 'templates')] if req.environ.get('trac.env_index_template'): env_index_template = req.environ['trac.env_index_template'] tmpl_path, template = os.path.split(env_index_template) loadpaths.insert(0, tmpl_path) else: template = 'index.html' data = { 'trac': { 'version': TRAC_VERSION, 'time': user_time(req, format_datetime) }, 'req': req } if req.environ.get('trac.template_vars'): for pair in req.environ['trac.template_vars'].split(','): key, val = pair.split('=') data[key] = val try: href = Href(req.base_path) projects = [] for env_name, env_path in get_environments(environ).items(): try: env = open_environment(env_path, use_cache=not environ['wsgi.run_once']) proj = { 'env': env, 'name': env.project_name, 'description': env.project_description, 'href': href(env_name) } except Exception as e: proj = {'name': env_name, 'description': to_unicode(e)} projects.append(proj) projects.sort(lambda x, y: cmp(x['name'].lower(), y['name'].lower())) data['projects'] = projects loader = TemplateLoader(loadpaths, variable_lookup='lenient', default_encoding='utf-8') tmpl = loader.load(template) stream = tmpl.generate(**data) if template.endswith('.xml'): output = stream.render('xml') req.send(output, 'text/xml') else: output = stream.render('xhtml', doctype=DocType.XHTML_STRICT, encoding='utf-8') req.send(output, 'text/html') except RequestDone: pass
def get_topic_cc_recipients(self, cursor, id): cc_recip = to_unicode("*****@*****.**") return [cc_recip]
def get_list(self, realm, wl, req, fields=None): db = self.env.get_db_cnx() cursor = db.cursor() context = Context.from_request(req) locale = getattr(req, 'locale', LC_TIME) ticketlist = [] extradict = {} if not fields: fields = set(self.default_fields['ticket']) else: fields = set(fields) if 'changetime' in fields: max_changetime = datetime(1970, 1, 1, tzinfo=utc) min_changetime = datetime.now(utc) if 'time' in fields: max_time = datetime(1970, 1, 1, tzinfo=utc) min_time = datetime.now(utc) for sid, last_visit in wl.get_watched_resources( 'ticket', req.authname): ticketdict = {} try: ticket = Ticket(self.env, sid, db) exists = ticket.exists except: exists = False if not exists: ticketdict['deleted'] = True if 'id' in fields: ticketdict['id'] = sid ticketdict['ID'] = '#' + sid if 'author' in fields: ticketdict['author'] = '?' if 'changetime' in fields: ticketdict['changedsincelastvisit'] = 1 ticketdict['changetime'] = '?' ticketdict['ichangetime'] = 0 if 'time' in fields: ticketdict['time'] = '?' ticketdict['itime'] = 0 if 'comment' in fields: ticketdict['comment'] = tag.strong(t_("deleted"), class_='deleted') if 'notify' in fields: ticketdict['notify'] = wl.is_notify(req, 'ticket', sid) if 'description' in fields: ticketdict['description'] = '' if 'owner' in fields: ticketdict['owner'] = '' if 'reporter' in fields: ticketdict['reporter'] = '' ticketlist.append(ticketdict) continue render_elt = lambda x: x if not (Chrome(self.env).show_email_addresses or \ 'EMAIL_VIEW' in req.perm(ticket.resource)): render_elt = obfuscate_email_address # Copy all requested fields from ticket if fields: for f in fields: ticketdict[f] = ticket.values.get(f, u'') else: ticketdict = ticket.values.copy() changetime = ticket.time_changed if wl.options['attachment_changes']: for attachment in Attachment.select(self.env, 'ticket', sid, db): if attachment.date > changetime: changetime = attachment.date if 'attachment' in fields: attachments = [] for attachment in Attachment.select(self.env, 'ticket', sid, db): wikitext = u'[attachment:"' + u':'.join([ attachment.filename, 'ticket', sid ]) + u'" ' + attachment.filename + u']' attachments.extend([ tag(', '), format_to_oneliner(self.env, context, wikitext, shorten=False) ]) if attachments: attachments.reverse() attachments.pop() ticketdict['attachment'] = moreless(attachments, 5) # Changes are special. Comment, commentnum and last author are included in them. if 'changes' in fields or 'author' in fields or 'comment' in fields or 'commentnum' in fields: changes = [] # If there are now changes the reporter is the last author author = ticket.values['reporter'] commentnum = u"0" comment = u"" want_changes = 'changes' in fields for date, cauthor, field, oldvalue, newvalue, permanent in ticket.get_changelog( changetime, db): author = cauthor if field == 'comment': if 'commentnum' in fields: ticketdict['commentnum'] = to_unicode(oldvalue) if 'comment' in fields: comment = to_unicode(newvalue) comment = moreless(comment, 200) ticketdict['comment'] = comment if not want_changes: break else: if want_changes: label = self.fields['ticket'].get(field, u'') if label: changes.extend([ tag( tag.strong(label), ' ', render_property_diff( self.env, req, ticket, field, oldvalue, newvalue)), tag('; ') ]) if want_changes: # Remove the last tag('; '): if changes: changes.pop() changes = moreless(changes, 5) ticketdict['changes'] = tag(changes) if 'id' in fields: ticketdict['id'] = sid ticketdict['ID'] = format_to_oneliner(self.env, context, '#' + sid, shorten=True) if 'cc' in fields: if render_elt == obfuscate_email_address: ticketdict['cc'] = ', '.join( [render_elt(c) for c in ticketdict['cc'].split(', ')]) if 'author' in fields: ticketdict['author'] = render_elt(author) if 'changetime' in fields: ichangetime = to_timestamp(changetime) ticketdict.update( changetime=format_datetime(changetime, locale=locale, tzinfo=req.tz), ichangetime=ichangetime, changedsincelastvisit=(last_visit < ichangetime and 1 or 0), changetime_delta=pretty_timedelta(changetime), changetime_link=req.href.timeline( precision='seconds', from_=trac_format_datetime(changetime, 'iso8601', tzinfo=req.tz))) if changetime > max_changetime: max_changetime = changetime if changetime < min_changetime: min_changetime = changetime if 'time' in fields: time = ticket.time_created ticketdict.update(time=format_datetime(time, locale=locale, tzinfo=req.tz), itime=to_timestamp(time), time_delta=pretty_timedelta(time), time_link=req.href.timeline( precision='seconds', from_=trac_format_datetime( time, 'iso8601', tzinfo=req.tz))) if time > max_time: max_time = time if time < min_time: min_time = time if 'description' in fields: description = ticket.values['description'] description = moreless(description, 200) ticketdict['description'] = description if 'notify' in fields: ticketdict['notify'] = wl.is_notify(req, 'ticket', sid) if 'owner' in fields: ticketdict['owner'] = render_elt(ticket.values['owner']) if 'reporter' in fields: ticketdict['reporter'] = render_elt(ticket.values['reporter']) if 'tags' in fields and self.tagsystem: tags = [] for t in self.tagsystem.get_tags(req, Resource('ticket', sid)): tags.extend( [tag.a(t, href=req.href('tags', q=t)), tag(', ')]) if tags: tags.pop() ticketdict['tags'] = moreless(tags, 10) ticketlist.append(ticketdict) if 'changetime' in fields: extradict['max_changetime'] = format_datetime(max_changetime, locale=locale, tzinfo=req.tz) extradict['min_changetime'] = format_datetime(min_changetime, locale=locale, tzinfo=req.tz) if 'time' in fields: extradict['max_time'] = format_datetime(max_time, locale=locale, tzinfo=req.tz) extradict['min_time'] = format_datetime(min_time, locale=locale, tzinfo=req.tz) return ticketlist, extradict
def get(self, section, option, raw=False, vars=None): section_str = to_utf8(section) option_str = to_utf8(option) return to_unicode( ConfigParser.get(self, section_str, option_str, raw, vars))
args.extend(['-f', dest_file, db_name]) environ = os.environ.copy() if 'password' in db_prop: environ['PGPASSWORD'] = str(db_prop['password']) try: p = Popen(args, env=environ, stderr=PIPE, close_fds=close_fds) except OSError, e: raise TracError( _("Unable to run %(path)s: %(msg)s", path=self.pg_dump_path, msg=exception_to_unicode(e))) errmsg = p.communicate()[1] if p.returncode != 0: raise TracError( _("pg_dump failed: %(msg)s", msg=to_unicode(errmsg.strip()))) if not os.path.exists(dest_file): raise TracError(_("No destination file created")) return dest_file class PostgreSQLConnection(ConnectionWrapper): """Connection wrapper for PostgreSQL.""" poolable = True def __init__(self, path, log=None, user=None, password=None,
def _getdoc(option_or_section): doc = to_unicode(option_or_section.__doc__) if doc: doc = dgettext(option_or_section.doc_domain, doc, **(option_or_section.doc_args or {})) return doc
def _to_unicode(self, iterable): for line in iterable: yield to_unicode(line, self.charset)
def save_milestone(self, req, milestone): # Instead of raising one single error, check all the constraints # and let the user fix them by going back to edit mode and showing # the warnings warnings = [] def warn(msg): add_warning(req, msg) warnings.append(msg) milestone.description = req.args.get('description', '') if 'due' in req.args: duedate = req.args.get('duedate') milestone.due = user_time(req, parse_date, duedate, hint='datetime') \ if duedate else None else: milestone.due = None # -- check completed date if 'completed' in req.args: completed = req.args.get('completeddate', '') completed = user_time(req, parse_date, completed, hint='datetime') if completed else None if completed and completed > datetime_now(utc): warn(_("Completion date may not be in the future")) else: completed = None milestone.completed = completed # -- check the name # If the name has changed, check that the milestone doesn't already # exist # FIXME: the whole .exists business needs to be clarified # (#4130) and should behave like a WikiPage does in # this respect. new_name = req.args.get('name') try: new_milestone = Milestone(self.env, new_name) except ResourceNotFound: milestone.name = new_name else: if new_milestone.name != milestone.name: if new_milestone.name: warn( _( 'Milestone "%(name)s" already exists, please ' 'choose another name.', name=new_milestone.name)) else: warn(_("You must provide a name for the milestone.")) if warnings: return False # -- actually save changes if milestone.exists: milestone.update(author=req.authname) if completed and 'retarget' in req.args: comment = req.args.get('comment', '') retarget_to = req.args.get('target') or None retargeted_tickets = \ milestone.move_tickets(retarget_to, req.authname, comment, exclude_closed=True) add_notice( req, _( 'The open tickets associated with ' 'milestone "%(name)s" have been retargeted ' 'to milestone "%(retarget)s".', name=milestone.name, retarget=retarget_to)) new_values = {'milestone': retarget_to} comment = comment or \ _("Open tickets retargeted after milestone closed") event = BatchTicketChangeEvent(retargeted_tickets, None, req.authname, comment, new_values, None) try: NotificationSystem(self.env).notify(event) except Exception as e: self.log.error( "Failure sending notification on ticket " "batch change: %s", exception_to_unicode(e)) add_warning( req, tag_( "The changes have been saved, but " "an error occurred while sending " "notifications: %(message)s", message=to_unicode(e))) add_notice(req, _("Your changes have been saved.")) else: milestone.insert() add_notice( req, _('The milestone "%(name)s" has been added.', name=milestone.name)) return True
raise except: # post-process the request in case of errors err = sys.exc_info() try: self._post_process_request(req) except RequestDone: raise except Exception, e: self.log.error( "Exception caught while post-processing" " request: %s", exception_to_unicode(e, traceback=True)) raise err[0], err[1], err[2] except PermissionError, e: raise HTTPForbidden(to_unicode(e)) except ResourceNotFound, e: raise HTTPNotFound(e) except TracError, e: raise HTTPInternalError(e) # Internal methods def _get_perm(self, req): if isinstance(req.session, FakeSession): return FakePerm() else: return PermissionCache(self.env, self.authenticate(req)) def _get_session(self, req): try:
def _fetch_milestone(self, ticket): if ticket['milestone']: try: return model.Milestone(self.env, ticket['milestone']) except ResourceNotFound, e: self.log.warning("In %s, %s", self._op_name, to_unicode(e))
def getdoc(option_or_section): doc = to_unicode(option_or_section.__doc__) if doc: doc = dgettext(option_or_section.doc_domain, doc) return doc
def _render_view(self, req, id): """Retrieve the report results and pre-process them for rendering.""" r = Report(self.env, id) title, description, sql = r.title, r.description, r.query # If this is a saved custom query, redirect to the query module # # A saved query is either an URL query (?... or query:?...), # or a query language expression (query:...). # # It may eventually contain newlines, for increased clarity. # query = ''.join(line.strip() for line in sql.splitlines()) if query and (query[0] == '?' or query.startswith('query:?')): query = query if query[0] == '?' else query[6:] report_id = 'report=%s' % id if 'report=' in query: if report_id not in query: err = _('When specified, the report number should be ' '"%(num)s".', num=id) req.redirect(req.href.report(id, action='edit', error=err)) else: if query[-1] != '?': query += '&' query += report_id req.redirect(req.href.query() + quote_query_string(query)) elif query.startswith('query:'): from trac.ticket.query import Query, QuerySyntaxError try: query = Query.from_string(self.env, query[6:], report=id) except QuerySyntaxError as e: req.redirect(req.href.report(id, action='edit', error=to_unicode(e))) else: req.redirect(query.get_href(req.href)) format = req.args.get('format') if format == 'sql': self._send_sql(req, id, title, description, sql) title = '{%i} %s' % (id, title) report_resource = Resource(self.realm, id) req.perm(report_resource).require('REPORT_VIEW') context = web_context(req, report_resource) page = req.args.getint('page', 1) default_max = {'rss': self.items_per_page_rss, 'csv': 0, 'tab': 0}.get(format, self.items_per_page) max = req.args.getint('max') limit = as_int(max, default_max, min=0) # explict max takes precedence offset = (page - 1) * limit sort_col = req.args.get('sort', '') asc = req.args.getint('asc', 0, min=0, max=1) args = {} def report_href(**kwargs): """Generate links to this report preserving user variables, and sorting and paging variables. """ params = args.copy() if sort_col: params['sort'] = sort_col if page != 1: params['page'] = page if max != default_max: params['max'] = max params.update(kwargs) params['asc'] = 1 if params.get('asc', asc) else None return req.href.report(id, params) data = {'action': 'view', 'report': {'id': id, 'resource': report_resource}, 'context': context, 'title': title, 'description': description, 'max': limit, 'args': args, 'show_args_form': False, 'message': None, 'paginator': None, 'report_href': report_href} try: args = self.get_var_args(req) sql = self.get_default_var_args(args, sql) except ValueError as e: data['message'] = _("Report failed: %(error)s", error=e) return 'report_view.html', data, None data.update({'args': args, 'title': sub_vars(title, args), 'description': sub_vars(description or '', args)}) try: res = self.execute_paginated_report(req, id, sql, args, limit, offset) except TracError as e: data['message'] = _("Report failed: %(error)s", error=e) else: if len(res) == 2: e, sql = res data['message'] = \ tag_("Report execution failed: %(error)s %(sql)s", error=tag.pre(exception_to_unicode(e)), sql=tag(tag.hr(), tag.pre(sql, style="white-space: pre"))) if data['message']: return 'report_view.html', data, None cols, results, num_items, missing_args, limit_offset = res need_paginator = limit > 0 and limit_offset need_reorder = limit_offset is None results = [list(row) for row in results] numrows = len(results) paginator = None if need_paginator: paginator = Paginator(results, page - 1, limit, num_items) data['paginator'] = paginator if paginator.has_next_page: add_link(req, 'next', report_href(page=page + 1), _('Next Page')) if paginator.has_previous_page: add_link(req, 'prev', report_href(page=page - 1), _('Previous Page')) pagedata = [] shown_pages = paginator.get_shown_pages(21) for p in shown_pages: pagedata.append([report_href(page=p), None, str(p), _('Page %(num)d', num=p)]) fields = ['href', 'class', 'string', 'title'] paginator.shown_pages = [dict(zip(fields, p)) for p in pagedata] paginator.current_page = {'href': None, 'class': 'current', 'string': str(paginator.page + 1), 'title': None} numrows = paginator.num_items # Place retrieved columns in groups, according to naming conventions # * _col_ means fullrow, i.e. a group with one header # * col_ means finish the current group and start a new one field_labels = TicketSystem(self.env).get_ticket_field_labels() header_groups = [[]] for idx, col in enumerate(cols): if col in field_labels: title = field_labels[col] else: title = col.strip('_').capitalize() header = { 'col': col, 'title': title, 'hidden': False, 'asc': None, } if col == sort_col: if asc: data['asc'] = asc data['sort'] = sort_col header['asc'] = bool(asc) if not paginator and need_reorder: # this dict will have enum values for sorting # and will be used in sortkey(), if non-empty: sort_values = {} if sort_col in ('status', 'resolution', 'priority', 'severity'): # must fetch sort values for that columns # instead of comparing them as strings with self.env.db_query as db: for name, value in db( "SELECT name, %s FROM enum WHERE type=%%s" % db.cast('value', 'int'), (sort_col,)): sort_values[name] = value def sortkey(row): val = row[idx] # check if we have sort_values, then use them as keys. if sort_values: return sort_values.get(val) # otherwise, continue with string comparison: if isinstance(val, basestring): val = val.lower() return val results = sorted(results, key=sortkey, reverse=not asc) header_group = header_groups[-1] if col.startswith('__') and col.endswith('__'): # __col__ header['hidden'] = True elif col[0] == '_' and col[-1] == '_': # _col_ header_group = [] header_groups.append(header_group) header_groups.append([]) elif col[0] == '_': # _col header['hidden'] = True elif col[-1] == '_': # col_ header_groups.append([]) header_group.append(header) # Structure the rows and cells: # - group rows according to __group__ value, if defined # - group cells the same way headers are grouped chrome = Chrome(self.env) row_groups = [] authorized_results = [] prev_group_value = None for row_idx, result in enumerate(results): col_idx = 0 cell_groups = [] row = {'cell_groups': cell_groups} realm = TicketSystem.realm parent_realm = '' parent_id = '' email_cells = [] for header_group in header_groups: cell_group = [] for header in header_group: value = cell_value(result[col_idx]) cell = {'value': value, 'header': header, 'index': col_idx} col = header['col'] col_idx += 1 # Detect and create new group if col == '__group__' and value != prev_group_value: prev_group_value = value # Brute force handling of email in group by header row_groups.append( (value and chrome.format_author(req, value), [])) # Other row properties row['__idx__'] = row_idx if col in self._html_cols: row[col] = value if col in ('report', 'ticket', 'id', '_id'): row['id'] = value # Special casing based on column name col = col.strip('_') if col in ('reporter', 'cc', 'owner'): email_cells.append(cell) elif col == 'realm': realm = value elif col == 'parent_realm': parent_realm = value elif col == 'parent_id': parent_id = value cell_group.append(cell) cell_groups.append(cell_group) if parent_realm: resource = Resource(realm, row.get('id'), parent=Resource(parent_realm, parent_id)) else: resource = Resource(realm, row.get('id')) # FIXME: for now, we still need to hardcode the realm in the action if resource.realm.upper() + '_VIEW' not in req.perm(resource): continue authorized_results.append(result) if email_cells: for cell in email_cells: emails = chrome.format_emails(context.child(resource), cell['value']) result[cell['index']] = cell['value'] = emails row['resource'] = resource if row_groups: row_group = row_groups[-1][1] else: row_group = [] row_groups = [(None, row_group)] row_group.append(row) data.update({'header_groups': header_groups, 'row_groups': row_groups, 'numrows': numrows}) if format == 'rss': data['context'] = web_context(req, report_resource, absurls=True) return 'report.rss', data, 'application/rss+xml' elif format == 'csv': filename = 'report_%s.csv' % id if id else 'report.csv' self._send_csv(req, cols, authorized_results, mimetype='text/csv', filename=filename) elif format == 'tab': filename = 'report_%s.tsv' % id if id else 'report.tsv' self._send_csv(req, cols, authorized_results, '\t', mimetype='text/tab-separated-values', filename=filename) else: p = page if max is not None else None add_link(req, 'alternate', auth_link(req, report_href(format='rss', page=None)), _('RSS Feed'), 'application/rss+xml', 'rss') add_link(req, 'alternate', report_href(format='csv', page=p), _('Comma-delimited Text'), 'text/plain') add_link(req, 'alternate', report_href(format='tab', page=p), _('Tab-delimited Text'), 'text/plain') if 'REPORT_SQL_VIEW' in req.perm(self.realm, id): add_link(req, 'alternate', req.href.report(id=id, format='sql'), _('SQL Query'), 'text/plain') # reuse the session vars of the query module so that # the query navigation links on the ticket can be used to # navigate report results as well try: req.session['query_tickets'] = \ ' '.join(str(int(row['id'])) for rg in row_groups for row in rg[1]) req.session['query_href'] = \ req.session['query_href'] = report_href() # Kludge: we have to clear the other query session # variables, but only if the above succeeded for var in ('query_constraints', 'query_time'): if var in req.session: del req.session[var] except (ValueError, KeyError): pass if set(data['args']) - {'USER'}: data['show_args_form'] = True # Add values of all select-type ticket fields for autocomplete. fields = TicketSystem(self.env).get_ticket_fields() arg_values = {} for arg in set(data['args']) - {'USER'}: attrs = fields.by_name(arg.lower()) if attrs and 'options' in attrs: arg_values[attrs['name']] = attrs['options'] if arg_values: add_script_data(req, arg_values=arg_values) Chrome(self.env).add_jquery_ui(req) if missing_args: add_warning(req, _( 'The following arguments are missing: %(args)s', args=", ".join(missing_args))) return 'report_view.html', data, None
def get_last_traceback(): """Retrieve the last traceback as an `unicode` string.""" import traceback tb = io.BytesIO() traceback.print_exc(file=tb) return to_unicode(tb.getvalue())
def get_pkginfo(dist): """Get a dictionary containing package information for a package `dist` can be either a Distribution instance or, as a shortcut, directly the module instance, if one can safely infer a Distribution instance from it. Always returns a dictionary but it will be empty if no Distribution instance can be created for the given module. """ import email import email.errors import types from trac.util.translation import _ def parse_pkginfo(dist, name): return email.message_from_string(to_utf8(dist.get_metadata(name))) if isinstance(dist, types.ModuleType): def has_resource(dist, module, resource_name): if dist.location.endswith('.egg'): # installed by easy_install return dist.has_resource(resource_name) if dist.has_metadata('installed-files.txt'): # installed by pip resource_name = os.path.normpath('../' + resource_name) return any( resource_name == os.path.normpath(name) for name in dist.get_metadata_lines('installed-files.txt')) if dist.has_metadata('SOURCES.txt'): resource_name = os.path.normpath(resource_name) return any(resource_name == os.path.normpath(name) for name in dist.get_metadata_lines('SOURCES.txt')) if dist.has_metadata('RECORD'): # *.dist-info/RECORD reader = csv.reader(io.BytesIO(dist.get_metadata('RECORD'))) return any(resource_name == row[0] for row in reader) if dist.has_metadata('PKG-INFO'): try: pkginfo = parse_pkginfo(dist, 'PKG-INFO') provides = pkginfo.get_all('Provides', ()) names = module.__name__.split('.') if any('.'.join(names[:n + 1]) in provides for n in xrange(len(names))): return True except (IOError, email.Errors.MessageError): pass toplevel = resource_name.split('/')[0] if dist.has_metadata('top_level.txt'): return toplevel in dist.get_metadata_lines('top_level.txt') return dist.key == toplevel.lower() module = dist module_path = get_module_path(module) resource_name = module.__name__.replace('.', '/') if os.path.basename(module.__file__) in ('__init__.py', '__init__.pyc', '__init__.pyo'): resource_name += '/__init__.py' else: resource_name += '.py' for dist in find_distributions(module_path, only=True): if os.path.isfile(module_path) or \ has_resource(dist, module, resource_name): break else: return {} attrs = ('author', 'author-email', 'maintainer', 'maintainer-email', 'license', 'home-page', 'summary', 'name', 'description', 'version') info = {} def normalize(attr): return attr.lower().replace('-', '_') metadata = 'METADATA' if dist.has_metadata('METADATA') else 'PKG-INFO' try: pkginfo = parse_pkginfo(dist, metadata) for attr in [key for key in attrs if key in pkginfo]: info[normalize(attr)] = pkginfo[attr] except IOError as e: err = _("Failed to read %(metadata)s file for %(dist)s: %(err)s", metadata=metadata, dist=dist, err=to_unicode(e)) for attr in attrs: info[normalize(attr)] = err except email.errors.MessageError as e: err = _("Failed to parse %(metadata)s file for %(dist)s: %(err)s", metadata=metadata, dist=dist, err=to_unicode(e)) for attr in attrs: info[normalize(attr)] = err return info
def _to_utf8(basestr): return to_unicode(basestr).encode('utf-8')
def test_explicit_charset_with_replace(self): uc = to_unicode('\xc3', 'utf-8') self.assertIsInstance(uc, unicode) self.assertEqual(u'\xc3', uc)
class RequestDispatcher(Component): """Component responsible for dispatching requests to registered handlers.""" authenticators = ExtensionPoint(IAuthenticator) handlers = ExtensionPoint(IRequestHandler) filters = OrderedExtensionsOption( 'trac', 'request_filters', IRequestFilter, doc="""Ordered list of filters to apply to all requests (''since 0.10'').""") default_handler = ExtensionOption( 'trac', 'default_handler', IRequestHandler, 'WikiModule', """Name of the component that handles requests to the base URL. Options include `TimelineModule`, `RoadmapModule`, `BrowserModule`, `QueryModule`, `ReportModule` and `NewticketModule` (''since 0.9'').""" ) # Public API def authenticate(self, req): for authenticator in self.authenticators: authname = authenticator.authenticate(req) if authname: return authname else: return 'anonymous' def dispatch(self, req): """Find a registered handler that matches the request and let it process it. In addition, this method initializes the HDF data set and adds the web site chrome. """ # FIXME: For backwards compatibility, should be removed in 0.11 self.env.href = req.href # FIXME in 0.11: self.env.abs_href = Href(self.env.base_url) self.env.abs_href = req.abs_href # Select the component that should handle the request chosen_handler = None early_error = None req.authname = 'anonymous' req.perm = NoPermissionCache() try: if not req.path_info or req.path_info == '/': chosen_handler = self.default_handler else: for handler in self.handlers: if handler.match_request(req): chosen_handler = handler break # Attach user information to the request early, so that # the IRequestFilter can see it while preprocessing if not getattr(chosen_handler, 'anonymous_request', False): try: req.authname = self.authenticate(req) req.perm = PermissionCache(self.env, req.authname) req.session = Session(self.env, req) req.form_token = self._get_form_token(req) except: req.authname = 'anonymous' req.perm = NoPermissionCache() early_error = sys.exc_info() chosen_handler = self._pre_process_request(req, chosen_handler) except: early_error = sys.exc_info() if not chosen_handler and not early_error: early_error = (HTTPNotFound('No handler matched request to %s', req.path_info), None, None) # Prepare HDF for the clearsilver template try: use_template = getattr(chosen_handler, 'use_template', True) req.hdf = None if use_template: chrome = Chrome(self.env) req.hdf = HDFWrapper(loadpaths=chrome.get_all_templates_dirs()) populate_hdf(req.hdf, self.env, req) chrome.populate_hdf(req, chosen_handler) except: req.hdf = None # revert to sending plaintext error if not early_error: raise if early_error: try: self._post_process_request(req) except Exception, e: self.log.exception(e) raise early_error[0], early_error[1], early_error[2] # Process the request and render the template try: try: # Protect against CSRF attacks: we validate the form token for # all POST requests with a content-type corresponding to form # submissions if req.method == 'POST': ctype = req.get_header('Content-Type') if ctype: ctype, options = cgi.parse_header(ctype) if ctype in ('application/x-www-form-urlencoded', 'multipart/form-data') and \ req.args.get('__FORM_TOKEN') != req.form_token: raise HTTPBadRequest('Missing or invalid form token. ' 'Do you have cookies enabled?') resp = chosen_handler.process_request(req) if resp: template, content_type = self._post_process_request( req, *resp) # Give the session a chance to persist changes if req.session: req.session.save() req.display(template, content_type or 'text/html') else: self._post_process_request(req) except RequestDone: raise except: err = sys.exc_info() try: self._post_process_request(req) except Exception, e: self.log.exception(e) raise err[0], err[1], err[2] except PermissionError, e: raise HTTPForbidden(to_unicode(e))
def _do_actions(self, context, actions): for action in actions: if action == 'get-file': context.req.perm.require('DOWNLOADS_VIEW') # Get request arguments. download_id = context.req.args.get('id') or 0 download_file = context.req.args.get('file') # Get download. if download_id: download = self.get_download(context, download_id) else: download = self.get_download_by_file( context, download_file) # Check if requested download exists. if not download: raise TracError('File not found.') # Check resource based permission. context.req.perm.require('DOWNLOADS_VIEW', Resource('downloads', download['id'])) # Get download file path. path = os.path.normpath( os.path.join(self.path, to_unicode(download['id']), download['file'])) self.log.debug('path: %s' % (path, )) # Increase downloads count. new_download = {'count': download['count'] + 1} # Edit download. self.edit_download(context, download['id'], new_download) # Notify change listeners. for listener in self.change_listeners: listener.download_changed(context, new_download, download) # Commit DB before file send. db = self.env.get_db_cnx() db.commit() # Guess mime type. file = open(path.encode('utf-8'), "r") file_data = file.read(1000) file.close() mimeview = Mimeview(self.env) mime_type = mimeview.get_mimetype(path, file_data) if not mime_type: mime_type = 'application/octet-stream' if 'charset=' not in mime_type: charset = mimeview.get_charset(file_data, mime_type) mime_type = mime_type + '; charset=' + charset # Return uploaded file to request. context.req.send_header( 'Content-Disposition', 'attachment;filename="%s"' % (os.path.normpath(download['file']))) context.req.send_header('Content-Description', download['description']) context.req.send_file(path.encode('utf-8'), mime_type) elif action == 'downloads-list': context.req.perm.require('DOWNLOADS_VIEW') self.log.debug('visible_fields: %s' % (self.visible_fields, )) # Get form values. order = context.req.args.get('order') or self.download_sort if context.req.args.has_key('desc'): desc = context.req.args.get('desc') == '1' else: desc = self.download_sort_direction == 'desc' self.data['order'] = order self.data['desc'] = desc self.data['has_tags'] = self.env.is_component_enabled( 'tractags.api.TagEngine') self.data['visible_fields'] = self.visible_fields self.data['title'] = self.title self.data['description'] = self.get_description(context) self.data['downloads'] = self.get_downloads( context, order, desc) self.data['visible_fields'] = [ visible_field for visible_field in self.visible_fields ] # Component, versions, etc. are needed only for new download # add form. if context.req.perm.has_permission('DOWNLOADS_ADD'): self.data['components'] = self.get_components(context) self.data['versions'] = self.get_versions(context) self.data['architectures'] = self.get_architectures( context) self.data['platforms'] = self.get_platforms(context) self.data['types'] = self.get_types(context) elif action == 'admin-downloads-list': context.req.perm.require('DOWNLOADS_ADMIN') # Get form values order = context.req.args.get('order') or self.download_sort if context.req.args.has_key('desc'): desc = context.req.args.get('desc') == '1' else: desc = self.download_sort_direction == 'desc' download_id = int(context.req.args.get('download') or 0) self.data['order'] = order self.data['desc'] = desc self.data['has_tags'] = self.env.is_component_enabled( 'tractags.api.TagEngine') self.data['download'] = self.get_download(context, download_id) self.data['downloads'] = self.get_downloads( context, order, desc) self.data['components'] = self.get_components(context) self.data['versions'] = self.get_versions(context) self.data['architectures'] = self.get_architectures(context) self.data['platforms'] = self.get_platforms(context) self.data['types'] = self.get_types(context) elif action == 'description-edit': context.req.perm.require('DOWNLOADS_ADMIN') elif action == 'description-post-edit': context.req.perm.require('DOWNLOADS_ADMIN') # Get form values. description = context.req.args.get('description') # Set new description. self.edit_description(context, description) elif action == 'downloads-post-add': context.req.perm.require('DOWNLOADS_ADD') # Get form values. file, filename, file_size = self._get_file_from_req(context) download = { 'file': filename, 'description': context.req.args.get('description'), 'size': file_size, 'time': to_timestamp(datetime.now(utc)), 'count': 0, 'author': context.req.authname, 'tags': context.req.args.get('tags'), 'component': context.req.args.get('component'), 'version': context.req.args.get('version'), 'architecture': context.req.args.get('architecture'), 'platform': context.req.args.get('platform'), 'type': context.req.args.get('type') } # Upload file to DB and file storage. self._add_download(context, download, file) # Close input file. file.close() elif action == 'downloads-post-edit': context.req.perm.require('DOWNLOADS_ADMIN') # Get form values. download_id = context.req.args.get('id') old_download = self.get_download(context, download_id) download = { 'description': context.req.args.get('description'), 'tags': context.req.args.get('tags'), 'component': context.req.args.get('component'), 'version': context.req.args.get('version'), 'architecture': context.req.args.get('architecture'), 'platform': context.req.args.get('platform'), 'type': context.req.args.get('type') } # Edit Download. self.edit_download(context, download_id, download) # Notify change listeners. for listener in self.change_listeners: listener.download_changed(context, download, old_download) elif action == 'downloads-delete': context.req.perm.require('DOWNLOADS_ADMIN') # Get selected downloads. selection = context.req.args.get('selection') if isinstance(selection, (str, unicode)): selection = [selection] # Delete download. if selection: for download_id in selection: download = self.get_download(context, download_id) self.log.debug('download: %s' % (download, )) self._delete_download(context, download) elif action == 'admin-architectures-list': context.req.perm.require('DOWNLOADS_ADMIN') # Get form values order = context.req.args.get('order') or self.architecture_sort if context.req.args.has_key('desc'): desc = context.req.args.get('desc') == '1' else: desc = self.architecture_sort_direction == 'desc' architecture_id = int( context.req.args.get('architecture') or 0) # Display architectures. self.data['order'] = order self.data['desc'] = desc self.data['architecture'] = self.get_architecture( context, architecture_id) self.data['architectures'] = self.get_architectures( context, order, desc) elif action == 'architectures-post-add': context.req.perm.require('DOWNLOADS_ADMIN') # Get form values. architecture = { 'name': context.req.args.get('name'), 'description': context.req.args.get('description') } # Add architecture. self.add_architecture(context, architecture) elif action == 'architectures-post-edit': context.req.perm.require('DOWNLOADS_ADMIN') # Get form values. architecture_id = context.req.args.get('id') architecture = { 'name': context.req.args.get('name'), 'description': context.req.args.get('description') } # Add architecture. self.edit_architecture(context, architecture_id, architecture) elif action == 'architectures-delete': context.req.perm.require('DOWNLOADS_ADMIN') # Get selected architectures. selection = context.req.args.get('selection') if isinstance(selection, (str, unicode)): selection = [selection] # Delete architectures. if selection: for architecture_id in selection: self.delete_architecture(context, architecture_id) elif action == 'admin-platforms-list': context.req.perm.require('DOWNLOADS_ADMIN') # Get form values. order = context.req.args.get('order') or self.platform_sort if context.req.args.has_key('desc'): desc = context.req.args.get('desc') == '1' else: desc = self.platform_sort_direction == 'desc' platform_id = int(context.req.args.get('platform') or 0) # Display platforms. self.data['order'] = order self.data['desc'] = desc self.data['platform'] = self.get_platform(context, platform_id) self.data['platforms'] = self.get_platforms( context, order, desc) elif action == 'platforms-post-add': context.req.perm.require('DOWNLOADS_ADMIN') # Get form values. platform = { 'name': context.req.args.get('name'), 'description': context.req.args.get('description') } # Add platform. self.add_platform(context, platform) elif action == 'platforms-post-edit': context.req.perm.require('DOWNLOADS_ADMIN') # Get form values. platform_id = context.req.args.get('id') platform = { 'name': context.req.args.get('name'), 'description': context.req.args.get('description') } # Add platform. self.edit_platform(context, platform_id, platform) elif action == 'platforms-delete': context.req.perm.require('DOWNLOADS_ADMIN') # Get selected platforms. selection = context.req.args.get('selection') if isinstance(selection, (str, unicode)): selection = [selection] # Delete platforms. if selection: for platform_id in selection: self.delete_platform(context, platform_id) elif action == 'admin-types-list': context.req.perm.require('DOWNLOADS_ADMIN') # Get form values order = context.req.args.get('order') or self.type_sort if context.req.args.has_key('desc'): desc = context.req.args.get('desc') == '1' else: desc = self.type_sort_direction == 'desc' platform_id = int(context.req.args.get('type') or 0) # Display platforms. self.data['order'] = order self.data['desc'] = desc self.data['type'] = self.get_type(context, platform_id) self.data['types'] = self.get_types(context, order, desc) elif action == 'types-post-add': context.req.perm.require('DOWNLOADS_ADMIN') # Get form values. type = { 'name': context.req.args.get('name'), 'description': context.req.args.get('description') } # Add type. self.add_type(context, type) elif action == 'types-post-edit': context.req.perm.require('DOWNLOADS_ADMIN') # Get form values. type_id = context.req.args.get('id') type = { 'name': context.req.args.get('name'), 'description': context.req.args.get('description') } # Add platform. self.edit_type(context, type_id, type) elif action == 'types-delete': context.req.perm.require('DOWNLOADS_ADMIN') # Get selected types. selection = context.req.args.get('selection') if isinstance(selection, (str, unicode)): selection = [selection] # Delete types. if selection: for type_id in selection: self.delete_type(context, type_id)
def test_implicit_charset(self): uc = to_unicode('\xc3\xa7') self.assertIsInstance(uc, unicode) self.assertEqual(u'\xe7', uc)