def _provider_failure(self, exc, req, ep, current_filters, all_filters): """Raise a TracError exception explaining the failure of a provider. At the same time, the message will contain a link to the timeline without the filters corresponding to the guilty event provider `ep`. """ self.log.error('Timeline event provider failed: %s', exception_to_unicode(exc, traceback=True)) ep_kinds = dict((f[0], f[1]) for f in ep.get_timeline_filters(req) or []) ep_filters = set(ep_kinds.keys()) current_filters = set(current_filters) other_filters = set(current_filters) - ep_filters if not other_filters: other_filters = set(all_filters) - ep_filters args = [(a, req.args.get(a)) for a in ('from', 'format', 'max', 'daysback')] href = req.href.timeline(args + [(f, 'on') for f in other_filters]) # TRANSLATOR: ...want to see the 'other kinds of events' from... (link) other_events = tag.a(_('other kinds of events'), href=href) raise TracError(tag( tag.p(tag_("Event provider %(name)s failed for filters " "%(kinds)s: ", name=tag.tt(ep.__class__.__name__), kinds=', '.join('"%s"' % ep_kinds[f] for f in current_filters & ep_filters)), tag.b(exception_to_unicode(exc)), class_='message'), tag.p(tag_("You may want to see the %(other_events)s from the " "Timeline or notify your Trac administrator about the " "error (detailed information was written to the log).", other_events=other_events))))
def _get_authz_info(self): try: mtime = os.path.getmtime(self.authz_file) except OSError as e: if self._authz is not None: self.log.error('Error accessing authz file: %s', exception_to_unicode(e)) self._mtime = mtime = 0 self._authz = None self._users = set() if mtime != self._mtime: self._mtime = mtime rm = RepositoryManager(self.env) modules = set(repos.reponame for repos in rm.get_real_repositories()) if '' in modules and self.authz_module_name: modules.add(self.authz_module_name) modules.add('') self.log.info('Parsing authz file: %s', self.authz_file) try: self._authz = parse(read_file(self.authz_file), modules) self._users = set(user for paths in self._authz.itervalues() for path in paths.itervalues() for user, result in path.iteritems() if result) except Exception as e: self._authz = None self._users = set() self.log.error('Error parsing authz file: %s', exception_to_unicode(e)) return self._authz, self._users
def onecmd(self, line): """`line` may be a `str` or an `unicode` object""" try: if isinstance(line, str): if self.interactive: encoding = sys.stdin.encoding else: encoding = getpreferredencoding() # sys.argv line = to_unicode(line, encoding) if self.interactive: line = line.replace('\\', '\\\\') rv = cmd.Cmd.onecmd(self, line) or 0 except SystemExit: raise except AdminCommandError as e: printerr(_("Error: %(msg)s", msg=to_unicode(e))) if e.show_usage: print() self.do_help(e.cmd or self.arg_tokenize(line)[0]) rv = 2 except TracError as e: printerr(exception_to_unicode(e)) rv = 2 except Exception as e: printerr(exception_to_unicode(e)) rv = 2 if self.env_check(): self.env.log.error("Exception in trac-admin command: %s", exception_to_unicode(e, traceback=True)) if not self.interactive: return rv
def _do_authz_raw(self, req): # get default authz file from trac.ini authz_file = self.config.get('trac', 'authz_file') # test if authz file exists and is writable if not os.access(authz_file,os.W_OK|os.R_OK): raise TracError("Can't access authz file %s" % authz_file) # evaluate forms if req.method == 'POST': current=req.args.get('current').strip().replace('\r', '') # encode to utf-8 current = current.encode('utf-8') # parse and validate authz file with a config parser from ConfigParser import ConfigParser from StringIO import StringIO cp = ConfigParser() try: cp.readfp(StringIO(current)) except Exception, e: raise TracError("Invalid Syntax: %s" % exception_to_unicode(e)) # write to disk try: fp = open(authz_file, 'wb') current = fp.write(current) fp.close() except Exception, e: raise TracError("Can't write authz file: %s" % exception_to_unicode(e))
def _dispatch_request(req, env, env_error): resp = [] # fixup env.abs_href if `[trac] base_url` was not specified if env and not env.abs_href.base: env._abs_href = req.abs_href try: if not env and env_error: raise HTTPInternalError(env_error) try: dispatcher = RequestDispatcher(env) dispatcher.dispatch(req) except RequestDone: pass resp = req._response or [] except HTTPException, e: # This part is a bit more complex than it should be. # See trac/web/api.py for the definition of HTTPException subclasses. if env: env.log.warn(exception_to_unicode(e)) try: # We try to get localized error messages here, # but we should ignore secondary errors title = _('Error') if e.reason: if title.lower() in e.reason.lower(): title = e.reason else: title = _('Error: %(message)s', message=e.reason) except: title = 'Error' # The message is based on the e.detail, which can be an Exception # object, but not a TracError one: when creating HTTPException, # a TracError.message is directly assigned to e.detail if isinstance(e.detail, Exception): # not a TracError message = exception_to_unicode(e.detail) elif isinstance(e.detail, Fragment): # markup coming from a TracError message = e.detail else: message = to_unicode(e.detail) data = {'title': title, 'type': 'TracError', 'message': message, 'frames': [], 'traceback': None} if e.code == 403 and req.authname == 'anonymous': # TRANSLATOR: ... not logged in, you may want to 'do so' now (link) do_so = tag.a(_("do so"), href=req.href.login()) req.chrome['notices'].append( tag_("You are currently not logged in. You may want to " "%(do_so)s now.", do_so=do_so)) try: req.send_error(sys.exc_info(), status=e.code, env=env, data=data) except RequestDone: pass
def replace(self, old_uid, new_uid): try: self.env.db_transaction(""" DELETE FROM %s WHERE %s=%%s """ % (self.table, self.column), (new_uid,)) except _get_db_exc(self.env), e: result = exception_to_unicode(e) msg = 'failed: %s' % exception_to_unicode(e, traceback=True) self.log.debug(self.msg(old_uid, new_uid, self.table, self.column, result=msg)) return dict(error={(self.table, self.column, None): result})
def _log_error(item, e): ue = exception_to_unicode(e) if isinstance(e, DistributionNotFound): env.log.debug('Skipping "%s": ("%s" not found)', item, ue) elif isinstance(e, VersionConflict): env.log.error('Skipping "%s": (version conflict "%s")', item, ue) elif isinstance(e, UnknownExtra): env.log.error('Skipping "%s": (unknown extra "%s")', item, ue) else: env.log.error('Skipping "%s": %s', item, exception_to_unicode(e, traceback=True))
def do_upgrade(env, version, cursor): """Move attachments from the `attachments` directory into `files`, hashing the filenames in the process.""" path = env.path old_dir = os.path.join(path, 'attachments') if not os.path.exists(old_dir): return old_stat = os.stat(old_dir) new_dir = os.path.join(path, 'files', 'attachments') if not os.path.exists(new_dir): os.makedirs(new_dir) cursor.execute(""" SELECT type, id, filename FROM attachment ORDER BY type, id """) for row in cursor: move_attachment_file(env, *row) # Try to preserve permissions and ownerships of the attachments # directory for $ENV/files for dir, dirs, files in os.walk(os.path.join(path, 'files')): try: if hasattr(os, 'chmod'): os.chmod(dir, old_stat.st_mode) if hasattr(os, 'chflags') and hasattr(old_stat, 'st_flags'): os.chflags(dir, old_stat.st_flags) if hasattr(os, 'chown'): os.chown(dir, old_stat.st_uid, old_stat.st_gid) except OSError: pass # Remove empty directory hierarchy try: for dir, dirs, files in os.walk(old_dir, topdown=False): os.rmdir(dir) except OSError, e: env.log.warning("Can't delete old attachments directory %s: %s", old_dir, exception_to_unicode(e)) # TRANSLATOR: Wrap message to 80 columns printerr(_("""\ The upgrade of attachments was successful, but the old attachments directory: %(src_dir)s couldn't be removed, possibly due to the presence of files that weren't referenced in the database. The error was: %(exception)s This error can be ignored, but for keeping your environment clean you should backup any remaining files in that directory and remove it manually. """, src_dir=old_dir, exception=exception_to_unicode(e)))
def process_request(self, req): # First check for the expected X-Gitlab-Event header event = req.get_header('X-Gitlab-Event') if not event and event.lower() == 'merge request hook': self.log.warn('GitLab webhook request event missing or ' 'not handled: {}'.format(event)) req.send_response(422) req.end_headers() return token = req.get_header('X-Gitlab-Token') if not self._verify_token(token): self.log.warn('GitLab webhook request security token missing ' 'or not valid') req.send_response(401) req.end_headers() return try: hook_data = json.load(req) except Exception as exc: self.log.warn( 'Gitlab webhook failed to parse the JSON request ' 'data: {}'.format(exc)) return req.send_no_content() self.log.debug('GitLab webhook received event payload:\n' + pformat(hook_data)) if hook_data['object_attributes']['state'] == 'closed': # Do not update tickets/branches for closed merged requests return req.send_no_content() try: synced_branch = self._sync_branch(hook_data) except Exception as exc: self.log.warn( 'Gitlab webhook failed to sync the downstream ' 'branch: {}'.format(exception_to_unicode(exc, True))) synced_branch = False try: self._create_or_update_ticket(hook_data, synced_branch) except Exception as exc: self.log.warn( 'Gitlab webhook failed to create or update the ' 'ticket for this merge request: {}'.format( exception_to_unicode(exc, True))) req.send_no_content()
def _dispatch_request(req, env, env_error): resp = [] # fixup env.abs_href if `[trac] base_url` was not specified if env and not env.abs_href.base: env._abs_href = req.abs_href try: if not env and env_error: raise HTTPInternalError(env_error) try: dispatcher = RequestDispatcher(env) dispatcher.dispatch(req) except RequestDone: pass resp = req._response or [] except HTTPException, e: # This part is a bit more complex than it should be. # See trac/web/api.py for the definition of HTTPException subclasses. if env: env.log.warn(exception_to_unicode(e)) title = 'Error' if e.reason: if 'error' in e.reason.lower(): title = e.reason else: title = 'Error: %s' % e.reason # The message is based on the e.detail, which can be an Exception # object, but not a TracError one: when creating HTTPException, # a TracError.message is directly assigned to e.detail if isinstance(e.detail, Exception): # not a TracError message = exception_to_unicode(e.detail) elif isinstance(e.detail, Fragment): # markup coming from a TracError message = e.detail else: message = to_unicode(e.detail) data = {'title': title, 'type': 'TracError', 'message': message, 'frames': [], 'traceback': None} if e.code == 403 and req.authname == 'anonymous': req.chrome['notices'].append(Markup( _('You are currently not logged in. You may want to ' '<a href="%(href)s">do so</a> now.', href=req.href.login()))) try: req.send_error(sys.exc_info(), status=e.code, env=env, data=data) except RequestDone: pass
def render_property(self, name, mode, context, props): """Renders a node property to HTML.""" if name in self.hidden_properties: return candidates = [] for renderer in self.property_renderers: quality = renderer.match_property(name, mode) if quality > 0: candidates.append((quality, renderer)) candidates.sort(reverse=True) for (quality, renderer) in candidates: try: rendered = renderer.render_property(name, mode, context, props) if not rendered: return rendered if isinstance(rendered, RenderedProperty): value = rendered.content else: value = rendered rendered = None prop = {'name': name, 'value': value, 'rendered': rendered} return prop except Exception, e: self.log.warning('Rendering failed for property %s with ' 'renderer %s: %s', name, renderer.__class__.__name__, exception_to_unicode(e, traceback=True))
def pre_process_request(self, req, handler): from trac.web.chrome import Chrome, add_warning if handler is not Chrome(self.env): for repo_info in self.get_all_repositories().values(): if not as_bool(repo_info.get('sync_per_request')): continue start = time.time() repo_name = repo_info['name'] or '(default)' try: repo = self.get_repository(repo_info['name']) repo.sync() except TracError as e: add_warning(req, _("Can't synchronize with repository \"%(name)s\" " "(%(error)s). Look in the Trac log for more " "information.", name=repo_name, error=to_unicode(e))) except Exception as e: add_warning(req, _("Failed to sync with repository \"%(name)s\": " "%(error)s; repository information may be out of " "date. Look in the Trac log for more information " "including mitigation strategies.", name=repo_name, error=to_unicode(e))) self.log.error( "Failed to sync with repository \"%s\"; You may be " "able to reduce the impact of this issue by " "configuring the sync_per_request option; see " "http://trac.edgewall.org/wiki/TracRepositoryAdmin" "#ExplicitSync for more detail: %s", repo_name, exception_to_unicode(e, traceback=True)) self.log.info("Synchronized '%s' repository in %0.2f seconds", repo_name, time.time() - start) return handler
def _parse_arg_list(self): """Parse the supplied request parameters into a list of `(name, value)` tuples. """ fp = self.environ['wsgi.input'] # Avoid letting cgi.FieldStorage consume the input stream when the # request does not contain form data ctype = self.get_header('Content-Type') if ctype: ctype, options = cgi.parse_header(ctype) if ctype not in ('application/x-www-form-urlencoded', 'multipart/form-data'): fp = StringIO('') # Python 2.6 introduced a backwards incompatible change for # FieldStorage where QUERY_STRING is no longer ignored for POST # requests. We'll keep the pre 2.6 behaviour for now... if self.method == 'POST': qs_on_post = self.environ.pop('QUERY_STRING', '') fs = _FieldStorage(fp, environ=self.environ, keep_blank_values=True) if self.method == 'POST': self.environ['QUERY_STRING'] = qs_on_post args = [] for value in fs.list or (): try: name = unicode(value.name, 'utf-8') if not value.filename: value = unicode(value.value, 'utf-8') except UnicodeDecodeError, e: raise HTTPBadRequest( _("Invalid encoding in form data: %(msg)s", msg=exception_to_unicode(e))) args.append((name, value))
def __init__(self, env, path, params, log, persistent_cache=False, git_bin='git', git_fs_encoding='utf-8', shortrev_len=7, rlookup_uid=lambda _: None, use_committer_id=False, use_committer_time=False, ): self.env = env self.logger = log self.gitrepo = path self.params = params self.persistent_cache = persistent_cache self.shortrev_len = max(4, min(shortrev_len, 40)) self.rlookup_uid = rlookup_uid self.use_committer_time = use_committer_time self.use_committer_id = use_committer_id try: factory = PyGIT.StorageFactory(path, log, not persistent_cache, git_bin=git_bin, git_fs_encoding=git_fs_encoding) self._git = factory.getInstance() except PyGIT.GitError as e: log.error(exception_to_unicode(e)) raise InvalidRepository( _("%(path)s does not appear to be a Git repository.", path=path)) Repository.__init__(self, 'git:' + path, self.params, log) self._cached_git_id = str(self.id)
def render_admin_panel(self, req, cat, page, version): self.log.debug("cat: %s page: %s", cat, page) req.perm.require('TRAC_ADMIN') options = ( 'api_base_url', 'api_token', 'room_id', 'only_owner_changed', 'notify_symbol', 'api_token_field_name') self.log.debug("method: %s", req.method) if req.method == 'POST': for option in options: self.config.set(SECTION_NAME, option, req.args.get(option)) try: self.config.save() self.log.debug('config saved.') add_notice(req, 'Your changes have been saved.') except Exception, e: self.log.error("Error writing to trac.ini: %s", exception_to_unicode(e)) add_warning(req, 'Error writing to trac.ini.') req.redirect(req.href.admin(cat, page))
def _close_mr(self, ticket_id, proj_id, mr_id, resolution): if not self.gitlab_api_token: self.log.warn( "GitLab API token not configured; GitLab webhook can't " "update the downstream merge request") return self.log.debug('Trying to close merge request {} since ticket {} ' 'was closed.'.format(mr_id, ticket_id)) headers = {'Private-Token': self.gitlab_api_token} url = '{}/api/v4/projects/{}/merge_requests/{}'.format( self.gitlab_url.rstrip('/'), proj_id, mr_id) try: r = requests.put(url, data={'state_event': 'close'}, headers=headers, timeout=10) except Exception as exc: self.log.error( 'Error updating merge request: {}'.format( exception_to_unicode(exc, True))) return text = ("Downstream ticket [Trac#{}]({}) was closed as {}, so I " "closed this merge request. If you feel this was in error " "feel free to reopen.".format( ticket_id, self.env.abs_href.ticket(ticket_id), resolution)) self._post_comment_to_mr(proj_id, mr_id, text) self.log.info('Successfully closed merge request {}'.format(mr_id))
def safe_wiki_to_html(context, text): try: return format_to_html(self.env, context, text) except Exception, e: self.log.error('Unable to render component documentation: %s', exception_to_unicode(e, traceback=True)) return tag.pre(text)
def _do_delete(self, req, milestone): req.perm(milestone.resource).require('MILESTONE_DELETE') retarget_to = req.args.get('target') or None # Don't translate ticket comment (comment:40:ticket:5658) retargeted_tickets = \ milestone.move_tickets(retarget_to, req.authname, "Ticket retargeted after milestone deleted") milestone.delete(author=req.authname) add_notice(req, _('The milestone "%(name)s" has been deleted.', name=milestone.name)) if retargeted_tickets: add_notice(req, _('The tickets associated with milestone ' '"%(name)s" have been retargeted to milestone ' '"%(retarget)s".', name=milestone.name, retarget=retarget_to)) new_values = {'milestone': retarget_to} comment = _("Tickets retargeted after milestone deleted") tn = BatchTicketNotifyEmail(self.env) try: tn.notify(retargeted_tickets, new_values, comment, None, req.authname) except Exception, e: self.log.error("Failure sending notification on ticket batch " "change: %s", exception_to_unicode(e)) add_warning(req, tag_("The changes have been saved, but an " "error occurred while sending " "notifications: %(message)s", message=to_unicode(e)))
def daemonize(pidfile=None, progname=None, stdin='/dev/null', stdout='/dev/null', stderr='/dev/null', umask=022): """Fork a daemon process.""" if pidfile: # Check whether the pid file already exists and refers to a still # process running pidfile = os.path.abspath(pidfile) if os.path.exists(pidfile): with open(pidfile) as fileobj: try: pid = int(fileobj.read()) except ValueError: sys.exit('Invalid pid in file %s\nPlease remove it to ' 'proceed' % pidfile) try: # signal the process to see if it is still running os.kill(pid, 0) if not progname: progname = os.path.basename(sys.argv[0]) sys.exit('%s is already running with pid %s' % (progname, pid)) except OSError, e: if e.errno != errno.ESRCH: raise # The pid file must be writable try: fileobj = open(pidfile, 'a+') fileobj.close() except IOError, e: from trac.util.text import exception_to_unicode sys.exit('Error writing to pid file: %s' % exception_to_unicode(e))
def _get_session(self, req): try: return Session(self.env, req) except TracError, e: self.log.error("can't retrieve session: %s", exception_to_unicode(e)) return FakeSession()
def process_request(self, req): if not req.session.authenticated: chrome.add_warning(req, tag_( "Please log in to finish email verification procedure.")) req.redirect(req.href.login()) if 'email_verification_token' not in req.session: chrome.add_notice(req, _("Your email is already verified.")) elif req.method == 'POST' and 'resend' in req.args: try: AccountManager(self.env)._notify( 'email_verification_requested', req.authname, req.session['email_verification_token'] ) except NotificationError, e: chrome.add_warning(req, _("Error raised while sending a " "change notification.") + _( "You should " "report that issue to a Trac admin.")) self.log.error('Unable to send verification notification: %s', exception_to_unicode(e, traceback=True)) else: chrome.add_notice(req, _("A notification email has been " "resent to <%s>."), req.session.get('email'))
def __init__(self, path, params, log, persistent_cache=False, git_bin='git', git_fs_encoding='utf-8', shortrev_len=7, rlookup_uid=lambda _: None, use_committer_id=False, use_committer_time=False, ): self.logger = log self.gitrepo = path self.params = params self.shortrev_len = max(4, min(shortrev_len, 40)) self.rlookup_uid = rlookup_uid self.use_committer_time = use_committer_time self.use_committer_id = use_committer_id try: self.git = PyGIT.StorageFactory(path, log, not persistent_cache, git_bin=git_bin, git_fs_encoding=git_fs_encoding) \ .getInstance() except PyGIT.GitError, e: log.error(exception_to_unicode(e)) raise TracError("%s does not appear to be a Git " "repository." % path)
def __init__(self, path, authz, log, options={}): self.log = log self.options = options self.pool = Pool() # Remove any trailing slash or else subversion might abort if isinstance(path, unicode): self.path = path path_utf8 = path.encode("utf-8") else: # note that this should usually not happen (unicode arg expected) self.path = to_unicode(path) path_utf8 = self.path.encode("utf-8") path_utf8 = os.path.normpath(path_utf8).replace("\\", "/") root_path_utf8 = repos.svn_repos_find_root_path(path_utf8, self.pool()) if root_path_utf8 is None: raise TracError(_("%(path)s does not appear to be a Subversion " "repository.", path=to_unicode(path_utf8))) try: self.repos = repos.svn_repos_open(root_path_utf8, self.pool()) except core.SubversionException, e: raise TracError( _( "Couldn't open Subversion repository %(path)s: " "%(svn_error)s", path=to_unicode(path_utf8), svn_error=exception_to_unicode(e), ) )
def add_tickets(self, project, customerrequest, tickets, reporter, notify=False): from trac.ticket.notification import TicketNotifyEmail from trac.util.text import exception_to_unicode from penelope.core.models.dashboard import User settings = get_current_registry().settings tracenvs = settings.get('penelope.trac.envs') request = get_current_request() for trac in project.tracs: for t in tickets: owner = DBSession.query(User).get(t['owner']) ticket = {'summary': t['summary'], 'description': t['description'], 'customerrequest': customerrequest.id, 'reporter': reporter.email, 'type': 'task', 'priority': 'major', 'milestone': 'Backlog', 'owner': owner.email, 'status': 'new'} tracenv = Environment('%s/%s' % (tracenvs, trac.trac_name)) tracenv.abs_href.base = trac.api_uri t = Ticket(tracenv) t.populate(ticket) t.insert() if notify: try: tn = TicketNotifyEmail(tracenv) tn.notify(t, newticket=True) except Exception, e: request.add_message('Failure sending notification on creation ' 'of a ticket #%s: %s' % (t.id, exception_to_unicode(e)), 'error')
def _provider_failure(self, exc, req, ep, current_filters, all_filters): """Raise a TracError exception explaining the failure of a provider. At the same time, the message will contain a link to the timeline without the filters corresponding to the guilty event provider `ep`. """ ep_name, exc_name = [i.__class__.__name__ for i in (ep, exc)] self.log.error("Timeline event provider failed: %s", exception_to_unicode(exc, traceback=True)) guilty_filters = [f[0] for f in ep.get_timeline_filters(req)] guilty_kinds = [f[1] for f in ep.get_timeline_filters(req)] other_filters = [f for f in current_filters if not f in guilty_filters] if not other_filters: other_filters = [f for f in all_filters if not f in guilty_filters] args = [(a, req.args.get(a)) for a in ("from", "format", "max", "daysback")] href = req.href.timeline(args + [(f, "on") for f in other_filters]) raise TracError( tag( tag.p( ", ".join(guilty_kinds), " event provider (", tag.tt(ep_name), ") failed:", tag.br(), exc_name, ": ", to_unicode(exc), class_="message", ), tag.p("You may want to see the other kind of events from the ", tag.a("Timeline", href=href)), ) )
def backup(self, dest_file): from subprocess import Popen, PIPE db_url = self.env.config.get('trac', 'database') scheme, db_prop = _parse_db_str(db_url) db_params = db_prop.setdefault('params', {}) db_name = os.path.basename(db_prop['path']) args = [self.pg_dump_path, '-C', '--inserts', '-x', '-Z', '8'] if 'user' in db_prop: args.extend(['-U', db_prop['user']]) if 'host' in db_params: host = db_params['host'] else: host = db_prop.get('host') if host: args.extend(['-h', host]) if '/' not in host: args.extend(['-p', str(db_prop.get('port', '5432'))]) if 'schema' in db_params: try: p = Popen([self.pg_dump_path, '--version'], stdout=PIPE, close_fds=close_fds) except OSError, e: raise TracError(_("Unable to run %(path)s: %(msg)s", path=self.pg_dump_path, msg=exception_to_unicode(e))) # Need quote for -n (--schema) option in PostgreSQL 8.2+ version = p.communicate()[0] if re.search(r' 8\.[01]\.', version): args.extend(['-n', db_params['schema']]) else: args.extend(['-n', '"%s"' % db_params['schema']])
def expand_macro(self, formatter, name, content): db = self.env.get_db_cnx() cursor = db.cursor() try: cursor.execute(content) except Exception, e: return system_message(_("Invalid SQL"), exception_to_unicode(e))
def post_process_request(self, req, template, data, content_type): if template is None or not req.session.authenticated: # Don't start the email verification procedure on anonymous users. return template, data, content_type email = req.session.get('email') # Only send verification if the user entered an email address. if self.verify_email and self.email_enabled is True and email and \ email != req.session.get('email_verification_sent_to') and \ 'ACCTMGR_ADMIN' not in req.perm: req.session['email_verification_token'] = self._gen_token() req.session['email_verification_sent_to'] = email try: AccountManager(self.env)._notify( 'email_verification_requested', req.authname, req.session['email_verification_token'] ) except NotificationError, e: chrome.add_warning(req, _( "Error raised while sending a change notification." ) + _("You should report that issue to a Trac admin.")) self.log.error('Unable to send registration notification: %s', exception_to_unicode(e, traceback=True)) else: # TRANSLATOR: An email has been sent to <%(email)s> # with a token to ... (the link label for following message) link = tag.a(_("verify your new email address"), href=req.href.verify_email()) # TRANSLATOR: ... verify your new email address chrome.add_notice(req, tag_( "An email has been sent to <%(email)s> with a token to " "%(link)s.", email=tag(email), link=link))
def backup(self, dest_file): from subprocess import Popen, PIPE db_url = self.env.config.get('trac', 'database') scheme, db_prop = _parse_db_str(db_url) db_params = db_prop.setdefault('params', {}) db_name = os.path.basename(db_prop['path']) args = [self.pg_dump_path, '-C', '--inserts', '-x', '-Z', '8'] if 'user' in db_prop: args.extend(['-U', db_prop['user']]) if 'host' in db_params: host = db_params['host'] else: host = db_prop.get('host') if host: args.extend(['-h', host]) if '/' not in host: args.extend(['-p', str(db_prop.get('port', '5432'))]) if 'schema' in db_params: args.extend(['-n', '"%s"' % db_params['schema']]) dest_file += ".gz" args.extend(['-f', dest_file, db_name]) environ = os.environ.copy() if 'password' in db_prop: environ['PGPASSWORD'] = str(db_prop['password']) try: p = Popen(args, env=environ, stderr=PIPE, close_fds=close_fds) except OSError, e: raise TracError(_("Unable to run %(path)s: %(msg)s", path=self.pg_dump_path, msg=exception_to_unicode(e)))
def __init__(self, path, params, log): self.log = log self.pool = Pool() # Remove any trailing slash or else subversion might abort if isinstance(path, unicode): path_utf8 = path.encode('utf-8') else: # note that this should usually not happen (unicode arg expected) path_utf8 = to_unicode(path).encode('utf-8') path_utf8 = core.svn_path_canonicalize( os.path.normpath(path_utf8).replace('\\', '/')) self.path = path_utf8.decode('utf-8') root_path_utf8 = repos.svn_repos_find_root_path(path_utf8, self.pool()) if root_path_utf8 is None: raise TracError(_("%(path)s does not appear to be a Subversion " "repository.", path=to_unicode(path_utf8))) try: self.repos = repos.svn_repos_open(root_path_utf8, self.pool()) except core.SubversionException, e: raise TracError(_("Couldn't open Subversion repository %(path)s: " "%(svn_error)s", path=to_unicode(path_utf8), svn_error=exception_to_unicode(e)))
def changeset_modified(self, repos, changeset, old_changeset): try: self.ticket_match = re.compile(self.ticket_pattern) except Exception, e: found = self.env.config.get('icucodetools', 'ticket_pattern', 'NoneFound') raise TracError('Could not compile icucodetools.ticket_pattern=/%s/ but /%s/: %s' % (self.ticket_pattern, found, exception_to_unicode(e, traceback=True)))
def _render_view(self, req, id): """Retrieve the report results and pre-process them for rendering.""" r = Report(self.env, id) title, description, sql = r.title, r.description, r.query # If this is a saved custom query, redirect to the query module # # A saved query is either an URL query (?... or query:?...), # or a query language expression (query:...). # # It may eventually contain newlines, for increased clarity. # query = ''.join(line.strip() for line in sql.splitlines()) if query and (query[0] == '?' or query.startswith('query:?')): query = query if query[0] == '?' else query[6:] report_id = 'report=%s' % id if 'report=' in query: if report_id not in query: err = _( 'When specified, the report number should be ' '"%(num)s".', num=id) req.redirect(req.href.report(id, action='edit', error=err)) else: if query[-1] != '?': query += '&' query += report_id req.redirect(req.href.query() + quote_query_string(query)) elif query.startswith('query:'): from trac.ticket.query import Query, QuerySyntaxError try: query = Query.from_string(self.env, query[6:], report=id) except QuerySyntaxError as e: req.redirect( req.href.report(id, action='edit', error=to_unicode(e))) else: req.redirect(query.get_href(req.href)) format = req.args.get('format') if format == 'sql': self._send_sql(req, id, title, description, sql) title = '{%i} %s' % (id, title) report_resource = Resource(self.realm, id) req.perm(report_resource).require('REPORT_VIEW') context = web_context(req, report_resource) page = req.args.getint('page', 1) default_max = { 'rss': self.items_per_page_rss, 'csv': 0, 'tab': 0 }.get(format, self.items_per_page) max = req.args.getint('max') limit = as_int(max, default_max, min=0) # explict max takes precedence offset = (page - 1) * limit sort_col = req.args.get('sort', '') asc = req.args.getint('asc', 0, min=0, max=1) args = {} def report_href(**kwargs): """Generate links to this report preserving user variables, and sorting and paging variables. """ params = args.copy() if sort_col: params['sort'] = sort_col if page != 1: params['page'] = page if max != default_max: params['max'] = max params.update(kwargs) params['asc'] = 1 if params.get('asc', asc) else None return req.href.report(id, params) data = { 'action': 'view', 'report': { 'id': id, 'resource': report_resource }, 'context': context, 'title': title, 'description': description, 'max': limit, 'args': args, 'show_args_form': False, 'message': None, 'paginator': None, 'report_href': report_href } try: args = self.get_var_args(req) sql = self.get_default_var_args(args, sql) except ValueError as e: data['message'] = _("Report failed: %(error)s", error=e) return 'report_view.html', data, None data.update({ 'args': args, 'title': sub_vars(title, args), 'description': sub_vars(description or '', args) }) try: res = self.execute_paginated_report(req, id, sql, args, limit, offset) except TracError as e: data['message'] = _("Report failed: %(error)s", error=e) else: if len(res) == 2: e, sql = res data['message'] = \ tag_("Report execution failed: %(error)s %(sql)s", error=tag.pre(exception_to_unicode(e)), sql=tag(tag.hr(), tag.pre(sql, style="white-space: pre"))) if data['message']: return 'report_view.html', data, None cols, results, num_items, missing_args, limit_offset = res need_paginator = limit > 0 and limit_offset need_reorder = limit_offset is None results = [list(row) for row in results] numrows = len(results) paginator = None if need_paginator: paginator = Paginator(results, page - 1, limit, num_items) data['paginator'] = paginator if paginator.has_next_page: add_link(req, 'next', report_href(page=page + 1), _('Next Page')) if paginator.has_previous_page: add_link(req, 'prev', report_href(page=page - 1), _('Previous Page')) pagedata = [] shown_pages = paginator.get_shown_pages(21) for p in shown_pages: pagedata.append([ report_href(page=p), None, str(p), _('Page %(num)d', num=p) ]) fields = ['href', 'class', 'string', 'title'] paginator.shown_pages = [dict(zip(fields, p)) for p in pagedata] paginator.current_page = { 'href': None, 'class': 'current', 'string': str(paginator.page + 1), 'title': None } numrows = paginator.num_items # Place retrieved columns in groups, according to naming conventions # * _col_ means fullrow, i.e. a group with one header # * col_ means finish the current group and start a new one field_labels = TicketSystem(self.env).get_ticket_field_labels() header_groups = [[]] for idx, col in enumerate(cols): if col in field_labels: title = field_labels[col] else: title = col.strip('_').capitalize() header = { 'col': col, 'title': title, 'hidden': False, 'asc': None, } if col == sort_col: if asc: data['asc'] = asc data['sort'] = sort_col header['asc'] = bool(asc) if not paginator and need_reorder: # this dict will have enum values for sorting # and will be used in sortkey(), if non-empty: sort_values = {} if sort_col in ('status', 'resolution', 'priority', 'severity'): # must fetch sort values for that columns # instead of comparing them as strings with self.env.db_query as db: for name, value in db( "SELECT name, %s FROM enum WHERE type=%%s" % db.cast('value', 'int'), (sort_col, )): sort_values[name] = value def sortkey(row): val = row[idx] # check if we have sort_values, then use them as keys. if sort_values: return sort_values.get(val) # otherwise, continue with string comparison: if isinstance(val, basestring): val = val.lower() return val results = sorted(results, key=sortkey, reverse=not asc) header_group = header_groups[-1] if col.startswith('__') and col.endswith('__'): # __col__ header['hidden'] = True elif col[0] == '_' and col[-1] == '_': # _col_ header_group = [] header_groups.append(header_group) header_groups.append([]) elif col[0] == '_': # _col header['hidden'] = True elif col[-1] == '_': # col_ header_groups.append([]) header_group.append(header) # Structure the rows and cells: # - group rows according to __group__ value, if defined # - group cells the same way headers are grouped chrome = Chrome(self.env) row_groups = [] authorized_results = [] prev_group_value = None for row_idx, result in enumerate(results): col_idx = 0 cell_groups = [] row = {'cell_groups': cell_groups} realm = TicketSystem.realm parent_realm = '' parent_id = '' email_cells = [] for header_group in header_groups: cell_group = [] for header in header_group: value = cell_value(result[col_idx]) cell = {'value': value, 'header': header, 'index': col_idx} col = header['col'] col_idx += 1 # Detect and create new group if col == '__group__' and value != prev_group_value: prev_group_value = value # Brute force handling of email in group by header row_groups.append( (value and chrome.format_author(req, value), [])) # Other row properties row['__idx__'] = row_idx if col in self._html_cols: row[col] = value if col in ('report', 'ticket', 'id', '_id'): row['id'] = value # Special casing based on column name col = col.strip('_') if col in ('reporter', 'cc', 'owner'): email_cells.append(cell) elif col == 'realm': realm = value elif col == 'parent_realm': parent_realm = value elif col == 'parent_id': parent_id = value cell_group.append(cell) cell_groups.append(cell_group) if parent_realm: resource = Resource(realm, row.get('id'), parent=Resource(parent_realm, parent_id)) else: resource = Resource(realm, row.get('id')) # FIXME: for now, we still need to hardcode the realm in the action if resource.realm.upper() + '_VIEW' not in req.perm(resource): continue authorized_results.append(result) if email_cells: for cell in email_cells: emails = chrome.format_emails(context.child(resource), cell['value']) result[cell['index']] = cell['value'] = emails row['resource'] = resource if row_groups: row_group = row_groups[-1][1] else: row_group = [] row_groups = [(None, row_group)] row_group.append(row) data.update({ 'header_groups': header_groups, 'row_groups': row_groups, 'numrows': numrows }) if format == 'rss': data['context'] = web_context(req, report_resource, absurls=True) return 'report.rss', data, 'application/rss+xml' elif format == 'csv': filename = 'report_%s.csv' % id if id else 'report.csv' self._send_csv(req, cols, authorized_results, mimetype='text/csv', filename=filename) elif format == 'tab': filename = 'report_%s.tsv' % id if id else 'report.tsv' self._send_csv(req, cols, authorized_results, '\t', mimetype='text/tab-separated-values', filename=filename) else: p = page if max is not None else None add_link(req, 'alternate', auth_link(req, report_href(format='rss', page=None)), _('RSS Feed'), 'application/rss+xml', 'rss') add_link(req, 'alternate', report_href(format='csv', page=p), _('Comma-delimited Text'), 'text/plain') add_link(req, 'alternate', report_href(format='tab', page=p), _('Tab-delimited Text'), 'text/plain') if 'REPORT_SQL_VIEW' in req.perm(self.realm, id): add_link(req, 'alternate', req.href.report(id=id, format='sql'), _('SQL Query'), 'text/plain') # reuse the session vars of the query module so that # the query navigation links on the ticket can be used to # navigate report results as well try: req.session['query_tickets'] = \ ' '.join(str(int(row['id'])) for rg in row_groups for row in rg[1]) req.session['query_href'] = \ req.session['query_href'] = report_href() # Kludge: we have to clear the other query session # variables, but only if the above succeeded for var in ('query_constraints', 'query_time'): if var in req.session: del req.session[var] except (ValueError, KeyError): pass if set(data['args']) - {'USER'}: data['show_args_form'] = True # Add values of all select-type ticket fields for autocomplete. fields = TicketSystem(self.env).get_ticket_fields() arg_values = {} for arg in set(data['args']) - {'USER'}: attrs = fields.by_name(arg.lower()) if attrs and 'options' in attrs: arg_values[attrs['name']] = attrs['options'] if arg_values: add_script_data(req, arg_values=arg_values) Chrome(self.env).add_jquery_ui(req) if missing_args: add_warning( req, _('The following arguments are missing: %(args)s', args=", ".join(missing_args))) return 'report_view.html', data, None
def distribute(self, transport, recipients, event): if transport != 'email': return if not self.config.getbool('notification', 'smtp_enabled'): self.log.debug("%s skipped because smtp_enabled set to false", self.__class__.__name__) return formats = {} for f in self.formatters: for style, realm in f.get_supported_styles(transport): if realm == event.realm: formats[style] = f if not formats: self.log.error("%s No formats found for %s %s", self.__class__.__name__, transport, event.realm) return self.log.debug( "%s has found the following formats capable of " "handling '%s' of '%s': %s", self.__class__.__name__, transport, event.realm, ', '.join(formats)) matcher = RecipientMatcher(self.env) notify_sys = NotificationSystem(self.env) always_cc = set(notify_sys.smtp_always_cc_list) addresses = {} for sid, auth, addr, fmt in recipients: if fmt not in formats: self.log.debug("%s format %s not available for %s %s", self.__class__.__name__, fmt, transport, event.realm) continue if sid and not addr: for resolver in self.resolvers: addr = resolver.get_address_for_session(sid, auth) or None if addr: self.log.debug( "%s found the address '%s' for '%s [%s]' via %s", self.__class__.__name__, addr, sid, auth, resolver.__class__.__name__) break if sid and auth and not addr: addr = sid if notify_sys.smtp_default_domain and \ not notify_sys.use_short_addr and \ addr and matcher.nodomaddr_re.match(addr): addr = '%s@%s' % (addr, notify_sys.smtp_default_domain) if not addr: self.log.debug( "%s was unable to find an address for " "'%s [%s]'", self.__class__.__name__, sid, auth) elif matcher.is_email(addr) or \ notify_sys.use_short_addr and \ matcher.nodomaddr_re.match(addr): addresses.setdefault(fmt, set()).add(addr) if sid and auth and sid in always_cc: always_cc.discard(sid) always_cc.add(addr) elif notify_sys.use_public_cc: always_cc.add(addr) else: self.log.debug( "%s was unable to use an address '%s' for '%s " "[%s]'", self.__class__.__name__, addr, sid, auth) outputs = {} failed = [] for fmt, formatter in formats.iteritems(): if fmt not in addresses and fmt != 'text/plain': continue try: outputs[fmt] = formatter.format(transport, fmt, event) except Exception as e: self.log.warning( '%s caught exception while ' 'formatting %s to %s for %s: %s%s', self.__class__.__name__, event.realm, fmt, transport, formatter.__class__, exception_to_unicode(e, traceback=True)) failed.append(fmt) # Fallback to text/plain when formatter is broken if failed and 'text/plain' in outputs: for fmt in failed: addresses.setdefault('text/plain', set()) \ .update(addresses.pop(fmt, ())) for fmt, addrs in addresses.iteritems(): self.log.debug("%s is sending event as '%s' to: %s", self.__class__.__name__, fmt, ', '.join(addrs)) message = self._create_message(fmt, outputs) if message: addrs = set(addrs) cc_addrs = sorted(addrs & always_cc) bcc_addrs = sorted(addrs - always_cc) self._do_send(transport, event, message, cc_addrs, bcc_addrs) else: self.log.warning("%s cannot send event '%s' as '%s': %s", self.__class__.__name__, event.realm, fmt, ', '.join(addrs))
def dispatch(self, req): """Find a registered handler that matches the request and let it process it. In addition, this method initializes the data dictionary passed to the the template and adds the web site chrome. """ self.log.debug('Dispatching %r', req) chrome = Chrome(self.env) try: # Select the component that should handle the request chosen_handler = None for handler in self._request_handlers.values(): if handler.match_request(req): chosen_handler = handler break if not chosen_handler and req.path_info in ('', '/'): chosen_handler = self._get_valid_default_handler(req) # pre-process any incoming request, whether a handler # was found or not self.log.debug("Chosen handler is %s", chosen_handler) chosen_handler = self._pre_process_request(req, chosen_handler) if not chosen_handler: if req.path_info.endswith('/'): # Strip trailing / and redirect target = unicode_quote(req.path_info.rstrip('/')) if req.query_string: target += '?' + req.query_string req.redirect(req.href + target, permanent=True) raise HTTPNotFound('No handler matched request to %s', req.path_info) req.callbacks['chrome'] = partial(chrome.prepare_request, handler=chosen_handler) # Protect against CSRF attacks: we validate the form token # for all POST requests with a content-type corresponding # to form submissions if req.method == 'POST': ctype = req.get_header('Content-Type') if ctype: ctype, options = cgi.parse_header(ctype) if ctype in ('application/x-www-form-urlencoded', 'multipart/form-data') and \ req.args.get('__FORM_TOKEN') != req.form_token: if self.env.secure_cookies and req.scheme == 'http': msg = _('Secure cookies are enabled, you must ' 'use https to submit forms.') else: msg = _('Do you have cookies enabled?') raise HTTPBadRequest( _('Missing or invalid form token.' ' %(msg)s', msg=msg)) # Process the request and render the template resp = chosen_handler.process_request(req) if resp: resp = self._post_process_request(req, *resp) template, data, metadata, method = resp if 'hdfdump' in req.args: req.perm.require('TRAC_ADMIN') # debugging helper - no need to render first out = io.BytesIO() pprint( { 'template': template, 'metadata': metadata, 'data': data }, out) req.send(out.getvalue(), 'text/plain') self.log.debug("Rendering response with template %s", template) iterable = chrome.use_chunked_encoding if isinstance(metadata, dict): iterable = metadata.setdefault('iterable', iterable) content_type = metadata.get('content_type') else: content_type = metadata output = chrome.render_template(req, template, data, metadata, iterable=iterable, method=method) # TODO (1.5.1) remove iterable and method parameters req.send(output, content_type or 'text/html') else: self.log.debug("Empty or no response from handler. " "Entering post_process_request.") self._post_process_request(req) except RequestDone: raise except Exception as e: # post-process the request in case of errors err = sys.exc_info() try: self._post_process_request(req) except RequestDone: raise except TracError as e2: self.log.warning( "Exception caught while post-processing" " request: %s", exception_to_unicode(e2)) except Exception as e2: if not (type(e) is type(e2) and e.args == e2.args): self.log.error( "Exception caught while post-processing" " request: %s", exception_to_unicode(e2, traceback=True)) if isinstance(e, PermissionError): raise HTTPForbidden(e) if isinstance(e, ResourceNotFound): raise HTTPNotFound(e) if isinstance(e, NotImplementedError): tb = traceback.extract_tb(err[2])[-1] self.log.warning("%s caught from %s:%d in %s: %s", e.__class__.__name__, tb[0], tb[1], tb[2], to_unicode(e) or "(no message)") raise HTTPInternalServerError(TracNotImplementedError(e)) if isinstance(e, TracError): raise HTTPInternalServerError(e) raise err[0], err[1], err[2]
def execute_paginated_report(self, req, id, sql, args, limit=0, offset=0): """ :param req: `Request` object. :param id: Integer id of the report. :param sql: SQL query that generates the report. :param args: SQL query arguments. :param limit: Maximum number of results to return (optional). :param offset: Offset to start of results (optional). """ sql, args, missing_args = self.sql_sub_vars(sql, args) if not sql: raise TracError(_("Report {%(num)s} has no SQL query.", num=id)) self.log.debug('Report {%d} with SQL "%s"', id, sql) self.log.debug('Request args: %r', req.args) rows = None num_items = 0 order_by = [] limit_offset = None base_sql = sql.replace(SORT_COLUMN, '1').replace(LIMIT_OFFSET, '') with self.env.db_query as db: cursor = db.cursor() if id == self.REPORT_LIST_ID or limit == 0: sql = base_sql else: # The number of tickets is obtained count_sql = 'SELECT COUNT(*) FROM (\n%s\n) AS tab' % base_sql self.log.debug("Report {%d} SQL (count): %s", id, count_sql) try: cursor.execute(count_sql, args) except Exception as e: self.log.warning( 'Exception caught while executing ' 'Report {%d}: %r, args %r%s', id, count_sql, args, exception_to_unicode(e, traceback=True)) return e, count_sql num_items = cursor.fetchone()[0] # The column names are obtained colnames_sql = 'SELECT * FROM (\n%s\n) AS tab LIMIT 1' \ % base_sql self.log.debug("Report {%d} SQL (col names): %s", id, colnames_sql) try: cursor.execute(colnames_sql, args) except Exception as e: self.log.warning( 'Exception caught while executing ' 'Report {%d}: args %r%s', id, colnames_sql, args, exception_to_unicode(e, traceback=True)) return e, colnames_sql cols = get_column_names(cursor) # The ORDER BY columns are inserted sort_col = req.args.get('sort', '') asc = req.args.getint('asc', 0, min=0, max=1) self.log.debug("%r %s (%s)", cols, sort_col, '^' if asc else 'v') order_cols = [] if sort_col and sort_col not in cols: raise TracError( _('Query parameter "sort=%(sort_col)s" ' ' is invalid', sort_col=sort_col)) skel = None if '__group__' in cols: order_cols.append('__group__') if sort_col: sort_col = '%s %s' % (db.quote(sort_col), 'ASC' if asc else 'DESC') if SORT_COLUMN in sql: # Method 1: insert sort_col at specified position sql = sql.replace(SORT_COLUMN, sort_col or '1') elif sort_col: # Method 2: automagically insert sort_col (and __group__ # before it, if __group__ was specified) as first criteria if '__group__' in cols: order_by.append('__group__ ASC') order_by.append(sort_col) # is there already an ORDER BY in the original sql? skel = sql_skeleton(sql) before, after = split_sql(sql, _order_by_re, skel) if after: # there were some other criteria, keep them order_by.append(after) sql = ' '.join([before, 'ORDER BY', ', '.join(order_by)]) # Add LIMIT/OFFSET if pagination needed limit_offset = '' if num_items > limit: limit_offset = ' '.join( ['LIMIT', str(limit), 'OFFSET', str(offset)]) if LIMIT_OFFSET in sql: # Method 1: insert LIMIT/OFFSET at specified position sql = sql.replace(LIMIT_OFFSET, limit_offset) else: # Method 2: limit/offset is added unless already present skel = skel or sql_skeleton(sql) if 'LIMIT' not in skel.upper(): sql = ' '.join([sql, limit_offset]) self.log.debug("Report {%d} SQL (order + limit): %s", id, sql) try: cursor.execute(sql, args) except Exception as e: self.log.warning( 'Exception caught while executing Report ' '{%d}: %r, args %r%s', id, sql, args, exception_to_unicode(e, traceback=True)) if order_by or limit_offset: add_notice( req, _( "Hint: if the report failed due to" " automatic modification of the ORDER" " BY clause or the addition of" " LIMIT/OFFSET, please look up" " %(sort_column)s and %(limit_offset)s" " in TracReports to see how to gain" " complete control over report" " rewriting.", sort_column=SORT_COLUMN, limit_offset=LIMIT_OFFSET)) return e, sql rows = cursor.fetchall() or [] cols = get_column_names(cursor) return cols, rows, num_items, missing_args, limit_offset
'paginator': None, 'report_href': report_href, } with self.env.db_query as db: try: cols, results, num_items, missing_args = \ self.execute_paginated_report(req, db, id, sql, args, limit, offset) results = [list(row) for row in results] numrows = len(results) except Exception, e: data['message'] = tag_( 'Report execution failed: %(error)s', error=tag.pre(exception_to_unicode(e, traceback=True))) return 'report_view.html', data, None paginator = None if limit > 0: paginator = Paginator(results, page - 1, limit, num_items) data['paginator'] = paginator if paginator.has_next_page: add_link(req, 'next', report_href(page=page + 1), _('Next Page')) if paginator.has_previous_page: add_link(req, 'prev', report_href(page=page - 1), _('Previous Page')) pagedata = [] shown_pages = paginator.get_shown_pages(21)
def notify(self, event, reponame, revs): """Notify repositories and change listeners about repository events. The supported events are the names of the methods defined in the `IRepositoryChangeListener` interface. """ self.log.debug("Event %s on repository '%s' for changesets %r", event, reponame or '(default)', revs) # Notify a repository by name, and all repositories with the same # base, or all repositories by base or by repository dir repos = self.get_repository(reponame) repositories = [] if repos: base = repos.get_base() else: dir = os.path.abspath(reponame) repositories = self.get_repositories_by_dir(dir) if repositories: base = None else: base = reponame if base: repositories = [ r for r in self.get_real_repositories() if r.get_base() == base ] if not repositories: self.log.warn("Found no repositories matching '%s' base.", base or reponame) return [ _("Repository '%(repo)s' not found", repo=reponame or _("(default)")) ] errors = [] for repos in sorted(repositories, key=lambda r: r.reponame): reponame = repos.reponame or '(default)' if reponame in self.repository_sync_per_request: self.log.warn( "Repository '%s' should be removed from [trac] " "repository_sync_per_request for explicit " "synchronization", reponame) repos.sync() for rev in revs: args = [] if event == 'changeset_modified': try: old_changeset = repos.sync_changeset(rev) except NoSuchChangeset, e: errors.append(exception_to_unicode(e)) self.log.warn( "No changeset '%s' found in repository '%s'. " "Skipping subscribers for event %s", rev, reponame, event) continue else: args.append(old_changeset) try: changeset = repos.get_changeset(rev) except NoSuchChangeset: try: repos.sync_changeset(rev) changeset = repos.get_changeset(rev) except NoSuchChangeset, e: errors.append(exception_to_unicode(e)) self.log.warn( "No changeset '%s' found in repository '%s'. " "Skipping subscribers for event %s", rev, reponame, event) continue self.log.debug("Event %s on repository '%s' for revision '%s'", event, reponame, rev) for listener in self.change_listeners: getattr(listener, event)(repos, changeset, *args)
def send_internal_error(env, req, exc_info): if env: env.log.error("[%s] Internal Server Error: %r, referrer %r%s", req.remote_addr, req, req.environ.get('HTTP_REFERER'), exception_to_unicode(exc_info[1], traceback=True)) message = exception_to_unicode(exc_info[1]) traceback = get_last_traceback() frames, plugins, faulty_plugins, interface_custom = [], [], [], [] th = 'http://trac-hacks.org' has_admin = False try: has_admin = 'TRAC_ADMIN' in req.perm except Exception: pass tracker = default_tracker tracker_args = {} if has_admin and not isinstance(exc_info[1], MemoryError): # Collect frame and plugin information frames = get_frame_info(exc_info[2]) if env: plugins = [ p for p in get_plugin_info(env) if any(c['enabled'] for m in p['modules'].itervalues() for c in m['components'].itervalues()) ] match_plugins_to_frames(plugins, frames) # Identify the tracker where the bug should be reported faulty_plugins = [p for p in plugins if 'frame_idx' in p] faulty_plugins.sort(key=lambda p: p['frame_idx']) if faulty_plugins: info = faulty_plugins[0]['info'] home_page = info.get('home_page', '') if 'trac' in info: tracker = info['trac'] elif urlparse(home_page).netloc == urlparse(th).netloc: tracker = th plugin_name = info.get('home_page', '').rstrip('/') \ .split('/')[-1] tracker_args = {'component': plugin_name} interface_custom = Chrome(env).get_interface_customization_files() def get_description(_): if env and has_admin: sys_info = "".join( "|| '''`%s`''' || `%s` ||\n" % (k, (v.replace('\n', '` [[br]] `') if v else _('N/A'))) for k, v in env.system_info) sys_info += "|| '''`jQuery`''' || `#JQUERY#` ||\n" \ "|| '''`jQuery UI`''' || `#JQUERYUI#` ||\n" \ "|| '''`jQuery Timepicker`''' || `#JQUERYTP#` ||\n" enabled_plugins = "".join("|| '''`%s`''' || `%s` ||\n" % (p['name'], p['version'] or _('N/A')) for p in plugins) files = Chrome(env).get_interface_customization_files().items() interface_files = "".join("|| **%s** || %s ||\n" % (k, ", ".join("`%s`" % f for f in v)) for k, v in sorted(files)) else: sys_info = _("''System information not available''\n") enabled_plugins = _("''Plugin information not available''\n") interface_files = _("''Interface customization information not " "available''\n") return _("""\ ==== How to Reproduce ==== While doing a %(method)s operation on `%(path_info)s`, Trac issued an internal error. ''(please provide additional details here)'' Request parameters: {{{ %(req_args)s }}} User agent: `#USER_AGENT#` ==== System Information ==== %(sys_info)s ==== Enabled Plugins ==== %(enabled_plugins)s ==== Interface Customization ==== %(interface_customization)s ==== Python Traceback ==== {{{ %(traceback)s}}}""", method=req.method, path_info=req.path_info, req_args=pformat(req.args), sys_info=sys_info, enabled_plugins=enabled_plugins, interface_customization=interface_files, traceback=to_unicode(traceback)) # Generate the description once in English, once in the current locale description_en = get_description(lambda s, **kw: safefmt(s, kw)) try: description = get_description(_) except Exception: description = description_en data = { 'title': 'Internal Error', 'type': 'internal', 'message': message, 'traceback': traceback, 'frames': frames, 'shorten_line': shorten_line, 'repr': safe_repr, 'plugins': plugins, 'faulty_plugins': faulty_plugins, 'interface': interface_custom, 'tracker': tracker, 'tracker_args': tracker_args, 'description': description, 'description_en': description_en } Chrome(env).add_jquery_ui(req) try: req.send_error(exc_info, status=500, env=env, data=data) except RequestDone: pass
def resync(self, log, db, repos, ourYoungest, theirYoungest): self.log.info('resync: ourYoungest=%d theirYoungest=%d' % (ourYoungest, theirYoungest)) if (ourYoungest < 0): # start at rev 1 ourYoungest = 1 #self.ticket_pattern = self.env.config.get('icucodetools', 'ticket_pattern', '^cldrbug (\d+):') #log.info("Pat: %s" % (self.ticket_pattern)) try: self.ticket_match = re.compile(self.ticket_pattern) except Exception, e: found = self.env.config.get('icucodetools', 'ticket_pattern', 'NoneFound') raise TracError('Could not compile icucodetools.ticket_pattern=/%s/ but /%s/: %s' % (self.ticket_pattern, found, exception_to_unicode(e, traceback=True)))
class EnvironmentAdmin(Component): """trac-admin command provider for environment administration.""" implements(IAdminCommandProvider) # IAdminCommandProvider methods def get_admin_commands(self): yield ('deploy', '<directory>', 'Extract static resources from Trac and all plugins', None, self._do_deploy) yield ('hotcopy', '<backupdir>', 'Make a hot backup copy of an environment', None, self._do_hotcopy) yield ('upgrade', '', 'Upgrade database to current version', None, self._do_upgrade) def _do_deploy(self, dest): target = os.path.normpath(dest) chrome_target = os.path.join(target, 'htdocs') script_target = os.path.join(target, 'cgi-bin') # Copy static content makedirs(target, overwrite=True) makedirs(chrome_target, overwrite=True) from trac.web.chrome import Chrome printout(_("Copying resources from:")) for provider in Chrome(self.env).template_providers: paths = list(provider.get_htdocs_dirs() or []) if not len(paths): continue printout(' %s.%s' % (provider.__module__, provider.__class__.__name__)) for key, root in paths: source = os.path.normpath(root) printout(' ', source) if os.path.exists(source): dest = os.path.join(chrome_target, key) copytree(source, dest, overwrite=True) # Create and copy scripts makedirs(script_target, overwrite=True) printout(_("Creating scripts.")) data = {'env': self.env, 'executable': sys.executable} for script in ('cgi', 'fcgi', 'wsgi'): dest = os.path.join(script_target, 'trac.' + script) template = Chrome(self.env).load_template('deploy_trac.' + script, 'text') stream = template.generate(**data) out = file(dest, 'w') try: stream.render('text', out=out) finally: out.close() def _do_hotcopy(self, dest): if os.path.exists(dest): raise TracError( _("hotcopy can't overwrite existing '%(dest)s'", dest=dest)) import shutil # Bogus statement to lock the database while copying files cnx = self.env.get_db_cnx() cursor = cnx.cursor() cursor.execute("UPDATE system SET name=NULL WHERE name IS NULL") try: printout( _('Hotcopying %(src)s to %(dst)s ...', src=self.env.path, dst=dest)) db_str = self.env.config.get('trac', 'database') prefix, db_path = db_str.split(':', 1) if prefix == 'sqlite': # don't copy the journal (also, this would fail on Windows) db = os.path.join(self.env.path, os.path.normpath(db_path)) skip = [db + '-journal', db + '-stmtjrnl'] else: skip = [] try: copytree(self.env.path, dest, symlinks=1, skip=skip) retval = 0 except shutil.Error, e: retval = 1 printerr( _('The following errors happened while copying ' 'the environment:')) for (src, dst, err) in e.args[0]: if src in err: printerr(' %s' % err) else: printerr(" %s: '%s'" % (err, src)) finally: # Unlock database cnx.rollback() printout(_("Hotcopy done.")) return retval def _do_upgrade(self, no_backup=None): if no_backup not in (None, '-b', '--no-backup'): raise AdminCommandError(_("Invalid arguments"), show_usage=True) if not self.env.needs_upgrade(): printout(_("Database is up to date, no upgrade necessary.")) return try: self.env.upgrade(backup=no_backup is None) except TracError, e: raise TracError( _( "Backup failed: %(msg)s.\nUse '--no-backup' to " "upgrade without doing a backup.", msg=unicode(e))) # Remove wiki-macros if it is empty and warn if it isn't wiki_macros = os.path.join(self.env.path, 'wiki-macros') try: entries = os.listdir(wiki_macros) except OSError: pass else: if entries: printerr( _("Warning: the wiki-macros directory in the " "environment is non-empty, but Trac\n" "doesn't load plugins from there anymore. " "Please remove it by hand.")) else: try: os.rmdir(wiki_macros) except OSError, e: printerr( _( "Error while removing wiki-macros: %(err)s\n" "Trac doesn't load plugins from wiki-macros " "anymore. Please remove it by hand.", err=exception_to_unicode(e)))
def _render_repository_index(self, context, all_repositories, order, desc): # Color scale for the age column timerange = custom_colorizer = None if self.color_scale: custom_colorizer = self.get_custom_colorizer() rm = RepositoryManager(self.env) repositories = [] for reponame, repoinfo in all_repositories.iteritems(): if not reponame or as_bool(repoinfo.get('hidden')): continue try: repos = rm.get_repository(reponame) except TracError as err: entry = (reponame, repoinfo, None, None, exception_to_unicode(err), None) else: if repos: if not repos.is_viewable(context.perm): continue try: youngest = repos.get_changeset(repos.youngest_rev) except NoSuchChangeset: youngest = None if self.color_scale and youngest: if not timerange: timerange = TimeRange(youngest.date) else: timerange.insert(youngest.date) raw_href = self._get_download_href(context.href, repos, None, None) entry = (reponame, repoinfo, repos, youngest, None, raw_href) else: entry = (reponame, repoinfo, None, None, u"\u2013", None) if entry[4] is not None: # Check permission in case of error root = Resource('repository', reponame).child(self.realm, '/') if 'BROWSER_VIEW' not in context.perm(root): continue repositories.append(entry) # Ordering of repositories if order == 'date': def repo_order((reponame, repoinfo, repos, youngest, err, href)): return (youngest.date if youngest else to_datetime(0), embedded_numbers(reponame.lower())) elif order == 'author': def repo_order((reponame, repoinfo, repos, youngest, err, href)): return (youngest.author.lower() if youngest else '', embedded_numbers(reponame.lower())) else: def repo_order((reponame, repoinfo, repos, youngest, err, href)): return embedded_numbers(reponame.lower()) repositories = sorted(repositories, key=repo_order, reverse=desc) return { 'repositories': repositories, 'timerange': timerange, 'colorize_age': custom_colorizer }
def save_milestone(self, req, milestone): # Instead of raising one single error, check all the constraints and # let the user fix them by going back to edit mode showing the warnings warnings = [] def warn(msg): add_warning(req, msg) warnings.append(msg) milestone.description = req.args.get('description', '') if 'due' in req.args: duedate = req.args.get('duedate') milestone.due = user_time(req, parse_date, duedate, hint='datetime') \ if duedate else None else: milestone.due = None # -- check completed date if 'completed' in req.args: completed = req.args.get('completeddate', '') completed = user_time(req, parse_date, completed, hint='datetime') if completed else None if completed and completed > datetime.now(utc): warn(_('Completion date may not be in the future')) else: completed = None milestone.completed = completed # -- check the name # If the name has changed, check that the milestone doesn't already # exist # FIXME: the whole .exists business needs to be clarified # (#4130) and should behave like a WikiPage does in # this respect. new_name = req.args.get('name') try: new_milestone = Milestone(self.env, new_name) except ResourceNotFound: milestone.name = new_name else: if new_milestone.name != milestone.name: if new_milestone.name: warn(_('Milestone "%(name)s" already exists, please ' 'choose another name.', name=new_milestone.name)) else: warn(_("You must provide a name for the milestone.")) if warnings: return False # -- actually save changes if milestone.exists: milestone.update(author=req.authname) if completed and 'retarget' in req.args: comment = req.args.get('comment', '') retarget_to = req.args.get('target') or None retargeted_tickets = \ milestone.move_tickets(retarget_to, req.authname, comment, exclude_closed=True) add_notice(req, _('The open tickets associated with ' 'milestone "%(name)s" have been retargeted ' 'to milestone "%(retarget)s".', name=milestone.name, retarget=retarget_to)) new_values = {'milestone': retarget_to} comment = comment or \ _("Open tickets retargeted after milestone closed") event = BatchTicketChangeEvent(retargeted_tickets, None, req.authname, comment, new_values, None) try: NotificationSystem(self.env).notify(event) except Exception as e: self.log.error("Failure sending notification on ticket " "batch change: %s", exception_to_unicode(e)) add_warning(req, tag_("The changes have been saved, but " "an error occurred while sending " "notifications: %(message)s", message=to_unicode(e))) add_notice(req, _("Your changes have been saved.")) else: milestone.insert() add_notice(req, _('The milestone "%(name)s" has been added.', name=milestone.name)) return True
def _move(self, new_realm=None, new_id=None, new_filename=None, new_parent_must_exist=False): """Move the attachment, changing one or more of its parent realm, parent id and filename. :since: 1.3.2 """ if not self.filename: raise TracError(_("Cannot rename non-existent attachment")) if new_realm is None: new_realm = self.parent_realm new_id = self.parent_id if new_id is None else unicode(new_id) if new_filename is None: new_filename = self.filename if (new_realm, new_id, new_filename) == \ (self.parent_realm, self.parent_id, self.filename): raise TracError(_("Attachment not modified")) new_path = self._get_path(self.env.attachments_dir, new_realm, new_id, new_filename) new_title = '%s:%s: %s' % (new_realm, new_id, new_filename) if new_parent_must_exist: new_parent_resource = Resource(new_realm, new_id) if not resource_exists(self.env, new_parent_resource): raise ResourceNotFound( _("%(target)s doesn't exist, can't move attachment", target=get_resource_name(self.env, new_parent_resource))) elif new_realm not in ResourceSystem(self.env).get_known_realms(): raise ResourceNotFound( _("%(target)s doesn't exist, can't move attachment", target=new_realm)) if not self._is_valid_path(new_path): raise TracError( _( 'Cannot move attachment "%(att)s" as "%(title)s" ' 'is invalid', att=self.filename, title=new_title)) if os.path.exists(new_path): raise TracError( _( 'Cannot move attachment "%(att)s" to "%(title)s" ' 'as it already exists', att=self.filename, title=new_title)) with self.env.db_transaction as db: db( """UPDATE attachment SET type=%s, id=%s, filename=%s WHERE type=%s AND id=%s AND filename=%s """, (new_realm, new_id, new_filename, self.parent_realm, self.parent_id, self.filename)) dirname = os.path.dirname(new_path) if not os.path.exists(dirname): os.makedirs(dirname) path = self.path if os.path.isfile(path): try: os.rename(path, new_path) except OSError as e: self.env.log.error("Failed to move attachment file %s: %s", path, exception_to_unicode(e, traceback=True)) raise TracError( _('Could not move attachment "%(title)s"', title=self.title)) old_realm = self.parent_realm old_id = self.parent_id old_filename = self.filename self.parent_realm = new_realm self.parent_id = new_id self.filename = new_filename self.env.log.info("Attachment moved: %s", self.title) reparented = old_realm != new_realm or old_id != new_id for listener in AttachmentModule(self.env).change_listeners: if hasattr(listener, 'attachment_moved'): listener.attachment_moved(self, old_realm, old_id, old_filename) if reparented and hasattr(listener, 'attachment_reparented'): listener.attachment_reparented(self, old_realm, old_id)
if e.code == 403 and req.authname == 'anonymous': req.chrome['notices'].append( Markup( _( 'You are currently not logged in. You may want to ' '<a href="%(href)s">do so</a> now.', href=req.href.login()))) try: req.send_error(sys.exc_info(), status=e.code, env=env, data=data) except RequestDone: pass except Exception, e: if env: env.log.error("Internal Server Error: %s", exception_to_unicode(e, traceback=True)) exc_info = sys.exc_info() try: message = "%s: %s" % (e.__class__.__name__, to_unicode(e)) traceback = get_last_traceback() frames = [] has_admin = False try: has_admin = 'TRAC_ADMIN' in req.perm except Exception, e: pass if has_admin and not isinstance(e, MemoryError): tb = exc_info[2] while tb:
def replace(self, old_uid, new_uid): results = {} with self.env.db_transaction as db: self.column = 'owner' result = super(TicketUserIdChanger, self).\ replace(old_uid, new_uid) if 'error' in result: return result results.update(result) self.column = 'reporter' result = super(TicketUserIdChanger, self).\ replace(old_uid, new_uid) if 'error' in result: return result results.update(result) # Replace user ID in Cc ticket column. result = 0 for row in db( """ SELECT id,cc FROM ticket WHERE cc %s """ % db.like(), ('%' + db.like_escape(old_uid) + '%', )): cc = _get_cc_list(row[1]) for i in [i for i, r in enumerate(cc) if r == old_uid]: cc[i] = new_uid try: db("UPDATE ticket SET cc=%s WHERE id=%s", (', '.join(cc), int(row[0]))) result += 1 except _get_db_exc(self.env), e: result = exception_to_unicode(e) msg = 'failed: %s' \ % exception_to_unicode(e, traceback=True) self.log.debug( self.msg(old_uid, new_uid, self.table, 'cc', result=msg)) return dict(error={(self.table, 'cc', None): result}) self.log.debug( self.msg(old_uid, new_uid, self.table, 'cc', result='%s time(s)' % result)) results.update({(self.table, 'cc', None): result}) table = 'ticket_change' self.column = 'author' self.table = table result = super(TicketUserIdChanger, self).replace(old_uid, new_uid) if 'error' in result: return result results.update(result) constraint = "field='owner'|'reporter'" for column in ('oldvalue', 'newvalue'): for count, in db( """ SELECT COUNT(*) FROM %s WHERE %s=%%s AND (field='owner' OR field='reporter') """ % (table, column), (old_uid, )): result = int(count) if result: try: db( """ UPDATE %s SET %s=%%s WHERE %s=%%s AND (field='owner' OR field='reporter') """ % (table, column, column), (new_uid, old_uid)) except _get_db_exc(self.env), e: result = exception_to_unicode(e) msg = 'failed: %s' % \ exception_to_unicode(e, traceback=True) self.log.debug( self.msg(old_uid, new_uid, table, column, constraint, result=msg)) return dict( error={(self.table, column, constraint): result}) self.log.debug( self.msg(old_uid, new_uid, table, column, constraint, result='%s time(s)' % result)) results.update({(table, column, constraint): result})
def _render_admin_panel(self, req, cat, page, path_info): label = [gettext(each) for each in self._label] data = { 'label_singular': label[0], 'label_plural': label[1], 'type': self._type } # Detail view? if path_info: enum = self._enum_cls(self.env, path_info) if req.method == 'POST': if req.args.get('save'): enum.name = name = req.args.get('name') try: enum.update() except self.env.db_exc.IntegrityError: raise TracError( _('%(type)s value "%(name)s" already ' 'exists', type=label[0], name=name)) add_notice(req, _("Your changes have been saved.")) req.redirect(req.href.admin(cat, page)) elif req.args.get('cancel'): req.redirect(req.href.admin(cat, page)) data.update({'view': 'detail', 'enum': enum}) else: default = self.config.get('ticket', 'default_%s' % self._type) if req.method == 'POST': # Add enum if req.args.get('add') and req.args.get('name'): name = req.args.get('name') try: enum = self._enum_cls(self.env, name=name) except ResourceNotFound: enum = self._enum_cls(self.env) enum.name = name enum.insert() add_notice( req, _( 'The %(field)s value "%(name)s" ' 'has been added.', field=label[0], name=name)) req.redirect(req.href.admin(cat, page)) else: if enum.name is None: raise TracError( _("Invalid %(type)s value.", type=label[0])) raise TracError( _('%(type)s value "%(name)s" already ' 'exists', type=label[0], name=name)) # Remove enums elif req.args.get('remove'): sel = req.args.getlist('sel') if not sel: raise TracError(_("No %s selected") % self._type) with self.env.db_transaction: for name in sel: self._enum_cls(self.env, name).delete() if name == default: self.config.set('ticket', 'default_%s' % self._type, '') self.config.save() add_notice( req, _( "The selected %(field)s values have " "been removed.", field=label[0])) req.redirect(req.href.admin(cat, page)) # Apply changes elif req.args.get('apply'): changed = False # Set default value name = req.args.get('default') if name and name != default: self.log.info("Setting default %s to %s", self._type, name) self.config.set('ticket', 'default_%s' % self._type, name) try: self.config.save() changed = True except Exception as e: self.log.error("Error writing to trac.ini: %s", exception_to_unicode(e)) add_warning( req, _("Error writing to trac.ini, make " "sure it is writable by the web " "server. The default value has not " "been saved.")) # Change enum values order = { str(int(key[6:])): str(req.args.getint(key)) for key in req.args if key.startswith('value_') } values = {val: True for val in order.values()} if len(order) != len(values): raise TracError(_("Order numbers must be unique")) with self.env.db_transaction: for enum in self._enum_cls.select(self.env): new_value = order[enum.value] if new_value != enum.value: enum.value = new_value enum.update() changed = True if changed: add_notice(req, _("Your changes have been saved.")) req.redirect(req.href.admin(cat, page)) # Clear default elif req.args.get('clear'): self.log.info("Clearing default %s", self._type) self.config.set('ticket', 'default_%s' % self._type, '') self._save_config(req) req.redirect(req.href.admin(cat, page)) data.update( dict(enums=list(self._enum_cls.select(self.env)), default=default, view='list')) Chrome(self.env).add_jquery_ui(req) add_script(req, 'common/js/admin_enums.js') return 'admin_enums.html', data
def render(self, context, mimetype, content, filename=None, url=None, annotations=None, force_source=False): """Render an XHTML preview of the given `content`. `content` is the same as an `IHTMLPreviewRenderer.render`'s `content` argument. The specified `mimetype` will be used to select the most appropriate `IHTMLPreviewRenderer` implementation available for this MIME type. If not given, the MIME type will be infered from the filename or the content. Return a string containing the XHTML text. When rendering with an `IHTMLPreviewRenderer` fails, a warning is added to the request associated with the context (if any), unless the `disable_warnings` hint is set to `True`. """ if not content: return '' if not isinstance(context, RenderingContext): raise TypeError("RenderingContext expected (since 0.11)") # Ensure we have a MIME type for this content full_mimetype = mimetype if not full_mimetype: if hasattr(content, 'read'): content = content.read(self.max_preview_size) full_mimetype = self.get_mimetype(filename, content) if full_mimetype: mimetype = ct_mimetype(full_mimetype) # split off charset else: mimetype = full_mimetype = 'text/plain' # fallback if not binary # Determine candidate `IHTMLPreviewRenderer`s candidates = [] for renderer in self.renderers: qr = renderer.get_quality_ratio(mimetype) if qr > 0: candidates.append((qr, renderer)) candidates.sort(lambda x, y: cmp(y[0], x[0])) # Wrap file-like object so that it can be read multiple times if hasattr(content, 'read'): content = Content(content, self.max_preview_size) # First candidate which renders successfully wins. # Also, we don't want to expand tabs more than once. expanded_content = None for qr, renderer in candidates: if force_source and not getattr(renderer, 'returns_source', False): continue # skip non-source renderers in force_source mode if isinstance(content, Content): content.reset() try: ann_names = ', '.join(annotations) if annotations else \ 'no annotations' self.log.debug('Trying to render HTML preview using %s [%s]', renderer.__class__.__name__, ann_names) # check if we need to perform a tab expansion rendered_content = content if getattr(renderer, 'expand_tabs', False): if expanded_content is None: content = content_to_unicode(self.env, content, full_mimetype) expanded_content = content.expandtabs(self.tab_width) rendered_content = expanded_content result = renderer.render(context, full_mimetype, rendered_content, filename, url) if not result: continue if not (force_source or getattr(renderer, 'returns_source', False)): # Direct rendering of content if isinstance(result, basestring): if not isinstance(result, unicode): result = to_unicode(result) return Markup(to_unicode(result)) elif isinstance(result, Fragment): return result.generate() else: return result # Render content as source code if annotations: m = context.req.args.get('marks') if context.req else None return self._render_source(context, result, annotations, m and Ranges(m)) else: if isinstance(result, list): result = Markup('\n').join(result) return tag.div(class_='code')(tag.pre(result)).generate() except Exception, e: self.log.warning('HTML preview using %s failed: %s', renderer.__class__.__name__, exception_to_unicode(e, traceback=True)) if context.req and not context.get_hint('disable_warnings'): from trac.web.chrome import add_warning add_warning( context.req, _("HTML preview using %(renderer)s failed (%(err)s)", renderer=renderer.__class__.__name__, err=exception_to_unicode(e)))
class PostgreSQLConnector(Component): """Database connector for PostgreSQL. Database URLs should be of the form: {{{ postgres://user[:password]@host[:port]/database[?schema=my_schema] }}} """ implements(IDatabaseConnector) pg_dump_path = Option('trac', 'pg_dump_path', 'pg_dump', """Location of pg_dump for Postgres database backups""") def __init__(self): self._version = None self.error = None def get_supported_schemes(self): if not has_psycopg: self.error = _("Cannot load Python bindings for PostgreSQL") yield ('postgres', -1 if self.error else 1) def get_connection(self, path, log=None, user=None, password=None, host=None, port=None, params={}): cnx = PostgreSQLConnection(path, log, user, password, host, port, params) if not self._version: self._version = get_pkginfo(psycopg).get('version', psycopg.__version__) self.env.systeminfo.append(('psycopg2', self._version)) self.required = True return cnx def get_exceptions(self): return psycopg def init_db(self, path, schema=None, log=None, user=None, password=None, host=None, port=None, params={}): cnx = self.get_connection(path, log, user, password, host, port, params) cursor = cnx.cursor() if cnx.schema: cursor.execute('CREATE SCHEMA "%s"' % cnx.schema) cursor.execute('SET search_path TO %s', (cnx.schema,)) if schema is None: from trac.db_default import schema for table in schema: for stmt in self.to_sql(table): cursor.execute(stmt) cnx.commit() def to_sql(self, table): sql = ['CREATE TABLE "%s" (' % table.name] coldefs = [] for column in table.columns: ctype = column.type ctype = _type_map.get(ctype, ctype) if column.auto_increment: ctype = 'SERIAL' if len(table.key) == 1 and column.name in table.key: ctype += ' PRIMARY KEY' coldefs.append(' "%s" %s' % (column.name, ctype)) if len(table.key) > 1: coldefs.append(' CONSTRAINT "%s_pk" PRIMARY KEY ("%s")' % (table.name, '","'.join(table.key))) sql.append(',\n'.join(coldefs) + '\n)') yield '\n'.join(sql) for index in table.indices: unique = 'UNIQUE' if index.unique else '' yield 'CREATE %s INDEX "%s_%s_idx" ON "%s" ("%s")' % \ (unique, table.name, '_'.join(index.columns), table.name, '","'.join(index.columns)) def alter_column_types(self, table, columns): """Yield SQL statements altering the type of one or more columns of a table. Type changes are specified as a `columns` dict mapping column names to `(from, to)` SQL type tuples. """ alterations = [] for name, (from_, to) in sorted(columns.iteritems()): to = _type_map.get(to, to) if to != _type_map.get(from_, from_): alterations.append((name, to)) if alterations: yield "ALTER TABLE %s %s" % (table, ', '.join("ALTER COLUMN %s TYPE %s" % each for each in alterations)) def backup(self, dest_file): from subprocess import Popen, PIPE db_url = self.env.config.get('trac', 'database') scheme, db_prop = _parse_db_str(db_url) db_params = db_prop.setdefault('params', {}) db_name = os.path.basename(db_prop['path']) args = [self.pg_dump_path, '-C', '--inserts', '-x', '-Z', '8'] if 'user' in db_prop: args.extend(['-U', db_prop['user']]) if 'host' in db_params: host = db_params['host'] else: host = db_prop.get('host') if host: args.extend(['-h', host]) if '/' not in host: args.extend(['-p', str(db_prop.get('port', '5432'))]) if 'schema' in db_params: try: p = Popen([self.pg_dump_path, '--version'], stdout=PIPE, close_fds=close_fds) except OSError, e: raise TracError(_("Unable to run %(path)s: %(msg)s", path=self.pg_dump_path, msg=exception_to_unicode(e))) # Need quote for -n (--schema) option in PostgreSQL 8.2+ version = p.communicate()[0] if re.search(r' 8\.[01]\.', version): args.extend(['-n', db_params['schema']]) else: args.extend(['-n', '"%s"' % db_params['schema']]) dest_file += ".gz" args.extend(['-f', dest_file, db_name]) environ = os.environ.copy() if 'password' in db_prop: environ['PGPASSWORD'] = str(db_prop['password']) try: p = Popen(args, env=environ, stderr=PIPE, close_fds=close_fds) except OSError, e: raise TracError(_("Unable to run %(path)s: %(msg)s", path=self.pg_dump_path, msg=exception_to_unicode(e)))
import re import unittest try: # Note: if trac/mimeview/tests is in sys.path, then the absolute # import will try to load this pygments.py file again, which is bad. import os, sys dir = os.path.dirname(__file__) if dir in sys.path: sys.path.remove(dir) import pygments have_pygments = True except ImportError, e: from trac.util.text import exception_to_unicode print exception_to_unicode(e, True) have_pygments = False from trac.mimeview.api import LineNumberAnnotator, Mimeview if have_pygments: from trac.mimeview.pygments import PygmentsRenderer from trac.test import EnvironmentStub, MockRequest from trac.util import get_pkginfo from trac.web.chrome import Chrome, web_context from trac.wiki.formatter import format_to_html if have_pygments: pygments_version = parse_version(get_pkginfo(pygments).get('version')) class PygmentsRendererTestCase(unittest.TestCase):
def distribute(self, transport, recipients, event): if transport != 'email': return if not self.config.getbool('notification', 'smtp_enabled'): self.log.debug("EmailDistributor smtp_enabled set to false") return formats = {} for f in self.formatters: for style, realm in f.get_supported_styles(transport): if realm == event.realm: formats[style] = f if not formats: self.log.error("EmailDistributor No formats found for %s %s", transport, event.realm) return self.log.debug( "EmailDistributor has found the following formats " "capable of handling '%s' of '%s': %s", transport, event.realm, ', '.join(formats.keys())) notify_sys = NotificationSystem(self.env) always_cc = set(notify_sys.smtp_always_cc_list) use_public_cc = notify_sys.use_public_cc addresses = {} for sid, authed, addr, fmt in recipients: if fmt not in formats: self.log.debug( "EmailDistributor format %s not available for " "%s %s", fmt, transport, event.realm) continue if sid and not addr: for resolver in self.resolvers: addr = resolver.get_address_for_session(sid, authed) if addr: status = 'authenticated' if authed else \ 'not authenticated' self.log.debug( "EmailDistributor found the address " "'%s' for '%s (%s)' via %s", addr, sid, status, resolver.__class__.__name__) break if addr: addresses.setdefault(fmt, set()).add(addr) if use_public_cc or sid and sid in always_cc: always_cc.add(addr) else: status = 'authenticated' if authed else 'not authenticated' self.log.debug( "EmailDistributor was unable to find an " "address for: %s (%s)", sid, status) outputs = {} failed = [] for fmt, formatter in formats.iteritems(): if fmt not in addresses and fmt != 'text/plain': continue try: outputs[fmt] = formatter.format(transport, fmt, event) except Exception as e: self.log.warn( 'EmailDistributor caught exception while ' 'formatting %s to %s for %s: %s%s', event.realm, fmt, transport, formatter.__class__, exception_to_unicode(e, traceback=True)) failed.append(fmt) # Fallback to text/plain when formatter is broken if failed and 'text/plain' in outputs: for fmt in failed: addresses.setdefault('text/plain', set()) \ .update(addresses.pop(fmt, ())) for fmt, addrs in addresses.iteritems(): self.log.debug("EmailDistributor is sending event as '%s' to: %s", fmt, ', '.join(addrs)) message = self._create_message(fmt, outputs) if message: addrs = set(addrs) cc_addrs = sorted(addrs & always_cc) bcc_addrs = sorted(addrs - always_cc) self._do_send(transport, event, message, cc_addrs, bcc_addrs) else: self.log.warn( "EmailDistributor cannot send event '%s' as " "'%s': %s", event.realm, fmt, ', '.join(addrs))
def test_without_traceback(self): try: raise ValueError('test') except ValueError as e: self.assertEqual('ValueError: test', exception_to_unicode(e))
def _save_ticket_changes(self, req, selected_tickets, new_values, comment, action): """Save changes to tickets.""" valid = True for manipulator in self.ticket_manipulators: if hasattr(manipulator, 'validate_comment'): for message in manipulator.validate_comment(req, comment): valid = False add_warning( req, tag_("The ticket comment is invalid: " "%(message)s", message=message)) tickets = [] for id_ in selected_tickets: t = Ticket(self.env, id_) values = self._get_updated_ticket_values(req, t, new_values) for ctlr in self._get_action_controllers(req, t, action): values.update(ctlr.get_ticket_changes(req, t, action)) t.populate(values) for manipulator in self.ticket_manipulators: for field, message in manipulator.validate_ticket(req, t): valid = False if field: add_warning( req, tag_( "The ticket field %(field)s is " "invalid: %(message)s", field=tag.strong(field), message=message)) else: add_warning(req, message) tickets.append(t) if not valid: return when = datetime_now(utc) with self.env.db_transaction: for t in tickets: t.save_changes(req.authname, comment, when=when) for ctlr in self._get_action_controllers(req, t, action): ctlr.apply_action_side_effects(req, t, action) event = BatchTicketChangeEvent(selected_tickets, when, req.authname, comment, new_values, action) try: NotificationSystem(self.env).notify(event) except Exception as e: self.log.error( "Failure sending notification on ticket batch" "change: %s", exception_to_unicode(e)) add_warning( req, tag_( "The changes have been saved, but an error " "occurred while sending notifications: " "%(message)s", message=to_unicode(e)))
def render_admin_panel(self, req, cat, page, path_info): log_type = self.env.log_type log_level = self.env.log_level log_file = self.env.log_file log_dir = self.env.log_dir log_types = [ dict(name='none', label=_("None"), selected=log_type == 'none', disabled=False), dict(name='stderr', label=_("Console"), selected=log_type == 'stderr', disabled=False), dict(name='file', label=_("File"), selected=log_type == 'file', disabled=False), dict(name='syslog', label=_("Syslog"), selected=log_type in ('unix', 'syslog'), disabled=os.name != 'posix'), dict(name='eventlog', label=_("Windows event log"), selected=log_type in ('winlog', 'eventlog', 'nteventlog'), disabled=os.name != 'nt'), ] if req.method == 'POST': changed = False new_type = req.args.get('log_type') if new_type not in [t['name'] for t in log_types]: raise TracError(_("Unknown log type %(type)s", type=new_type), _("Invalid log type")) new_file = req.args.get('log_file', log_file) if not new_file: raise TracError(_("You must specify a log file"), _("Missing field")) new_level = req.args.get('log_level', log_level) if new_level not in _valid_log_levels: raise TracError( _("Unknown log level %(level)s", level=new_level), _("Invalid log level")) # Create logger to be sure the configuration is valid. new_file_path = new_file if not os.path.isabs(new_file_path): new_file_path = os.path.join(self.env.log_dir, new_file) try: logger, handler = \ self.env.create_logger(new_type, new_file_path, new_level, self.env.log_format) except Exception as e: add_warning( req, tag_( "Changes not saved. Logger configuration " "error: %(error)s. Inspect the log for more " "information.", error=tag.code(exception_to_unicode(e)))) self.log.error("Logger configuration error: %s", exception_to_unicode(e, traceback=True)) else: handler.close() if new_type != log_type: self.config.set('logging', 'log_type', new_type) changed = True log_type = new_type if new_level != log_level: self.config.set('logging', 'log_level', new_level) changed = True log_level = new_level if new_file != log_file: self.config.set('logging', 'log_file', new_file) changed = True log_file = new_file if changed: _save_config(self.config, req, self.log), req.redirect(req.href.admin(cat, page)) # Order log levels by priority value, with aliases excluded. all_levels = sorted(log.LOG_LEVEL_MAP, key=log.LOG_LEVEL_MAP.get, reverse=True) log_levels = [level for level in all_levels if level in log.LOG_LEVELS] log_level = LOG_LEVEL_ALIASES_MAP.get(log_level, log_level) data = { 'type': log_type, 'types': log_types, 'level': log_level, 'levels': log_levels, 'file': log_file, 'dir': log_dir } return 'admin_logging.html', {'log': data}
def send_internal_error(env, req, exc_info): if env: env.log.error("Internal Server Error: %s", exception_to_unicode(exc_info[1], traceback=True)) message = exception_to_unicode(exc_info[1]) traceback = get_last_traceback() frames, plugins, faulty_plugins = [], [], [] th = 'http://trac-hacks.org' has_admin = False try: has_admin = 'TRAC_ADMIN' in req.perm except Exception: pass tracker = default_tracker if has_admin and not isinstance(exc_info[1], MemoryError): # Collect frame and plugin information frames = get_frame_info(exc_info[2]) if env: plugins = [ p for p in get_plugin_info(env) if any(c['enabled'] for m in p['modules'].itervalues() for c in m['components'].itervalues()) ] match_plugins_to_frames(plugins, frames) # Identify the tracker where the bug should be reported faulty_plugins = [p for p in plugins if 'frame_idx' in p] faulty_plugins.sort(key=lambda p: p['frame_idx']) if faulty_plugins: info = faulty_plugins[0]['info'] if 'trac' in info: tracker = info['trac'] elif info.get('home_page', '').startswith(th): tracker = th def get_description(_): if env and has_admin: sys_info = "".join("|| '''`%s`''' || `%s` ||\n" % (k, v.replace('\n', '` [[br]] `')) for k, v in env.get_systeminfo()) sys_info += "|| '''`jQuery`''' || `#JQUERY#` ||\n" enabled_plugins = "".join("|| '''`%s`''' || `%s` ||\n" % (p['name'], p['version'] or _('N/A')) for p in plugins) else: sys_info = _("''System information not available''\n") enabled_plugins = _("''Plugin information not available''\n") return _("""\ ==== How to Reproduce ==== While doing a %(method)s operation on `%(path_info)s`, Trac issued an internal error. ''(please provide additional details here)'' Request parameters: {{{ %(req_args)s }}} User agent: `#USER_AGENT#` ==== System Information ==== %(sys_info)s ==== Enabled Plugins ==== %(enabled_plugins)s ==== Python Traceback ==== {{{ %(traceback)s}}}""", method=req.method, path_info=req.path_info, req_args=pformat(req.args), sys_info=sys_info, enabled_plugins=enabled_plugins, traceback=to_unicode(traceback)) # Generate the description once in English, once in the current locale description_en = get_description(lambda s, **kw: safefmt(s, kw)) try: description = get_description(_) except Exception: description = description_en data = { 'title': 'Internal Error', 'type': 'internal', 'message': message, 'traceback': traceback, 'frames': frames, 'shorten_line': shorten_line, 'repr': safe_repr, 'plugins': plugins, 'faulty_plugins': faulty_plugins, 'tracker': tracker, 'description': description, 'description_en': description_en } try: req.send_error(exc_info, status=500, env=env, data=data) except RequestDone: pass
except Exception, e: env_error = e run_once = environ['wsgi.run_once'] req = None if env_error is None: try: req = bootstrap.create_request(env, environ, start_response) \ if env is not None else Request(environ, start_response) except Exception, e: log = environ.get('wsgi.errors') if log: log.write("[FAIL] [Trac] Entry point '%s' " "Method 'create_request' Reason %s" % (bootstrap_ep, repr(exception_to_unicode(e)))) if req is None: req = RequestWithSession(environ, start_response) translation.make_activable(lambda: req.locale, env.path if env else None) try: return _dispatch_request(req, env, env_error) finally: translation.deactivate() if env and not run_once: env.shutdown(threading._get_ident()) # Now it's a good time to do some clean-ups # # Note: enable the '##' lines as soon as there's a suspicion # of memory leak due to uncollectable objects (typically # objects with a __del__ method caught in a cycle) #
def dispatch(self, req): """Find a registered handler that matches the request and let it process it. In addition, this method initializes the HDF data set and adds the web site chrome. """ self.log.debug('Dispatching %r', req) chrome = Chrome(self.env) # Setup request callbacks for lazily-evaluated properties req.callbacks.update({ 'authname': self.authenticate, 'chrome': chrome.prepare_request, 'hdf': self._get_hdf, 'perm': self._get_perm, 'session': self._get_session, 'locale': self._get_locale, 'tz': self._get_timezone, 'form_token': self._get_form_token }) try: try: # Select the component that should handle the request chosen_handler = None try: for handler in self.handlers: if handler.match_request(req): chosen_handler = handler break if not chosen_handler: if not req.path_info or req.path_info == '/': chosen_handler = self.default_handler # pre-process any incoming request, whether a handler # was found or not chosen_handler = self._pre_process_request( req, chosen_handler) except TracError, e: raise HTTPInternalError(e) if not chosen_handler: if req.path_info.endswith('/'): # Strip trailing / and redirect target = req.path_info.rstrip('/').encode('utf-8') if req.query_string: target += '?' + req.query_string req.redirect(req.href + target, permanent=True) raise HTTPNotFound('No handler matched request to %s', req.path_info) req.callbacks['chrome'] = partial(chrome.prepare_request, handler=chosen_handler) # Protect against CSRF attacks: we validate the form token # for all POST requests with a content-type corresponding # to form submissions if req.method == 'POST': ctype = req.get_header('Content-Type') if ctype: ctype, options = cgi.parse_header(ctype) if ctype in ('application/x-www-form-urlencoded', 'multipart/form-data') and \ req.args.get('__FORM_TOKEN') != req.form_token: if self.env.secure_cookies and req.scheme == 'http': msg = _('Secure cookies are enabled, you must ' 'use https to submit forms.') else: msg = _('Do you have cookies enabled?') raise HTTPBadRequest( _('Missing or invalid form token.' ' %(msg)s', msg=msg)) # Process the request and render the template resp = chosen_handler.process_request(req) if resp: if len(resp) == 2: # Clearsilver chrome.populate_hdf(req) template, content_type = \ self._post_process_request(req, *resp) # Give the session a chance to persist changes req.session.save() req.display(template, content_type or 'text/html') else: # Genshi template, data, content_type = \ self._post_process_request(req, *resp) if 'hdfdump' in req.args: req.perm.require('TRAC_ADMIN') # debugging helper - no need to render first out = StringIO() pprint(data, out) req.send(out.getvalue(), 'text/plain') else: output = chrome.render_template( req, template, data, content_type) # Give the session a chance to persist changes req.session.save() req.send(output, content_type or 'text/html') else: self._post_process_request(req) except RequestDone: raise except: # post-process the request in case of errors err = sys.exc_info() try: self._post_process_request(req) except RequestDone: raise except Exception, e: self.log.error( "Exception caught while post-processing" " request: %s", exception_to_unicode(e, traceback=True)) raise err[0], err[1], err[2]
def get_cnx(self, connector, kwargs, timeout=None): cnx = None log = kwargs.get('log') key = unicode(kwargs) start = time_now() tid = get_thread_id() # Get a Connection, either directly or a deferred one with self._available: # First choice: Return the same cnx already used by the thread if (tid, key) in self._active: cnx, num = self._active[(tid, key)] num += 1 else: if self._waiters == 0: cnx = self._take_cnx(connector, kwargs, key, tid) if not cnx: self._waiters += 1 self._available.wait() self._waiters -= 1 cnx = self._take_cnx(connector, kwargs, key, tid) num = 1 if cnx: self._active[(tid, key)] = (cnx, num) deferred = num == 1 and isinstance(cnx, tuple) exc_info = (None, None, None) if deferred: # Potentially lengthy operations must be done without lock held op, cnx = cnx try: if op == 'ping': cnx.ping() elif op == 'close': cnx.close() if op in ('close', 'create'): cnx = connector.get_connection(**kwargs) except TracError: exc_info = sys.exc_info() cnx = None except Exception: exc_info = sys.exc_info() if log: log.error('Exception caught on %s', op, exc_info=True) cnx = None if cnx and not isinstance(cnx, tuple): if deferred: # replace placeholder with real Connection with self._available: self._active[(tid, key)] = (cnx, num) return PooledConnection(self, cnx, key, tid, log) if deferred: # cnx couldn't be reused, clear placeholder with self._available: del self._active[(tid, key)] if op == 'ping': # retry return self.get_cnx(connector, kwargs) # if we didn't get a cnx after wait(), something's fishy... if isinstance(exc_info[1], TracError): raise exc_info[0], exc_info[1], exc_info[2] timeout = time_now() - start errmsg = _( "Unable to get database connection within %(time)d seconds.", time=timeout) if exc_info[1]: errmsg += " (%s)" % exception_to_unicode(exc_info[1]) raise TimeoutError(errmsg)