def get_repository(self, type, dir, params): """GitRepository factory method""" assert type == "git" if not self._version: raise TracError("GIT backend not available") elif not self._version['v_compatible']: raise TracError( "GIT version %s installed not compatible (need >= %s)" % (self._version['v_str'], self._version['v_min_str'])) repos = GitRepository(dir, params, self.log, persistent_cache=self._persistent_cache, git_bin=self._git_bin, shortrev_len=self._shortrev_len) if self._cached_repository: repos = CachedRepository2(self.env, repos, self.log) self.log.info("enabled CachedRepository for '%s'" % dir) else: self.log.info("disabled CachedRepository for '%s'" % dir) return repos
def get_repository(self, type, dir, params): """GitRepository factory method""" assert type == "git" if not (4 <= self._shortrev_len <= 40): raise TracError("shortrev_len must be withing [4..40]") if not (4 <= self._wiki_shortrev_len <= 40): raise TracError("wiki_shortrev_len must be withing [4..40]") if not self._version: raise TracError("GIT backend not available") elif not self._version['v_compatible']: raise TracError("GIT version %s installed not compatible (need >= %s)" % (self._version['v_str'], self._version['v_min_str'])) if self._trac_user_rlookup: def rlookup_uid(email): """ reverse map 'real name <*****@*****.**>' addresses to trac user ids returns None if lookup failed """ try: _, email = email.rsplit('<', 1) email, _ = email.split('>', 1) email = email.lower() except Exception: return None for _uid, _name, _email in self.env.get_known_users(): try: if email == _email.lower(): return _uid except Exception: continue else: def rlookup_uid(_): return None repos = GitRepository(dir, params, self.log, persistent_cache=self._persistent_cache, git_bin=self._git_bin, git_fs_encoding=self._git_fs_encoding, shortrev_len=self._shortrev_len, rlookup_uid=rlookup_uid, use_committer_id=self._use_committer_id, use_committer_time=self._use_committer_time, ) if self._cached_repository: repos = GitCachedRepository(self.env, repos, self.log) self.log.debug("enabled CachedRepository for '%s'" % dir) else: self.log.debug("disabled CachedRepository for '%s'" % dir) return repos
def _get_file_from_req(self, req): image = req.args['image'] # Test if file is uploaded. if not hasattr(image, 'filename') or not image.filename: raise TracError('No file uploaded.') if hasattr(image.file, 'fileno'): size = os.fstat(image.file.fileno())[6] else: size = image.file.len if size == 0: raise TracError('Can\'t upload empty file.') return image.file, image.filename
def _get_file_from_req(self, req): image = req.args['image'] # Test if file is uploaded. if not hasattr(image, 'filename') or not image.filename: raise TracError('No file uploaded.') if hasattr(image.file, 'fileno'): size = os.fstat(image.file.fileno())[6] else: size = image.file.len if size == 0: raise TracError('Can\'t upload empty file.') filename = os.path.basename(image.filename).decode('utf-8') self.log.debug(filename) return image.file, filename
def get_changes(self, old_path, old_rev, new_path, new_rev): if old_path != new_path: raise TracError("not supported in git_fs") #print "get_changes", (old_path, old_rev, new_path, new_rev) for chg in self.git.diff_tree(old_rev, new_rev, self.normalize_path(new_path)): #print chg (mode1, mode2, obj1, obj2, action, path) = chg kind = Node.FILE if mode2.startswith('04') or mode1.startswith('04'): kind = Node.DIRECTORY if action == 'A': change = Changeset.ADD elif action == 'M': change = Changeset.EDIT elif action == 'D': change = Changeset.DELETE else: raise "OhOh" old_node = None new_node = None if change != Changeset.ADD: old_node = self.get_node(path, old_rev) if change != Changeset.DELETE: new_node = self.get_node(path, new_rev) yield (old_node, new_node, kind, change)
def __init__( self, path, params, log, persistent_cache=False, git_bin='git', git_fs_encoding='utf-8', shortrev_len=7, rlookup_uid=lambda _: None, use_committer_id=False, use_committer_time=False, ): self.logger = log self.gitrepo = path self.params = params self.shortrev_len = max(4, min(shortrev_len, 40)) self.rlookup_uid = rlookup_uid self.use_committer_time = use_committer_time self.use_committer_id = use_committer_id try: self.git = PyGIT.StorageFactory(path, log, not persistent_cache, git_bin=git_bin, git_fs_encoding=git_fs_encoding) \ .getInstance() except PyGIT.GitError, e: raise TracError("%s does not appear to be a Git " "repository." % path)
def get_builders(self, req): server = None try: server = self.get_server() builders = server.getAllBuilders() except: raise TracError("Can't get access to buildbot at " + self.get_xmlrpc_url()) ret = [] for builder in builders: lastbuilds = server.getLastBuilds(builder, 1) lastnumber = 0 if len(lastbuilds) > 0: lastbuild = lastbuilds[0] lastnumber = lastbuild[1] build = { 'name': builder, 'status': server.getStatus(builder), 'url': req.href.buildbot(builder), 'lastbuild': lastnumber, 'lastbuildurl': self.get_build_url(builder, lastnumber) } else: build = { 'name': builder, 'status': "missing", 'url': req.href.buildbot(builder), 'lastbuild': None, 'lastbuildurl': None } ret.append(build) return ret
def __init__(self, git, path, rev, log, ls_tree_info=None): self.log = log self.git = git self.fs_sha = None # points to either tree or blobs self.fs_perm = None self.fs_size = None rev = rev and str(rev) or 'HEAD' kind = Node.DIRECTORY p = path.strip('/') if p: # ie. not the root-tree if not ls_tree_info: ls_tree_info = git.ls_tree(rev, p) or None if ls_tree_info: [ls_tree_info] = ls_tree_info if not ls_tree_info: raise NoSuchNode(path, rev) (self.fs_perm, k, self.fs_sha, self.fs_size, fn) = ls_tree_info # fix-up to the last commit-rev that touched this node rev = self.git.last_change(rev, p) if k=='tree': pass elif k=='blob': kind = Node.FILE else: raise TracError("internal error (got unexpected object kind '%s')" % k) self.created_path = path self.created_rev = rev Node.__init__(self, path, rev, kind)
def get_changes(self, old_path, old_rev, new_path, new_rev, ignore_ancestry=0): # TODO: handle renames/copies, ignore_ancestry if old_path != new_path: raise TracError(_("Not supported in git_fs")) with self.git.get_historian(old_rev, old_path.strip('/')) as old_historian: with self.git.get_historian(new_rev, new_path.strip('/')) as new_historian: for chg in self.git.diff_tree(old_rev, new_rev, self.normalize_path(new_path)): mode1, mode2, obj1, obj2, action, path, path2 = chg kind = Node.FILE if mode2.startswith('04') or mode1.startswith('04'): kind = Node.DIRECTORY change = GitChangeset.action_map[action] old_node = None new_node = None if change != Changeset.ADD: old_node = self.get_node(path, old_rev, old_historian) if change != Changeset.DELETE: new_node = self.get_node(path, new_rev, new_historian) yield old_node, new_node, kind, change
def __init__(self, name, expose, disp, url): if not name or name == '': raise TracError('Link name is required') if not url or len(url) == 0: raise TracError("'URL' is required") self.name = name self.disp = disp or name + ':%s' self.url = url self.expose = expose # validate 'name' if not self.name_re.search(name): raise TracError("Invalid link name: '%s'" % name) # validate 'disp' if 2 < len(self.disp.split('%s')): raise TracError("'Display' is allowed for at most one parameter") # validate 'URL' : URL may contains %xx chars if 2 < len(self.url.split('%s')): raise TracError("'URL' is allowed for at most one parameter")
def get_existing_node(env, repos, path, rev): try: return repos.get_node(path, rev) except TracError, e: raise TracError(e.message + '<br><p>You can <a href="%s">search</a> ' 'in the repository history to see if that path ' 'existed but was later removed.</p>' % escape(env.href.log(path, rev=rev, mode='path_history')))
def fetch_page(cursor, page): cursor.execute( "SELECT text FROM wiki WHERE name=%s ORDER BY version DESC LIMIT 1", (page, )) text = cursor.fetchone() if not text: raise TracError( "No such template page <a class='missing' href='%s/wiki/%s'>%s</a>" % (page, BASE_URL, page)) return text[0]
def __git_path(self): """return path as expected by PyGIT""" p = self.path.strip('/') if self.isfile: assert p return p if self.isdir: return p and (p + '/') raise TracError(_("Internal error"))
def __init__(self, repos, path, rev, log, ls_tree_info=None, historian=None): self.log = log self.repos = repos self.fs_sha = None # points to either tree or blobs self.fs_perm = None self.fs_size = None if rev: rev = repos.normalize_rev(to_unicode(rev)) else: rev = repos.youngest_rev created_rev = rev kind = Node.DIRECTORY p = path.strip('/') if p: # ie. not the root-tree if not rev: raise NoSuchNode(path, rev) if not ls_tree_info: ls_tree_info = repos.git.ls_tree(rev, p) or None if ls_tree_info: [ls_tree_info] = ls_tree_info if not ls_tree_info: raise NoSuchNode(path, rev) self.fs_perm, k, self.fs_sha, self.fs_size, fname = ls_tree_info # fix-up to the last commit-rev that touched this node created_rev = repos.git.last_change(rev, p, historian) if k == 'tree': pass elif k == 'commit': # FIXME: this is a workaround for missing git submodule # support in the plugin pass elif k == 'blob': kind = Node.FILE else: raise TracError( _( "Internal error (got unexpected object " "kind '%(kind)s')", kind=k)) self.created_path = path self.created_rev = created_rev Node.__init__(self, repos, path, rev, kind)
def render_property(self, name, mode, context, props): def sha_link(sha): return self._format_sha_link(context, 'sha', sha, sha) if name in ('Parents','Children'): revs = props[name] return tag([tag(sha_link(rev), ', ') for rev in revs[:-1]], sha_link(revs[-1])) if name in ('git-committer', 'git-author'): user_, time_ = props[name] _str = "%s (%s)" % (Chrome(self.env).format_author(context.req, user_), format_datetime(time_, tzinfo=context.req.tz)) return unicode(_str) raise TracError("internal error")
def render_property(self, name, mode, context, props): assert(name.startswith('git-')) content = None if name in ('git-Parents', 'git-Children'): revs = props[name] content = tag([tag(self._format_sha_link(context, rev, rev), ', ') for rev in revs[:-1]], self._format_sha_link(context, revs[-1], revs[-1])) if name in ('git-committer', 'git-author'): user_, time_ = props[name] content = to_unicode(user_) + " / " + \ time_.strftime('%Y-%m-%dT%H:%M:%SZ%z') if content: return RenderedProperty(name=name[4:] + ':', name_attributes=[("class", "property")], content=content) else: raise TracError("Can't render property '%s'" % name)
def get_last_builds(self, builder): server = None builds = None try: server = self.get_server() builds = server.getLastBuilds(builder, 5) except: raise TracError("Can't get builder %s on url %s" % (builder, self.get_xmlrpc_url())) #last build first builds.reverse() ret = [] for build in builds: thisbuild = { 'status': build[5], 'number': build[1], 'url': self.get_build_url(builder, build[1]) } ret.append(thisbuild) return ret
def get_builders(self, req): server = None try: server = self.get_server() builders = server.getAllBuilders() except: raise TracError("Can't get access to buildbot at " + self.get_xmlrpc_url()) ret = [] for builder in builders: lastbuild = server.getLastBuilds(builder, 1)[0] build = { 'name': builder, 'status': server.getStatus(builder), 'url': req.href.buildbot(builder), 'lastbuild': lastbuild[1], 'lastbuildurl': self.get_build_url(builder, lastbuild[1]) } ret.append(build) return ret
def _parse_macro_content(self, content, req): args, kwargs = parse_args(content, strict=False) kwargs['max'] = 0 kwargs['order'] = 'id' kwargs['col'] = 'id' # special case for values equal to 'self': replace with current ticket # number, if available preview = False for key in kwargs.keys(): if kwargs[key] == 'self': current_ticket = self._this_ticket(req) if current_ticket: kwargs[key] = current_ticket else: # id=0 basically causes a dummy preview of the meter # to be rendered preview = True kwargs = {'id': 0} break try: spkw = kwargs.pop('stats_provider') xtnpt = ExtensionPoint(ITicketGroupStatsProvider) found = False for impl in xtnpt.extensions(self): if impl.__class__.__name__ == spkw: found = True stats_provider = impl break if not found: raise TracError("Supplied stats provider does not exist!") except KeyError: # if the `stats_provider` keyword argument is not provided, # propagate the stats provider defined in the config file stats_provider = self._sp return stats_provider, kwargs, preview
def load(self): self._links = {} exposes = {} for name in self._get_config('expose').split(','): name = name.strip() if name == '': continue exposes[name] = True for name in self._get_config('names', '').split(','): name = name.strip() if name == '': continue disp = self._get_config(name + self._disp_suffix, name) url = self._get_config(name + self._url_suffix) if not url: raise TracError("No URL defined for '%s'" % name) expose = exposes.has_key(name) try: self._internal_add(LinkInfo(name, expose, disp, url)) except TracError, e: self.log.debug('LinkInfo Error: ' + str(e))
def __init__(self, repos, path, rev, log, ls_tree_info=None): self.log = log self.repos = repos self.fs_sha = None # points to either tree or blobs self.fs_perm = None self.fs_size = None rev = rev and str(rev) or 'HEAD' kind = Node.DIRECTORY p = path.strip('/') if p: # ie. not the root-tree if not ls_tree_info: ls_tree_info = repos.git.ls_tree(rev, p) or None if ls_tree_info: [ls_tree_info] = ls_tree_info if not ls_tree_info: raise NoSuchNode(path, rev) self.fs_perm, k, self.fs_sha, self.fs_size, _ = ls_tree_info # fix-up to the last commit-rev that touched this node rev = repos.git.last_change(rev, p) if k == 'tree': pass elif k == 'commit': pass # FIXME: this is a workaround for missing git submodule support in the plugin elif k == 'blob': kind = Node.FILE else: raise TracError("Internal error (got unexpected object kind '%s')" % k) self.created_path = path self.created_rev = rev Node.__init__(self, repos, path, rev, kind)
def _change_link(self, name, expose, disp, url): # validate if not name: raise TracError('name must be specified.') # delete? if expose == None: info = self._links.pop(name) # clean config self._remove_config(info.name + '_disp') self._remove_config(info.name + '_url') else: # new object with error check info = LinkInfo(name, expose, disp, url) self._links[name] = info self._set_config(info.name + '_disp', info.disp) self._set_config(info.name + '_url', info.url) # update 'names' and 'expose' names = [n for n in self._links] expose = [n for n in names if self._links[n].expose] self._set_config('names', ', '.join(names)) self._set_config('expose', ', '.join(expose)) self.config.save() self.log.debug('config is updated.')
def change_sid(self, new_sid): assert self.req.authname == 'anonymous', \ 'Cannot change ID of authenticated session' assert new_sid, 'Session ID cannot be empty' if new_sid == self.sid: return db = self.env.get_db_cnx() cursor = db.cursor() cursor.execute( "SELECT sid FROM session WHERE sid=%s " "AND authenticated=0", (new_sid, )) if cursor.fetchone(): raise TracError( "Session '%s' already exists.<br />" "Please choose a different session id." % new_sid, "Error renaming session") self.env.log.debug('Changing session ID %s to %s' % (self.sid, new_sid)) cursor.execute( "UPDATE session SET sid=%s WHERE sid=%s " "AND authenticated=0", (new_sid, self.sid)) db.commit() self.sid = new_sid self.bake_cookie()
def get_path_history(self, path, rev=None, limit=None): raise TracError(_("Unsupported \"Show only adds and deletes\""))
def render_property(self, name, mode, context, props): def sha_link(sha, label=None): # sha is assumed to be a non-abbreviated 40-chars sha id try: reponame = context.resource.parent.id repos = self.env.get_repository(reponame) cset = repos.get_changeset(sha) if label is None: label = repos.display_rev(sha) return tag.a(label, class_='changeset', title=shorten_line(cset.message), href=context.href.changeset(sha, repos.reponame)) except Exception as e: return tag.a(sha, class_='missing changeset', title=to_unicode(e), rel='nofollow') if name == 'Branches': branches = props[name] # simple non-merge commit return tag(*intersperse(', ', (sha_link(rev, label) for label, rev in branches))) elif name in ('Parents', 'Children'): revs = props[name] # list of commit ids if name == 'Parents' and len(revs) > 1: # we got a merge... current_sha = context.resource.id reponame = context.resource.parent.id parent_links = intersperse(', ', \ ((sha_link(rev), ' (', tag.a(_("diff"), title=_("Diff against this parent (show the " "changes merged from the other parents)"), href=context.href.changeset(current_sha, reponame, old=rev)), ')') for rev in revs)) return tag( list(parent_links), tag.br(), tag.span(Markup( _("Note: this is a <strong>merge" "</strong> changeset, the " "changes displayed below " "correspond to the merge " "itself.")), class_='hint'), tag.br(), tag.span(Markup( _("Use the <code>(diff)</code> " "links above to see all the " "changes relative to each " "parent.")), class_='hint')) # simple non-merge commit return tag(*intersperse(', ', map(sha_link, revs))) elif name in ('git-committer', 'git-author'): user_, time_ = props[name] _str = "%s (%s)" % (Chrome(self.env).format_author( context.req, user_), format_datetime(time_, tzinfo=context.req.tz)) return unicode(_str) raise TracError(_("Internal error"))
def get_repository(self, type, dir, params): """GitRepository factory method""" assert type == 'git' if not (4 <= self.shortrev_len <= 40): raise TracError( _("%(option)s must be in the range [4..40]", option="[git] shortrev_len")) if not (4 <= self.wiki_shortrev_len <= 40): raise TracError( _("%(option)s must be in the range [4..40]", option="[git] wikishortrev_len")) if not self._version: raise TracError(_("GIT backend not available")) elif not self._version['v_compatible']: raise TracError( _( "GIT version %(hasver)s installed not " "compatible (need >= %(needsver)s)", hasver=self._version['v_str'], needsver=self._version['v_min_str'])) if self.trac_user_rlookup: def rlookup_uid(email): """Reverse map 'real name <*****@*****.**>' addresses to trac user ids. :return: `None` if lookup failed """ try: _, email = email.rsplit('<', 1) email, _ = email.split('>', 1) email = email.lower() except Exception: return None for _uid, _name, _email in self.env.get_known_users(): try: if email == _email.lower(): return _uid except Exception: continue else: def rlookup_uid(_): return None repos = GitRepository( self.env, dir, params, self.log, persistent_cache=self.persistent_cache, git_bin=self.git_bin, git_fs_encoding=self.git_fs_encoding, shortrev_len=self.shortrev_len, rlookup_uid=rlookup_uid, use_committer_id=self.use_committer_id, use_committer_time=self.use_committer_time, ) if self.cached_repository: repos = GitCachedRepository(self.env, repos, self.log) self.log.debug("enabled CachedRepository for '%s'", dir) else: self.log.debug("disabled CachedRepository for '%s'", dir) return repos
class CsetPropertyRenderer(Component): implements(IPropertyRenderer) # relied upon by GitChangeset def match_property(self, name, mode): # default renderer has priority 1 return (name in ( 'Parents', 'Children', 'Branches', 'git-committer', 'git-author', ) and mode == 'revprop') and 4 or 0 def render_property(self, name, mode, context, props): def sha_link(sha, label=None): # sha is assumed to be a non-abbreviated 40-chars sha id try: reponame = context.resource.parent.id repos = self.env.get_repository(reponame) cset = repos.get_changeset(sha) if label is None: label = repos.display_rev(sha) return tag.a(label, class_='changeset', title=shorten_line(cset.message), href=context.href.changeset(sha, repos.reponame)) except Exception, e: return tag.a(sha, class_='missing changeset', title=to_unicode(e), rel='nofollow') if name == 'Branches': branches = props[name] # simple non-merge commit return tag(*intersperse(', ', (sha_link(rev, label) for label, rev in branches))) elif name in ('Parents', 'Children'): revs = props[name] # list of commit ids if name == 'Parents' and len(revs) > 1: # we got a merge... current_sha = context.resource.id reponame = context.resource.parent.id parent_links = intersperse(', ', \ ((sha_link(rev), ' (', tag.a('diff', title="Diff against this parent (show the " \ "changes merged from the other parents)", href=context.href.changeset(current_sha, reponame, old=rev)), ')') for rev in revs)) return tag( list(parent_links), tag.br(), tag.span(tag( "Note: this is a ", tag.strong("merge"), " changeset, " "the changes displayed below " "correspond to the merge itself."), class_='hint'), tag.br(), tag.span(tag( "Use the ", tag.tt("(diff)"), " links above to see all the changes " "relative to each parent."), class_='hint')) # simple non-merge commit return tag(*intersperse(', ', map(sha_link, revs))) elif name in ('git-committer', 'git-author'): user_, time_ = props[name] _str = "%s (%s)" % (Chrome(self.env).format_author( context.req, user_), format_datetime(time_, tzinfo=context.req.tz)) return unicode(_str) raise TracError("Internal error")
from trac.config import BoolOption, IntOption, PathOption, Option from trac.web.chrome import Chrome # for some reason CachedRepository doesn't pass-through short_rev()s class CachedRepository2(CachedRepository): def short_rev(self, path): return self.repos.short_rev(path) from genshi.builder import tag from genshi.core import Markup, escape from datetime import datetime import time, sys if not sys.version_info[:2] >= (2,5): raise TracError("python >= 2.5 dependancy not met") import PyGIT def _last_iterable(iterable): "helper for detecting last iteration in for-loop" i = iter(iterable) v = i.next() for nextv in i: yield False, v v = nextv yield True, v # helper def _parse_user_time(s): """parse author/committer attribute lines and return
from trac.config import BoolOption, IntOption, PathOption, Option from trac.core import * from trac.util import TracError, shorten_line from trac.util.datefmt import FixedOffset, to_timestamp from trac.util.text import to_unicode from trac.versioncontrol.api import (Changeset, IRepositoryConnector, Node, NoSuchChangeset, NoSuchNode, Repository) from trac.versioncontrol.cache import CachedRepository from trac.versioncontrol.web_ui import IPropertyRenderer, RenderedProperty from trac.wiki import IWikiSyntaxProvider import PyGIT if not sys.version_info[:2] >= (2, 5): raise TracError("This plugin requires Python >= 2.5") # for some reason CachedRepository doesn't pass-through short_rev()s class CachedRepository2(CachedRepository): def short_rev(self, path): return self.repos.short_rev(path) def normalize_rev(self, rev): if not rev: return self.repos.get_youngest_rev() normrev=self.repos.git.verifyrev(rev) if normrev is None: raise NoSuchChangeset(rev) return normrev
def process_request(self, req): req.perm.assert_permission('SEARCH_VIEW') available_filters = [] for source in self.search_sources: available_filters += source.get_search_filters(req) filters = [f[0] for f in available_filters if req.args.has_key(f[0])] if not filters: filters = [f[0] for f in available_filters] req.hdf['search.filters'] = [{ 'name': filter[0], 'label': filter[1], 'active': filter[0] in filters } for filter in available_filters] req.hdf['title'] = 'Search' query = req.args.get('q') if query: page = int(req.args.get('page', '1')) redir = self.quickjump(query) if redir: req.redirect(redir) elif query.startswith('!'): query = query[1:] # Refuse queries that obviously would result in a huge result set if len(query) < 3 and len(query.split()) == 1: raise TracError( 'Search query too short. ' 'Query must be at least 3 characters long.', 'Search Error') results = [] for source in self.search_sources: results += list(source.get_search_results(req, query, filters)) results.sort(lambda x, y: cmp(y[2], x[2])) page_size = self.RESULTS_PER_PAGE n = len(results) n_pages = n / page_size + 1 results = results[(page - 1) * page_size:page * page_size] req.hdf['title'] = 'Search Results' req.hdf['search.q'] = req.args.get('q').replace('"', """) req.hdf['search.page'] = page req.hdf['search.n_hits'] = n req.hdf['search.n_pages'] = n_pages req.hdf['search.page_size'] = page_size if page < n_pages: next_href = self.env.href.search(zip(filters, ['on'] * len(filters)), q=query, page=page + 1) add_link(req, 'next', next_href, 'Next Page') if page > 1: prev_href = self.env.href.search(zip(filters, ['on'] * len(filters)), q=query, page=page - 1) add_link(req, 'prev', prev_href, 'Previous Page') req.hdf['search.page_href'] = escape( self.env.href.search(zip(filters, ['on'] * len(filters)), q=query)) req.hdf['search.result'] = [{ 'href': escape(result[0]), 'title': result[1], 'date': format_datetime(result[2]), 'author': escape(result[3]), 'excerpt': result[4] } for result in results] add_stylesheet(req, 'common/css/search.css') return 'search.cs', None