Exemple #1
0
class GitwebProjectsRepositoryProvider(Component):
    implements(IRepositoryProvider)

    projects_list = PathOption('git',
                               'projects_list',
                               doc='Path to a gitweb-formatted projects.list')
    projects_base = PathOption('git',
                               'projects_base',
                               doc='Path to the base of your git projects')
    projects_url = Option(
        'git',
        'projects_url',
        doc='Template for project URLs. %s will be replaced with the repo name'
    )

    def get_repositories(self):
        if not self.projects_list:
            return

        for line in open(self.projects_list):
            line = line.strip()
            name = line
            if name.endswith('.git'):
                name = name[:-4]
            repo = {
                'dir': os.path.join(self.projects_base, line),
                'type': 'git',
            }
            description_path = os.path.join(repo['dir'], 'description')
            if os.path.exists(description_path):
                repo['description'] = open(description_path).read().strip()
            if self.projects_url:
                repo['url'] = self.projects_url % name
            yield name, repo
Exemple #2
0
class GitwebProjectsRepositoryProvider(Component):

    implements(IRepositoryProvider)

    projects_list = PathOption(
        'gitweb-repositories',
        'projects_list',
        doc="""Path to a gitweb-formatted projects.list""")

    projects_base = PathOption('gitweb-repositories',
                               'projects_base',
                               doc="""Path to the base of your git projects""")

    projects_url = Option(
        'gitweb-repositories',
        'projects_url',
        doc="""Template for project URLs. `%s` will be replaced with the repo
        name""")

    sync_per_request = ListOption('gitweb-repositories',
                                  'sync_per_request',
                                  '',
                                  doc="""Repositories to sync on every request
        (not recommended).""")

    def get_repositories(self):
        """Retrieve repositories specified in a `projects_list` file."""
        if not self.projects_list:
            return

        if not os.path.exists(self.projects_list):
            self.log.warning(
                "The [git] projects_list file was not found at "
                "'%s'", self.projects_list)
            return

        with open(self.projects_list, 'r') as fp:
            for line in fp:
                entries = line.strip().split()
                if entries:
                    name = entries[0]
                    reponame = name.rstrip('.git')
                    info = {
                        'dir': os.path.join(self.projects_base, name),
                        'sync_per_request': reponame in self.sync_per_request,
                        'type': 'git',
                    }
                    description_path = \
                        os.path.join(info['dir'], 'description')
                    if os.path.exists(description_path):
                        with open(description_path, 'r') as fd:
                            info['description'] = fd.read().strip()
                    if self.projects_url:
                        info['url'] = self.projects_url % reponame
                    yield reponame, info
    class ServiceProvider(Component):
        implements(IGraphDatabaseProvider)

        resource_uri = Option('neo4j', 'resource_uri', doc="""
""")
        options = Options()
        classpath = options.add(
            ListOption('neo4j', 'classpath', sep=os.pathsep, doc="""
"""))
        ext_dirs = options.add(
            ListOption('neo4j', 'ext_dirs', sep=os.pathsep, doc="""
"""))
        start_server = options.add(
            BoolOption('neo4j', 'start_server', doc="""
"""))
        server_path = options.add(
            PathOption('neo4j', 'server_path', doc="""
"""))
        username = options.add(Option('neo4j', 'username', doc="""
"""))
        password = options.add(Option('neo4j', 'password', doc="""
"""))
        jvm = options.add(PathOption('neo4j', 'jvm', doc="""
"""))

        def start(self, resource_uri, params):
            if resource_uri is None:
                resource_uri = self.resource_uri
                if not resource_uri:
                    resource_uri = os.path.join(self.env.path, 'neodb')
            if resource_uri.startswith('file://'):
                resource_uri = 'file://' + normalize_path(
                    self.env.path, resource_uri[7:])
            elif '://' not in resource_uri:
                resource_uri = normalize_path(self.env.path, resource_uri)
            for option, filter in self.options:
                if option not in params:
                    value = getattr(self, option)
                    if value is not None:
                        params[option] = filter(value)
            return resource_uri, neo4j.GraphDatabase(resource_uri, **params)

        instances = {}

        def instance(self, resource_uri, params):
            if resource_uri not in self.instances:
                key = resource_uri
                resource_uri, neo = self.start(resource_uri, params)
                neo = TracNeo(self.env, neo)
                if resource_uri != key:
                    self.instances[resource_uri] = neo
                self.instances[key] = neo
            return self.instances[resource_uri]
Exemple #4
0
class GitConnector(Component):
    implements(IRepositoryConnector, IWikiSyntaxProvider, IPropertyRenderer)

    _persistent_cache = BoolOption('git', 'persistent_cache', 'false',
                       "Enable persistent caching of commit tree")

    _cached_repository = BoolOption('git', 'cached_repository', 'false',
                    "Wrap `GitRepository` in `CachedRepository`")

    _shortrev_len = IntOption('git', 'shortrev_len', 7,
                  "Length rev sha sums should be tried to be abbreviated to"
                  " (must be >= 4 and <= 40)")

    _git_bin = PathOption('git', 'git_bin', '/usr/bin/git',
                  "Path to git executable (relative to trac project folder!)")

    def __init__(self):
        self._version = None

        try:
            self._version = PyGIT.Storage.git_version(git_bin=self._git_bin)
        except PyGIT.GitError, e:
            self.log.error("GitError: %s", e)

        if self._version:
            self.log.info("detected GIT version %s", self._version['v_str'])
            self.env.systeminfo.append(('GIT', self._version['v_str']))
            if not self._version['v_compatible']:
                self.log.error("GIT version %s installed not compatible "
                               "(need >= %s)" , self._version['v_str'],
                               self._version['v_min_str'])
Exemple #5
0
class ScreenshotsInit(Component):
    """
       Init component initialises database and environment for screenshots
       plugin.
    """
    implements(IEnvironmentSetupParticipant)

    # Configuration options.
    path = PathOption('screenshots',
                      'path',
                      '../screenshots',
                      doc='Path where to store uploaded screenshots.')

    # IEnvironmentSetupParticipanttr
    def environment_created(self):
        pass

    def environment_needs_upgrade(self, db):
        cursor = db.cursor()

        # Is database up to date?
        return (self._get_db_version(cursor) != last_db_version) or not \
          os.path.isdir(self.path)

    def upgrade_environment(self, db):
        cursor = db.cursor()

        # Create screenshots directory if not exists.
        if not os.path.isdir(self.path):
            os.mkdir(os.path.normpath(self.path))

        # Get current database schema version
        db_version = self._get_db_version(cursor)

        # Is this clean installation?
        if db_version == 0:
            # Perform single upgrade.
            module = __import__('tracscreenshots.db.db%s' % (last_db_version),
                                globals(), locals(), ['do_upgrade'])
            module.do_upgrade(self.env, cursor, False)
        else:
            # Perform incremental upgrades
            for I in range(db_version + 1, last_db_version + 1):
                script_name = 'db%i' % (I)
                module = __import__('tracscreenshots.db.%s' % (script_name),
                                    globals(), locals(), ['do_upgrade'])
                module.do_upgrade(self.env, cursor, True)

    def _get_db_version(self, cursor):
        try:
            sql = ("""SELECT value
                      FROM system
                      WHERE name='screenshots_version'""")
            self.log.debug(sql)
            cursor.execute(sql)
            for row in cursor:
                return int(row[0])
            return 0
        except:
            return 0
Exemple #6
0
class GitwebProjectsRepositoryProvider(Component):

    implements(IRepositoryProvider)

    projects_list = PathOption(
        'git',
        'projects_list',
        doc="""Path to a gitweb-formatted projects.list""")

    projects_base = PathOption('git',
                               'projects_base',
                               doc="""Path to the base of your git projects""")

    projects_url = Option(
        'git',
        'projects_url',
        doc="""Template for project URLs. %s will be replaced with the repo
        name""")

    def get_repositories(self):
        if not self.projects_list:
            return

        if not os.path.exists(self.projects_list):
            self.log.warn(
                "The [git] projects_list file was not found at "
                "'%s'", self.projects_list)
            return

        with open(self.projects_list, 'r') as fp:
            for line in fp:
                entries = line.strip().split()
                if entries:
                    reponame = entries[0]
                    repo = {
                        'dir': os.path.join(self.projects_base, reponame),
                        'type': 'git',
                    }
                    description_path = \
                        os.path.join(repo['dir'], 'description')
                    if os.path.exists(description_path):
                        with open(description_path, 'r') as fd:
                            repo['description'] = fd.read().strip()
                    name = reponame.rstrip('.git')
                    if self.projects_url:
                        repo['url'] = self.projects_url % name
                    yield name, repo
class SubversionConnector(Component):
    """Add support for creating and managing SVN repositories."""

    implements(IAdministrativeRepositoryConnector)

    svn_authz_file = PathOption('repository-manager', 'svn_authz_file',
                                'svn.authz',
                                doc="""The path where the svn auhz file for
                                       repository access control via e.g.
                                       Apache should be created for managed
                                       repositories. If not set, no file will
                                       be written.
                                       """)
    pre_commit_hook = Option('repository-manager', 'svn_pre_commit',
                             doc="""Path to an executable that should be run
                                    before a change is committed to any SVN
                                    repository managed via this plugin.
                                    """)
    post_commit_hook = Option('repository-manager', 'svn_post_commit',
                              doc="""Path to an executable that should be run
                                     after a change was committed to any SVN
                                     repository managed via this plugin.
                                     """)

    def get_supported_types(self):
        yield ('svn', 0)

    def can_fork(self, type):
        return False

    def can_delete_changesets(self, type):
        return False

    def can_ban_changesets(self, type):
        return False

    def create(self, repo):
        try:
            characters = string.ascii_lowercase + string.digits
            layout = ''.join(random.choice(characters) for x in range(20))
            layout = os.path.join('/tmp', layout)
            os.makedirs(os.path.join(layout, 'trunk'))
            os.makedirs(os.path.join(layout, 'branches'))
            os.makedirs(os.path.join(layout, 'tags'))
            config = { 'fs-type': 'fsfs' }
            svn_repos_create(repo['dir'], '', '', None, config)
            client = pysvn.Client()
            client.set_default_username(repo['owner'])
            client.import_(layout, 'file://' + repo['dir'],
                           'Initial repository layout')
            shutil.rmtree(layout)
            if self.pre_commit_hook:
                os.symlink(os.path.join(self.env.path, self.pre_commit_hook),
                           os.path.join(repo['dir'], 'hooks/pre-commit'))
            if self.post_commit_hook:
                os.symlink(os.path.join(self.env.path, self.post_commit_hook),
                           os.path.join(repo['dir'], 'hooks/post-commit'))
        except Exception, e:
            raise TracError(_("Failed to initialize repository: ") + str(e))
class SimplecaptchaPlugin(Component):
    implements(IRegistrationConfirmation)

    dict_file = PathOption('simplecaptcha', 'dictionary_file',
                           default="http://java.sun.com/docs/books/tutorial/collections/interfaces/examples/dictionary.txt")

    def __init__(self):
        self.keys = {}

    def pre_registration(self, req):
        """ Returns the HTML to be added to the registration form """
        msg = "Please enter the text below to prove you're not a machine."
        key = random.Random().randint(0, sys.maxint)
        word = self._random_word()
        self.keys[key] = (word, time.time())
        
        content = "<div><p>%s</p>%s" % (msg, self._get_captcha(word))
        content += "<label>Enter Word: <input type='text' name='simplecaptcha_captcha' class='textwidget' size='20'></label>"
        content += "<input type='hidden' name='simplecaptcha_key' value='%s' /></div>" % key
        
        return content

    def verify_registration(self, req):
        """Returns an error message if confirmation fails, or None on success
        """
        # keys older than 10min get deleted
        timeout = 600
        [self.keys.pop(k) for k, (word, timestamp) in self.keys.items() 
         if time.time()-timestamp >= timeout]

        key = int(req.args['simplecaptcha_key'])
        if not self.keys.has_key(key): # timeout
            return req.redirect(req.href.base + "/register")
        correct_answer, timestamp = self.keys.pop(key)
        if req.args['simplecaptcha_captcha'].lower() != correct_answer:
            return "Sorry, the word you entered was incorrect. Please try again."

        return None

    def _random_word(self):
        """Returns a random word for use with the captcha"""
        min_len =  5
        
        if not globals().has_key('captcha_dict'):
            if self.dict_file.startswith("http://"):
                f = urllib.urlopen(self.dict_file)
            else:
                f = open(self.dict_file, "r")
            _dict = f.read()
            f.close()    
            _dict = _dict.lower().split()
            _dict = [word for word in _dict if word.isalpha() and len(word) > min_len]
            globals()['captcha_dict'] = _dict

        return random.Random().choice(captcha_dict)

    def _get_captcha(self, passphrase):
        """Returns HTML content of captcha as string"""
        return skimpyAPI.Pre(passphrase).data()
Exemple #9
0
class AuthzPolicy(Component):
    """Permission policy using an authz-like configuration file.

    Refer to SVN documentation for syntax of the authz file. Groups are
    supported.

    As the fine-grained permissions brought by this permission policy are
    often used in complement of the other permission policies (like the
    `DefaultPermissionPolicy`), there's no need to redefine all the
    permissions here. Only additional rights or restrictions should be added.

    === Installation ===
    Enabling this policy requires listing it in `trac.ini`::

      {{{
      [trac]
      permission_policies = AuthzPolicy, DefaultPermissionPolicy

      [authz_policy]
      authz_file = conf/authzpolicy.conf
      }}}

    This means that the `AuthzPolicy` permissions will be checked first, and
    only if no rule is found will the `DefaultPermissionPolicy` be used.


    === Configuration ===
    The `authzpolicy.conf` file is a `.ini` style configuration file.

     - Each section of the config is a glob pattern used to match against a
       Trac resource descriptor. These descriptors are in the form::

         {{{
         <realm>:<id>@<version>[/<realm>:<id>@<version> ...]
         }}}

       Resources are ordered left to right, from parent to child. If any
       component is inapplicable, `*` is substituted. If the version pattern is
       not specified explicitely, all versions (`@*`) is added implicitly

       Example: Match the WikiStart page::

         {{{
         [wiki:*]
         [wiki:WikiStart*]
         [wiki:WikiStart@*]
         [wiki:WikiStart]
         }}}

       Example: Match the attachment
       ``wiki:WikiStart@117/attachment/FOO.JPG@*`` on WikiStart::

         {{{
         [wiki:*]
         [wiki:WikiStart*]
         [wiki:WikiStart@*]
         [wiki:WikiStart@*/attachment/*]
         [wiki:WikiStart@117/attachment/FOO.JPG]
         }}}

     - Sections are checked against the current Trac resource '''IN ORDER''' of
       appearance in the configuration file. '''ORDER IS CRITICAL'''.

     - Once a section matches, the current username is matched, '''IN ORDER''',
       against the keys of the section. If a key is prefixed with a `@`, it is
       treated as a group. If a key is prefixed with a `!`, the permission is
       denied rather than granted. The username will match any of 'anonymous',
       'authenticated', <username> or '*', using normal Trac permission rules.

    Example configuration::

      {{{
      [groups]
      administrators = athomas

      [*/attachment:*]
      * = WIKI_VIEW, TICKET_VIEW

      [wiki:WikiStart@*]
      @administrators = WIKI_ADMIN
      anonymous = WIKI_VIEW
      * = WIKI_VIEW

      # Deny access to page templates
      [wiki:PageTemplates/*]
      * =

      # Match everything else
      [*]
      @administrators = TRAC_ADMIN
      anonymous = BROWSER_VIEW, CHANGESET_VIEW, FILE_VIEW, LOG_VIEW,
          MILESTONE_VIEW, POLL_VIEW, REPORT_SQL_VIEW, REPORT_VIEW,
          ROADMAP_VIEW, SEARCH_VIEW, TICKET_CREATE, TICKET_MODIFY,
          TICKET_VIEW, TIMELINE_VIEW,
          WIKI_CREATE, WIKI_MODIFY, WIKI_VIEW
      # Give authenticated users some extra permissions
      authenticated = REPO_SEARCH, XML_RPC
      }}}

    """
    implements(IPermissionPolicy)

    authz_file = PathOption('authz_policy', 'authz_file', '',
                            "Location of authz policy configuration file. "
                            "Non-absolute paths are relative to the "
                            "Environment `conf` directory.")

    def __init__(self):
        self.authz = None
        self.authz_mtime = None
        self.groups_by_user = {}

    # IPermissionPolicy methods

    def check_permission(self, action, username, resource, perm):
        if not self.authz_mtime or \
                os.path.getmtime(self.authz_file) != self.authz_mtime:
            self.parse_authz()
        resource_key = self.normalise_resource(resource)
        self.log.debug('Checking %s on %s', action, resource_key)
        permissions = self.authz_permissions(resource_key, username)
        if permissions is None:
            return None                 # no match, can't decide
        elif permissions == []:
            return False                # all actions are denied

        # FIXME: expand all permissions once for all
        ps = PermissionSystem(self.env)
        for deny, perms in groupby(permissions,
                                   key=lambda p: p.startswith('!')):
            if deny and action in ps.expand_actions(p[1:] for p in perms):
                return False            # action is explicitly denied
            elif action in ps.expand_actions(perms):
                return True             # action is explicitly granted

        return None                     # no match for action, can't decide

    # Internal methods

    def parse_authz(self):
        self.log.debug("Parsing authz security policy %s",
                       self.authz_file)

        if not self.authz_file:
            self.log.error("The `[authz_policy] authz_file` configuration "
                           "option in trac.ini is empty or not defined.")
            raise ConfigurationError()
        try:
            self.authz_mtime = os.path.getmtime(self.authz_file)
        except OSError as e:
            self.log.error("Error parsing authz permission policy file: %s",
                           exception_to_unicode(e))
            raise ConfigurationError()

        self.authz = UnicodeConfigParser(ignorecase_option=False)
        try:
            self.authz.read(self.authz_file)
        except ParsingError as e:
            self.log.error("Error parsing authz permission policy file: %s",
                           exception_to_unicode(e))
            raise ConfigurationError()
        groups = {}
        if self.authz.has_section('groups'):
            for group, users in self.authz.items('groups'):
                groups[group] = to_list(users)

        self.groups_by_user = {}

        def add_items(group, items):
            for item in items:
                if item.startswith('@'):
                    add_items(group, groups[item[1:]])
                else:
                    self.groups_by_user.setdefault(item, set()).add(group)

        for group, users in groups.iteritems():
            add_items('@' + group, users)

        all_actions = set(PermissionSystem(self.env).get_actions())
        authz_basename = os.path.basename(self.authz_file)
        for section in self.authz.sections():
            if section == 'groups':
                continue
            for _, actions in self.authz.items(section):
                for action in to_list(actions):
                    if action.startswith('!'):
                        action = action[1:]
                    if action not in all_actions:
                        self.log.warning("The action %s in the [%s] section "
                                         "of %s is not a valid action.",
                                         action, section, authz_basename)

    def normalise_resource(self, resource):
        def to_descriptor(resource):
            id = resource.id
            return '%s:%s@%s' % (resource.realm or '*',
                                 id if id is not None else '*',
                                 resource.version or '*')

        def flatten(resource):
            if not resource:
                return ['*:*@*']
            descriptor = to_descriptor(resource)
            if not resource.realm and resource.id is None:
                return [descriptor]
            # XXX Due to the mixed functionality in resource we can end up with
            # ticket, ticket:1, ticket:1@10. This code naively collapses all
            # subsets of the parent resource into one. eg. ticket:1@10
            parent = resource.parent
            while parent and resource.realm == parent.realm:
                parent = parent.parent
            if parent:
                return flatten(parent) + [descriptor]
            else:
                return [descriptor]

        return '/'.join(flatten(resource))

    def authz_permissions(self, resource_key, username):
        # TODO: Handle permission negation in sections. eg. "if in this
        # ticket, remove TICKET_MODIFY"
        if username and username != 'anonymous':
            valid_users = ['*', 'authenticated', 'anonymous', username]
        else:
            valid_users = ['*', 'anonymous']
        for resource_section in [a for a in self.authz.sections()
                                   if a != 'groups']:
            resource_glob = resource_section
            if '@' not in resource_glob:
                resource_glob += '@*'

            if fnmatchcase(resource_key, resource_glob):
                for who, permissions in self.authz.items(resource_section):
                    permissions = to_list(permissions)
                    if who in valid_users or \
                            who in self.groups_by_user.get(username, []):
                        self.log.debug("%s matched section %s for user %s",
                                       resource_key, resource_glob, username)
                        if isinstance(permissions, basestring):
                            return [permissions]
                        else:
                            return permissions
        return None
Exemple #10
0
class AuthzSourcePolicy(Component):
    """Permission policy for `source:` and `changeset:` resources using a
    Subversion authz file.

    `FILE_VIEW` and `BROWSER_VIEW` permissions are granted as specified in the
    authz file.

    `CHANGESET_VIEW` permission is granted for changesets where `FILE_VIEW` is
    granted on at least one modified file, as well as for empty changesets.
    """

    implements(IPermissionPolicy)

    authz_file = PathOption('svn',
                            'authz_file',
                            '',
                            """The path to the Subversion
        [%(svnbook)s authorization (authz) file].
        To enable authz permission checking, the `AuthzSourcePolicy`
        permission policy must be added to `[trac] permission_policies`.
        Non-absolute paths are relative to the Environment `conf`
        directory.
        """,
                            doc_args={
                                'svnbook':
                                'http://svnbook.red-bean.com/en/1.7/'
                                'svn.serverconfig.pathbasedauthz.html'
                            })

    authz_module_name = Option(
        'svn', 'authz_module_name', '',
        """The module prefix used in the `authz_file` for the default
        repository. If left empty, the global section is used.
        """)

    _handled_perms = frozenset([(None, 'BROWSER_VIEW'),
                                (None, 'CHANGESET_VIEW'), (None, 'FILE_VIEW'),
                                (None, 'LOG_VIEW'), ('source', 'BROWSER_VIEW'),
                                ('source', 'FILE_VIEW'),
                                ('source', 'LOG_VIEW'),
                                ('changeset', 'CHANGESET_VIEW')])

    def __init__(self):
        self._mtime = 0
        self._authz = {}
        self._users = set()

    # IPermissionPolicy methods

    def check_permission(self, action, username, resource, perm):
        realm = resource.realm if resource else None
        if (realm, action) in self._handled_perms:
            authz, users = self._get_authz_info()
            if authz is None:
                return False

            if username == 'anonymous':
                usernames = '$anonymous', '*'
            else:
                usernames = username, '$authenticated', '*'
            if resource is None:
                return True if users & set(usernames) else None

            rm = RepositoryManager(self.env)
            try:
                repos = rm.get_repository(resource.parent.id)
            except TracError:
                return True  # Allow error to be displayed in the repo index
            if repos is None:
                return True
            modules = [resource.parent.id or self.authz_module_name]
            if modules[0]:
                modules.append('')

            def check_path_0(spath):
                sections = [
                    authz.get(module, {}).get(spath) for module in modules
                ]
                sections = [section for section in sections if section]
                denied = False
                for user in usernames:
                    for section in sections:
                        if user in section:
                            if section[user]:
                                return True
                            denied = True
                            # Don't check section without module name
                            # because the section with module name defines
                            # the user's permissions.
                            break
                if denied:  # All users has no readable permission.
                    return False

            def check_path(path):
                path = '/' + pathjoin(repos.scope, path)
                if path != '/':
                    path += '/'

                # Allow access to parent directories of allowed resources
                for spath in set(
                        sum((list(authz.get(module, {}))
                             for module in modules), [])):
                    if spath.startswith(path):
                        result = check_path_0(spath)
                        if result is True:
                            return True

                # Walk from resource up parent directories
                for spath in parent_iter(path):
                    result = check_path_0(spath)
                    if result is not None:
                        return result

            if realm == 'source':
                return check_path(resource.id)

            elif realm == 'changeset':
                changes = list(repos.get_changeset(resource.id).get_changes())
                if not changes or any(
                        check_path(change[0]) for change in changes):
                    return True

    def _get_authz_info(self):
        if not self.authz_file:
            self.log.error("The [svn] authz_file configuration option in "
                           "trac.ini is empty or not defined")
            raise ConfigurationError()
        try:
            mtime = os.path.getmtime(self.authz_file)
        except OSError as e:
            self.log.error(
                "Error accessing svn authz permission policy "
                "file: %s", exception_to_unicode(e))
            raise ConfigurationError()
        if mtime != self._mtime:
            self._mtime = mtime
            rm = RepositoryManager(self.env)
            modules = set(repos.reponame
                          for repos in rm.get_real_repositories())
            if '' in modules and self.authz_module_name:
                modules.add(self.authz_module_name)
            modules.add('')
            self.log.info("Parsing authz file: %s", self.authz_file)
            try:
                self._authz = parse(self.authz_file, modules)
            except ParsingError as e:
                self.log.error(
                    "Error parsing svn authz permission policy "
                    "file: %s", exception_to_unicode(e))
                raise ConfigurationError()
            else:
                self._users = {
                    user
                    for paths in self._authz.itervalues()
                    for path in paths.itervalues()
                    for user, result in path.iteritems() if result
                }
        return self._authz, self._users
Exemple #11
0
Fichier : env.py Projet : t2y/trac
class Environment(Component, ComponentManager):
    """Trac environment manager.

    Trac stores project information in a Trac environment. It consists
    of a directory structure containing among other things:

    * a configuration file,
    * project-specific templates and plugins,
    * the wiki and ticket attachments files,
    * the SQLite database file (stores tickets, wiki pages...)
      in case the database backend is sqlite

    """

    implements(ISystemInfoProvider)

    required = True

    system_info_providers = ExtensionPoint(ISystemInfoProvider)
    setup_participants = ExtensionPoint(IEnvironmentSetupParticipant)

    components_section = ConfigSection('components',
        """This section is used to enable or disable components
        provided by plugins, as well as by Trac itself. The component
        to enable/disable is specified via the name of the
        option. Whether its enabled is determined by the option value;
        setting the value to `enabled` or `on` will enable the
        component, any other value (typically `disabled` or `off`)
        will disable the component.

        The option name is either the fully qualified name of the
        components or the module/package prefix of the component. The
        former enables/disables a specific component, while the latter
        enables/disables any component in the specified
        package/module.

        Consider the following configuration snippet:
        {{{
        [components]
        trac.ticket.report.ReportModule = disabled
        webadmin.* = enabled
        }}}

        The first option tells Trac to disable the
        [wiki:TracReports report module].
        The second option instructs Trac to enable all components in
        the `webadmin` package. Note that the trailing wildcard is
        required for module/package matching.

        To view the list of active components, go to the ''Plugins''
        page on ''About Trac'' (requires `CONFIG_VIEW`
        [wiki:TracPermissions permissions]).

        See also: TracPlugins
        """)

    shared_plugins_dir = PathOption('inherit', 'plugins_dir', '',
        """Path to the //shared plugins directory//.

        Plugins in that directory are loaded in addition to those in
        the directory of the environment `plugins`, with this one
        taking precedence.

        (''since 0.11'')""")

    base_url = Option('trac', 'base_url', '',
        """Reference URL for the Trac deployment.

        This is the base URL that will be used when producing
        documents that will be used outside of the web browsing
        context, like for example when inserting URLs pointing to Trac
        resources in notification e-mails.""")

    base_url_for_redirect = BoolOption('trac', 'use_base_url_for_redirect',
                                        False,
        """Optionally use `[trac] base_url` for redirects.

        In some configurations, usually involving running Trac behind
        a HTTP proxy, Trac can't automatically reconstruct the URL
        that is used to access it. You may need to use this option to
        force Trac to use the `base_url` setting also for
        redirects. This introduces the obvious limitation that this
        environment will only be usable when accessible from that URL,
        as redirects are frequently used. (''since 0.10.5'')""")

    secure_cookies = BoolOption('trac', 'secure_cookies', False,
        """Restrict cookies to HTTPS connections.

        When true, set the `secure` flag on all cookies so that they
        are only sent to the server on HTTPS connections. Use this if
        your Trac instance is only accessible through HTTPS. (''since
        0.11.2'')""")

    project_name = Option('project', 'name', 'My Project',
        """Name of the project.""")

    project_description = Option('project', 'descr', 'My example project',
        """Short description of the project.""")

    project_url = Option('project', 'url', '',
        """URL of the main project web site, usually the website in
        which the `base_url` resides. This is used in notification
        e-mails.""")

    project_admin = Option('project', 'admin', '',
        """E-Mail address of the project's administrator.""")

    project_admin_trac_url = Option('project', 'admin_trac_url', '.',
        """Base URL of a Trac instance where errors in this Trac
        should be reported.

        This can be an absolute or relative URL, or '.' to reference
        this Trac instance. An empty value will disable the reporting
        buttons.  (''since 0.11.3'')""")

    project_footer = Option('project', 'footer',
                            N_('Visit the Trac open source project at<br />'
                               '<a href="http://trac.edgewall.org/">'
                               'http://trac.edgewall.org/</a>'),
        """Page footer text (right-aligned).""")

    project_icon = Option('project', 'icon', 'common/trac.ico',
        """URL of the icon of the project.""")

    log_type = Option('logging', 'log_type', 'none',
        """Logging facility to use.

        Should be one of (`none`, `file`, `stderr`, `syslog`, `winlog`).""")

    log_file = Option('logging', 'log_file', 'trac.log',
        """If `log_type` is `file`, this should be a path to the
        log-file.  Relative paths are resolved relative to the `log`
        directory of the environment.""")

    log_level = Option('logging', 'log_level', 'DEBUG',
        """Level of verbosity in log.

        Should be one of (`CRITICAL`, `ERROR`, `WARN`, `INFO`, `DEBUG`).""")

    log_format = Option('logging', 'log_format', None,
        """Custom logging format.

        If nothing is set, the following will be used:

        Trac[$(module)s] $(levelname)s: $(message)s

        In addition to regular key names supported by the Python
        logger library (see
        http://docs.python.org/library/logging.html), one could use:

        - $(path)s     the path for the current environment
        - $(basename)s the last path component of the current environment
        - $(project)s  the project name

        Note the usage of `$(...)s` instead of `%(...)s` as the latter form
        would be interpreted by the ConfigParser itself.

        Example:
        `($(thread)d) Trac[$(basename)s:$(module)s] $(levelname)s: $(message)s`

        (''since 0.10.5'')""")

    def __init__(self, path, create=False, options=[]):
        """Initialize the Trac environment.

        :param path:   the absolute path to the Trac environment
        :param create: if `True`, the environment is created and
                       populated with default data; otherwise, the
                       environment is expected to already exist.
        :param options: A list of `(section, name, value)` tuples that
                        define configuration options
        """
        ComponentManager.__init__(self)

        self.path = path
        self.log = None
        self.config = None
        # System info should be provided through ISystemInfoProvider rather
        # than appending to systeminfo, which may be a private in a future
        # release.
        self.systeminfo = []

        if create:
            self.create(options)
        else:
            self.verify()
            self.setup_config()

        if create:
            for setup_participant in self.setup_participants:
                setup_participant.environment_created()

    def get_systeminfo(self):
        """Return a list of `(name, version)` tuples describing the name
        and version information of external packages used by Trac and plugins.
        """
        info = self.systeminfo[:]
        for provider in self.system_info_providers:
            info.extend(provider.get_system_info() or [])
        info.sort(key=lambda (name, version): (name != 'Trac', name.lower()))
        return info

    def get_configinfo(self):
        """Returns a list of dictionaries containing the `name` and `options`
        of each configuration section. The value of `options` is a list of
        dictionaries containing the `name`, `value` and `modified` state of
        each configuration option. The `modified` value is True if the value
        differs from its default.

        :since: version 1.1.2
        """
        defaults = self.config.defaults(self.compmgr)
        sections = []
        for section in self.config.sections(self.compmgr):
            options = []
            default_options = defaults.get(section, {})
            for name, value in self.config.options(section, self.compmgr):
                default = default_options.get(name) or ''
                options.append({
                    'name': name, 'value': value,
                    'modified': unicode(value) != unicode(default)
                })
            options.sort(key=lambda o: o['name'])
            sections.append({'name': section, 'options': options})
        sections.sort(key=lambda s: s['name'])
        return sections

    # ISystemInfoProvider methods

    def get_system_info(self):
        from trac import core, __version__ as VERSION
        yield 'Trac', get_pkginfo(core).get('version', VERSION)
        yield 'Python', sys.version
        yield 'setuptools', setuptools.__version__
        from trac.util.datefmt import pytz
        if pytz is not None:
            yield 'pytz', pytz.__version__
        if hasattr(self, 'webfrontend_version'):
            yield self.webfrontend, self.webfrontend_version

    def component_activated(self, component):
        """Initialize additional member variables for components.

        Every component activated through the `Environment` object
        gets three member variables: `env` (the environment object),
        `config` (the environment configuration) and `log` (a logger
        object)."""
        component.env = self
        component.config = self.config
        component.log = self.log

    def _component_name(self, name_or_class):
        name = name_or_class
        if not isinstance(name_or_class, basestring):
            name = name_or_class.__module__ + '.' + name_or_class.__name__
        return name.lower()

    @lazy
    def _component_rules(self):
        _rules = {}
        for name, value in self.components_section.options():
            if name.endswith('.*'):
                name = name[:-2]
            _rules[name.lower()] = value.lower() in ('enabled', 'on')
        return _rules

    def is_component_enabled(self, cls):
        """Implemented to only allow activation of components that are
        not disabled in the configuration.

        This is called by the `ComponentManager` base class when a
        component is about to be activated. If this method returns
        `False`, the component does not get activated. If it returns
        `None`, the component only gets activated if it is located in
        the `plugins` directory of the environment.
        """
        component_name = self._component_name(cls)

        # Disable the pre-0.11 WebAdmin plugin
        # Please note that there's no recommendation to uninstall the
        # plugin because doing so would obviously break the backwards
        # compatibility that the new integration administration
        # interface tries to provide for old WebAdmin extensions
        if component_name.startswith('webadmin.'):
            self.log.info("The legacy TracWebAdmin plugin has been "
                          "automatically disabled, and the integrated "
                          "administration interface will be used "
                          "instead.")
            return False

        rules = self._component_rules
        cname = component_name
        while cname:
            enabled = rules.get(cname)
            if enabled is not None:
                return enabled
            idx = cname.rfind('.')
            if idx < 0:
                break
            cname = cname[:idx]

        # By default, all components in the trac package except
        # trac.test are enabled
        return component_name.startswith('trac.') and \
               not component_name.startswith('trac.test.') or None

    def enable_component(self, cls):
        """Enable a component or module."""
        self._component_rules[self._component_name(cls)] = True

    def verify(self):
        """Verify that the provided path points to a valid Trac environment
        directory."""
        try:
            tag = read_file(os.path.join(self.path, 'VERSION')).splitlines()[0]
            if tag != _VERSION:
                raise Exception("Unknown Trac environment type '%s'" % tag)
        except Exception as e:
            raise TracError("No Trac environment found at %s\n%s"
                            % (self.path, e))

    def get_db_cnx(self):
        """Return a database connection from the connection pool

        :deprecated: Use :meth:`db_transaction` or :meth:`db_query` instead

        `db_transaction` for obtaining the `db` database connection
        which can be used for performing any query
        (SELECT/INSERT/UPDATE/DELETE)::

           with env.db_transaction as db:
               ...

        Note that within the block, you don't need to (and shouldn't)
        call ``commit()`` yourself, the context manager will take care
        of it (if it's the outermost such context manager on the
        stack).


        `db_query` for obtaining a `db` database connection which can
        be used for performing SELECT queries only::

           with env.db_query as db:
               ...
        """
        return DatabaseManager(self).get_connection()

    @lazy
    def db_exc(self):
        """Return an object (typically a module) containing all the
        backend-specific exception types as attributes, named
        according to the Python Database API
        (http://www.python.org/dev/peps/pep-0249/).

        To catch a database exception, use the following pattern::

            try:
                with env.db_transaction as db:
                    ...
            except env.db_exc.IntegrityError as e:
                ...
        """
        return DatabaseManager(self).get_exceptions()

    def with_transaction(self, db=None):
        """Decorator for transaction functions :deprecated:"""
        return with_transaction(self, db)

    def get_read_db(self):
        """Return a database connection for read purposes :deprecated:

        See `trac.db.api.get_read_db` for detailed documentation."""
        return DatabaseManager(self).get_connection(readonly=True)

    @property
    def db_query(self):
        """Return a context manager
        (`~trac.db.api.QueryContextManager`) which can be used to
        obtain a read-only database connection.

        Example::

            with env.db_query as db:
                cursor = db.cursor()
                cursor.execute("SELECT ...")
                for row in cursor.fetchall():
                    ...

        Note that a connection retrieved this way can be "called"
        directly in order to execute a query::

            with env.db_query as db:
                for row in db("SELECT ..."):
                    ...

        :warning: after a `with env.db_query as db` block, though the
          `db` variable is still defined, you shouldn't use it as it
          might have been closed when exiting the context, if this
          context was the outermost context (`db_query` or
          `db_transaction`).

        If you don't need to manipulate the connection itself, this
        can even be simplified to::

            for row in env.db_query("SELECT ..."):
                ...

        """
        return QueryContextManager(self)

    @property
    def db_transaction(self):
        """Return a context manager
        (`~trac.db.api.TransactionContextManager`) which can be used
        to obtain a writable database connection.

        Example::

            with env.db_transaction as db:
                cursor = db.cursor()
                cursor.execute("UPDATE ...")

        Upon successful exit of the context, the context manager will
        commit the transaction. In case of nested contexts, only the
        outermost context performs a commit. However, should an
        exception happen, any context manager will perform a rollback.
        You should *not* call `commit()` yourself within such block,
        as this will force a commit even if that transaction is part
        of a larger transaction.

        Like for its read-only counterpart, you can directly execute a
        DML query on the `db`::

            with env.db_transaction as db:
                db("UPDATE ...")

        :warning: after a `with env.db_transaction` as db` block,
          though the `db` variable is still available, you shouldn't
          use it as it might have been closed when exiting the
          context, if this context was the outermost context
          (`db_query` or `db_transaction`).

        If you don't need to manipulate the connection itself, this
        can also be simplified to::

            env.db_transaction("UPDATE ...")

        """
        return TransactionContextManager(self)

    def shutdown(self, tid=None):
        """Close the environment."""
        RepositoryManager(self).shutdown(tid)
        DatabaseManager(self).shutdown(tid)
        if tid is None:
            self.log.removeHandler(self._log_handler)
            self._log_handler.flush()
            self._log_handler.close()
            del self._log_handler

    def get_repository(self, reponame=None, authname=None):
        """Return the version control repository with the given name,
        or the default repository if `None`.

        The standard way of retrieving repositories is to use the
        methods of `RepositoryManager`. This method is retained here
        for backward compatibility.

        :param reponame: the name of the repository
        :param authname: the user name for authorization (not used
                         anymore, left here for compatibility with
                         0.11)
        """
        return RepositoryManager(self).get_repository(reponame)

    def create(self, options=[]):
        """Create the basic directory structure of the environment,
        initialize the database and populate the configuration file
        with default values.

        If options contains ('inherit', 'file'), default values will
        not be loaded; they are expected to be provided by that file
        or other options.
        """
        # Create the directory structure
        if not os.path.exists(self.path):
            os.mkdir(self.path)
        os.mkdir(self.get_log_dir())
        os.mkdir(self.get_htdocs_dir())
        os.mkdir(os.path.join(self.path, 'plugins'))

        # Create a few files
        create_file(os.path.join(self.path, 'VERSION'), _VERSION + '\n')
        create_file(os.path.join(self.path, 'README'),
                    'This directory contains a Trac environment.\n'
                    'Visit http://trac.edgewall.org/ for more information.\n')

        # Setup the default configuration
        os.mkdir(os.path.join(self.path, 'conf'))
        create_file(os.path.join(self.path, 'conf', 'trac.ini.sample'))
        config = Configuration(os.path.join(self.path, 'conf', 'trac.ini'))
        for section, name, value in options:
            config.set(section, name, value)
        config.save()
        self.setup_config()
        if not any((section, option) == ('inherit', 'file')
                   for section, option, value in options):
            self.config.set_defaults(self)
            self.config.save()

        # Create the database
        DatabaseManager(self).init_db()

    def get_version(self, initial=False):
        """Return the current version of the database.  If the
        optional argument `initial` is set to `True`, the version of
        the database used at the time of creation will be returned.

        In practice, for database created before 0.11, this will
        return `False` which is "older" than any db version number.

        :since: 0.11
        """
        rows = self.db_query("""
                SELECT value FROM system WHERE name='%sdatabase_version'
                """ % ('initial_' if initial else ''))
        return int(rows[0][0]) if rows else False

    def setup_config(self):
        """Load the configuration file."""
        self.config = Configuration(os.path.join(self.path, 'conf',
                                                 'trac.ini'),
                                    {'envname': os.path.basename(self.path)})
        self.setup_log()
        from trac.loader import load_components
        plugins_dir = self.shared_plugins_dir
        load_components(self, plugins_dir and (plugins_dir,))

    def get_templates_dir(self):
        """Return absolute path to the templates directory."""
        return os.path.join(self.path, 'templates')

    def get_htdocs_dir(self):
        """Return absolute path to the htdocs directory."""
        return os.path.join(self.path, 'htdocs')

    def get_log_dir(self):
        """Return absolute path to the log directory."""
        return os.path.join(self.path, 'log')

    def setup_log(self):
        """Initialize the logging sub-system."""
        from trac.log import logger_handler_factory
        logtype = self.log_type
        logfile = self.log_file
        if logtype == 'file' and not os.path.isabs(logfile):
            logfile = os.path.join(self.get_log_dir(), logfile)
        format = self.log_format
        logid = 'Trac.%s' % sha1(self.path).hexdigest()
        if format:
            format = format.replace('$(', '%(') \
                     .replace('%(path)s', self.path) \
                     .replace('%(basename)s', os.path.basename(self.path)) \
                     .replace('%(project)s', self.project_name)
        self.log, self._log_handler = logger_handler_factory(
            logtype, logfile, self.log_level, logid, format=format)
        from trac import core, __version__ as VERSION
        self.log.info('-' * 32 + ' environment startup [Trac %s] ' + '-' * 32,
                      get_pkginfo(core).get('version', VERSION))

    def get_known_users(self):
        """Generator that yields information about all known users,
        i.e. users that have logged in to this Trac environment and
        possibly set their name and email.

        This function generates one tuple for every user, of the form
        (username, name, email) ordered alpha-numerically by username.
        """
        for username, name, email in self.db_query("""
                SELECT DISTINCT s.sid, n.value, e.value
                FROM session AS s
                 LEFT JOIN session_attribute AS n ON (n.sid=s.sid
                  and n.authenticated=1 AND n.name = 'name')
                 LEFT JOIN session_attribute AS e ON (e.sid=s.sid
                  AND e.authenticated=1 AND e.name = 'email')
                WHERE s.authenticated=1 ORDER BY s.sid
                """):
            yield username, name, email

    def backup(self, dest=None):
        """Create a backup of the database.

        :param dest: Destination file; if not specified, the backup is
                     stored in a file called db_name.trac_version.bak
        """
        return DatabaseManager(self).backup(dest)

    def needs_upgrade(self):
        """Return whether the environment needs to be upgraded."""
        for participant in self.setup_participants:
            args = ()
            with self.db_query as db:
                if arity(participant.environment_needs_upgrade) == 1:
                    args = (db,)
                if participant.environment_needs_upgrade(*args):
                    self.log.warn("Component %s requires environment upgrade",
                                  participant)
                    return True
        return False

    def upgrade(self, backup=False, backup_dest=None):
        """Upgrade database.

        :param backup: whether or not to backup before upgrading
        :param backup_dest: name of the backup file
        :return: whether the upgrade was performed
        """
        upgraders = []
        for participant in self.setup_participants:
            args = ()
            with self.db_query as db:
                if arity(participant.environment_needs_upgrade) == 1:
                    args = (db,)
                if participant.environment_needs_upgrade(*args):
                    upgraders.append(participant)
        if not upgraders:
            return

        if backup:
            try:
                self.backup(backup_dest)
            except Exception as e:
                raise BackupError(e)

        for participant in upgraders:
            self.log.info("%s.%s upgrading...", participant.__module__,
                          participant.__class__.__name__)
            args = ()
            with self.db_transaction as db:
                if arity(participant.upgrade_environment) == 1:
                    args = (db,)
                participant.upgrade_environment(*args)
            # Database schema may have changed, so close all connections
            DatabaseManager(self).shutdown()
        return True

    @lazy
    def href(self):
        """The application root path"""
        return Href(urlsplit(self.abs_href.base).path)

    @lazy
    def abs_href(self):
        """The application URL"""
        if not self.base_url:
            self.log.warn("base_url option not set in configuration, "
                          "generated links may be incorrect")
            _abs_href = Href('')
        else:
            _abs_href = Href(self.base_url)
        return _abs_href
Exemple #12
0
class FilesCoreComponent(Component):
    """
    Helper component for the file configurations etc.
    """

    default_downloads_directory = Option('multiproject-files', 'default_downloads_directory',
        default='downloads',
        doc='Default name of the directory which is created for project. ')

    sys_dav_root = PathOption('multiproject-files', 'sys_dav_root',
        default='/var/www/trac/webdav',
        doc='Path to the root directory of the webdav directory. '
            'For example, "/path/to/multiproject/root/webdav". ')

    url_dav_path = Option('multiproject-files', 'url_dav_path',
        default='dav',
        doc='Relative url path to the dav directory after [multiproject] url_projects_path. '
            'For example, "dav". ')

    downloads_dir_customizable = BoolOption('multiproject-files', 'downloads_dir_customizable',
        default='True',
        doc="Whether or not projects can configure their downloads directory, "
            "or whether to use the MultiProject-wide configurations. "
            "Setting this to False improves performance. ")

    def base_url(self, context_name='files'):
        env_name = self.env.project_identifier
        if context_name == 'files':
            return '/files'
        elif context_name == 'webdav':
            return '/'.join([self.config.get('multiproject',
                'url_projects_path', ''), self.url_dav_path, env_name])
        else:
            raise InvalidOperationError("context name was wrong")

    def files_download_config(self, context_name='files', req=None):
        """
        :param str context_name: either 'files' or 'webdav'
        :return: FilesDownloadConfig
        """
        ctx = None
        ctx_key = 'files.download_config.' + context_name
        if req:
            ctx = get_context(req)
            try:
                return ctx[ctx_key]
            except KeyError:
                pass
        env_name = self.env.project_identifier
        base_url = self.base_url(context_name=context_name)
        download_config = FilesDownloadConfig(env_name, base_url=base_url)
        if ctx:
            ctx[ctx_key] = download_config
        return download_config

    def files_node_factory_and_config(self, req=None, context_name='files'):
        """
        :param req: Request object
        :param str context_name: either 'files' or 'webdav'
        :return:
        """
        ctx = None
        ctx_key = 'files.node_factory.' + context_name
        if req:
            ctx = get_context(req)
            try:
                return ctx[ctx_key], self.files_download_config(context_name, req=req)
            except KeyError:
                pass
        download_config = self.files_download_config(context_name=context_name, req=req)
        project = Project.get(self.env)
        project_id = project.id
        node_factory = MappedFileNode(project_id,
            download_config.base_path, download_config.base_url, download_config.downloads_dir)
        if ctx:
            ctx[ctx_key] = node_factory
        return node_factory, download_config

    def order_files_and_dirs(self, req, is_download, dirs=None, files=None):
        order_by = req.args.get('files_order_by', 'filename')
        order_reversed = req.args.get('files_order_in', 'asc') == 'desc'

        dir_key = None
        if order_by == 'filename':
            key = lambda node: node.filename
        elif order_by == 'count' and is_download:
            key = lambda node: (node.download().count, node.filename)
            dir_key = lambda node: node.filename
        elif order_by == 'featured' and is_download:
            key = lambda node: (node.download().is_featured() and 1 or 0, node.filename)
            dir_key = lambda node: node.filename
        elif order_by == 'modified':
            if is_download:
                key = lambda node: (node.download().created, node.filename)
                dir_key = lambda node: (node.time_changed, node.filename)
            else:
                key = lambda node: (node.time_changed, node.filename)
        elif order_by == 'size':
            key = lambda node: (node.size, node.filename)
            dir_key = lambda node: node.filename
        else:
            key = lambda node: node.filename
        if files:
            files.sort(key=key, reverse=order_reversed)
        if dirs:
            if not dir_key:
                dirs.sort(key=key, reverse=order_reversed)
            else:
                dirs.sort(key=dir_key, reverse=order_reversed)
Exemple #13
0
class DownloadsApi(Component):

    # Download change listeners.
    change_listeners = ExtensionPoint(IDownloadChangeListener)

    # Configuration options.
    title = Option('downloads',
                   'title',
                   'Downloads',
                   doc='Main navigation bar button title.')
    path = PathOption('downloads',
                      'path',
                      '../downloads',
                      doc='Path where to store uploaded downloads.')
    ext = ListOption('downloads', 'ext', 'zip,gz,bz2,rar',
      doc = 'List of file extensions allowed to upload. Set to ''all'' ' \
            'to specify that any file extensions is allowed.')
    max_size = IntOption(
        'downloads', 'max_size', 268697600,
        'Maximum allowed file size (in bytes) for downloads. Default is 256 MB.'
    )
    visible_fields = ListOption(
        'downloads',
        'visible_fields',
        'id,file,description,size,time,count,author,tags,component,version,'
        'architecture,platform,type',
        doc='List of downloads table fields that'
        ' should be visible to users on Downloads section.')
    download_sort = Option(
        'downloads', 'download_sort', 'time', 'Column by'
        ' which downloads list will be sorted. Possible values are: id, file,'
        ' description, size, time, count, author, tags, component, version,'
        ' architecture, platform, type. Default value is: time.')
    download_sort_direction = Option(
        'downloads', 'download_sort_direction', 'desc',
        'Direction of downloads list sorting. Possible values are: asc,'
        ' desc. Default value is: desc.')
    architecture_sort = Option(
        'downloads', 'architecture_sort', 'name',
        'Column by which architectures list will be sorted. Possible values are:'
        ' id, name, description. Default value is: name.')
    architecture_sort_direction = Option(
        'downloads', 'architecture_sort_direction', 'asc',
        'Direction of architectures list'
        ' sorting. Possible values are: asc, desc. Default value is: asc.')
    platform_sort = Option(
        'downloads', 'platform_sort', 'name', 'Column by'
        ' which platforms list will be sorted. Possible values are: id, name,'
        ' description. Default value is: name.')
    platform_sort_direction = Option(
        'downloads', 'platform_sort_direction', 'asc',
        'Direction of platforms list sorting. Possible values are: asc,'
        ' desc. Default value is: asc.')
    type_sort = Option(
        'downloads', 'type_sort', 'name', 'Column by which types'
        ' list will be sorted. Possible values are: id, name, description.'
        ' Default value is: name.')
    type_sort_direction = Option(
        'downloads', 'type_sort_direction', 'asc',
        'Direction of types list sorting. Possible values are: asc, desc. Default'
        ' value is: asc.')
    unique_filename = BoolOption(
        'downloads',
        'unique_filename',
        False,
        doc='If enabled checks if uploaded file has unique name.')

    # Get list functions.

    def _get_items(self,
                   context,
                   table,
                   columns,
                   where='',
                   values=(),
                   order_by='',
                   desc=False):
        sql = 'SELECT ' + ', '.join(columns) + ' FROM ' + table + (
            where and (' WHERE ' + where)
            or '') + (order_by and (' ORDER BY ' + order_by +
                                    (' ASC', ' DESC')[bool(desc)]) or '')
        self.log.debug(sql % values)
        context.cursor.execute(sql, values)
        items = []
        for row in context.cursor:
            row = dict(zip(columns, row))
            items.append(row)
        return items

    def get_versions(self, context, order_by='name', desc=False):
        # Get versions from table.
        versions = self._get_items(context,
                                   'version', ('name', 'description'),
                                   order_by=order_by,
                                   desc=desc)

        # Add IDs to versions according to selected sorting.
        id = 0
        for version in versions:
            id = id + 1
            version['id'] = id
        return versions

    def get_components(self, context, order_by='', desc=False):
        # Get components from table.
        components = self._get_items(context,
                                     'component', ('name', 'description'),
                                     order_by=order_by,
                                     desc=desc)

        # Add IDs to versions according to selected sorting.
        id = 0
        for component in components:
            id = id + 1
            component['id'] = id
        return components

    def get_downloads(self, context, order_by='id', desc=False):
        # Get downloads from table.
        downloads = self._get_items(
            context,
            'download', ('id', 'file', 'description', 'size', 'time', 'count',
                         'author', 'tags', 'component', 'version',
                         'architecture', 'platform', 'type'),
            order_by=order_by,
            desc=desc)

        # Replace field IDs with apropriate objects.
        for download in downloads:
            download['architecture'] = self.get_architecture(
                context, download['architecture'])
            download['platform'] = self.get_platform(context,
                                                     download['platform'])
            download['type'] = self.get_type(context, download['type'])
        return downloads

    def get_new_downloads(self,
                          context,
                          start,
                          stop,
                          order_by='time',
                          desc=False):
        return self._get_items(context,
                               'download',
                               ('id', 'file', 'description', 'size', 'time',
                                'count', 'author', 'tags', 'component',
                                'version', 'architecture', 'platform', 'type'),
                               'time BETWEEN %s AND'
                               ' %s', (start, stop),
                               order_by=order_by,
                               desc=desc)

    def get_architectures(self, context, order_by='id', desc=False):
        return self._get_items(context,
                               'architecture', ('id', 'name', 'description'),
                               order_by=order_by,
                               desc=desc)

    def get_platforms(self, context, order_by='id', desc=False):
        return self._get_items(context,
                               'platform', ('id', 'name', 'description'),
                               order_by=order_by,
                               desc=desc)

    def get_types(self, context, order_by='id', desc=False):
        return self._get_items(context,
                               'download_type', ('id', 'name', 'description'),
                               order_by=order_by,
                               desc=desc)

    # Get one item functions.

    def _get_item(self, context, table, columns, where='', values=()):
        sql = 'SELECT ' + ', '.join(columns) + ' FROM ' + table + (
            where and (' WHERE ' + where) or '')
        self.log.debug(sql % values)
        context.cursor.execute(sql, values)
        for row in context.cursor:
            row = dict(zip(columns, row))
            return row
        return None

    def get_download(self, context, id):
        return self._get_item(context, 'download',
                              ('id', 'file', 'description', 'size', 'time',
                               'count', 'author', 'tags', 'component',
                               'version', 'architecture', 'platform', 'type'),
                              'id = %s', (id, ))

    def get_download_by_time(self, context, time):
        return self._get_item(context, 'download',
                              ('id', 'file', 'description', 'size', 'time',
                               'count', 'author', 'tags', 'component',
                               'version', 'architecture', 'platform', 'type'),
                              'time = %s', (time, ))

    def get_download_by_file(self, context, file):
        return self._get_item(context, 'download',
                              ('id', 'file', 'description', 'size', 'time',
                               'count', 'author', 'tags', 'component',
                               'version', 'architecture', 'platform', 'type'),
                              'file = %s', (file, ))

    def get_architecture(self, context, id):
        architecture = self._get_item(context, 'architecture',
                                      ('id', 'name', 'description'), 'id = %s',
                                      (id, ))
        if not architecture:
            architecture = {'id': 0, 'name': '', 'description': ''}
        return architecture

    def get_architecture_by_name(self, context, name):
        architecture = self._get_item(context, 'architecture',
                                      ('id', 'name', 'description'),
                                      'name = %s', (name, ))
        if not architecture:
            architecture = {'id': 0, 'name': '', 'description': ''}
        return architecture

    def get_platform(self, context, id):
        platform = self._get_item(context, 'platform',
                                  ('id', 'name', 'description'), 'id = %s',
                                  (id, ))
        if not platform:
            platform = {'id': 0, 'name': '', 'description': ''}
        return platform

    def get_platform_by_name(self, context, name):
        platform = self._get_item(context, 'platform',
                                  ('id', 'name', 'description'), 'name = %s',
                                  (name, ))
        if not platform:
            platform = {'id': 0, 'name': '', 'description': ''}
        return platform

    def get_type(self, context, id):
        type = self._get_item(context, 'download_type',
                              ('id', 'name', 'description'), 'id = %s', (id, ))
        if not type:
            type = {'id': 0, 'name': '', 'description': ''}
        return type

    def get_type_by_name(self, context, name):
        type = self._get_item(context, 'download_type',
                              ('id', 'name', 'description'), 'name = %s',
                              (name, ))
        if not type:
            type = {'id': 0, 'name': '', 'description': ''}
        return type

    def get_description(self, context):
        sql = "SELECT value FROM system WHERE name = 'downloads_description'"
        self.log.debug(sql)
        context.cursor.execute(sql)
        for row in context.cursor:
            return row[0]

    # Add item functions.

    def _add_item(self, context, table, item):
        fields = item.keys()
        values = item.values()
        sql = "INSERT INTO %s (" % (table,) + ", ".join(fields) + ") VALUES (" \
          + ", ".join(["%s" for I in xrange(len(fields))]) + ")"
        self.log.debug(sql % tuple(values))
        context.cursor.execute(sql, tuple(values))

    def add_download(self, context, download):
        self._add_item(context, 'download', download)

    def add_architecture(self, context, architecture):
        self._add_item(context, 'architecture', architecture)

    def add_platform(self, context, platform):
        self._add_item(context, 'platform', platform)

    def add_type(self, context, type):
        self._add_item(context, 'download_type', type)

    # Edit item functions.

    def _edit_item(self, context, table, id, item):
        fields = item.keys()
        values = item.values()
        sql = "UPDATE %s SET " % (table, ) + ", ".join(
            [("%s = %%s" % (field)) for field in fields]) + " WHERE id = %s"
        self.log.debug(sql % tuple(values + [id]))
        context.cursor.execute(sql, tuple(values + [id]))

    def edit_download(self, context, id, download):
        self._edit_item(context, 'download', id, download)

    def edit_architecture(self, context, id, architecture):
        self._edit_item(context, 'architecture', id, architecture)

    def edit_platform(self, context, id, platform):
        self._edit_item(context, 'platform', id, platform)

    def edit_type(self, context, id, type):
        self._edit_item(context, 'download_type', id, type)

    def edit_description(self, context, description):
        sql = "UPDATE system SET value = %s WHERE name = 'downloads_description'"
        self.log.debug(sql % (description, ))
        context.cursor.execute(sql, (description, ))

    # Delete item functions.

    def _delete_item(self, context, table, id):
        sql = "DELETE FROM " + table + " WHERE id = %s"
        self.log.debug(sql % (id, ))
        context.cursor.execute(sql, (id, ))

    def _delete_item_ref(self, context, table, column, id):
        sql = "UPDATE " + table + " SET " + column + " = NULL WHERE " + column + " = %s"
        self.log.debug(sql % (id, ))
        context.cursor.execute(sql, (id, ))

    def delete_download(self, context, id):
        self._delete_item(context, 'download', id)

    def delete_architecture(self, context, id):
        self._delete_item(context, 'architecture', id)
        self._delete_item_ref(context, 'download', 'architecture', id)

    def delete_platform(self, context, id):
        self._delete_item(context, 'platform', id)
        self._delete_item_ref(context, 'download', 'platform', id)

    def delete_type(self, context, id):
        self._delete_item(context, 'download_type', id)
        self._delete_item_ref(context, 'download', 'type', id)

    # Misc database access functions.

    def _get_attribute(self, context, table, column, where='', values=()):
        sql = 'SELECT ' + column + ' FROM ' + table + (where and
                                                       (' WHERE ' + where)
                                                       or '')
        self.log.debug(sql % values)
        context.cursor.execute(sql, values)
        for row in context.cursor:
            return row[0]
        return None

    def get_download_id_from_file(self, context, file):
        return self._get_attribute(context, 'download', 'file', 'id = %s',
                                   (file, ))

    def get_number_of_downloads(self, context, download_ids=None):
        sql = 'SELECT SUM(count) FROM download' + (
            download_ids and (' WHERE id in (' + ', '.join(
                [to_unicode(download_id)
                 for download_id in download_ids]) + ')') or '')
        self.log.debug(sql)
        context.cursor.execute(sql)
        for row in context.cursor:
            return row[0]
        return None

    # Proces request functions.

    def process_downloads(self, context):
        # Clear data for next request.
        self.data = {}

        # Get database access.
        db = self.env.get_db_cnx()
        context.cursor = db.cursor()

        # Get request mode
        modes = self._get_modes(context)
        self.log.debug('modes: %s' % modes)

        # Perform mode actions
        self._do_actions(context, modes)

        # Fill up the template data.
        self.data['authname'] = context.req.authname
        self.data['time'] = format_datetime(datetime.now(utc))
        self.data['realm'] = context.resource.realm

        # Add CSS styles
        add_stylesheet(context.req, 'common/css/wiki.css')
        add_stylesheet(context.req, 'downloads/css/downloads.css')
        add_stylesheet(context.req, 'downloads/css/admin.css')

        # Add JavaScripts
        add_script(context.req, 'common/js/trac.js')
        add_script(context.req, 'common/js/wikitoolbar.js')

        # Commit database changes and return template and data.
        db.commit()
        self.env.log.debug('data: %s' % (self.data, ))
        return modes[-1] + '.html', {'downloads': self.data}

    # Internal functions.

    def _get_modes(self, context):
        # Get request arguments.
        page = context.req.args.get('page')
        action = context.req.args.get('action')
        self.log.debug('context: %s page: %s action: %s' %
                       (context, page, action))

        # Determine mode.
        if context.resource.realm == 'downloads-admin':
            if page == 'downloads':
                if action == 'post-add':
                    return ['downloads-post-add', 'admin-downloads-list']
                elif action == 'post-edit':
                    return ['downloads-post-edit', 'admin-downloads-list']
                elif action == 'delete':
                    return ['downloads-delete', 'admin-downloads-list']
                else:
                    return ['admin-downloads-list']
            elif page == 'architectures':
                if action == 'post-add':
                    return [
                        'architectures-post-add', 'admin-architectures-list'
                    ]
                elif action == 'post-edit':
                    return [
                        'architectures-post-edit', 'admin-architectures-list'
                    ]
                elif action == 'delete':
                    return ['architectures-delete', 'admin-architectures-list']
                else:
                    return ['admin-architectures-list']
            elif page == 'platforms':
                if action == 'post-add':
                    return ['platforms-post-add', 'admin-platforms-list']
                elif action == 'post-edit':
                    return ['platforms-post-edit', 'admin-platforms-list']
                elif action == 'delete':
                    return ['platforms-delete', 'admin-platforms-list']
                else:
                    return ['admin-platforms-list']
            elif page == 'types':
                if action == 'post-add':
                    return ['types-post-add', 'admin-types-list']
                elif action == 'post-edit':
                    return ['types-post-edit', 'admin-types-list']
                elif action == 'delete':
                    return ['types-delete', 'admin-types-list']
                else:
                    return ['admin-types-list']
        elif context.resource.realm == 'downloads-core':
            if action == 'get-file':
                return ['get-file']
        elif context.resource.realm == 'downloads-downloads':
            if action == 'post-add':
                return ['downloads-post-add', 'downloads-list']
            elif action == 'edit':
                return ['description-edit', 'downloads-list']
            elif action == 'post-edit':
                return ['description-post-edit', 'downloads-list']
            else:
                return ['downloads-list']
        else:
            pass

    def _do_actions(self, context, actions):
        for action in actions:
            if action == 'get-file':
                context.req.perm.require('DOWNLOADS_VIEW')

                # Get request arguments.
                download_id = context.req.args.get('id') or 0
                download_file = context.req.args.get('file')

                # Get download.
                if download_id:
                    download = self.get_download(context, download_id)
                else:
                    download = self.get_download_by_file(
                        context, download_file)

                # Check if requested download exists.
                if not download:
                    raise TracError('File not found.')

                # Check resource based permission.
                context.req.perm.require('DOWNLOADS_VIEW',
                                         Resource('downloads', download['id']))

                # Get download file path.
                path = os.path.normpath(
                    os.path.join(self.path, to_unicode(download['id']),
                                 download['file']))
                self.log.debug('path: %s' % (path, ))

                # Increase downloads count.
                new_download = {'count': download['count'] + 1}

                # Edit download.
                self.edit_download(context, download['id'], new_download)

                # Notify change listeners.
                for listener in self.change_listeners:
                    listener.download_changed(context, new_download, download)

                # Commit DB before file send.
                db = self.env.get_db_cnx()
                db.commit()

                # Guess mime type.
                file = open(path.encode('utf-8'), "r")
                file_data = file.read(1000)
                file.close()
                mimeview = Mimeview(self.env)
                mime_type = mimeview.get_mimetype(path, file_data)
                if not mime_type:
                    mime_type = 'application/octet-stream'
                if 'charset=' not in mime_type:
                    charset = mimeview.get_charset(file_data, mime_type)
                    mime_type = mime_type + '; charset=' + charset

                # Return uploaded file to request.
                context.req.send_header(
                    'Content-Disposition', 'attachment;filename="%s"' %
                    (os.path.normpath(download['file'])))
                context.req.send_header('Content-Description',
                                        download['description'])
                context.req.send_file(path.encode('utf-8'), mime_type)

            elif action == 'downloads-list':
                context.req.perm.require('DOWNLOADS_VIEW')

                self.log.debug('visible_fields: %s' % (self.visible_fields, ))

                # Get form values.
                order = context.req.args.get('order') or self.download_sort
                if context.req.args.has_key('desc'):
                    desc = context.req.args.get('desc') == '1'
                else:
                    desc = self.download_sort_direction == 'desc'

                self.data['order'] = order
                self.data['desc'] = desc
                self.data['has_tags'] = self.env.is_component_enabled(
                    'tractags.api.TagEngine')
                self.data['visible_fields'] = self.visible_fields
                self.data['title'] = self.title
                self.data['description'] = self.get_description(context)
                self.data['downloads'] = self.get_downloads(
                    context, order, desc)
                self.data['visible_fields'] = [
                    visible_field for visible_field in self.visible_fields
                ]

                # Component, versions, etc. are needed only for new download
                # add form.
                if context.req.perm.has_permission('DOWNLOADS_ADD'):
                    self.data['components'] = self.get_components(context)
                    self.data['versions'] = self.get_versions(context)
                    self.data['architectures'] = self.get_architectures(
                        context)
                    self.data['platforms'] = self.get_platforms(context)
                    self.data['types'] = self.get_types(context)

            elif action == 'admin-downloads-list':
                context.req.perm.require('DOWNLOADS_ADMIN')

                # Get form values
                order = context.req.args.get('order') or self.download_sort
                if context.req.args.has_key('desc'):
                    desc = context.req.args.get('desc') == '1'
                else:
                    desc = self.download_sort_direction == 'desc'
                download_id = int(context.req.args.get('download') or 0)

                self.data['order'] = order
                self.data['desc'] = desc
                self.data['has_tags'] = self.env.is_component_enabled(
                    'tractags.api.TagEngine')
                self.data['download'] = self.get_download(context, download_id)
                self.data['downloads'] = self.get_downloads(
                    context, order, desc)
                self.data['components'] = self.get_components(context)
                self.data['versions'] = self.get_versions(context)
                self.data['architectures'] = self.get_architectures(context)
                self.data['platforms'] = self.get_platforms(context)
                self.data['types'] = self.get_types(context)

            elif action == 'description-edit':
                context.req.perm.require('DOWNLOADS_ADMIN')

            elif action == 'description-post-edit':
                context.req.perm.require('DOWNLOADS_ADMIN')

                # Get form values.
                description = context.req.args.get('description')

                # Set new description.
                self.edit_description(context, description)

            elif action == 'downloads-post-add':
                context.req.perm.require('DOWNLOADS_ADD')

                # Get form values.
                file, filename, file_size = self._get_file_from_req(context)
                download = {
                    'file': filename,
                    'description': context.req.args.get('description'),
                    'size': file_size,
                    'time': to_timestamp(datetime.now(utc)),
                    'count': 0,
                    'author': context.req.authname,
                    'tags': context.req.args.get('tags'),
                    'component': context.req.args.get('component'),
                    'version': context.req.args.get('version'),
                    'architecture': context.req.args.get('architecture'),
                    'platform': context.req.args.get('platform'),
                    'type': context.req.args.get('type')
                }

                # Upload file to DB and file storage.
                self._add_download(context, download, file)

                # Close input file.
                file.close()

            elif action == 'downloads-post-edit':
                context.req.perm.require('DOWNLOADS_ADMIN')

                # Get form values.
                download_id = context.req.args.get('id')
                old_download = self.get_download(context, download_id)
                download = {
                    'description': context.req.args.get('description'),
                    'tags': context.req.args.get('tags'),
                    'component': context.req.args.get('component'),
                    'version': context.req.args.get('version'),
                    'architecture': context.req.args.get('architecture'),
                    'platform': context.req.args.get('platform'),
                    'type': context.req.args.get('type')
                }

                # Edit Download.
                self.edit_download(context, download_id, download)

                # Notify change listeners.
                for listener in self.change_listeners:
                    listener.download_changed(context, download, old_download)

            elif action == 'downloads-delete':
                context.req.perm.require('DOWNLOADS_ADMIN')

                # Get selected downloads.
                selection = context.req.args.get('selection')
                if isinstance(selection, (str, unicode)):
                    selection = [selection]

                # Delete download.
                if selection:
                    for download_id in selection:
                        download = self.get_download(context, download_id)
                        self.log.debug('download: %s' % (download, ))
                        self._delete_download(context, download)

            elif action == 'admin-architectures-list':
                context.req.perm.require('DOWNLOADS_ADMIN')

                # Get form values
                order = context.req.args.get('order') or self.architecture_sort
                if context.req.args.has_key('desc'):
                    desc = context.req.args.get('desc') == '1'
                else:
                    desc = self.architecture_sort_direction == 'desc'
                architecture_id = int(
                    context.req.args.get('architecture') or 0)

                # Display architectures.
                self.data['order'] = order
                self.data['desc'] = desc
                self.data['architecture'] = self.get_architecture(
                    context, architecture_id)
                self.data['architectures'] = self.get_architectures(
                    context, order, desc)

            elif action == 'architectures-post-add':
                context.req.perm.require('DOWNLOADS_ADMIN')

                # Get form values.
                architecture = {
                    'name': context.req.args.get('name'),
                    'description': context.req.args.get('description')
                }

                # Add architecture.
                self.add_architecture(context, architecture)

            elif action == 'architectures-post-edit':
                context.req.perm.require('DOWNLOADS_ADMIN')

                # Get form values.
                architecture_id = context.req.args.get('id')
                architecture = {
                    'name': context.req.args.get('name'),
                    'description': context.req.args.get('description')
                }

                # Add architecture.
                self.edit_architecture(context, architecture_id, architecture)

            elif action == 'architectures-delete':
                context.req.perm.require('DOWNLOADS_ADMIN')

                # Get selected architectures.
                selection = context.req.args.get('selection')
                if isinstance(selection, (str, unicode)):
                    selection = [selection]

                # Delete architectures.
                if selection:
                    for architecture_id in selection:
                        self.delete_architecture(context, architecture_id)

            elif action == 'admin-platforms-list':
                context.req.perm.require('DOWNLOADS_ADMIN')

                # Get form values.
                order = context.req.args.get('order') or self.platform_sort
                if context.req.args.has_key('desc'):
                    desc = context.req.args.get('desc') == '1'
                else:
                    desc = self.platform_sort_direction == 'desc'
                platform_id = int(context.req.args.get('platform') or 0)

                # Display platforms.
                self.data['order'] = order
                self.data['desc'] = desc
                self.data['platform'] = self.get_platform(context, platform_id)
                self.data['platforms'] = self.get_platforms(
                    context, order, desc)

            elif action == 'platforms-post-add':
                context.req.perm.require('DOWNLOADS_ADMIN')

                # Get form values.
                platform = {
                    'name': context.req.args.get('name'),
                    'description': context.req.args.get('description')
                }

                # Add platform.
                self.add_platform(context, platform)

            elif action == 'platforms-post-edit':
                context.req.perm.require('DOWNLOADS_ADMIN')

                # Get form values.
                platform_id = context.req.args.get('id')
                platform = {
                    'name': context.req.args.get('name'),
                    'description': context.req.args.get('description')
                }

                # Add platform.
                self.edit_platform(context, platform_id, platform)

            elif action == 'platforms-delete':
                context.req.perm.require('DOWNLOADS_ADMIN')

                # Get selected platforms.
                selection = context.req.args.get('selection')
                if isinstance(selection, (str, unicode)):
                    selection = [selection]

                # Delete platforms.
                if selection:
                    for platform_id in selection:
                        self.delete_platform(context, platform_id)

            elif action == 'admin-types-list':
                context.req.perm.require('DOWNLOADS_ADMIN')

                # Get form values
                order = context.req.args.get('order') or self.type_sort
                if context.req.args.has_key('desc'):
                    desc = context.req.args.get('desc') == '1'
                else:
                    desc = self.type_sort_direction == 'desc'
                platform_id = int(context.req.args.get('type') or 0)

                # Display platforms.
                self.data['order'] = order
                self.data['desc'] = desc
                self.data['type'] = self.get_type(context, platform_id)
                self.data['types'] = self.get_types(context, order, desc)

            elif action == 'types-post-add':
                context.req.perm.require('DOWNLOADS_ADMIN')

                # Get form values.
                type = {
                    'name': context.req.args.get('name'),
                    'description': context.req.args.get('description')
                }

                # Add type.
                self.add_type(context, type)

            elif action == 'types-post-edit':
                context.req.perm.require('DOWNLOADS_ADMIN')

                # Get form values.
                type_id = context.req.args.get('id')
                type = {
                    'name': context.req.args.get('name'),
                    'description': context.req.args.get('description')
                }

                # Add platform.
                self.edit_type(context, type_id, type)

            elif action == 'types-delete':
                context.req.perm.require('DOWNLOADS_ADMIN')

                # Get selected types.
                selection = context.req.args.get('selection')
                if isinstance(selection, (str, unicode)):
                    selection = [selection]

                # Delete types.
                if selection:
                    for type_id in selection:
                        self.delete_type(context, type_id)

    """ Full implementation of download addition. It creates DB entry for
    download <download> and stores download file <file> to file system. """

    def _add_download(self, context, download, file):
        # Check for file name uniqueness.
        if self.unique_filename:
            if self.get_download_by_file(context, download['file']):
                raise TracError('File with same name is already uploaded and'
                                ' unique file names are enabled.')

        # Check correct file type.
        name, ext = os.path.splitext(download['file'])
        self.log.debug('file_ext: %s ext: %s' % (ext, self.ext))
        if not (ext[1:].lower() in self.ext) and not ('all' in self.ext):
            raise TracError('Unsupported file type.')

        # Check for maximum file size.
        if self.max_size >= 0 and download['size'] > self.max_size:
            raise TracError('Maximum file size: %s bytes' % (self.max_size),
                            'Upload failed')

        # Add new download to DB.
        self.add_download(context, download)

        # Get inserted download by time to get its ID.
        download = self.get_download_by_time(context, download['time'])

        # Prepare file paths.
        path = os.path.normpath(
            os.path.join(self.path, to_unicode(download['id'])))
        filepath = os.path.normpath(os.path.join(path, download['file']))

        self.log.debug('path: %s' % ((path, )))
        self.log.debug('filepath: %s' % ((filepath, )))

        # Store uploaded image.
        try:
            os.mkdir(path.encode('utf-8'))
            out_file = open(filepath.encode('utf-8'), "wb+")
            file.seek(0)
            shutil.copyfileobj(file, out_file)
            out_file.close()
        except Exception, error:
            self.delete_download(context, download['id'])
            self.log.debug(error)
            try:
                os.remove(filepath.encode('utf-8'))
            except:
                pass
            try:
                os.rmdir(path.encode('utf-8'))
            except:
                pass
            raise TracError('Error storing file %s! Is directory specified in' \
              ' path config option in [downloads] section of trac.ini' \
              ' existing?' % (download['file'],))

        # Notify change listeners.
        for listener in self.change_listeners:
            listener.download_created(context, download)
Exemple #14
0
class CrashDumpSubmit(Component):
    """Upload/Submit new crash dumps"""

    implements(IRequestHandler, IRequestFilter, INavigationContributor, ITemplateProvider)

    dumpdata_dir = PathOption('crashdump', 'dumpdata_dir', default='../dumpdata',
                      doc='Path to the crash dump data directory relative to the environment conf directory.')

    default_priority = Option('crashdump', 'default_priority', default='major',
                      doc='Default priority for submitted crash reports.')

    default_milestone = Option('crashdump', 'default_milestone', '< default >',
        """Default milestone for submitted crash reports.""")

    default_version = Option('crashdump', 'default_version', '< default >',
        """Default version for submitted crash reports.""")

    default_component = Option('crashdump', 'default_component', '< default >',
        """Default component for submitted crash reports.""")

    default_severity = Option('crashdump', 'default_severity', '',
        """Default severity for submitted crash reports.""")

    default_summary = Option('crashdump', 'default_summary', '',
        """Default summary (title) for submitted crash reports.""")

    default_description = Option('crashdump', 'default_description', '',
        """Default description for submitted crash reports.""")

    default_keywords = Option('crashdump', 'default_keywords', '',
        """Default keywords for submitted crash reports.""")

    default_reporter = Option('crashdump', 'default_reporter', '< default >',
        """Default reporter for submitted crash reports.""")

    default_owner = Option('crashdump', 'default_owner', '< default >',
        """Default owner for submitted crash reports.""")

    default_ticket_type = Option('crashdump', 'ticket_type', 'defect',
        """Default ticket type for linked tickets.""")

    ignored_modules = Option('crashdump', 'ignore_modules', 'libc, kernel32, ntdll, user32, gdi32',
        """List of modules to ignore for component matching.""")

    replace_usernames = Option('crashdump', 'replace_usernames', '',
        """List of username replacements applied when a new crash is uploaded (format username=myrealname; multiple values separated by comma).""")

    max_upload_size = IntOption('crashdump', 'max_upload_size', default=16 * 1024 * 1024,
                      doc="""Maximum allowed upload size. If set to zero the upload limit is disabled and all uploads will be accepted.""")

    upload_disabled = BoolOption('crashdump', 'upload_disabled', 'false',
                      doc="""Disable upload. No further crashdumps can be submitted.""")

    disable_manual_upload = BoolOption('crashdump', 'manual_upload_disabled', 'false',
                      doc="""Disable manual upload function. Crashes can only be uploaded automatically via the crash handler.""")

    # INavigationContributor methods
    def get_active_navigation_item(self, req):
        self.log.debug('get_active_navigation_item %s' % req.path_info)
        if not self.disable_manual_upload:
            val = re.search('/crash_upload$', req.path_info)
            if val and val.start() == 0:
                return 'upload_crash'

    def get_navigation_items(self, req):
        if not self.disable_manual_upload:
            yield ('mainnav', 'upload_crash',
                   tag.a('Upload Crash', href=req.href('crash_upload')))

    # IRequestHandler methods
    def match_request(self, req):
        if req.method == 'POST' and (req.path_info == '/crashdump/submit' or req.path_info == '/submit'):
            self.log.debug('match_request: %s %s', req.method, req.path_info)
            return True
        elif req.method == 'GET' and (req.path_info == '/crashdump/submit/crashlist' or req.path_info == '/submit/crashlist'):
            self.log.debug('match_request: %s %s', req.method, req.path_info)
            return True
        elif req.method == 'GET' and (req.path_info == '/crashdump/submit/capabilities' or req.path_info == '/submit/capabilities'):
            self.log.debug('match_request: %s %s', req.method, req.path_info)
            return True
        elif req.method == 'GET' and (req.path_info == '/crashdump/list' or req.path_info == '/crashlist'):
            self.log.debug('match_request: %s %s', req.method, req.path_info)
            return True
        elif req.method == 'GET' and (req.path_info == '/crashdump/capabilities' or req.path_info == '/capabilities'):
            self.log.debug('match_request: %s %s', req.method, req.path_info)
            return True
        elif (req.method == 'GET' or req.method == 'POST') and (req.path_info == '/crashdump/crash_upload' or req.path_info == '/crash_upload'):
            return True
        else:
            self.log.debug('match_request: %s %s', req.method, req.path_info)
            return False

    def _error_response(self, req, status, body=None, content_type='text/plain', headers=None):

        self.log.debug('_error_response: %s %s -> %i: %s', req.method, req.path_info, status, body)
        if isinstance(body, unicode):
            body = body.encode('utf-8')

        req.send_response(status)
        req._outheaders = []
        req.send_header('Cache-Control', 'must-revalidate')
        req.send_header('Expires', 'Fri, 01 Jan 1999 00:00:00 GMT')
        req.send_header('Content-Type', content_type + ';charset=utf-8')
        req.send_header('Content-Length', len(body))
        if headers:
            for k,v in headers.items():
                req.send_header(k, v)
        req._send_cookie_headers()

        if req.method != 'HEAD':
            req.write(body)
        raise RequestDone

    def _success_response(self, req, body=None, content_type='text/plain', status=200, headers=None):

        if isinstance(body, unicode):
            body = body.encode('utf-8')

        req.send_response(status)
        req.send_header('Cache-Control', 'must-revalidate')
        req.send_header('Expires', 'Fri, 01 Jan 1999 00:00:00 GMT')
        req.send_header('Content-Type', content_type + ';charset=utf-8')
        req.send_header('Content-Length', len(body))
        if headers:
            for k,v in headers.items():
                req.send_header(k, v)
        req.end_headers()

        if req.method != 'HEAD':
            req.write(body)
        raise RequestDone

    def _manual_upload_result(self, req, error=None):
        data = {}
        action = 'upload'
        params = None
        submit_href = req.href + '/submit'
        data.update({'action': action,
                    'params': params,
                    'submit_href': submit_href,
                    'upload_error': error,
                    })
        if crashdump_use_jinja2:
            metadata = {'content_type': 'text/html'}
        else:
            add_script(req, 'common/js/folding.js')
            metadata = None
        add_script(req, 'crashdump/crashdump.js')
        add_stylesheet(req, 'crashdump/crashdump.css')
        return 'upload.html', data, metadata

    def _find_first_component_from_list(self, possible_components):
        ret = None
        for compname in possible_components:
            try:
                component = TicketComponent(self.env, compname)
                ret = component.name
                break
            except ResourceNotFound:
                # No such component exists
                pass
        return ret

    def _find_first_milestone_from_list(self, possible_milestones):
        #print('_find_first_milestone_from_list %s' % str(possible_milestones))
        ret = None
        for ms_name in possible_milestones:
            try:
                milestone = Milestone(self.env, ms_name)
                ret = milestone.name
                break
            except ResourceNotFound:
                # No such component exists
                pass
        return ret

    def _find_first_version_from_list(self, possible_versions):
        #print('_find_first_version_from_list %s' % str(possible_versions))
        ret = None
        for v_name in possible_versions:
            try:
                ver = Version(self.env, v_name)
                ret = ver.name
                break
            except ResourceNotFound:
                # No such component exists
                pass
        return ret

    def _find_component_from_involved_modules(self, module_list, buildpostfix):
        possible_components = []
        for m in module_list:
            module_base = os.path.basename(m) if '/' in m else m
            module_name, module_ext = os.path.splitext(module_base)
            if buildpostfix and module_name.endswith(buildpostfix):
                module_name = module_name[:-len(buildpostfix)]
            if '-' in module_name:
                (prefix, name) = module_name.split('-', 1)
                name_is_version = True
                for c in name:
                    if (c >= '0' and c <= '9') or c == '.':
                        pass
                    else:
                        name_is_version = False

                if name_is_version:
                    # name is a version number so check the prefix instead of the name
                    # and to not check the full module name since it would check for
                    # a matching version number as well.
                    if prefix not in self.ignored_modules:
                        possible_components.append(prefix)
                else:
                    # add the entire module name
                    if module_name not in self.ignored_modules:
                        possible_components.append(module_name)
                    # ... and the shorten name (without prefix) to the list
                    if name not in self.ignored_modules:
                        possible_components.append(name)
            else:
                if module_name not in self.ignored_modules:
                    possible_components.append(module_name)
        return self._find_first_component_from_list(possible_components)

    def _find_component_for_application(self, applicationname):
        if applicationname is None:
            return None

        possible_components = [applicationname]
        if '-' in applicationname:
            (prefix, name) = applicationname.split('-', 1)
            possible_components.append(name)

        return self._find_first_component_from_list(possible_components)

    def _find_milestone(self, productversion, producttargetversion):
        if producttargetversion is None:
            return None
        possible_versions = []
        v_elems = producttargetversion.split('.')
        while len(v_elems) < 4:
            v_elems.append('0')

        for i in range(4, 0, -1):
            possible_versions.append('v' + '.'.join(v_elems[0:i]))
            possible_versions.append('.'.join(v_elems[0:i]))
        return self._find_first_milestone_from_list(possible_versions)

    def _find_version(self, productversion, producttargetversion):
        if productversion is None:
            return None
        possible_versions = []
        v_elems = productversion.split('.')
        while len(v_elems) < 4:
            v_elems.append('0')

        for i in range(4, 2, -1):
            possible_versions.append('v' + '.'.join(v_elems[0:i]))
            possible_versions.append('.'.join(v_elems[0:i]))
            if v_elems[i - 1] != '0':
                v_elems[i - 1] = '0'
            possible_versions.append('v' + '.'.join(v_elems[0:i]))
            possible_versions.append('.'.join(v_elems[0:i]))
        return self._find_first_version_from_list(possible_versions)

    def _apply_username_replacements(self, username):
        if username is None:
            return None
        self.log.debug('CrashDumpSubmit _apply_username_replacements in=\'%s\'' % username)
        ret = username
        ret_lower = username.lower()
        for pattern in self.replace_usernames.split(','):
            pattern = pattern.strip()
            self.log.debug('CrashDumpSubmit _apply_username_replacements pattern=\'%s\'' % pattern)
            if '=' in pattern:
                (find, replace) = pattern.split('=', 1)
                find = find.strip().lower()
                replace = replace.strip()
                self.log.debug('CrashDumpSubmit _apply_username_replacements find=\'%s\' -> replace=\'%s\'' % (find, replace))
                if ret_lower == find:
                    ret = replace
                    ret_lower = replace.lower()
        self.log.debug('CrashDumpSubmit _apply_username_replacements out=\'%s\'' % ret)
        return ret

    def pre_process_request(self, req, handler):
        if req.path_info != '/crashdump/submit' and req.path_info != '/submit' and \
            req.path_info != '/crashdump/crash_upload' and req.path_info != '/crash_upload':
            return handler

        self.log.debug('CrashDumpSubmit pre_process_request: %s %s %s', req.method, req.path_info, handler)
        if req.method == "POST":
            user_agent = req.get_header('User-Agent')
            if user_agent is not None and '/' in user_agent:
                user_agent, agent_ver = user_agent.split('/', 1)
            if user_agent == 'terra3d-crashuploader':
                # copy the requested form token from into the args to pass the CSRF test
                req.args['__FORM_TOKEN' ] = req.form_token

            manual_upload = req.args.as_int('manual_upload', 0)
            # for testing
            if manual_upload:
                # copy the requested form token from into the args to pass the CSRF test
                req.args['__FORM_TOKEN' ] = req.form_token

        return handler

    def post_process_request(self, req, template, data, content_type, method=None):
        return template, data, content_type, method

    def process_request(self, req):
        self.log.debug('CrashDumpSubmit process_request: %s %s', req.method, req.path_info)
        if req.path_info == '/crashdump/submit' or req.path_info == '/submit':
            self.log.debug('CrashDumpSubmit process_request_submit: %s %s', req.method, req.path_info)
            return self.process_request_submit(req)
        elif req.path_info == '/crashdump/crash_upload' or req.path_info == '/crash_upload':
            return self.process_request_crash_upload(req)
        elif req.path_info == '/crashdump/list' or req.path_info == '/crashlist' or req.path_info == '/crashdump/submit/crashlist' or req.path_info == '/submit/crashlist':
            return self.process_request_crashlist(req)
        elif req.path_info == '/crashdump/capabilities' or req.path_info == '/capabilities' or req.path_info == '/crashdump/submit/capabilities' or req.path_info == '/submit/capabilities':
            return self.process_request_capabilities(req)
        else:
            return self._error_response(req, status=HTTPMethodNotAllowed.code, body='Invalid request path %s.' % req.path_info)

    def process_request_capabilities(self, req):
        if req.method != "GET":
            return self._error_response(req, status=HTTPMethodNotAllowed.code, body='Method %s not allowed' % req.method)
        user_agent = req.get_header('User-Agent')
        if user_agent is None:
            return self._error_response(req, status=HTTPForbidden.code, body='No user-agent specified.')

        headers = {}
        headers['Max-Upload-Size'] = self.max_upload_size
        headers['Upload-Disabled'] = '1' if self.upload_disabled else '0'

        # This is a plain Python source file, not an egg
        dist = get_distribution('TracCrashDump')
        if dist:
            headers['Crashdump-Plugin-Version'] = dist.version
            headers['Upload-Disabled'] = '1' if self.upload_disabled else '0'
        if self.upload_disabled:
            body = 'Disabled'
        else:
            body = 'OK'
        return self._success_response(req, body=body.encode('utf-8'), headers=headers)

    def process_request_crash_upload(self, req):
        return self._manual_upload_result(req, error=None)

    def escape_ticket_values(self, values):
        ret = {}
        for k,v in values.items():
            if isinstance(v, str) or isinstance(v, basestring):
                ret[k] = v.replace('#', '!#')
            else:
                ret[k] = v
        return ret

    def process_request_submit(self, req):
        if req.method != "POST":
            return self._error_response(req, status=HTTPMethodNotAllowed.code, body='Method %s not allowed' % req.method)

        manual_upload = req.args.as_int('manual_upload', 0)
        if manual_upload == 0:
            user_agent_full = req.get_header('User-Agent')
            if user_agent_full is None:
                return self._error_response(req, status=HTTPForbidden.code, body='No user-agent specified.')
            if '/' in user_agent_full:
                user_agent, agent_ver = user_agent_full.split('/', 1)
            else:
                user_agent = user_agent_full
            if user_agent != 'terra3d-crashuploader':
                return self._error_response(req, status=HTTPForbidden.code, body='User-agent %s not allowed' % user_agent_full)

        headers = {}
        headers['Max-Upload-Size'] = self.max_upload_size
        headers['Upload-Disabled'] = '1' if self.upload_disabled else '0'

        if self.upload_disabled:
            return self._error_response(req, status=HTTPInternalServerError.code, body='Crashdump upload has been disabled by the administrator.', headers=headers)

        id_str = req.args.get('id')
        if not manual_upload:
            if not id_str or not CrashDump.uuid_is_valid(id_str):
                return self._error_response(req, status=HTTPInternalServerError.code, body='Invalid crash identifier %s specified.' % id_str)

        total_upload_size = self._get_total_upload_size(req)
        if self.max_upload_size > 0 and total_upload_size > self.max_upload_size:
            self.log.debug('total_upload_size %i > max_upload_size %i' % (total_upload_size, self.max_upload_size) )
            return self._error_response(req, status=HTTPInternalServerError.code, body='Upload size %i bytes exceed the upload limit of %i bytes' % (total_upload_size, self.max_upload_size), headers=headers)
        else:
            self.log.debug('total_upload_size %i <= max_upload_size %i' % (total_upload_size, self.max_upload_size) )

        if manual_upload:
            self.log.debug('manual_upload')

            files = req.args.getlist('files')
            if len(files) == 0:
                return self._error_response(req, status=HTTPInternalServerError.code, body='No files uploaded.')

            import re

            id_str = None
            minidump = None
            minidumpreportxml = None

            for file in files:
                if isinstance(file, cgi.FieldStorage):
                    filename = os.path.basename(file.filename)
                    self.log.debug('got file %s' % filename)
                    match = re.match(r'^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})\.([0-9a-zA-Z\.]+)$', filename)
                    if match:
                        new_id_str = match.groups()[0]
                        ext = match.groups()[1]
                        self.log.debug('got file match %s' % new_id_str)
                        if id_str is None:
                            id_str = new_id_str
                        elif id_str == new_id_str:
                            pass
                        else:
                            return self._error_response(req, status=HTTPInternalServerError.code, body='At the moment uploading multiples crashes is not supported.')
                        if ext == 'dmp':
                            minidump = file
                        elif ext == 'dmp.xml':
                            minidumpreportxml = file
                        self.log.debug('got id %s, ext %s' % (id_str, ext))
                else:
                    self.log.debug('skip file field %s-%s' % (type(file), file) )
            if not id_str:
                return self._manual_upload_result(req, error='Cannot determine crash identifier from file upload. The files uploaded must have a UUID in its name and the extentsion must either be .dmp or .dmp.xml.')
            elif minidump is None and minidumpreportxml is None:
                return self._manual_upload_result(req, error='Uploaded files do not contain a valid crash dump information.')

            self.log.debug('got crashid %s' % id_str)
            if minidump is not None:
                req.args['minidump'] = minidump
            if minidumpreportxml is not None:
                req.args['minidumpreportxml'] = minidumpreportxml

        uuid = UUID(id_str)
        crashid = None
        crashobj = CrashDump.find_by_uuid(self.env, uuid)
        if not crashobj:
            crashobj = CrashDump(uuid=uuid, env=self.env, must_exist=False)
        else:
            crashid = crashobj.id

        force_str = req.args.get('force') or 'false'
        force = True if force_str.lower() == 'true' else False
        if crashid is not None and not force and not manual_upload:
            headers = {}
            headers['Crash-URL'] = req.abs_href('crash', str(uuid))
            headers['CrashId'] = str(crashid)
            self.log.debug('crash %s already uploaded %s' % (uuid, headers['Crash-URL']) )
            return self._error_response(req, status=HTTPInternalServerError.code, body='Crash identifier %s already uploaded.' % id_str, headers=headers)

        ticket_str = req.args.get('ticket') or 'no'

        linked_tickets = set()
        ticketobjs = []
        new_ticket = None
        if ticket_str == 'no':
            pass
        elif '#' in ticket_str:
            ticket_ids = []
            for t in ticket_str.split(','):
                if t[0] == '#':
                    ticket_ids.append(int(t[1:]))
            ticketobjs = []
            for tkt_id in ticket_ids:
                try:
                    ticketobjs.append(Ticket(env=self.env, tkt_id=tkt_id))
                except ResourceNotFound:
                    return self._error_response(req, status=HTTPNotFound.code, body='Ticket %i not found. Cannot link crash %s to the requested ticket.' % (tkt_id, str(uuid)))

        elif ticket_str == 'auto':
            if crashid is None:
                new_ticket = Ticket(env=self.env)
                ticketobjs = [ new_ticket ]
            else:
                for tkt_id in crashobj.linked_tickets:
                    try:
                        ticketobjs.append( Ticket(env=self.env, tkt_id=tkt_id) )
                        break
                    except ResourceNotFound:
                        pass
                if len(ticketobjs) == 0:
                    new_ticket = Ticket(env=self.env)
                    ticketobjs = [ new_ticket ]
        elif ticket_str == 'new':
            new_ticket = Ticket(env=self.env)
            ticketobjs = [ new_ticket ]
        else:
            return self._error_response(req, status=HTTPInternalServerError.code, body='Unrecognized ticket string %s for crash %s.' % (ticket_str, str(uuid)))
        
        #print('ticket_str=%s' % ticket_str)
        #print('ticketobjs=%s' % str(ticketobjs))

        # we require at least one crash dump file (either minidump or coredump)
        # and any number of report files
        failure_message = None
        result = False
        ok, new_minidumpfile, errmsg = self._store_dump_file(uuid, req, 'minidump', force)
        if ok:
            result = True
        elif failure_message is None:
            failure_message = errmsg
        ok, new_minidumpreporttextfile, errmsg = self._store_dump_file(uuid, req, 'minidumpreport', force)
        ok, new_minidumpreportxmlfile, errmsg = self._store_dump_file(uuid, req, 'minidumpreportxml', force)
        # accept XML crash upload only for manual uploads
        if manual_upload and ok:
            result = True
        elif failure_message is None:
            failure_message = errmsg
        ok, new_minidumpreporthtmlfile, errmsg = self._store_dump_file(uuid, req, 'minidumpreporthtml', force)
        ok, new_coredumpfile, errmsg = self._store_dump_file(uuid, req, 'coredump', force)
        if ok:
            result = True
        elif failure_message is None:
            failure_message = errmsg
        ok, new_coredumpreporttextfile, errmsg = self._store_dump_file(uuid, req, 'coredumpreport', force)
        ok, new_coredumpreportxmlfile, errmsg = self._store_dump_file(uuid, req, 'coredumpreportxml', force)
        ok, new_coredumpreporthtmlfile, errmsg = self._store_dump_file(uuid, req, 'coredumpreporthtml', force)

        self.log.debug('new_minidumpfile \'%s\'' % new_minidumpfile)
        self.log.debug('new_minidumpreportxmlfile \'%s\'' % new_minidumpreportxmlfile)
        self.log.debug('before crashobj[minidumpfile] \'%s\'' % crashobj['minidumpfile'])
        self.log.debug('before crashobj[minidumpreportxmlfile] \'%s\'' % crashobj['minidumpreportxmlfile'])

        if manual_upload:
            if not crashobj['minidumpfile'] or force:
                crashobj['minidumpfile'] = new_minidumpfile
            if not crashobj['minidumpreporttextfile'] or force:
                crashobj['minidumpreporttextfile'] = new_minidumpreporttextfile
            if not crashobj['minidumpreportxmlfile'] or force:
                crashobj['minidumpreportxmlfile'] = new_minidumpreportxmlfile
            if not crashobj['minidumpreporthtmlfile'] or force:
                crashobj['minidumpreporthtmlfile'] = new_minidumpreporthtmlfile
            if not crashobj['coredumpfile'] or force:
                crashobj['coredumpfile'] = new_coredumpfile
            if not crashobj['coredumpreporttextfile'] or force:
                crashobj['coredumpreporttextfile'] = new_coredumpreporttextfile
            if not crashobj['coredumpreportxmlfile'] or force:
                crashobj['coredumpreportxmlfile'] = new_coredumpreportxmlfile
            if not crashobj['coredumpreporthtmlfile'] or force:
                crashobj['coredumpreporthtmlfile'] = new_coredumpreporthtmlfile
        else:
            crashobj['minidumpfile'] = new_minidumpfile
            crashobj['minidumpreporttextfile'] = new_minidumpreporttextfile
            crashobj['minidumpreportxmlfile'] = new_minidumpreportxmlfile
            crashobj['minidumpreporthtmlfile'] = new_minidumpreporthtmlfile
            crashobj['coredumpfile'] = new_coredumpfile
            crashobj['coredumpreporttextfile'] = new_coredumpreporttextfile
            crashobj['coredumpreportxmlfile'] = new_coredumpreportxmlfile
            crashobj['coredumpreporthtmlfile'] = new_coredumpreporthtmlfile

        self.log.debug('after crashobj[minidumpfile] \'%s\'' % crashobj['minidumpfile'])
        self.log.debug('after crashobj[minidumpreportxmlfile] \'%s\'' % crashobj['minidumpreportxmlfile'])

        new_applicationfile = req.args.get('applicationfile')
        if not crashobj['applicationfile']:
            crashobj['applicationfile'] = new_applicationfile

        self.log.debug('crashtimestamp from http form \'%s\'' % req.args.get('crashtimestamp'))
        self.log.debug('reporttimestamp from http form \'%s\'' % req.args.get('reporttimestamp'))

        try:
            crashtimestamp = parse_date(req.args.get('crashtimestamp', ''), hint='iso8601' )
        except TracError:
            crashtimestamp = None
            self.log.warn('invalid crash timestamp \'%s\'' % (req.args.get('crashtimestamp')))
        try:
            reporttimestamp = parse_date(req.args.get('reporttimestamp', ''), hint='iso8601' )
        except TracError:
            reporttimestamp = None
            self.log.warn('invalid crash report timestamp \'%s\'' % (req.args.get('reporttimestamp')))

        crashobj['crashtime'] = crashtimestamp if crashtimestamp else None
        crashobj['reporttime'] = reporttimestamp if reporttimestamp else None
        crashobj['uploadtime'] = datetime.datetime.now(utc)

        self.log.debug('crashtimestamp %s' % (crashobj['crashtime']))
        self.log.debug('reporttimestamp %s' % (crashobj['reporttime']))
        self.log.debug('uploadtime %s' % (crashobj['uploadtime']))

        if not manual_upload:
            crashobj['productname'] = req.args.get('productname')
            crashobj['productcodename'] = req.args.get('productcodename')
            crashobj['productversion'] = req.args.get('productversion')
            crashobj['producttargetversion'] = req.args.get('producttargetversion')
            crashobj['uploadhostname'] = req.args.get('fqdn')
            crashobj['uploadusername'] = req.args.get('username')
            crashobj['crashhostname'] = req.args.get('crashfqdn')
            crashobj['crashusername'] = req.args.get('crashusername')
            crashobj['buildtype'] = req.args.get('buildtype')
            crashobj['buildpostfix'] = req.args.get('buildpostfix')
            crashobj['machinetype'] = req.args.get('machinetype')
            crashobj['systemname'] = req.args.get('systemname')
            crashobj['osversion'] = req.args.get('osversion')
            crashobj['osrelease'] = req.args.get('osrelease')
            crashobj['osmachine'] = req.args.get('osmachine')

        if result:

            xmlreport = None
            try:
                if crashobj['minidumpreportxmlfile']:
                    xmlfile = self._get_dump_filename(crashobj, 'minidumpreportxmlfile')
                    xmlreport = XMLReport(xmlfile)
                elif crashobj['coredumpreportxmlfile']:
                    xmlfile = self._get_dump_filename(crashobj, 'coredumpreportxmlfile')
                    xmlreport = XMLReport(xmlfile)
            except XMLReport.XMLReportException as e:
                return self._error_response(req, status=HTTPInternalServerError.code, body='Failed to process crash dump %s: %s' % (uuid, str(e)))

            if xmlreport and manual_upload:

                if xmlreport.crash_info:
                    crashobj['crashtime'] = xmlreport.crash_info.crash_timestamp
                    crashobj['reporttime'] = xmlreport.crash_info.report_time
                    crashobj['uploadhostname'] = req.remote_addr
                    crashobj['uploadusername'] = req.remote_user
                    crashobj['applicationfile'] = xmlreport.crash_info.application
                if xmlreport.fast_protect_version_info:
                    crashobj['productname'] = xmlreport.fast_protect_version_info.product_name
                    crashobj['productcodename'] = xmlreport.fast_protect_version_info.product_code_name
                    crashobj['productversion'] = xmlreport.fast_protect_version_info.product_version
                    crashobj['producttargetversion'] = xmlreport.fast_protect_version_info.product_target_version
                    crashobj['buildtype'] = xmlreport.fast_protect_version_info.product_build_type
                    crashobj['buildpostfix'] = xmlreport.fast_protect_version_info.product_build_postfix

                if xmlreport.fast_protect_system_info:
                    crashobj['crashhostname'] = xmlreport.fast_protect_system_info.fqdn
                    crashobj['crashusername'] = xmlreport.fast_protect_system_info.username
                    crashobj['machinetype'] = xmlreport.fast_protect_system_info.machine_type

                if xmlreport.system_info:
                    crashobj['systemname'] = xmlreport.system_info.platform_type
                    crashobj['osversion'] = xmlreport.system_info.os_version
                    crashobj['osrelease'] = xmlreport.system_info.os_build_number
                    crashobj['osmachine'] = xmlreport.system_info.cpu_type

            # get the application name from the application file
            if crashobj['applicationfile']:
                appfile = crashobj['applicationfile']
                if '/' in appfile:
                    appbase = appfile.split('/')[-1]
                elif '\\' in appfile:
                    appbase = appfile.split('\\')[-1]
                else:
                    appbase = os.path.basename(appfile)
                (appbase, ext) = os.path.splitext(appbase)
                if crashobj['buildpostfix'] and appbase.endswith(crashobj['buildpostfix']):
                    appbase = appbase[:-len(crashobj['buildpostfix'])]
                crashobj['applicationname'] = appbase

            new_crash = True if crashid is None else False
            if new_crash:
                crashobj['status'] = 'new'
                crashobj['type'] = 'crash'
                crashobj['priority'] = self.default_priority
                if self.default_milestone == '< default >':
                    crashobj['milestone'] = self._find_milestone(crashobj['productversion'], crashobj['producttargetversion'])
                else:
                    crashobj['milestone'] = self.default_milestone
                if self.default_version == '< default >':
                    crashobj['version'] = self._find_version(crashobj['productversion'], crashobj['producttargetversion'])
                else:
                    crashobj['version'] = self.default_version
                if self.default_component == '< default >':
                    if xmlreport is not None and xmlreport.exception is not None and xmlreport.exception.involved_modules:
                        crashobj['component'] = self._find_component_from_involved_modules(xmlreport.exception.involved_modules, crashobj['buildpostfix'])
                    if not crashobj['component']:
                        crashobj['component'] = self._find_component_for_application(crashobj['applicationname'])
                else:
                    crashobj['component'] = self.default_component
                crashobj['severity'] = self.default_severity
                crashobj['summary'] = self.default_summary
                crashobj['description'] = self.default_description
                crashobj['keywords'] = self.default_keywords
                if self.default_owner == '< default >':
                    default_to_owner = ''
                    if crashobj['component']:
                        try:
                            component = TicketComponent(self.env, crashobj['component'])
                            default_to_owner = component.owner # even if it's empty
                        except ResourceNotFound:
                            # No such component exists
                            pass
                    if default_to_owner:
                        crashobj['owner'] = default_to_owner
                    else:
                        # If the current owner is "< default >", we need to set it to
                        # _something_ else, even if that something else is blank.
                        crashobj['owner'] = crashobj['crashusername']
                else:
                    crashobj['owner'] = self.default_owner
                if self.default_reporter == '< default >':
                    crashobj['reporter'] = crashobj['crashusername']
                else:
                    crashobj['reporter'] = self.default_reporter

                # apply replacements on usernames in owner and reporter field
                crashobj['owner'] = self._apply_username_replacements(crashobj['owner'])
                crashobj['reporter'] = self._apply_username_replacements(crashobj['reporter'])

                crashid = crashobj.insert()
                result = True if crashid else False
                if result:
                    if xmlreport is not None and xmlreport.exception is not None:
                        ex_thread = xmlreport.exception.thread
                    else:
                        ex_thread = None
                    if ex_thread is not None:
                        threadid = ex_thread.id
                        stackdump = ex_thread.simplified_stackdump if ex_thread.simplified_stackdump is not None else ex_thread.stackdump
                        if stackdump:
                            for frameno, frm in enumerate(stackdump.callstack):
                                frameobj = CrashDumpStackFrame(crashid, threadid,frameno, env=self.env)
                                frameobj['module'] = frm.module
                                frameobj['function'] = frm.function
                                frameobj['funcoff'] = frm.funcoff
                                frameobj['source'] = frm.source
                                frameobj['line'] = frm.line
                                frameobj['lineoff'] = frm.lineoff
                                frameobj.insert()


            else:
                #print('update crash %s' % crashobj)
                result = crashobj.save_changes(author=crashobj['crashusername'])

            if result:
                values = crashobj.values
                values['crashtimestamp'] = crashtimestamp
                values['reporttimestamp'] = reporttimestamp
                values['crashid'] = crashid
                values['uuid'] = crashobj.uuid
                values['app'] = crashobj['applicationname'] if crashobj['applicationname'] else crashobj['applicationfile']
                # Update all already linked tickets
                for tkt_id in crashobj.linked_tickets:
                    try:
                        new_linked_ticketobj = Ticket(env=self.env, tkt_id=tkt_id)
                        comment = """The crash [[/crash/%(uuid)s|CrashId#%(crashid)s - %(uuid)s]] has been updated by **%(uploadusername)s**
from **%(uploadhostname)s** is already linked to this ticket.
""" % values

                        new_linked_ticketobj.save_changes(author=crashobj['reporter'], comment=comment)
                        # Only add valid tickets to the linked_tickets set
                        linked_tickets.add(tkt_id)
                    except ResourceNotFound:
                        pass
                    
                if new_ticket is not None:
                    new_ticket['type'] = self.default_ticket_type
                    new_ticket['summary'] = "Crash %(uuid)s in %(app)s" % values
                    comment = """The crash [[/crash/%(uuid)s|CrashId#%(crashid)s - %(uuid)s]] has been uploaded by **%(uploadusername)s**
from **%(uploadhostname)s** and this ticket has been automatically created to track the progress in finding and resolving the cause of the crash.
""" % values
                    new_ticket['description'] = comment
                    # copy over some fields from the crash itself
                    for field in ['status', 'priority', 'milestone', 'component',
                                'severity', 'keywords']:
                        new_ticket[field] = crashobj[field]

                    # apply replacements on usernames in owner and reporter field
                    new_ticket['owner'] = self._apply_username_replacements(crashobj['owner'])
                    new_ticket['reporter'] = self._apply_username_replacements(crashobj['reporter'])

                    new_ticket['linked_crash'] = str(crashid)
                    new_ticket.insert()

                # Now add the newly linked tickets as well
                for tkt_obj in ticketobjs:
                    if tkt_obj.id not in crashobj.linked_tickets:
                        ticket_values = self.escape_ticket_values(values)
                        #self.log.debug('ticket_values=%s' % str(ticket_values))
                        comment = """The crash [[/crash/%(uuid)s|CrashId#%(crashid)s - %(uuid)s]] has been uploaded by **%(uploadusername)s**
from **%(uploadhostname)s** and linked to this ticket.

The crash occured at //%(crashtimestamp)s UTC// on **%(crashhostname)s** with user **%(crashusername)s** while running `%(applicationfile)s`. The
application was running as part of %(productname)s (%(productcodename)s) version %(productversion)s (%(producttargetversion)s, %(buildtype)s) on a
%(systemname)s/%(machinetype)s with %(osversion)s (%(osrelease)s/%(osmachine)s).
""" % ticket_values
                        linked_crashes = tkt_obj['linked_crash'] if tkt_obj['linked_crash'] else ''
                        linked_crashes = set([int(x.strip()) for x in linked_crashes.split(',') if x.strip()])
                        #print('crashid=%s' % crashid)
                        #print('linked_crashes=%s' % linked_crashes)
                        linked_crashes.add(crashid)
                        #print('linked_crashes=%s' % linked_crashes)
                        tkt_obj['linked_crash'] = ', '.join(str(x) for x in sorted(linked_crashes))
                        tkt_obj.save_changes(author=crashobj['reporter'], comment=comment)
                    
                        linked_tickets.add(tkt_obj.id)
                        with self.env.db_transaction as db:
                            links = CrashDumpTicketLinks(self.env, tkt=tkt_obj, db=db)
                            links.crashes.add(crashid)
                            links.save(author=crashobj['reporter'], db=db)

            if result:
                if manual_upload:
                    req.redirect(req.abs_href('crash', str(uuid)))
                else:
                    headers = {}
                    linked_ticket_header = []
                    for tkt_id in linked_tickets:
                        linked_ticket_header.append('#%i:%s' % (tkt_id, req.abs_href.ticket(tkt_id)))
                    if linked_ticket_header:
                        headers['Linked-Tickets'] = ';'.join(linked_ticket_header)
                    headers['Crash-URL'] = req.abs_href('crash', str(uuid))
                    headers['CrashId'] = str(crashid)

                    return self._success_response(req, body='Crash dump %s uploaded successfully.' % uuid, headers=headers)
            elif new_crash:
                return self._error_response(req, status=HTTPInternalServerError.code, body='Failed to add crash dump %s to database' % uuid)
            else:
                headers = {}
                headers['Crash-URL'] = req.abs_href('crash', str(uuid))
                headers['CrashId'] = str(crashid)
                return self._error_response(req, status=HTTPInternalServerError.code, body='Failed to update crash dump %s to database' % uuid, headers=headers)
        else:
            if failure_message is None:
                body = 'Failed to process crash dump %s' % uuid
            else:
                body = 'The following occured while processing the crash dump %s: %s' % (uuid, failure_message)
            return self._error_response(req, status=HTTPInternalServerError.code, body=body)

    def process_request_crashlist(self, req):
        if req.method != "GET":
            return self._error_response(req, status=HTTPMethodNotAllowed.code, body='Method %s not allowed' % req.method)

        user_agent = req.get_header('User-Agent')
        if user_agent is None:
            return self._error_response(req, status=HTTPForbidden.code, body='No user-agent specified.')
        if '/' in user_agent:
            user_agent, agent_ver = user_agent.split('/', 1)
        #if user_agent != 'terra3d-crashuploader':
            #return self._error_response(req, status=HTTPForbidden.code, body='User-agent %s not allowed' % user_agent)

        req_status = req.args.get('status') or 'active'
        
        headers = {}
        body = ''
        body = body + '<?xml version="1.0" encoding="utf-8"?>\r\n<crashlist>\r\n'
        for crashobj in CrashDump.query(env=self.env, status=req_status):
            
            body = body + '<crash id=\"%i\" uuid=\"%s\" url=\"%s\" xmlreport=\"%s\" rawfile=\"%s\">\r\n' % \
                (crashobj.id, crashobj['uuid'], 
                    req.href('crash', crashobj['uuid']),
                    req.href('crash', crashobj['uuid'], 'xml'),
                    req.href('crash', crashobj['uuid'], 'raw'),
                    )

            for field in crashobj.fields:
                field_name = field['name']
                if field_name == 'uuid':
                    continue
                field_type = field['type']
                field_value = crashobj[field_name]
                if field_type == 'time':
                    field_value = str(to_utimestamp(field_value))

                body = body + '<%s type=\"%s\">' % (field_name, field_type)
                if field_value is not None:
                    #print('%s=%s' % (field_name, field_value))
                    body = body + escape(field_value)
                body = body + '</%s>\r\n' % (field_name)
            body = body + '<linked_tickets>\r\n'
            for tkt in crashobj.linked_tickets:
                body = body + '<ticket id=\"%i\" url=\"%s\">\r\n' % (tkt, req.href.ticket(tkt))
                body = body + '</ticket>\r\n'
            body = body + '</linked_tickets>\r\n'
            body = body + '</crash>\r\n'
        body = body + '</crashlist>\r\n'
        return self._success_response(req, body=body.encode('utf-8'), headers=headers)

    # ITemplateProvider methods
    def get_htdocs_dirs(self):
        """Return the absolute path of a directory containing additional
        static resources (such as images, style sheets, etc).
        """
        return [('crashdump', resource_filename(__name__, 'htdocs'))]

    def get_templates_dirs(self):
        """Return the absolute path of the directory containing the provided
        ClearSilver templates.
        """
        return [resource_filename(__name__, 'templates')]

    @property
    def path(self):
        return self._get_path(self.env.path, self.parent_realm, self.parent_id,
                              self.filename)

    def _create_crash_file(self, filename, force):
        flags = os.O_CREAT + os.O_WRONLY
        if force:
            flags += os.O_TRUNC
        else:
            if os.path.isfile(filename):
                return None
            flags += os.O_EXCL
        if hasattr(os, 'O_BINARY'):
            flags += os.O_BINARY
        return os.fdopen(os.open(filename, flags, 0660), 'w')

    def _get_total_upload_size(self, req):
        ret = 0
        files_fields = ['minidump', 'minidumpreport', 'minidumpreportxml', 'minidumpreporthtml',
                        'coredump', 'coredumpreport', 'coredumpreportxml', 'coredumpreporthtml']

        for name in files_fields:
            file = req.args.get(name) if name in req.args else None
            if file is None:
                continue
            if hasattr(file, 'fileno'):
                size = os.fstat(file.fileno())[6]
            else:
                file.file.seek(0, 2) # seek to end of file
                size = file.file.tell()
                file.file.seek(0)
            self.log.debug('found file name %s, size %i' % (name, size))
            ret = ret + size
        return ret

    def _store_dump_file(self, uuid, req, name, force):
        item_name = None
        ret = False
        file = req.args.get(name) if name in req.args else None
        errmsg = None
        if file is None:
            errmsg = 'Field %s not available' % name
        else:
            filename = file.filename
            fileobj = file.file
            item_name = os.path.join(str(uuid), filename)
            crash_dir = os.path.join(self.env.path, self.dumpdata_dir, str(uuid))
            crash_file = os.path.join(crash_dir, filename)
            self.log.debug('_store_dump_file env.path %s' % (self.env.path))
            self.log.debug('_store_dump_file self.dumpdata_dir %s' % (self.dumpdata_dir))
            
            self.log.debug('_store_dump_file item_name %s' % (item_name))
            self.log.debug('_store_dump_file crash_dir %s' % (crash_dir))
            self.log.debug('_store_dump_file crash_file %s' % (crash_file))
            if not os.path.isdir(crash_dir):
                os.makedirs(crash_dir)

            flags = os.O_CREAT + os.O_WRONLY
            flags += os.O_TRUNC
            #if force:
                #flags += os.O_TRUNC
            #else:
                #if os.path.isfile(crash_file):
                    #errmsg = 'File %s already exists.' % crash_file
                    #return (False, item_name, errmsg)
                #flags += os.O_EXCL
            if hasattr(os, 'O_BINARY'):
                flags += os.O_BINARY
            targetfileobj = None
            try:
                targetfileobj = os.fdopen(os.open(crash_file, flags, 0660), 'w')
            except OSError as e:
                errmsg = str(e)
            except IOError as e:
                errmsg = str(e)

            if targetfileobj is None:
                ret = False
                if errmsg is None:
                    errmsg = 'Cannot open file %s.' % crash_file
            else:
                try:
                    shutil.copyfileobj(fileobj, targetfileobj)
                    ret = True
                except OSError as e:
                    errmsg = str(e)
                except IOError as e:
                    errmsg = str(e)
        return (ret, item_name, errmsg)

    def _get_dump_filename(self, crashobj, name):
        item_name = crashobj[name]
        crash_file = os.path.join(self.env.path, self.dumpdata_dir, item_name)
        return crash_file
Exemple #15
0
class AuthzSourcePolicy(Component):
    """Permission policy for `source:` and `changeset:` resources using a
    Subversion authz file.
    
    `FILE_VIEW` and `BROWSER_VIEW` permissions are granted as specified in the
    authz file.
    
    `CHANGESET_VIEW` permission is granted for changesets where `FILE_VIEW` is
    granted on at least one modified file, as well as for empty changesets.
    """

    implements(IPermissionPolicy)

    authz_file = PathOption(
        'trac', 'authz_file', '', """The path to the Subversion
        [http://svnbook.red-bean.com/en/1.5/svn.serverconfig.pathbasedauthz.html authorization (authz) file].
        To enable authz permission checking, the `AuthzSourcePolicy` permission
        policy must be added to `[trac] permission_policies`.
        """)

    authz_module_name = Option(
        'trac', 'authz_module_name', '',
        """The module prefix used in the `authz_file` for the default
        repository. If left empty, the global section is used.
        """)

    _mtime = 0
    _authz = {}
    _users = set()

    _handled_perms = frozenset([(None, 'BROWSER_VIEW'),
                                (None, 'CHANGESET_VIEW'), (None, 'FILE_VIEW'),
                                (None, 'LOG_VIEW'), ('source', 'BROWSER_VIEW'),
                                ('source', 'FILE_VIEW'),
                                ('source', 'LOG_VIEW'),
                                ('changeset', 'CHANGESET_VIEW')])

    # IPermissionPolicy methods

    def check_permission(self, action, username, resource, perm):
        realm = resource.realm if resource else None
        if (realm, action) in self._handled_perms:
            authz, users = self._get_authz_info()
            if authz is None:
                return False

            if username == 'anonymous':
                usernames = ('$anonymous', '*')
            else:
                usernames = (username, '$authenticated', '*')
            if resource is None:
                return True if users & set(usernames) else None

            rm = RepositoryManager(self.env)
            try:
                repos = rm.get_repository(resource.parent.id)
            except TracError:
                return True  # Allow error to be displayed in the repo index
            if repos is None:
                return True
            modules = [resource.parent.id or self.authz_module_name]
            if modules[0]:
                modules.append('')

            def check_path(path):
                path = '/' + join(repos.scope, path)
                if path != '/':
                    path += '/'

                # Allow access to parent directories of allowed resources
                if any(
                        section.get(user) is True
                        for module in modules for spath, section in authz.get(
                            module, {}).iteritems() if spath.startswith(path)
                        for user in usernames):
                    return True

                # Walk from resource up parent directories
                for spath in parent_iter(path):
                    for module in modules:
                        section = authz.get(module, {}).get(spath)
                        if section:
                            for user in usernames:
                                result = section.get(user)
                                if result is not None:
                                    return result

            if realm == 'source':
                return check_path(resource.id)

            elif realm == 'changeset':
                changes = list(repos.get_changeset(resource.id).get_changes())
                if not changes or any(
                        check_path(change[0]) for change in changes):
                    return True

    def _get_authz_info(self):
        try:
            mtime = os.path.getmtime(self.authz_file)
        except OSError, e:
            if self._authz is not None:
                self.log.error('Error accessing authz file: %s',
                               exception_to_unicode(e))
            self._mtime = mtime = 0
            self._authz = None
            self._users = set()
        if mtime > self._mtime:
            self._mtime = mtime
            rm = RepositoryManager(self.env)
            modules = set(repos.reponame
                          for repos in rm.get_real_repositories())
            if '' in modules and self.authz_module_name:
                modules.add(self.authz_module_name)
            modules.add('')
            self.log.info('Parsing authz file: %s' % self.authz_file)
            try:
                self._authz = parse(read_file(self.authz_file), modules)
                self._users = set(user for paths in self._authz.itervalues()
                                  for path in paths.itervalues()
                                  for user, result in path.iteritems()
                                  if result)
            except Exception, e:
                self._authz = None
                self._users = set()
                self.log.error('Error parsing authz file: %s',
                               exception_to_unicode(e))
Exemple #16
0
class DownloadPlugin(Component):

    implements(INavigationContributor, IRequestHandler, IAdminPanelProvider,
               ITemplateProvider, IPermissionRequestor)

    path = PathOption('downloads',
                      'path',
                      '../downloads',
                      doc="Path where to store uploaded downloads.")

    ext = ListOption(
        'downloads',
        'ext',
        'zip,gz,bz2,rar',
        doc="""List of file extensions allowed to upload. Set to 'all'
            to specify that any file extensions is allowed.
            """)

    max_size = IntOption(
        'downloads', 'max_size', 268697600,
        """Maximum allowed file size (in bytes) for downloads. Default
        is 256 MB.
        """)

    def __init__(self):
        self.env.log.debug('Download Initial')

    # INavigationContributor methods
    def get_active_navigation_item(self, req):
        if 'DOWNLOAD_VIEW' in req.perm:
            return 'downloads'

    def get_navigation_items(self, req):
        if 'DOWNLOAD_VIEW' in req.perm:
            yield ('mainnav', 'downloads',
                   html.a('Downloads', href=req.href.downloads()))

    # IRequestHandler methods
    def match_request(self, req):
        return req.path_info.find('/downloads') == 0

    def process_request(self, req):
        data = {}
        cursor = self.env.db_query(
            "SELECT id, file, description FROM download ORDER BY id")
        data['downloads'] = [(row[0], row[1], row[2]) for row in cursor]
        return ('downloads.html', data, None)

    # IAdminPageProvider methods
    def get_admin_panels(self, req):
        if req.perm.has_permission('DOWNLOAD_ADMIN'):
            yield ('general', 'General', 'download', 'Download')

    def render_admin_panel(self, req, cat, page, version):
        # here comes the page content, handling, etc.
        data = {}

        if req.method == "POST":
            submit = req.args.get('submit').strip()
            if submit == 'Add':
                # Get form values.
                #file = req.args['download']
                # Test if file is uploaded.
                #if hasattr(file, 'filename'):
                #    self.log.debug("Filename:" + file.filename)

                file, filename, file_size = self.get_file_from_req(req)
                download = {
                    'file': filename,
                    'description': req.args.get('description'),
                    'size': file_size,
                    'time': to_timestamp(datetime.datetime.now(utc)),
                    'count': 0,
                    'author': req.authname
                }
                self.log.debug("FileUpload filename:" + download['file'])
                self.log.debug("FileUpload description:" +
                               download['description'])
                self.log.debug("FileUpload size:", download['size'])
                self.log.debug("FileUpload time:", download['time'])
                self.log.debug("FileUpload author:" + download['author'])
                # Upload file to DB and file storage.
                add_download(download, file)
                file.close()

                add_notice(req, 'Download has been added.')
            elif submit == 'Remove':
                ids = req.args.getlist('sels')
                if ids is not None and len(ids) > 0:
                    for id in ids:
                        sql = "DELETE FROM download WHERE id ={}".format(
                            int(id))
                        self.env.db_transaction(sql)
                    add_notice(req, 'Download has been deleted.')
        else:
            # Get download.
            download_id = req.args.get('sel') or 0
            if download_id > 0:
                sql = "SELECT file, description FROM download where id={}".format(
                    download_id)
                cursor = self.env.db_query(sql)
                if len(cursor) > 0:
                    fn = cursor[0][0]
                    description = cursor[0][1]
                else:
                    raise TracError("File not found.")

                # Get download file path.
                filename = os.path.basename(fn)
                filepath = os.path.join(self.path, to_unicode(download_id),
                                        filename)
                filepath = os.path.normpath(filepath)

                # Increase downloads count.
                sql = "UPDATE download SET count=count+1 WHERE id ={}".format(
                    download_id)
                self.env.db_transaction(sql)

                # Guess mime type.
                with open(filepath.encode('utf-8'), 'r') as fileobj:
                    file_data = fileobj.read(1000)
                mimeview = Mimeview(self.env)
                mime_type = mimeview.get_mimetype(filepath, file_data)
                if not mime_type:
                    mime_type = 'application/octet-stream'
                if 'charset=' not in mime_type:
                    charset = mimeview.get_charset(file_data, mime_type)
                    mime_type = mime_type + '; charset=' + charset

                # Return uploaded file to request.
                req.send_header(
                    'Content-Disposition',
                    'attachment;filename="%s"' % os.path.normpath(fn))
                req.send_header('Content-Description', description)
                req.send_file(filepath.encode('utf-8'), mime_type)

        cursor = self.env.db_query(
            "SELECT id, file, description, size, time, author FROM download ORDER BY id"
        )
        data['downloads'] = [(row[0], row[1], row[2]) for row in cursor]
        return ('admin_download.html', data, None)

    # ITemplateProvider
    def get_htdocs_dirs(self):
        """Return the absolute path of a directory containing additional
        static resources (such as images, style sheets, etc).
        """
        from pkg_resources import resource_filename
        return [resource_filename(__name__, 'htdocs')]

    def get_templates_dirs(self):
        """Return the absolute path of the directory containing the provided
        ClearSilver/Genshi templates.
        """
        from pkg_resources import resource_filename
        return [resource_filename(__name__, 'templates')]

    # IPermissionRequestor methods.
    def get_permission_actions(self):
        view = 'DOWNLOAD_VIEW'
        add = ('DOWNLOAD_ADD', ['DOWNLOAD_VIEW'])
        admin = ('DOWNLOAD_ADMIN', ['DOWNLOAD_VIEW', 'DOWNLOAD_ADD'])
        return [view, add, admin]

    def get_download_id_by_time(self, time):
        cursor = self.env.db_query(
            "SELECT id, file, description, size, time, author FROM download where time={}"
            .format(time))
        for row in cursor:
            return row[0]
        return None

    def get_file_from_req(self, req):
        file = req.args['file']

        # Test if file is uploaded.
        if not hasattr(file, 'filename') or not file.filename:
            raise TracError("No file uploaded.")

        # Get file size.
        if hasattr(file.file, 'fileno'):
            size = os.fstat(file.file.fileno())[6]
        else:
            # Seek to end of file to get its size.
            file.file.seek(0, 2)
            size = file.file.tell()
            file.file.seek(0)
        if size == 0:
            raise TracError("Can't upload empty file.")

        # Try to normalize the filename to unicode NFC if we can.
        # Files uploaded from OS X might be in NFD.
        self.log.debug("input filename: %s", file.filename)
        filename = unicodedata.normalize('NFC',
                                         to_unicode(file.filename, 'utf-8'))
        filename = filename.replace('\\', '/').replace(':', '/')
        filename = os.path.basename(filename)
        self.log.debug("output filename: %s", filename)

        return file.file, filename, size

    def add_download(self, download, file):
        # Check for file name uniqueness.
        #if self.unique_filename:
        #   if self.get_download_by_file(download['file']):
        #        raise TracError("File with same name is already uploaded "
        #                        "and unique file names are enabled.")

        # Check correct file type.
        #name, ext = os.path.splitext(download['file'])
        #if not (ext[1:].lower() in self.ext) and not ('all' in self.ext):
        #    raise TracError("Unsupported file type.")

        # Check for maximum file size.
        if 0 <= self.max_size < download['size']:
            raise TracError("Maximum file size: %s bytes" % self.max_size,
                            "Upload failed")

        # Add new download to DB.
        #self.add_download(download)
        sql = "INSERT INTO download (file,description,size,time,author) " \
                            " VALUES(%s,%s,%s,%s,%s)"
        args = (download['file'], download['description'], download['size'],
                download['time'], download['author'])
        self.env.db_transaction(sql, args)
        self.log.debug("FileUpload SQL: %s", sql)

        # Get inserted download by time to get its ID.
        id = self.get_download_id_by_time(download['time'])
        self.log.debug("FileUpload id: %s", id)

        # Prepare file paths.
        path = os.path.normpath(os.path.join(self.path, to_unicode(id)))
        filepath = os.path.normpath(os.path.join(path, download['file']))

        self.log.debug("FileUpload path: %s", path)
        self.log.debug("FileUpload filepath: %s", filepath)

        # Store uploaded image.
        try:
            os.mkdir(path.encode('utf-8'))
            with open(filepath.encode('utf-8'), 'wb+') as fileobj:
                file.seek(0)
                shutil.copyfileobj(file, fileobj)
        except Exception as error:
            self._delete_download(id)
            self.log.debug(error)
            try:
                os.remove(filepath.encode('utf-8'))
            except:
                pass
            try:
                os.rmdir(path.encode('utf-8'))
            except:
                pass
            raise TracError("Error storing file %s. Does the directory "
                            "specified in path config option of [downloads] "
                            "section of trac.ini exist?" % download['file'])
Exemple #17
0
        """Use git-committer id instead of git-author id for the
        changeset ''Author'' field.
        """)

    use_committer_time = BoolOption(
        'git', 'use_committer_time', 'true',
        """Use git-committer timestamp instead of git-author timestamp
        for the changeset ''Timestamp'' field.
        """)

    git_fs_encoding = Option(
        'git', 'git_fs_encoding', 'utf-8',
        """Define charset encoding of paths within git repositories.""")

    git_bin = PathOption(
        'git', 'git_bin', '/usr/bin/git',
        """Path to git executable (relative to the Trac configuration folder,
        so better use an absolute path here).""")

    def get_supported_types(self):
        yield ('git', 8)

    def get_repository(self, type, dir, params):
        """GitRepository factory method"""
        assert type == 'git'

        if not (4 <= self.shortrev_len <= 40):
            raise TracError(
                "[git] shortrev_len setting must be within [4..40]")

        if not (4 <= self.wiki_shortrev_len <= 40):
            raise TracError("[git] wikishortrev_len must be within [4..40]")
Exemple #18
0
class MultiProductSystem(Component):
    """Creates the database tables and template directories"""

    implements(IEnvironmentSetupParticipant, IExternalResourceConnector,
               IPermissionRequestor, IResourceChangeListener, IResourceManager,
               ISupportMultiProductEnvironment, ITemplateProvider,
               ITicketFieldProvider, IWikiSyntaxProvider, ITicketManipulator)

    default_product_prefix = Option(
        'multiproduct',
        'default_product_prefix',
        default='@',
        doc="""Prefix used for default product when migrating single-product
        installations to multi-product.""", doc_domain='multiproduct')

    default_product = Option('ticket', 'default_product', '',
        """Default product for newly created tickets.""")

    product_base_url = Option('multiproduct', 'product_base_url', '',
        """A pattern used to generate the base URL of product environments,
        e.g. the use cases listed in bh:wiki:/Proposals/BEP-0003#url-mapping .
        Both absolute as well as relative URLs are supported. The later
        will be resolved with respect to the base URL of the parent global
        environment. The pattern may contain references to $(envname)s,
        $(prefix)s and $(name)s placeholders representing the environment name,
        product prefix and product name respectively . If nothing is set the
        following will be used `products/$(prefix)s`

        Note the usage of `$(...)s` instead of `%(...)s` as the later form
        would be interpreted by the ConfigParser itself. """,
                              doc_domain='multiproduct')

    product_config_parent = PathOption('inherit', 'multiproduct', '',
        """The path to the configuration file containing the settings shared
        by sibling product environments. By default will inherit
        global environment configuration.
        """, doc_domain='multiproduct')

    SCHEMA = [mcls._get_schema()
              for mcls in (Product, ProductResourceMap)]

    # Tables which should be migrated (extended with 'product' column)
    MIGRATE_TABLES = ['component',
                      'milestone',
                      'version',
                      'enum',
                      'permission',
                      'wiki',
                      'report',
                      ]

    PRODUCT_POPULATE_TABLES = list(set(MIGRATE_TABLES) - set(['wiki']))

    def __init__(self, *args, **kwargs):
        import pkg_resources
        locale_dir = pkg_resources.resource_filename(__name__, 'locale')
        add_domain(self.env.path, locale_dir)
        super(MultiProductSystem, self).__init__(*args, **kwargs)

    def get_version(self):
        """Finds the current version of the bloodhound database schema"""
        rows = self.env.db_direct_query("""
            SELECT value FROM system WHERE name = %s
            """, (DB_SYSTEM_KEY,))
        return int(rows[0][0]) if rows else -1

    # IEnvironmentSetupParticipant methods
    def environment_created(self):
        """Insertion of any default data into the database."""
        self.log.debug("creating environment for %s plugin." % PLUGIN_NAME)

    def environment_needs_upgrade(self, db_dummy=None):
        """Detects if the installed db version matches the running system"""
        db_installed_version = self.get_version()

        if db_installed_version > DB_VERSION:
            raise TracError('''Current db version (%d) newer than supported by
            this version of the %s (%d).''' % (db_installed_version,
                                               PLUGIN_NAME,
                                               DB_VERSION))
        needs_upgrade = db_installed_version < DB_VERSION
        if not needs_upgrade:
            self.env.enable_multiproduct_schema(True)
        return needs_upgrade

    def _update_db_version(self, db, version):
        old_version = self.get_version()
        if old_version != -1:
            self.log.info("Updating multiproduct database schema from version %d"
                          " to %d" % (old_version, version))
            db("""UPDATE system SET value=%s
                      WHERE name=%s""", (version, DB_SYSTEM_KEY))
        else:
            self.log.info("Initial multiproduct database schema set to version %d" % version)
            db("""
                INSERT INTO system (name, value) VALUES ('%s','%s')
                """  % (DB_SYSTEM_KEY, version))
        return version


    _system_wiki_list = None
    @property
    def system_wiki_list(self):
        if MultiProductSystem._system_wiki_list is None:
            MultiProductSystem._system_wiki_list = self._get_system_wiki_list()
        return MultiProductSystem._system_wiki_list

    def _get_system_wiki_list(self):
        """Helper function that enumerates all 'system' wikis. The
        list is combined of default wiki pages and pages that are
        bundled with Bloodhound dashboard and search plugins"""
        from bhdashboard import wiki

        paths = [resource_filename('trac.wiki',
                                   'default-pages')] + \
                [resource_filename('bhdashboard',
                                   'default-pages')] + \
                [resource_filename('bhsearch',
                                   'default-pages')]
        pages = []
        original_pages = []
        for path in paths:
            for page in os.listdir(path):
                filename = os.path.join(path, page)
                page = unicode_unquote(page.encode('utf-8'))
                if os.path.isfile(filename):
                    original_pages.append(page)
        for original_name in original_pages:
            if original_name.startswith('Trac'):
                new_name = wiki.new_name(original_name)
                if not new_name:
                    continue
                if new_name in original_pages:
                    continue
                name = new_name
                # original trac wikis should also be included in the list
                pages.append(original_name)
            else:
                name = original_name
            pages.append(name)
        return pages

    def upgrade_environment(self, db_dummy=None):
        """Installs or updates tables to current version"""
        self.log.debug("upgrading existing environment for %s plugin." %
                       PLUGIN_NAME)
        db_installed_version = self.get_version()
        with self.env.db_direct_transaction as db:
            if db_installed_version < 1:
                self._add_column_product_to_ticket(db)
                self._create_multiproduct_tables(db)
                db_installed_version = self._update_db_version(db, 1)

            if db_installed_version < 2:
                self._replace_product_on_ticket_with_product_prefix(db)
                db_installed_version = self._update_db_version(db, 2)

            if db_installed_version < 3:
                SYSTEM_TABLES = ['system']
                TICKET_TABLES = [
                    'ticket_change', 'ticket_custom', 'attachment',
                ]
                table_defs = self._add_product_column_to_tables(
                    self.MIGRATE_TABLES + TICKET_TABLES + SYSTEM_TABLES,
                    db_installed_version)
                table_columns = self._get_table_columns(table_defs)
                create_temp_table = lambda table: self._create_temp_table(
                    db, table, table_columns, table_defs)

                self._insert_default_product(db)
                self._upgrade_tickets(db, TICKET_TABLES, create_temp_table)
                self._upgrade_wikis(db, create_temp_table)
                self._upgrade_system_tables(db, create_temp_table)
                self._soft_link_repositories_to_default_product(db)
                self._upgrade_table_system(SYSTEM_TABLES, create_temp_table, db)
                self._enable_multiproduct_hooks()

                db_installed_version = self._update_db_version(db, 3)

            if db_installed_version < 4:
                self._create_product_tables_for_plugins(db)
                db_installed_version = self._update_db_version(db, 4)

            if db_installed_version < 5:
                table_defs = self._add_product_column_to_tables(
                    ['ticket'], db_installed_version)
                self._modify_ticket_pk(db, table_defs)
                db_installed_version = self._update_db_version(db, 5)

            self.env.enable_multiproduct_schema(True)

    def _add_column_product_to_ticket(self, db):
        self.log.debug("Adding field product to ticket table")
        db("ALTER TABLE ticket ADD COLUMN product TEXT")

    def _create_multiproduct_tables(self, db):
        self.log.debug("Creating initial db tables for %s plugin." %
                       PLUGIN_NAME)
        db_connector, dummy = DatabaseManager(self.env)._get_connector()
        for table in self.SCHEMA:
            for statement in db_connector.to_sql(table):
                db(statement)

    def _replace_product_on_ticket_with_product_prefix(self, db):
        for prod in Product.select(self.env):
            db("""UPDATE ticket SET product=%s
                          WHERE product=%s""", (prod.prefix, prod.name))

    def _create_temp_table(self, db, table, table_columns, table_defs):
        """creates temporary table with the new schema and
        drops original table"""
        table_temp_name = '%s_temp' % table
        if table == 'report':
            cols = ','.join([c for c in table_columns[table] if c != 'id'])
        else:
            cols = ','.join(table_columns[table])
        self.log.info("Migrating table '%s' to a new schema", table)
        db("""CREATE TABLE %s AS SELECT %s FROM %s""" %
              (table_temp_name, cols, table))
        db("""DROP TABLE %s""" % table)
        db_connector, _ = DatabaseManager(self.env)._get_connector()
        table_schema = [t for t in table_defs if t.name == table][0]
        for sql in db_connector.to_sql(table_schema):
            db(sql)
        return table_temp_name, cols

    def _drop_temp_table(self, db, table):
        db("""DROP TABLE %s""" % table)

    def _add_product_column_to_tables(self, tables, current_version):
        """Extend trac default schema by adding product column
        and extending key with product.
        """
        table_defs = [copy.deepcopy(t) for t in trac.db_default.schema
                      if
                      t.name in tables]
        for t in table_defs:
            t.columns.append(Column('product'))
            if isinstance(t.key, list):
                t.key = tuple(t.key) + tuple(['product'])
            elif isinstance(t.key, tuple):
                t.key = t.key + tuple(['product'])
            else:
                raise TracError(
                    "Invalid table '%s' schema key '%s' while upgrading "
                    "plugin '%s' from version %d to %d'" %
                    (t.name, t.key, PLUGIN_NAME, current_version, 3))
        return table_defs

    def _get_table_columns(self, table_defs, all_columns=False):
        table_columns = dict()
        for table in table_defs:
            table_definition = \
                [t for t in table_defs if t.name == table.name][0]
            column_names = \
                [column.name for column in table_definition.columns]
            table_columns[table.name] = \
                [c for c in column_names if all_columns or c != 'product']
        return table_columns

    def _insert_default_product(self, db):
        self.log.info("Creating default product")
        db("""INSERT INTO bloodhound_product (prefix, name, description, owner)
              VALUES ('%s', '%s', '%s', '')
           """ % (self.default_product_prefix, 'Default', 'Default product'))

    def _upgrade_tickets(self, db, TICKET_TABLES, create_temp_table):
        # migrate tickets that don't have product assigned to default product
        # - update ticket table product column
        # - update ticket related tables by:
        #   - upgrading schema
        #   - update product column to match ticket's product
        self.log.info("Migrating tickets w/o product to default product")
        db("""UPDATE ticket SET product='%s'
                      WHERE (product IS NULL OR product='')
           """ % self.default_product_prefix)
        self._migrate_attachments(
            db("""SELECT a.type, a.id, a.filename
                            FROM attachment a
                      INNER JOIN ticket t ON a.id = %(t.id)s
                           WHERE a.type='ticket'
                       """ % {'t.id': db.cast('t.id', 'text')}),
            to_product=self.default_product_prefix
        )
        self.log.info("Migrating ticket tables to a new schema")
        for table in TICKET_TABLES:
            temp_table_name, cols = create_temp_table(table)
            db("""INSERT INTO %s (%s, product)
                          SELECT %s, '' FROM %s""" %
               (table, cols, cols, temp_table_name))
            self._drop_temp_table(db, temp_table_name)
            if table == 'attachment':
                db("""UPDATE attachment
                         SET product=(SELECT ticket.product
                                        FROM ticket
                                       WHERE %(ticket.id)s=attachment.id
                                       LIMIT 1)
                       WHERE attachment.type='ticket'
                         AND EXISTS(SELECT ticket.product
                                      FROM ticket
                                     WHERE %(ticket.id)s=attachment.id)
                   """ % {'ticket.id': db.cast('ticket.id', 'text')})
            else:
                db("""UPDATE %(table)s
                         SET product=(SELECT ticket.product
                                        FROM ticket
                                       WHERE ticket.id=%(table)s.ticket)
                   """ % {'table': table})

    def _upgrade_system_tables(self, db, create_temp_table):
        # migrate system table (except wiki which is handled separately)
        # to a new schema
        # - create tables with the new schema
        # - populate system tables with global configuration for each product
        # - exception is permission table where permissions
        #   are also populated in global scope
        #
        # permission table specifics: 'anonymous' and 'authenticated' users
        # should by default have a PRODUCT_VIEW permission for all products
        self.log.info("Migrating system tables to a new schema")
        for table in self.MIGRATE_TABLES:
            if table == 'wiki':
                continue
            temp_table_name, cols = create_temp_table(table)
            for product in Product.select(self.env):
                self.log.info("Populating table '%s' for product '%s' ('%s')",
                              table, product.name, product.prefix)
                db("""INSERT INTO %s (%s, product) SELECT %s,'%s' FROM %s""" %
                   (table, cols, cols, product.prefix, temp_table_name))
                if table == 'permission':
                    db.executemany(
                        """INSERT INTO permission (username, action, product)
                           VALUES (%s, %s, %s)""",
                        [('anonymous', 'PRODUCT_VIEW', product.prefix),
                         ('authenticated', 'PRODUCT_VIEW', product.prefix)])

            if table == 'permission':
                self.log.info("Populating table '%s' for global scope", table)
                db("""INSERT INTO %s (%s, product) SELECT %s,'%s' FROM %s""" %
                   (table, cols, cols, '', temp_table_name))
            self._drop_temp_table(db, temp_table_name)
        db.executemany(
            """INSERT INTO permission (username, action, product)
                VALUES (%s, %s, %s)""",
            [('anonymous', 'PRODUCT_VIEW', ''),
             ('authenticated', 'PRODUCT_VIEW', '')])


    def _upgrade_wikis(self, db, create_temp_table):
        # migrate wiki table
        # - populate system wikis to all products + global scope
        # - update wiki attachment product to match wiki product
        table = 'wiki'
        temp_table_name, cols = create_temp_table(table)
        self.log.info("Migrating wikis to default product")
        db("""INSERT INTO %(table)s (%(cols)s, product)
                   SELECT %(cols)s, '%(default_product)s' FROM %(temp_table)s
           """ % dict(table=table,
                      temp_table=temp_table_name,
                      cols=cols,
                      default_product=self.default_product_prefix,))
        db("""UPDATE attachment
                 SET product='%s'
               WHERE attachment.type='wiki'
           """ % self.default_product_prefix)
        self._migrate_attachments(
            db("""SELECT type, id, filename
                    FROM attachment
                   WHERE type='wiki'
                     AND product='%s'
               """ % (self.default_product_prefix)),
            to_product=self.default_product_prefix,
        )
        self._drop_temp_table(db, temp_table_name)

    def _migrate_attachments(self, attachments, to_product=None, copy=False):
        for type, id, filename in attachments:
            old_path = Attachment._get_path(self.env.path, type, id, filename)
            new_path = self.env.path
            if to_product:
                new_path = os.path.join(new_path, 'products', to_product)
            new_path = Attachment._get_path(new_path, type, id, filename)
            dirname = os.path.dirname(new_path)
            if not os.path.exists(old_path):
                self.log.warning(
                    "Missing attachment files for %s:%s/%s",
                    type, id, filename)
                continue
            if os.path.exists(new_path):
                # TODO: Do we want to overwrite?
                continue
            try:
                if not os.path.exists(dirname):
                    os.makedirs(dirname)
                if copy:
                    if hasattr(os, 'link'):
                        # TODO: It this safe?
                        os.link(old_path, new_path)
                    else:
                        shutil.copy(old_path, new_path)
                else:
                    os.rename(old_path, new_path)
            except OSError as err:
                self.log.warning(
                    "Could not move attachment %s from %s %s to"
                    "product @ (%s)",
                    filename, type, id, str(err)
                )

    def _soft_link_repositories_to_default_product(self, db):
        # soft link existing repositories to default product
        repositories_linked = []
        for id, name in db("""SELECT id, value FROM repository
                                      WHERE name='name'"""):
            if id in repositories_linked:
                continue
            db("""INSERT INTO repository (id, name, value)
                          VALUES (%s, 'product', '%s')""" %
               (id, self.default_product_prefix))
            repositories_linked.append(id)
            self.log.info("Repository '%s' (%s) soft linked to default product",
                          name, id)

    def _upgrade_table_system(self, SYSTEM_TABLES, create_temp_table, db):
        # Update system tables
        # Upgrade schema
        self.log.info("Migrating system tables to a new schema")
        for table in SYSTEM_TABLES:
            temp_table_name, cols = create_temp_table(table)
            db("""INSERT INTO %s (%s, product)
                          SELECT %s,'' FROM %s""" %
               (table, cols, cols, temp_table_name))
            self._drop_temp_table(db, temp_table_name)

    def _enable_multiproduct_hooks(self):
        # enable multi product hooks in environment configuration

        config_update = False
        if not 'environment_factory' in self.env.config['trac']:
            self.env.config['trac'].set('environment_factory',
                                        'multiproduct.hooks.MultiProductEnvironmentFactory')
            config_update = True
        if not 'request_factory' in self.env.config['trac']:
            self.env.config['trac'].set('request_factory',
                                        'multiproduct.hooks.ProductRequestFactory')
            config_update = True
        if config_update:
            self.log.info(
                "Enabling multi product hooks in environment configuration")
            self.env.config.save()

    def _create_product_tables_for_plugins(self, db):
        self.log.debug("creating additional db tables for %s plugin." %
                       PLUGIN_NAME)
        db_connector, dummy = DatabaseManager(self.env)._get_connector()
        for statement in db_connector.to_sql(ProductSetting._get_schema()):
            db(statement)

    def _modify_ticket_pk(self, db, table_defs):
        self.log.debug("Modifying ticket primary key: id -> uid")
        table_columns = self._get_table_columns(table_defs, True)
        db_connector, _ = DatabaseManager(self.env)._get_connector()

        def rename_id_to_uid(table):
            for c in table.columns:
                if c.name == 'id':
                    c.name = 'uid'
                    break
            table.key = ['uid']

        def add_new_id_column(table):
            id_column = Column('id', type='int', auto_increment=True)
            if using_sqlite_backend(self.env) or using_mysql_backend(self.env):
                # sqlite and mysql don't support multiple auto increment columns
                id_column.auto_increment = False
            table.columns.append(id_column)
            table.indices.append(Index(['product', 'id'], unique=True))


        for t in table_defs:
            rename_id_to_uid(t)
            add_new_id_column(t)

            temp_table_name, cols = self._create_temp_table(
                db, t.name, table_columns, table_defs)
            db("""INSERT INTO ticket (%s, uid)
                       SELECT %s, id FROM ticket_temp""" %
                (cols, cols))
            self._drop_temp_table(db, temp_table_name)
            db.update_sequence(db.cursor(), 'ticket', 'id')
            db.update_sequence(db.cursor(), 'ticket', 'uid')

    # IResourceChangeListener methods
    def match_resource(self, resource):
        return isinstance(resource, Product)

    def resource_created(self, resource, context):
        import trac.db_default
        from multiproduct.env import EnvironmentStub

        # Don't populate product database when running from within test
        # environment stub as test cases really don't expect that ...
        if isinstance(self.env, EnvironmentStub):
            return

        product = resource
        self.log.debug("Adding product info (%s) to tables:" % product.prefix)
        with self.env.db_direct_transaction as db:
            # create the default entries for this Product from defaults
            for table in trac.db_default.get_data(db):
                if not table[0] in self.PRODUCT_POPULATE_TABLES:
                    continue

                self.log.debug("  -> %s" % table[0])
                cols = table[1] + ('product', )
                rows = [p + (product.prefix, ) for p in table[2]]
                db.executemany(
                    "INSERT INTO %s (%s) VALUES (%s)" %
                    (table[0], ','.join(cols), ','.join(['%s' for c in cols])),
                    rows)

        # Import default pages in product wiki
        wikiadmin = WikiAdmin(ProductEnvironment(self.env, product.prefix))
        pages = ('TitleIndex', 'RecentChanges', 'InterTrac', 'InterWiki')
        for page in pages:
            filename = resource_filename('trac.wiki', 'default-pages/' + page)
            wikiadmin.import_page(filename, page)

    def resource_changed(self, resource, old_values, context):
        return

    def resource_deleted(self, resource, context):
        return

    def resource_version_deleted(self, resource, context):
        return

    # ITemplateProvider methods
    def get_templates_dirs(self):
        """provide the plugin templates"""
        return [resource_filename(__name__, 'templates')]

    def get_htdocs_dirs(self):
        """proved the plugin htdocs"""
        return []

    # IPermissionRequestor methods
    def get_permission_actions(self):
        acts = ['PRODUCT_CREATE', 'PRODUCT_DELETE', 'PRODUCT_MODIFY',
                'PRODUCT_VIEW']
        if not isinstance(self.env, ProductEnvironment):
            return acts + [('PRODUCT_ADMIN', acts)] + [('ROADMAP_ADMIN', acts)]
        else:
            # In product context PRODUCT_ADMIN will be provided by product env
            # to ensure it will always be handy
            return acts

    # ITicketFieldProvider methods
    def get_select_fields(self):
        """Product select fields"""
        return [(35, {'name': 'product', 'label': _('Product'),
                      'cls': Product, 'pk': 'prefix', 'optional': False,
                      'value': self.default_product})]

    def get_radio_fields(self):
        """Product radio fields"""
        return []

    # IResourceManager methods
    def get_resource_realms(self):
        """Manage 'product' realm.
        """
        yield 'product'

    def get_resource_description(self, resource, format='default', context=None,
                                 **kwargs):
        """Describe product resource.
        """
        desc = resource.id
        if format != 'compact':
            desc = _('Product %(name)s', name=resource.id)
        if context:
            return self._render_link(context, resource.id, desc)
        else:
            return desc

    def resource_exists(self, resource):
        """Check whether product exists physically.
        """
        products = Product.select(self.env, where={'name' : resource.id})
        return bool(products)

    # IExternalResourceConnector methods
    def get_supported_neighborhoods(self):
        """Neighborhoods for `product` and `global` environments.
        """
        yield 'product'
        yield 'global'

    def load_manager(self, neighborhood):
        """Load global environment or product environment given its prefix
        """
        if neighborhood._realm == 'global':
            # FIXME: ResourceNotFound if neighborhood ID != None ?
            prefix = GLOBAL_PRODUCT
        elif neighborhood._realm == 'product':
            prefix = neighborhood._id
        else:
            raise ResourceNotFound(_(u'Unsupported neighborhood %(realm)s',
                                     realm=neighborhood._realm))
        try:
            return lookup_product_env(self.env, prefix)
        except LookupError:
            raise ResourceNotFound(_(u'Unknown product prefix %(prefix)s',
                                     prefix=prefix))

    def manager_exists(self, neighborhood):
        """Check whether the target environment exists physically.
        """
        if neighborhood._realm == 'global':
            # Global environment
            return isinstance(self.env, (Environment, ProductEnvironment))
        elif neighborhood._realm == 'product':
            prefix = neighborhood._id
            if not prefix:
                # Global environment
                return True
            return Product(lookup_product_env(self.env, GLOBAL_PRODUCT),
                           {'prefix' : prefix})._exists

    # IWikiSyntaxProvider methods

    short_syntax_delimiter = u'->'

    def get_wiki_syntax(self):
        yield (r'(?<!\S)!?(?P<pid>%s)%s(?P<ptarget>%s:(?:%s)|%s|%s(?:%s*%s)?)' %
                    (IDENTIFIER,
                     PRODUCT_SYNTAX_DELIMITER_RE,
                     WikiParser.LINK_SCHEME, WikiParser.QUOTED_STRING,
                     WikiParser.QUOTED_STRING, WikiParser.SHREF_TARGET_FIRST,
                     WikiParser.SHREF_TARGET_MIDDLE, WikiParser.SHREF_TARGET_LAST),
               lambda f, m, fm :
                    self._format_link(f, 'product',
                                      '%s:%s' % (fm.group('pid'),
                                                 unquote_label(fm.group('ptarget'))),
                                      fm.group(0), fm))
        if self.env[ProductTicketModule] is not None:
            yield (r"(?<!\S)!?(?P<jtp>%s)-(?P<jtt>\d+)(?P<jtf>[?#]\S+)?" %
                        (IDENTIFIER,),
                   lambda f, m, fm :
                        self._format_link(f, 'product',
                                          '%s:ticket:%s' %
                                                (fm.group('jtp'),
                                                 fm.group('jtt') +
                                                 (fm.group('jtf') or '')),
                                          m, fm))

    def get_link_resolvers(self):
        yield ('global', self._format_link)
        yield ('product', self._format_link)

    # ITicketManipulator methods
    def validate_ticket(self, req, ticket):
        # check whether the owner exists in db, add a warning if not
        if req.args.get('action') == 'reassign' and \
           ticket['owner'] != self.env.config.get('ticket', 'default_owner'):
            owner = self.env.db_direct_query(
                "SELECT sid FROM session WHERE sid=%s",
                (ticket['owner'], ))
            if not owner:
                # Note: add_warning() is used intead of returning a list of
                # error tuples, since the latter results in trac rendering
                # errors (ticket's change.date is not populated)
                add_warning(req, _('The user "%s" does not exist.') %
                    ticket['owner'])
        return []


    # Internal methods

    def _render_link(self, context, name, label, extra='', prefix=None):
        """Render link to product page.
        """
        product_env = product = None
        env = self.env
        if isinstance(env, ProductEnvironment):
            if (prefix is not None and env.product.prefix == prefix) \
                    or (prefix is None and env.name == name):
                product_env = env
            env = env.parent
        try:
            if product_env is None:
                if prefix is not None:
                    product_env = ProductEnvironment(env, to_unicode(prefix))
                else:
                    product = Product.select(env,
                                             where={'name' : to_unicode(name)})
                    if not product:
                        raise LookupError("Missing product")
                    product_env = ProductEnvironment(env,
                                                     to_unicode(product[0]))
        except LookupError:
            pass

        if product_env is not None:
            product = product_env.product
            href = resolve_product_href(to_env=product_env, at_env=self.env)
            if 'PRODUCT_VIEW' in context.perm(product.resource):
                return tag.a(label, class_='product', href=href() + extra,
                             title=product.name)
        if 'PRODUCT_CREATE' in context.perm('product', name):
            params = [('action', 'new')]
            if prefix:
                params.append( ('prefix', prefix) )
            if name:
                params.append( ('name', name) )
            return tag.a(label, class_='missing product',
                    href=env.href('products', params),
                    rel='nofollow')
        return tag.a(label, class_='missing product')

    def _format_link(self, formatter, ns, target, label, fullmatch):
        link, params, fragment = formatter.split_link(target)
        expr = link.split(':', 1)
        if ns == 'product' and len(expr) == 1:
            # product:prefix form
            return self._render_link(formatter.context, None, label,
                                     params + fragment, expr[0])
        elif ns == 'global' or (ns == 'product' and expr[0] == ''):
            # global scope
            sublink = link if ns == 'global' else expr[1]
            target_env = self.env.parent \
                            if isinstance(self.env, ProductEnvironment) \
                            else self.env
            return self._make_sublink(target_env, sublink, formatter, ns,
                                      target, label, fullmatch,
                                      extra=params + fragment)
        else:
            # product:prefix:realm:id:...
            prefix, sublink = expr
            try:
                target_env = lookup_product_env(self.env, prefix)
            except LookupError:
                return tag.a(label, class_='missing product')
            # TODO: Check for nested product links
            # e.g. product:p1:product:p2:ticket:1
            return self._make_sublink(target_env, sublink, formatter, ns,
                                      target, label, fullmatch,
                                      extra=params + fragment)

    FakePermClass = FakePerm

    def _make_sublink(self, env, sublink, formatter, ns, target, label,
                      fullmatch, extra=''):
        parent_match = {'ns' : ns,
                        'target' : target,
                        'label': Markup(escape(unescape(label)
                                               if isinstance(label, Markup)
                                               else label)),
                        'fullmatch' : fullmatch,
                        }

        # Tweak nested context to work in target product/global scope
        subctx = formatter.context.child()
        subctx.href = resolve_product_href(to_env=env, at_env=self.env)
        try:
            req = formatter.context.req
        except AttributeError:
            pass
        else:
            # Authenticate in local context but use foreign permissions
            subctx.perm = self.FakePermClass() \
                            if isinstance(req.session, FakeSession) \
                            else PermissionCache(env, req.authname)
            subctx.req = req

        subformatter = EmbeddedLinkFormatter(env, subctx, parent_match)
        subformatter.auto_quote = True
        ctxtag = '[%s] ' % (env.product.prefix,) \
                    if isinstance(env, ProductEnvironment) \
                    else '<global> '
        subformatter.enhance_link = lambda link : (
                                link(title=ctxtag + link.attrib.get('title'))
                                if isinstance(link, Element)
                                    and 'title' in link.attrib
                                else link)
        link = subformatter.match(sublink + extra)
        if link:
            return link
        else:
            # Return outermost match unchanged like if it was !-escaped
            for itype, match in fullmatch.groupdict().items():
                if match and not itype in formatter.wikiparser.helper_patterns:
                    return escape(match)
Exemple #19
0
class ShortcutIconRequestPlugin(Component):
    """Implements the /favicon.ico handler."""
    implements(IRequestHandler, IRequestFilter)

    iconpath = PathOption('shortcuticon', 'iconpath', None,
                          "Filesystem path of shortcut icon")
    _mimetype = Option('shortcuticon', 'mimetype', None,
                       "Mimetype of shortcut icon")
    ishandler = BoolOption('shortcuticon', 'handler', True,
                           "Handler for '/favicon.ico'")
    isfilter = BoolOption('shortcuticon', 'linkheader', ishandler,
                          "Add 'link' tags for icon into HTML pages")

    path = r'/favicon.ico'

    exttypes = {
        '.ico': 'image/x-icon',
        '.png': 'image/png',
        '.jpg': 'image/jpg',
        '.gif': 'image/gif',
    }

    def __init__(self):
        if self._mimetype:
            self.mimetype = self._mimetype
        else:
            try:
                iconpath = self.iconpath
                idx = iconpath.rindex('.', -4)
                self.mimetype = self.exttypes[iconpath[idx:]]
                self.mimetype = 'image/x-icon'
            except:
                self.mimetype = 'image/x-icon'

    # IRequestHandler methods
    def match_request(self, req):
        if not self.ishandler:
            return False
        return req.path_info == self.path \
            or req.path_info == req.base_path + self.path

    def process_request(self, req):
        iconpath = self.iconpath
        iconok = False
        if iconpath:
            if os.path.isfile(iconpath) and os.access(iconpath, R_OK):
                iconok = True
            else:
                self.env.log.warning("Icon '%s' isn't a readable file!" %
                                     iconpath)
        else:
            self.env.log.warning("No icon file configured!")

        if iconok:
            req.send_file(self.iconpath, self.mimetype)
        else:
            req.send_response(404)
            req.end_headers()
        raise RequestDone

    def pre_process_request(self, req, handler):
        return handler

    def post_process_request(self, req, template, data, content_type):
        if self.isfilter:
            path = req.base_path + self.path
            add_link(req, 'shortcut icon', path, None, self.mimetype)
            add_link(req, 'icon', path, None, self.mimetype)

        return (template, data, content_type)
Exemple #20
0
class CrashDumpModule(Component):
    """UI for crash dumps."""

    implements(IRequestHandler, IRequestFilter, ITemplateStreamFilter,
               INavigationContributor, ITemplateProvider, IAdminPanelProvider)

    dumpdata_dir = PathOption(
        'crashdump',
        'dumpdata_dir',
        default='../dumpdata',
        doc=
        'Path to the crash dump data directory relative to the environment conf directory.'
    )

    crashlink_query = Option(
        'crashdump',
        'crashlink_query',
        default='?status=!closed',
        doc="""The base query to be used when linkifying values of ticket
            fields. The query is a URL query
            string starting with `?` as used in `query:`
            [TracQuery#UsingTracLinks Trac links].
            (''since 0.12'')""")

    nav_url = Option(
        'crashdump', 'main_page', 'crash/list',
        'The url of the crashes main page to which the trac nav '
        'entry should link; if empty, no entry is created in '
        'the nav bar. This may be a relative url.')

    items_per_page = IntOption(
        'crashdump', 'items_per_page', 100,
        """Number of crashes displayed per page in default report,
        by default. Set to `0` to specify no limit.
        """)

    show_delete_crash = BoolOption(
        'crashdump',
        'show_delete_crash',
        'false',
        doc="""Show button to delete a crash from the system.""")

    crashdump_fields = set(['_crash'])
    crashdump_uuid_fields = set(['_crash_uuid'])
    crashdump_sysinfo_fields = set(['_crash_sysinfo'])
    datetime_fields = set(['crashtime', 'uploadtime', 'reporttime'])
    crashdump_link_fields = set(['linked_crash'])
    crashdump_ticket_fields = set(['linked_tickets'])

    @property
    def must_preserve_newlines(self):
        return True

    # INavigationContributor methods
    def get_active_navigation_item(self, req):
        self.log.debug('get_active_navigation_item %s' % req.path_info)
        if self.nav_url:
            if req.path_info == self.nav_url:
                return 'crash_list'

    def get_navigation_items(self, req):
        if self.nav_url:
            yield ('mainnav', 'crash_list',
                   tag.a('Crashes', href=req.href(self.nav_url)))

    # ITemplateStreamFilter methods
    def filter_stream(self, req, method, filename, stream, data):
        if not data:
            return stream

        # We try all at the same time to maybe catch also changed or processed templates
        if filename in [
                "report_view.html", "query_results.html", "ticket.html",
                "query.html"
        ]:
            # For ticket.html
            if 'fields' in data and isinstance(data['fields'], list):
                for field in data['fields']:
                    for f in self.crashdump_fields:
                        if field['name'] == f and data['ticket'][f]:
                            field['rendered'] = self._link_crash(
                                req, data['ticket'][f])
                    for f in self.crashdump_uuid_fields:
                        if field['name'] == f and data['ticket'][f]:
                            field['rendered'] = self._link_crash(
                                req, data['ticket'][f], show_uuid=True)
                    for f in self.crashdump_sysinfo_fields:
                        if field['name'] == f and data['ticket'][f]:
                            field['rendered'] = self._link_crash(
                                req,
                                data['ticket'][f],
                                show_uuid=True,
                                sysinfo=True)
                    for f in self.crashdump_link_fields:
                        if field['name'] == f and data['ticket'][f]:
                            field['rendered'] = self._link_crashes_by_id(
                                req, data['ticket'][f])
            # For query_results.html and query.html
            if 'groups' in data and isinstance(data['groups'], list):
                for group, tickets in data['groups']:
                    for ticket in tickets:
                        for f in self.crashdump_fields:
                            if f in ticket:
                                ticket[f] = self._link_crash(req, ticket[f])
                        for f in self.crashdump_uuid_fields:
                            if f in ticket:
                                ticket[f] = self._link_crash(req,
                                                             ticket[f],
                                                             show_uuid=True)
                        for f in self.crashdump_sysinfo_fields:
                            if f in ticket:
                                ticket[f] = self._link_crash(req,
                                                             ticket[f],
                                                             show_uuid=True,
                                                             sysinfo=True)
                        for f in self.crashdump_ticket_fields:
                            if f in ticket:
                                ticket[f] = self._link_tickets(req, ticket[f])

            # For report_view.html
            if 'row_groups' in data and isinstance(data['row_groups'], list):
                #self.log.debug('got row_groups %s' % str(data['row_groups']))
                for group, rows in data['row_groups']:
                    for row in rows:
                        if 'cell_groups' in row and isinstance(
                                row['cell_groups'], list):
                            for cells in row['cell_groups']:
                                for cell in cells:
                                    # If the user names column in the report differently (blockedby AS "blocked by") then this will not find it
                                    #self.log.debug('got cell header %s' % str(cell.get('header', {}).get('col')))
                                    if cell.get('header', {}).get(
                                            'col') in self.crashdump_fields:
                                        cell['value'] = self._link_crash(
                                            req, cell['value'])
                                        cell['header']['hidden'] = False
                                        cell['header']['title'] = _(
                                            'Crashdump')
                                    elif cell.get(
                                            'header', {}
                                    ).get('col') in self.crashdump_uuid_fields:
                                        cell['value'] = self._link_crash(
                                            req, cell['value'], show_uuid=True)
                                        cell['header']['hidden'] = False
                                        cell['header']['title'] = _(
                                            'Crashdump')
                                    elif cell.get('header', {}).get(
                                            'col'
                                    ) in self.crashdump_sysinfo_fields:
                                        cell['value'] = self._link_crash(
                                            req,
                                            cell['value'],
                                            show_uuid=True,
                                            sysinfo=True)
                                        cell['header']['hidden'] = False
                                        cell['header']['title'] = _(
                                            'System info')
                                    elif cell.get('header', {}).get(
                                            'col'
                                    ) in self.crashdump_ticket_fields:
                                        cell['value'] = self._link_tickets(
                                            req, cell['value'])
                                        cell['header']['hidden'] = False
                                        cell['header']['title'] = _(
                                            'Linked tickets')
                                    elif cell.get(
                                            'header',
                                        {}).get('col') in self.datetime_fields:
                                        cell['value'] = self._format_datetime(
                                            req, cell['value'])
        return stream

    # ITemplateProvider methods
    def get_htdocs_dirs(self):
        """Return the absolute path of a directory containing additional
        static resources (such as images, style sheets, etc).
        """
        return [('crashdump', _htdoc_dir)]

    def get_templates_dirs(self):
        return [_template_dir]

    # IRequestHandler methods
    def match_request(self, req):
        if not req.path_info.startswith('/crash'):
            return False

        ret = False
        action = None
        path_info = req.path_info[6:]
        if path_info == '/list':
            action = 'crash_list'
            ret = True
        else:
            #self.log.debug('match_request %s' % path_info)
            match = re.match(
                r'/([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})/?(.+)?$',
                path_info)
            if match:
                req.args['crashuuid'], action = match.groups()
                ret = True
            else:
                match = re.match(r'/([0-9]+)/?(.+)?$', path_info)
                if match:
                    req.args['crashid'], action = match.groups()
                    ret = True
        if ret:
            #self.log.debug('match_request raw_action:\"%s\"' % action)
            if action:
                e = action.split('/')
                #self.log.debug('match_request raw_action->e:\"%s\"' % e)
                req.args['action'] = e[0]
                req.args['params'] = e[1:] if len(e) > 1 else None
                #self.log.debug('match_request action->params:%s->\"%s\"' % (req.args['action'], req.args['params']))
            else:
                req.args['action'] = None
                req.args['params'] = None
        self.log.debug('match_request %s -> %s' % (path_info, str(req.args)))
        return ret

    # IRequestFilter methods
    def pre_process_request(self, req, handler):
        return handler

    def post_process_request(self,
                             req,
                             template,
                             data,
                             content_type,
                             method=None):
        if req.path_info.startswith('/ticket/'):
            # In case of an invalid ticket, the data is invalid
            if not data:
                return template, data, content_type, method
            tkt = data['ticket']
            with self.env.db_query as db:
                links = CrashDumpTicketLinks(self.env, tkt, db=db)

                for change in data.get('changes', {}):
                    if not change.has_key('fields'):
                        continue
                    for field, field_data in change['fields'].iteritems():
                        if field in self.crashdump_link_fields:
                            if field_data['new'].strip():
                                new = set([
                                    CrashDumpSystem.get_crash_id(n)
                                    for n in field_data['new'].split(',')
                                ])
                            else:
                                new = set()
                            if field_data['old'].strip():
                                old = set([
                                    CrashDumpSystem.get_crash_id(n)
                                    for n in field_data['old'].split(',')
                                ])
                            else:
                                old = set()
                            add = new - old
                            sub = old - new
                            elms = tag()
                            if add:
                                elms.append(
                                    tag.em(u', '.join(
                                        [unicode(n) for n in sorted(add)])))
                                elms.append(u' added')
                            if add and sub:
                                elms.append(u'; ')
                            if sub:
                                elms.append(
                                    tag.em(u', '.join(
                                        [unicode(n) for n in sorted(sub)])))
                                elms.append(u' removed')
                            field_data['rendered'] = elms
                            links.crashes = new

        return template, data, content_type, method

    def _prepare_data(self, req, crashobj, absurls=False):
        data = {
            'object': crashobj,
            'to_utimestamp': to_utimestamp,
            'hex_format': hex_format,
            'addr_format': None,
            'exception_code': exception_code,
            'format_bool_yesno': format_bool_yesno,
            'format_source_line': format_source_line,
            'format_function_plus_offset': format_function_plus_offset,
            'str_or_unknown': str_or_unknown,
            'format_cpu_type': format_cpu_type,
            'format_cpu_vendor': format_cpu_vendor,
            'format_cpu_name': format_cpu_name,
            'format_platform_type': format_platform_type,
            'format_os_version': format_os_version,
            'format_distribution_id': format_distribution_id,
            'format_distribution_codename': format_distribution_codename,
            'format_milliseconds': format_milliseconds,
            'format_seconds': format_seconds,
            'format_size': format_size,
            'format_trust_level': format_trust_level,
            'format_memory_usagetype': format_memory_usagetype,
            'format_gl_extension_name': format_gl_extension_name,
            'format_version_number': format_version_number,
            'format_thread': format_thread,
            'thread_extra_info': thread_extra_info,
            'format_stack_frame': format_stack_frame,
            'context': web_context(req, crashobj.resource, absurls=absurls),
            'preserve_newlines': self.must_preserve_newlines,
            'emtpy': empty
        }
        xmlfile = None
        xmlfile_from_db = None
        minidumpfile = None
        coredumpfile = None
        if crashobj['minidumpreportxmlfile']:
            xmlfile_from_db = crashobj['minidumpreportxmlfile']
            xmlfile = self._get_dump_filename(crashobj,
                                              'minidumpreportxmlfile')
            reporttextfile = self._get_dump_filename(crashobj,
                                                     'minidumpreporttextfile')
            reporthtmlfile = self._get_dump_filename(crashobj,
                                                     'minidumpreporthtmlfile')
        elif crashobj['coredumpreportxmlfile']:
            xmlfile_from_db = crashobj['coredumpreportxmlfile']
            xmlfile = self._get_dump_filename(crashobj,
                                              'coredumpreportxmlfile')
            reporttextfile = self._get_dump_filename(crashobj,
                                                     'coredumpreporttextfile')
            reporthtmlfile = self._get_dump_filename(crashobj,
                                                     'coredumpreporthtmlfile')
        coredumpfile = self._get_dump_filename(crashobj, 'coredumpfile')
        minidumpfile = self._get_dump_filename(crashobj, 'minidumpfile')
        data['xmlfile_from_db'] = xmlfile_from_db
        data['xmlfile'] = xmlfile
        data['xmlfile_error'] = None
        data['minidump_xml_size'] = 0
        data['coredump_xml_size'] = 0
        data['minidumpfile_size'] = 0
        data['coredumpfile_size'] = 0
        data['xmlfile_size'] = 0
        data['reporttextfile_size'] = 0
        data['reporthtmlfile_size'] = 0
        data['show_debug_info'] = False
        data['parsetime'] = 0
        data['is_64_bit'] = False
        if xmlfile:
            start = time.time()
            if minidumpfile:
                try:
                    data['minidumpfile_size'] = os.path.getsize(minidumpfile)
                    data['minidumpfile'] = MiniDump(minidumpfile)
                except OSError:
                    pass
            if coredumpfile:
                try:
                    data['coredumpfile_size'] = os.path.getsize(coredumpfile)
                except OSError:
                    pass
            if reporttextfile:
                try:
                    data['reporttextfile_size'] = os.path.getsize(
                        reporttextfile)
                except OSError:
                    pass
            if reporthtmlfile:
                try:
                    data['reporthtmlfile_size'] = os.path.getsize(
                        reporthtmlfile)
                except OSError:
                    pass
            if xmlfile:
                try:
                    data['xmlfile_size'] = os.path.getsize(xmlfile)
                except OSError:
                    pass
            if os.path.isfile(xmlfile):
                try:
                    xmlreport = XMLReport(xmlfile)
                    for f in xmlreport.fields:
                        data[f] = XMLReport.ProxyObject(xmlreport, f)
                    data['xmlreport'] = xmlreport
                    data['is_64_bit'] = xmlreport.is_64_bit
                except XMLReport.XMLReportIOError as e:
                    data['xmlfile_error'] = str(e)
            else:
                wrapper = MiniDumpWrapper(data['minidumpfile'])
                for f in wrapper.fields:
                    data[f] = MiniDumpWrapper.ProxyObject(wrapper, f)
                data['xmlreport'] = None
                data['xmlfile_error'] = 'XML file %s does not exist' % xmlfile
            end = time.time()
            data['parsetime'] = end - start
        data['bits'] = 64 if data['is_64_bit'] else 32
        data['addr_format'] = addr_format_64 if data[
            'is_64_bit'] else addr_format_32
        return data

    def _get_prefs(self, req):
        return {
            'comments_order': req.session.get('ticket_comments_order',
                                              'oldest'),
            'comments_only': req.session.get('ticket_comments_only', 'false')
        }

    def process_request(self, req):
        if crashdump_use_jinja2:
            metadata = {'content_type': 'text/html'}
        else:
            metadata = None

        action = req.args.get('action', 'view')
        if action == 'crash_list':
            page = req.args.getint('page', 1)
            default_max = self.items_per_page
            max = req.args.getint('max')
            limit = as_int(max, default_max,
                           min=0)  # explict max takes precedence
            offset = (page - 1) * limit

            sort_col = req.args.get('sort', '')
            asc = req.args.getint('asc', 0, min=0, max=1)

            title = ''
            description = ''

            data = {
                'action': 'crash_list',
                'max': limit,
                'numrows': 0,
                'title': title,
                'description': description,
                'message': None,
                'paginator': None
            }

            req_status = req.args.get('status') or 'active'
            #results = CrashDump.query(env=self.env, status=req_status)
            results = CrashDump.query(env=self.env, status=None)
            data['results'] = results

            limit_offset = 0
            need_paginator = limit > 0 and limit_offset
            need_reorder = limit_offset is None
            numrows = len(results)

            paginator = None
            if need_paginator:
                paginator = Paginator(results, page - 1, limit, num_items)
                data['paginator'] = paginator
                if paginator.has_next_page:
                    add_link(req, 'next', report_href(page=page + 1),
                             _('Next Page'))
                if paginator.has_previous_page:
                    add_link(req, 'prev', report_href(page=page - 1),
                             _('Previous Page'))

                pagedata = []
                shown_pages = paginator.get_shown_pages(21)
                for p in shown_pages:
                    pagedata.append([
                        report_href(page=p), None,
                        str(p),
                        _('Page %(num)d', num=p)
                    ])
                fields = ['href', 'class', 'string', 'title']
                paginator.shown_pages = [
                    dict(zip(fields, p)) for p in pagedata
                ]
                paginator.current_page = {
                    'href': None,
                    'class': 'current',
                    'string': str(paginator.page + 1),
                    'title': None
                }
                numrows = paginator.num_items
            data['paginator'] = paginator

            add_script_data(req, {'comments_prefs': self._get_prefs(req)})
            if not crashdump_use_jinja2:
                add_script(req, 'common/js/folding.js')
            add_script(req, 'crashdump/crashdump.js')
            add_stylesheet(req, 'crashdump/crashdump.css')
            return 'list.html', data, metadata

        start = time.time()
        if 'crashuuid' in req.args:
            crashobj = CrashDump.find_by_uuid(self.env, req.args['crashuuid'])
            if not crashobj:
                raise ResourceNotFound(
                    _("Crash %(id)s does not exist.",
                      id=req.args['crashuuid']), _("Invalid crash identifier"))
        elif 'crashid' in req.args:
            crashobj = CrashDump.find_by_id(self.env, req.args['crashid'])
            if not crashobj:
                raise ResourceNotFound(
                    _("Crash %(id)s does not exist.", id=req.args['crashid']),
                    _("Invalid crash identifier"))
        else:
            raise ResourceNotFound(_("No crash identifier specified."))
        end = time.time()
        xhr = req.get_header('X-Requested-With') == 'XMLHttpRequest'

        #req.perm('crash', id, version).require('TICKET_VIEW')
        params = _get_list_from_args(req.args, 'params', None)
        self.log.debug('process_request %s:%s-%s' %
                       (action, type(params), params))
        if action is None or action == 'view':
            data = self._prepare_data(req, crashobj)

            xmlfile = data['xmlfile'] if 'xmlfile' in data else None
            data['dbtime'] = end - start

            field_changes = {}
            data.update({
                'action': action,
                'params': params,
                # Store a timestamp for detecting "mid air collisions"
                'start_time': crashobj['changetime']
            })

            self._insert_crashdump_data(req, crashobj, data,
                                        get_reporter_id(req, 'author'),
                                        field_changes)

            if params is None:
                add_script_data(req, {'comments_prefs': self._get_prefs(req)})
                if not crashdump_use_jinja2:
                    add_script(req, 'common/js/folding.js')
                add_script(req, 'crashdump/crashdump.js')
                add_stylesheet(req, 'crashdump/crashdump.css')

                data['show_delete_crash'] = self.show_delete_crash
                linked_tickets = []
                for tkt_id in crashobj.linked_tickets:
                    a = self._link_ticket_by_id(req, tkt_id)
                    if a:
                        linked_tickets.append(a)
                data['linked_tickets'] = linked_tickets

                return 'report.html', data, metadata
            else:
                if params[0] in [
                        'sysinfo', 'sysinfo_ex', 'fast_protect_version_info',
                        'exception', 'memory_blocks', 'memory_regions',
                        'modules', 'threads', 'stackdumps', 'file_info'
                ]:
                    return params[0] + '.html', data, metadata
                elif params[0] == 'memory_block':
                    block_base = safe_list_get_as_int(params, 1, 0)
                    memory_block = None
                    for b in data['memory_blocks']:
                        if b.base == block_base:
                            memory_block = b
                            break
                    data.update({
                        'memory_block': memory_block,
                        'memory_block_base': block_base
                    })
                    return 'memory_block.html', data, metadata
                elif params[0] == 'stackdump':
                    threadid = safe_list_get_as_int(params, 1, 0)
                    stackdump = None
                    if threadid in data['stackdumps']:
                        stackdump = data['stackdumps'][threadid]
                    self.log.debug('stackdump %s' % stackdump)
                    data.update({'stackdump': stackdump, 'threadid': threadid})
                    return 'stackdump.html', data, metadata
                else:
                    raise ResourceNotFound(
                        _("Invalid sub-page request %(param)s for crash %(uuid)s.",
                          param=str(params[0]),
                          uuid=str(crashobj.uuid)))
        elif action == 'sysinfo_report':
            data = self._prepare_data(req, crashobj)
            data['dbtime'] = end - start

            if 'xmlreport' in data:
                xmlfile = data['xmlreport']
                data['sysinfo_report'] = None
                if isinstance(xmlfile,
                              XMLReport) or (isinstance(xmlfile, string)
                                             and os.path.isfile(xmlfile)):
                    try:
                        data['sysinfo_report'] = SystemInfoReport(
                            xmlreport=xmlfile)
                    except SystemInfoReport.SystemInfoReportException as e:
                        data['xmlfile_error'] = str(e)
                else:
                    data['xmlfile_error'] = _(
                        "XML file %(file)s is unavailable", file=xmlfile)

            data.update({
                'action': action,
                'params': params,
                # Store a timestamp for detecting "mid air collisions"
                'start_time': crashobj['changetime']
            })

            if params is None:
                add_script_data(req, {'comments_prefs': self._get_prefs(req)})
                if not crashdump_use_jinja2:
                    add_script(req, 'common/js/folding.js')
                add_script(req, 'crashdump/crashdump.js')
                add_stylesheet(req, 'crashdump/crashdump.css')

                linked_tickets = []
                for tkt_id in crashobj.linked_tickets:
                    a = self._link_ticket_by_id(req, tkt_id)
                    if a:
                        linked_tickets.append(a)
                data['linked_tickets'] = linked_tickets
                return 'sysinfo_report.html', data, metadata
            else:
                if params[0] in [
                        'sysinfo', 'sysinfo_ex', 'sysinfo_opengl',
                        'sysinfo_env', 'sysinfo_terra4d_dirs', 'sysinfo_cpu',
                        'sysinfo_locale', 'sysinfo_network', 'sysinfo_rawdata'
                ]:
                    return params[0] + '.html', data, metadata
                else:
                    raise ResourceNotFound(
                        _("Invalid sub-page request %(param)s for crash %(uuid)s.",
                          param=str(params[0]),
                          uuid=str(crashobj.uuid)))

        elif action == 'systeminfo_raw':
            data = self._prepare_data(req, crashobj)

            xmlfile = data['xmlfile'] if 'xmlfile' in data else None
            data['dbtime'] = end - start

            fast_protect_system_info = data[
                'fast_protect_system_info'] if 'fast_protect_system_info' in data else None
            if fast_protect_system_info:
                if crashobj['crashhostname']:
                    filename = "%s_%s.terra4d-system-info" % (str(
                        crashobj.uuid), str(crashobj['crashhostname']))
                else:
                    filename = "%s.terra4d-system-info" % str(crashobj.uuid)
                if fast_protect_system_info.rawdata:
                    return self._send_data(
                        req,
                        fast_protect_system_info.rawdata.raw,
                        filename=filename)
            raise ResourceNotFound(
                _("No system information available for crash %(uuid)s.",
                  uuid=str(crashobj.uuid)))

        elif action == 'delete':
            add_script_data(req, {'comments_prefs': self._get_prefs(req)})
            add_script(req, 'crashdump/crashdump.js')
            add_stylesheet(req, 'crashdump/crashdump.css')

            data = {'id': crashobj.id, 'uuid': crashobj.uuid}
            crashobj.delete(self.dumpdata_dir)
            return 'deleted.html', data, metadata

        elif action == 'minidump_raw':
            return self._send_file(req, crashobj, 'minidumpfile')
        elif action == 'minidump_text':
            return self._send_file(req, crashobj, 'minidumpreporttextfile')
        elif action == 'minidump_xml':
            return self._send_file(req, crashobj, 'minidumpreportxmlfile')
        elif action == 'minidump_html':
            return self._send_file(req, crashobj, 'minidumpreporthtmlfile')
        elif action == 'coredump_raw':
            return self._send_file(req, crashobj, 'coredumpfile')
        elif action == 'coredump_text':
            return self._send_file(req, crashobj, 'coredumpreporttextfile')
        elif action == 'coredump_xml':
            return self._send_file(req, crashobj, 'coredumpreportxmlfile')
        elif action == 'coredump_html':
            return self._send_file(req, crashobj, 'coredumpreporthtmlfile')
        elif action == 'raw':
            if crashobj['minidumpfile']:
                return self._send_file(req, crashobj, 'minidumpfile')
            elif crashobj['coredumpfile']:
                return self._send_file(req, crashobj, 'coredumpfile')
        elif action == 'xml':
            if crashobj['minidumpreportxmlfile']:
                return self._send_file(req, crashobj, 'minidumpreportxmlfile')
            elif crashobj['coredumpreportxmlfile']:
                return self._send_file(req, crashobj, 'coredumpreportxmlfile')
        elif action == 'html':
            if crashobj['minidumpreporthtmlfile']:
                return self._send_file(req, crashobj, 'minidumpreporthtmlfile')
            elif crashobj['coredumpreporthtmlfile']:
                return self._send_file(req, crashobj, 'coredumpreporthtmlfile')
        elif action == 'text':
            if crashobj['minidumpreporttextfile']:
                return self._send_file(req, crashobj, 'minidumpreporttextfile')
            elif crashobj['coredumpreporttextfile']:
                return self._send_file(req, crashobj, 'coredumpreporttextfile')
        raise ResourceNotFound(
            _("Invalid action %(action)s for crash %(uuid)s specified.",
              action=str(action),
              uuid=str(crashobj.uuid)))

    def _send_data(self, req, data, filename):
        # Force browser to download files instead of rendering
        # them, since they might contain malicious code enabling
        # XSS attacks
        req.send_header('Content-Disposition',
                        'attachment; filename=%s' % filename)
        req.send_header('Content-Length', '%i' % len(data))
        req.send(content=data,
                 content_type='application/force-download',
                 status=200)

    def _send_file(self, req, crashobj, name):
        filename = self._get_dump_filename(crashobj, name)
        item_name = os.path.basename(filename)
        # Force browser to download files instead of rendering
        # them, since they might contain malicious code enabling
        # XSS attacks
        req.send_header('Content-Disposition',
                        'attachment; filename=%s' % item_name)
        req.send_file(filename, mimetype='application/force-download')

    def _query_link(self, req, name, value, text=None):
        """Return a link to /query with the appropriate name and value"""
        default_query = self.crashlink_query.lstrip('?')
        args = arg_list_to_args(parse_arg_list(default_query))
        args[name] = value
        if name == 'resolution':
            args['status'] = 'closed'
        return tag.a(text or value, href=req.href.query(args))

    def _insert_crashdump_data(self, req, crashobj, data, author_id,
                               field_changes):
        """Insert crashobj data into the template `data`"""
        replyto = req.args.get('replyto')
        data['replyto'] = replyto
        data['version'] = crashobj.resource.version
        data['description_change'] = None
        data['author_id'] = author_id

        if crashobj.resource.version is not None:
            crashobj.values.update(values)

        context = web_context(req, crashobj.resource)

        # Display the owner and reporter links when not obfuscated
        chrome = Chrome(self.env)
        for user in 'reporter', 'owner':
            if chrome.format_author(req, crashobj[user]) == crashobj[user]:
                data['%s_link' % user] = self._query_link(
                    req, user, crashobj[user])
        data['context'] = context

    def _format_datetime(self, req, timestamp):
        try:
            utimestamp = long(timestamp)
            return format_datetime(from_utimestamp(utimestamp))
        except ValueError:
            return str(timestamp)

    def _get_dump_filename(self, crashobj, name):
        item_name = crashobj[name]
        if not item_name:
            return None
        crash_file = os.path.join(self.env.path, self.dumpdata_dir, item_name)
        return crash_file

    def _link_ticket_by_id(self, req, ticketid):
        ret = None
        try:
            ticket = Ticket(self.env, ticketid)
            if 'TICKET_VIEW' in req.perm(ticket.resource):
                ret = \
                    tag.a(
                        '#%s' % ticket.id,
                        class_=ticket['status'],
                        href=req.href.ticket(int(ticket.id)),
                        title=shorten_line(ticket['summary'])
                    )
        except ResourceNotFound:
            pass
        return ret

    def _link_tickets(self, req, tickets):
        if tickets is None:
            return None

        if not isinstance(tickets, str) and not isinstance(tickets, unicode):
            self.log.debug('_link_tickets %s invalid type (%s)' %
                           (tickets, type(tickets)))
            return None

        if not tickets:
            return None

        items = []
        for i, word in enumerate(re.split(r'([;,\s]+)', tickets)):
            if i % 2:
                items.append(word)
            elif word:
                ticketid = word
                word = '#%s' % word

                try:
                    ticket = Ticket(self.env, ticketid)
                    if 'TICKET_VIEW' in req.perm(ticket.resource):
                        word = \
                            tag.a(
                                '#%s' % ticket.id,
                                class_=ticket['status'],
                                href=req.href.ticket(int(ticket.id)),
                                title=shorten_line(ticket['summary'])
                            )
                except ResourceNotFound:
                    pass

                items.append(word)

        if items:
            return tag(items)
        else:
            return None

    def _link_crash_by_id(self, req, id):
        ret = None
        try:
            crash = CrashDump(env=self.env, id=id)
            ret = \
                tag.a(
                    'CrashId#%i' % crash.id,
                    class_=crash['status'],
                    href=req.href('crash', crash.uuid),
                    title=crash.uuid
                )
        except ResourceNotFound:
            pass
        return ret

    def _link_crashes_by_id(self, req, ids):
        items = []

        for i, word in enumerate(re.split(r'([;,\s]+)', ids)):
            if i % 2:
                items.append(word)
            elif word:
                crashid = word
                word = 'CrashId#%s' % word

                try:
                    crash = CrashDump(env=self.env, id=crashid)
                    word = \
                        tag.a(
                            'CrashId#%i' % crash.id,
                            class_=crash['status'],
                            href=req.href('crash', crash.uuid),
                            title=crash.uuid
                        )
                except ResourceNotFound:
                    pass
                items.append(word)

        if items:
            return tag(items)
        else:
            return None

    def _link_crash(self, req, uuid, show_uuid=False, sysinfo=False):
        ret = None
        try:
            crash = CrashDump(env=self.env, uuid=uuid)
            if sysinfo:
                href = req.href('crash', crash.uuid, 'sysinfo_report')
                title = 'CrashId#%i (%s)' % (crash.id, crash.uuid)
            else:
                if show_uuid:
                    title = str(crash.uuid)
                else:
                    title = 'CrashId#%i (%s)' % (crash.id, crash.uuid)
                href = req.href('crash', crash.uuid)
            if show_uuid:
                ret = \
                    tag.a(
                        str(crash.uuid),
                        class_=crash['status'],
                        href=href,
                        title=title,
                        style="white-space: nowrap"
                    )
            else:
                ret = \
                    tag.a(
                        'CrashId#%i' % crash.id,
                        class_=crash['status'],
                        href=href,
                        title=crash.uuid
                    )
        except ResourceNotFound:
            pass
        return ret

    # IAdminPanelProvider methods
    def get_admin_panels(self, req):
        if req.perm.has_permission('TRAC_ADMIN'):
            yield ('crashdump', 'Crash dump', 'maintenance', 'Maintenance')

    def render_admin_panel(self, req, cat, page, path_info):
        assert req.perm.has_permission('TRAC_ADMIN')

        action = req.args.get('action', 'view')
        if req.method == 'POST':
            confirm = req.args.get('confirm', 0)
            if 'purge_threshold' in req.args:
                purge_threshold_str = req.args.get('purge_threshold', '')
                purge_threshold = user_time(req, parse_date, purge_threshold_str, hint='datetime') \
                                if purge_threshold_str else None
            else:
                purge_threshold = None

            if not confirm:
                self.log.debug('render_admin_panel purge not yet confirmed')
                if purge_threshold is not None:
                    crashes = CrashDump.query_old_data(self.env,
                                                       purge_threshold)
                data = {
                    'datetime_hint': get_datetime_format_hint(req.lc_time),
                    'purge_threshold': purge_threshold_str,
                    'purge_crashes': crashes
                }
                return 'crashdump_admin_%s.html' % page, data
            elif confirm == 'no':
                self.log.debug('render_admin_panel purge canceled')
                req.redirect(req.href.admin(cat, page))
            elif confirm == 'yes':
                self.log.debug('render_admin_panel purge confirmed')
                req.redirect(req.href.admin(cat, page))
        else:
            now = datetime.now(req.tz)

            purge_threshold = datetime(now.year, now.month, now.day, 0)
            purge_threshold -= timedelta(days=365)
            data = {
                'datetime_hint': get_datetime_format_hint(req.lc_time),
                'purge_threshold': purge_threshold,
                'purge_crashes': None,
            }
            print('crashdump_admin_%s.html' % page, data)
            return 'crashdump_admin_%s.html' % page, data
Exemple #21
0
                yield ('sha', self._format_sha_link)

        #######################
        # IRepositoryConnector

        _persistent_cache = BoolOption('git', 'persistent_cache', 'false',
                                       "enable persistent caching of commit tree")

        _cached_repository = BoolOption('git', 'cached_repository', 'false',
                                        "wrap `GitRepository` in `CachedRepository`")

        _shortrev_len = IntOption('git', 'shortrev_len', 7,
                                  "length rev sha sums should be tried to be abbreviated to"
                                  " (must be >= 4 and <= 40)")

        _git_bin = PathOption('git', 'git_bin', '/usr/bin/git', "path to git executable (relative to trac project folder!)")


        def get_supported_types(self):
                yield ("git", 8)

        def get_repository(self, type, dir, authname):
                """GitRepository factory method"""
                assert type == "git"

                if not self._version:
                        raise TracError("GIT backend not available")
                elif not self._version['v_compatible']:
                        raise TracError("GIT version %s installed not compatible (need >= %s)" %
                                        (self._version['v_str'], self._version['v_min_str']))
Exemple #22
0
class PatchMan(Component):
    implements(ITemplateProvider)

    _git_bin = PathOption('git', 'git_bin', '/usr/bin/git',
                          "path to git executable")
    APPLIED = "APPLIED", "green"
    PENDING = "PENDING", "orange"
    REJECTED = "REJECTED", "red"

    RECIPIENTS = [
        "*****@*****.**", "*****@*****.**"
    ]
    DIFFERENTIAL_URL_KEY = "Differential Revision"

    def getPatchPath(self, patchId):
        filename = "patch" + str(patchId)
        filepath = self.env.path + "/attachments/PatchManager/0.2/" + filename
        return filepath

    def addNewPatch(self,
                    FromAddr,
                    Subject,
                    Branch,
                    Date,
                    FileContent,
                    DifferentialURL=""):
        db = self.env.get_db_cnx()
        cursor = db.cursor()
        cursor.execute(
            "INSERT INTO Patches (email, subject, branch, commit_time, submit_time, rejected, differential_url)\
            VALUES (%s, %s, %s, %s, %s, %s, %s) ",
            (FromAddr, Subject, Branch, Date, datetime.today(), "0",
             DifferentialURL))
        id = db.get_last_id(cursor, "Patches")

        filepath = self.getPatchPath(id)
        flags = os.O_CREAT + os.O_WRONLY + os.O_EXCL
        if hasattr(os, 'O_BINARY'):
            flags += os.O_BINARY
        targetfile = os.fdopen(os.open(filepath, flags, 0666), 'w')
        # file = Attachment(self.env, 'PatchManager', '0.2')
        # tmp_file = Attachment(self.env, file.resource.parent)
        # self.env.log.debug("attachment path: " + str(file.path) + ", " + str(attachment.path))
        # FileContent.file.seek(0, 2)             # seek to end of file
        # size = FileContent.file.tell()
        FileContent.file.seek(0)
        # file.insert(filename, FileContent.file, size)
        with targetfile:
            shutil.copyfileobj(FileContent.file, targetfile)
        db.commit()

        jenkins_url = self.config.get('jenkins', 'url')
        jenkins_user = self.config.get('jenkins', 'user')
        jenkins_passwd = self.config.get('jenkins', 'passwd')
        job_name = self.config.get('jenkins', 'patch_test_job_name')
        jenkins = JenkinsApi(jenkins_url, jenkins_user, jenkins_passwd,
                             self.env.log)

        if self.is_jenkins_enabled():
            jenkins.test_patch(id, job_name, Branch)

        for r in self.RECIPIENTS:
            self.sendMail(
                r, "[PATCH] <%s> %s" %
                (self.cutString(FromAddr, 15), self.cutString(Subject, 40)),
                "New patch submitted to rasdaman.org:\n\nSubject\t %s\nFrom\t %s\nDate\t %s"
                % (Subject, FromAddr, Date), None)
        return

    def cutString(self, s, n):
        if len(s) > n:
            return s[:n] + "..."
        else:
            return s

    # http://kutuma.blogspot.com/2007/08/sending-emails-via-gmail-with-python.html
    def sendMail(self, to, subject, text, attach):
        gmailUser = self.config.get('gmail', 'user')
        gmailPassword = self.config.get('gmail', 'password')

        msg = MIMEMultipart()
        msg['From'] = gmailUser
        msg['To'] = to
        msg['Subject'] = subject
        msg.attach(MIMEText(text))

        if attach != None:
            part = MIMEBase('application', 'octet-stream')
            part.set_payload(open(attach, 'rb').read())
            Encoders.encode_base64(part)
            part.add_header(
                'Content-Disposition',
                'attachment; filename="%s"' % os.path.basename(attach))
            msg.attach(part)

        try:
            mailServer = smtplib.SMTP("smtp.gmail.com", 587)
            mailServer.ehlo()
            mailServer.starttls()
            mailServer.ehlo()
            mailServer.login(gmailUser, gmailPassword)
            mailServer.sendmail(gmailUser, to, msg.as_string())
            # Should be mailServer.quit(), but that crashes...
            mailServer.close()
        except:
            self.env.log.debug("failed sending email to " + to)

    def sendPatchStatusMail(self, id, patchStatus):
        """Send an email to the submitter of a patch with the given id. The
        subject is of the form 'rasdaman.org: PATCH patchStatus'"""
        db = self.env.get_db_cnx()
        cursor = db.cursor()
        cursor.execute(
            "SELECT email, subject, submit_time FROM Patches WHERE id='" +
            str(id) + "'")
        for email, subject, submit_time in cursor:
            # self.env.log.debug("in sendPatchStatusMail: " + email + ' ' + subject + ' ' + submit_time)
            if email and subject and submit_time:
                self.sendMail(
                    email, "rasdaman.org: PATCH " + patchStatus,
                    "Your patch submitted to rasdaman.org on " +
                    str(submit_time) + " has been " + patchStatus +
                    "\n\nPatch description:\n" + str(subject), None)

    def checkPatchStatus(self, repo_patches, email, subject, branch):
        if email is None or subject is None:
            return self.PENDING
        r = re.compile(r'\[PATCH[^\]]*\]( *\[[^\]]*\])* *(.*)', re.DOTALL)
        match = r.match(subject)
        if not match is None:
            subject = match.group(2)
        key = self.getPatchUID(email, subject, branch)
        if (not repo_patches.has_key(key)):
            key = self.getPatchUID(email, subject, 'master')

        if (repo_patches.has_key(key)):
            return self.APPLIED
        else:
            return self.PENDING

    def getPatchUID(self, author, message, branch):
        author = author.replace('"', '')
        return self._deleteExtraWhiteSpace(author + ":" + message + " (" +
                                           branch + ")").strip()

    def listBranches(self):
        results = deque()

        repDir = self.config.get('trac', 'repository_dir')
        gitBin = self.config.get('git', 'git_bin')

        #REPO should be repDir if everything is fine with trac.ini
        REPO = repDir
        output = self.getSystemOutput('cd ' + REPO + '; git branch')

        res = ""
        results.append({'branch': 'master'})
        for branch in output:
            temp = branch[2:-1]
            if temp != 'master':
                results.append({'branch': temp})

        return results

    def listPatches(self, req, applied, pending, rejected, offset=0, count=10):
        result = deque()
        repo_patches = dict()
        subject_patches = dict()

        repo = self.env.get_repository()

        rev = repo.get_youngest_rev()

        while (not (rev is None)):
            t = repo.get_changeset(rev)
            properties = t.get_properties().get("git-author")
            branch = ""
            self.env.log.debug("branches: %s" % t.get_branches())
            for tmp in t.get_branches():
                branch, head = tmp
                msg = t.message
                end = msg.find("\n")
                if end != -1:
                    msg = msg[:end]

                self.env.log.debug(
                    "branch: %s\n author: %s\n properties: %s\n msg: %s\n" %
                    (branch, t.author, properties, msg))

                if (properties is None):
                    key = self.getPatchUID(t.author, msg, branch)
                else:
                    key = self.getPatchUID(properties[0], msg, branch)
                rev = repo.previous_rev(rev)
                repo_patches.update([(key, True)])
                subject_patches.update([(key, t.message)])

        db = self.env.get_db_cnx()
        cursor = db.cursor()
        cursor.execute(
            "SELECT id, email, subject, branch, commit_time, submit_time, rejected, test_status, test_url, differential_url FROM Patches ORDER BY submit_time DESC LIMIT %s OFFSET %s",
            (count, offset))

        jenkins_url = self.config.get('jenkins', 'url')
        jenkins_user = self.config.get('jenkins', 'user')
        jenkins_passwd = self.config.get('jenkins', 'passwd')
        jenkins = JenkinsApi(jenkins_url, jenkins_user, jenkins_passwd,
                             self.env.log)
        job_name = self.config.get('jenkins', 'patch_test_job_name')

        test_status = {}

        if self.is_jenkins_enabled():
            test_status = jenkins.get_test_patch_map(job_name)
            self.env.log.debug(test_status)

        update_cursor = db.cursor()
        refreshPage = False

        for id, email, subject, branch, commit_time, submit_time, rejected_att, test, test_url, differential_url in cursor:
            if not branch:
                branch = 'master'
            if rejected_att == 1:
                status = self.REJECTED
            else:
                status = self.checkPatchStatus(repo_patches, email, subject,
                                               branch)
            if not applied is None and status != self.APPLIED:
                continue
            if not pending is None and status != self.PENDING:
                continue
            if not rejected is None and status != self.REJECTED:
                continue

            ind = submit_time.find(".")
            if ind != -1:
                submit_time = submit_time[:ind]

            r = re.compile(r'\[PATCH[^\]]*\] +(.*)', re.DOTALL)
            match = r.match(subject)
            if not match is None:
                tmp = match.group(1)
            else:
                tmp = subject
            key = self.getPatchUID(email, tmp, branch)
            if (not subject_patches.has_key(key)):
                key = self.getPatchUID(email, tmp, 'master')

            if (subject_patches.has_key(key)):
                subj = subject_patches[key]
            else:
                subj = subject

            if (status == self.PENDING) and (id in test_status):
                refreshPage |= bool(test_status[id]['building'])
                if (not bool(test_status[id]['building'])) and (test is None):
                    test = test_status[id]['buildStatus']
                    test_url = test_status[id]['url']
                    update_cursor.execute(
                        "UPDATE Patches SET test_status=%s WHERE id=%s",
                        (test, id))
                    update_cursor.execute(
                        "UPDATE Patches SET test_url=%s WHERE id=%s",
                        (test_url, id))

            test_status_color = 'red'
            if test == 'SUCCESS':
                test_status_color = 'green'

            result.append({
                'id': id,
                'email': self.encodeEmail(email, req),
                'subject': subj,
                'branch': branch,
                'commit_time': commit_time,
                'submit_time': submit_time,
                'status': status[0],
                'status_color': status[1],
                'test': test,
                'test_url': test_url,
                'test_status_color': test_status_color,
                'differential_url': differential_url
            })
        db.commit()
        return result, refreshPage

    def encodeEmail(self, email, req):
        result = ""
        if email is None:
            return result
        if 'anonymous' == req.perm.username:
            ind = email.find("<")
            if ind != -1:
                email = email[:ind]
        for c in email:
            result += chr(ord(c) + 1)
        return result

    def patchCount(self):
        db = self.env.get_db_cnx()
        cursor = db.cursor()
        cursor.execute("SELECT count(id) as cnt FROM Patches")
        result = cursor.fetchone()[0]
        return result

    def _getAgreementText(self):
        path = self.get_htdocs_dirs()
        f = open(path[0][1] + '/agreement.txt')
        result = f.read()
        f.close()
        return result

    # ITemplateProvider methods
    def get_templates_dirs(self):
        from pkg_resources import resource_filename
        return [resource_filename(__name__, 'templates')]

    def get_htdocs_dirs(self):
        from pkg_resources import resource_filename
        return [('patchmanager', resource_filename(__name__, 'htdocs'))]

    def _getPatchDetails(self, emailContent):
        if (type(emailContent) == str):
            msg = PatchModel(emailContent)
        else:
            msg = PatchModel(emailContent.file.read())

        return {
            'From': msg.getFrom(),
            'Subject': msg.getSubject(),
            'Date': msg.getDate(),
            self.DIFFERENTIAL_URL_KEY: msg.getReviewUrl()
        }

    def _getBundleDetails(self, bundle, selectbranch):

        _bundle_file = bundle.file
        _bundle_file.seek(0)
        _rep_dir = self.config.get('trac', 'repository_dir')
        _git_bin = self.config.get('git', 'git_bin')
        _cmd = ""

        # Temporarily store the bundle file
        _temp_bundle_file = tempfile.NamedTemporaryFile()
        _temp_bundle_file.write(_bundle_file.read())
        _temp_bundle_file.seek(0)
        _bundle_path = _temp_bundle_file.name
        # DEBUG
        # _bundle_file.seek(0)
        # self.env.log.debug("BUNDLE FILE: " + _bundle_file.read(500))
        # self.env.log.debug("TEMP BUNDLE FILE: " + _temp_bundle_file.read(500))

        # Clone the repo for testing
        _temp_dir = tempfile.mkdtemp()
        _cmd = "cd " + _temp_dir + "; " + _git_bin + " clone -b " + selectbranch + " " + _rep_dir + " " + _temp_dir
        self.env.log.debug("terminal call: " + _cmd)
        os.system(_cmd)

        # Check there is a single commit inside the bundle
        _cmd = "cd " + _temp_dir + "; test $( " + _git_bin + " bundle list-heads " + _bundle_path + " | wc -l ) = 1"
        self.env.log.debug("terminal call:" + _cmd)
        if os.system(_cmd) != os.EX_OK:
            raise TracError(
                "The uploaded bundle contains more than one commit.")
        # _temp_bundle_file.seek(0)

        # Need to apply the bundle and fetch the details there (From, To, Date, etc.)
        # checkout to a tmp branch (otherwise git-fetch won't apply)
        _cmd = "cd " + _temp_dir + "; " + _git_bin + " checkout -b tmp "
        self.env.log.debug(("terminal call: " + _cmd + " 2>&1"))
        os.system(_cmd)
        # apply the bundle
        _cmd = "cd " + _temp_dir + "; " + _git_bin + " fetch " + _bundle_path + " " + selectbranch + ":" + selectbranch
        self.env.log.debug(("terminal call: " + _cmd + " 2>&1"))
        messages = self.getSystemOutput(_cmd + " 2>&1")
        self.env.log.debug(self.toString(messages))

        # patch the bundle's commit and extract metadata
        _cmd = "cd " + _temp_dir + "; " + _git_bin + " show " + selectbranch + " HEAD --pretty=email"
        self.env.log.debug(("terminal call: " + _cmd + " 2>&1"))
        messages = self.getSystemOutput(_cmd + " 2>&1")
        _bundle_patch = self.toString(messages)
        # self.env.log.debug("Bundle diffs: " + _bundle_patch)

        # cleaning
        _temp_bundle_file.close()
        os.system("rm -r " + _temp_dir)

        return self._getPatchDetails(_bundle_patch)

    def _deleteExtraWhiteSpace(self, string1):
        result = StringIO.StringIO()
        lastSpace = False
        for i in range(len(string1)):
            if (not (string1[i] in string.whitespace)):
                result.write(string1[i])
            elif (not lastSpace):
                result.write(" ")
            lastSpace = string1[i] in string.whitespace
        return result.getvalue()

    def getAttachment(self, id):
        try:
            return Attachment(self.env, 'PatchManager', '0.2',
                              "patch" + str(id))
        except:
            pass
        try:
            return Attachment(self.env, 'PatchManager', '0.1',
                              "patch" + str(id))
        except:
            pass
        return None

    def processUploadPatch(self, req):
        if (not 'agree' in req.args or req.args['agree'] != 'on'):
            raise TracError('Please accept the license agreement')

        if (not 'agreement' in req.args):
            raise TracError('Please accept the license agreement')

        uploadedAgreement = self._deleteExtraWhiteSpace(req.args['agreement'])
        localAgreement = self._deleteExtraWhiteSpace(self._getAgreementText())

        if (uploadedAgreement != localAgreement):
            raise TracError(
                'Uploaded agreement differs from our local agreement. Please contact admin!'
            )

        file_name = self.getFileName(req.args['patchfile'])
        self.env.log.debug("Name of the uploaded file: " + file_name)
        selectbranch = req.args["selectbranch"]

        data = {}
        if re.match(r'.*\.patch', file_name):
            data.update(self._getPatchDetails(req.args["patchfile"]))
        elif re.match(r'.*\.bundle', file_name):
            data.update(
                self._getBundleDetails(req.args["patchfile"], selectbranch))
        else:
            raise TracError(
                "Please upload a file with either .patch or .bundle extension."
            )

        if data['From'] is None:
            raise TracError("The patch doesn't have any author credentials")
        if data['Subject'] is None:
            raise TracError("The patch doesn't have any description")

        subject = data['Subject']
        r = re.compile(r'\[PATCH[^\]]*\] +ticket:[0-9]+ .+', re.DOTALL)
        match = r.match(subject)
        if match is None and selectbranch == 'master':
            raise TracError(
                "The subject of the patch is invalid; Please edit it so that the subject starts with ticket:NUMBER, where NUMBER is a valid ticket number on the rasdaman.org tracker."
            )

        self.env.log.debug("A new patch was submitted! Getting details...")
        self.addNewPatch(data['From'], data['Subject'],
                         req.args["selectbranch"], data['Date'],
                         req.args["patchfile"],
                         data[self.DIFFERENTIAL_URL_KEY])
        return {'page': 'addpatchdetails.html', 'data': data}

    def automatic_test_results(self):
        jenkins_url = self.config.get('jenkins', 'url')
        jenkins_user = self.config.get('jenkins', 'user')
        jenkins_passwd = self.config.get('jenkins', 'passwd')
        job_name = self.config.get('jenkins', 'automatic_test_job_name')
        jenkins = JenkinsApi(jenkins_url, jenkins_user, jenkins_passwd,
                             self.env.log)

        tests = jenkins.get_automatic_build_map(job_name)

        keys = sorted(tests.keys(), reverse=True)
        result = list()
        for k in keys:
            result.append(tests[k])

        return result

    def processDeletePatch(self, req):
        if not 'TRAC_ADMIN' in req.perm:
            raise TracError(
                'You do not have enough priviledges to delete a patch!')
        db = self.env.get_db_cnx()
        cursor = db.cursor()
        for id in self.getIDs(req.args['select']):
            self.sendPatchStatusMail(id, "DELETED")
            cursor.execute("DELETE FROM Patches WHERE id=" + str(id))
            patchpath = self.getPatchPath(id)
            os.remove(patchpath)
        db.commit()

    def processDownloadPatch(self, req):
        tempDir = tempfile.mkdtemp("patch")
        path = os.path.join(self.env.path, 'attachments', 'PatchManager',
                            '0.2')
        cmd = "cd " + tempDir + "; tar -czf archive.tgz -C " + path + " "

        for id in self.getIDs(req.args['select']):
            cmd += "patch" + str(id) + " "
        self.env.log.debug("executing " + cmd)
        os.system(cmd)
        req.send_header('Content-Disposition',
                        'attachment; filename="patches.tgz"')
        req.send_file(tempDir + "/archive.tgz", 'application/x-tar-gz')

    def getIDs(self, args):
        if type(args) == type("ab") or type(args) == type(u"ab"):
            return [args]
        else:
            return args

    def getFileName(self, arg):
        if arg.filename:
            return arg.filename
        else:
            raise TracError("This resource has no filename.")

    def getSystemOutput(self, cmd):
        Messages = []
        proc = os.popen(cmd, "r")
        for line in proc:
            Messages.append(line)
        status = proc.close()
        return Messages

    def processApplyPatch(self, req):
        if not 'TRAC_ADMIN' in req.perm:
            raise TracError(
                'You do not have enough priviledges to apply a patch!')

        tempDir = tempfile.mkdtemp("patch")
        os.removedirs(tempDir)

        repDir = self.config.get('trac', 'repository_dir')
        gitBin = self.config.get('git', 'git_bin')
        # git commands
        gitAm = gitBin + ' am -3 --ignore-whitespace --whitespace=fix '  # see LSIS ticket #45
        gitBranch = gitBin + ' branch '
        gitClone = gitBin + ' clone '
        gitCo = gitBin + ' checkout '
        gitFetch = gitBin + ' fetch '
        gitLog = gitBin + ' log '
        gitPush = gitBin + ' push '

        log = ""

        for id in self.getIDs(req.args['select']):
            try:
                patch_path = self.getPatchPath(id)
            except RuntimeError:
                print ""

            try:
                db = self.env.get_db_cnx()
                cursor = db.cursor()
                cursor.execute(
                    "SELECT email, subject, branch, submit_time FROM Patches WHERE id='"
                    + str(id) + "'")
                for email, subject, branch, submit_time in cursor:
                    if not branch:
                        branch = 'master'

                    tempDir = tempfile.mkdtemp('tbranch' + str(id))
                    os.removedirs(tempDir)

                    self.env.log.debug("terminal call: " + gitClone + " -b " +
                                       branch + " " + repDir + " " + tempDir)
                    os.system(gitClone + " -b " + branch + " " + repDir + " " +
                              tempDir)

                    # Need to distinguish between an email-patch and a binary bundle:
                    _bundle_head_pattern = "git bundle"
                    _cmd = "head -n 1 " + patch_path + " | grep \"" + _bundle_head_pattern + "\""
                    if os.system(_cmd) != os.EX_OK:
                        self.env.log.debug("applying patch...")
                        self.env.log.debug("terminal call2: " + "cd " +
                                           tempDir + "; " + gitAm +
                                           patch_path + " 2>&1")

                        messages = self.getSystemOutput("cd " + tempDir +
                                                        "; " + gitAm +
                                                        patch_path + " 2>&1")
                        log += self.toString(messages)
                    else:
                        self.env.log.debug("applying git bundle...")
                        _tmp_branch = "tmp"
                        # apply the bundle
                        messages = self.getSystemOutput("cd " + tempDir +
                                                        "; " + gitCo + " -b " +
                                                        _tmp_branch + " 2>&1")
                        log += self.toString(messages)
                        log += "\n"

                        messages = self.getSystemOutput("cd " + tempDir +
                                                        "; " + gitFetch +
                                                        patch_path + " " +
                                                        branch + ":" + branch +
                                                        " 2>&1")
                        log += self.toString(messages)

                        # get back to master and clean up
                        log += "\n"
                        messages = self.getSystemOutput("cd " + tempDir +
                                                        "; " + gitCo +
                                                        " master 2>&1")
                        log += self.toString(messages)
                        log += "\n"

                        messages = self.getSystemOutput("cd " + tempDir +
                                                        "; " + gitBranch +
                                                        " -D " + _tmp_branch +
                                                        " 2>&1")
                        log += self.toString(messages)
                        # Log (DEBUG)
                        log += "\n"
                        messages = self.getSystemOutput("cd " + tempDir +
                                                        "; " + gitLog +
                                                        " --graph -3 " +
                                                        branch + " 2>&1")
                        log += self.toString(messages)

                    # Push changes to public repo
                    try:
                        self.env.log.debug("Applying patch on %s" % branch)

                        messages = self.getSystemOutput("cd " + tempDir +
                                                        "; " + gitPush +
                                                        " origin " + branch +
                                                        " 2>&1")
                        log += "\n" + self.toString(messages)

                        self.env.log.debug("Patch applied on %s" % branch)

                        os.system("rm -Rf " + tempDir)
                        self.setRejected(id, 0)

                        if email and subject and submit_time:
                            subjectCut = str(subject)[8:80]
                            if len(str(subject)) > 80:
                                subjectCut = subjectCut + "..."
                            self.sendMail(
                                "*****@*****.**",
                                "Patch applied: " + subjectCut,
                                "New patch has been applied in rasdaman:\n\n" +
                                str(subject) + "\n\nSubmitted on: " +
                                str(submit_time) + "\nSubmitted by: " +
                                str(email), None)
                            self.sendPatchStatusMail(id, "APPLIED")
                    except RuntimeError:
                        self.env.log("Patch was not applied")
                        print ""
            except RuntimeError:
                self.env.log.debug("Reached here - patch check failed")
                print ""

        return {
            'page': 'applypatchlog.html',
            'data': {
                'messages': log.strip()
            }
        }

    def toString(self, msgs):
        res = ""
        for m in msgs:
            res += str(m)
        return res.strip() + "\n\n"

    def setRejected(self, id, flag):
        db = self.env.get_db_cnx()
        cursor = db.cursor()
        try:
            cursor.execute("UPDATE Patches SET rejected=" + str(flag) +
                           " WHERE id=" + id)
        except:
            self.env.log.error("Error un-rejecting patch %s" % id)
        db.commit()

    def processRejectPatch(self, req):
        if not 'TRAC_ADMIN' in req.perm:
            raise TracError(
                'You do not have enough priviledges to reject a patch!')
        Messages = []
        for id in self.getIDs(req.args['select']):
            self.setRejected(id, 1)
            self.sendPatchStatusMail(id, "REJECTED")
        return {'data': {'messages': Messages}}

    def processTryApplyPatch(self, req):
        if not 'TRAC_ADMIN' in req.perm:
            raise TracError(
                'You do not have enough priviledges to apply a patch!')

        repDir = self.config.get('trac', 'repository_dir')
        gitBin = self.config.get('git', 'git_bin')
        # git commands
        gitAm = gitBin + ' am -3 --ignore-whitespace --whitespace=fix '  # see LSIS ticket #45
        gitClone = gitBin + ' clone '
        gitCo = gitBin + ' checkout '
        Messages = []
        for id in self.getIDs(req.args['select']):
            try:
                patch_path = self.getPatchPath(id)
            except RuntimeError:
                print ""

            try:
                self.setRejected(id, 0)
                # self.sendPatchStatusMail(id, "APPLIED")

                # send an email to rasdaman-dev
                db = self.env.get_db_cnx()
                cursor = db.cursor()
                cursor.execute(
                    "SELECT email, subject, branch, submit_time FROM Patches WHERE id='"
                    + str(id) + "'")
                for email, subject, branch, submit_time in cursor:
                    if not branch:
                        branch = 'master'

                    self.env.log.debug("apply patch to branch: " + branch)

                    tempDir = tempfile.mkdtemp('tbranch' + str(id))
                    os.removedirs(tempDir)

                    self.env.log.debug("terminal call: " + gitClone + " -b " +
                                       branch + " " + repDir + " " + tempDir)
                    messages = self.getSystemOutput(gitClone + " -b " +
                                                    branch + " " + repDir +
                                                    " " + tempDir)
                    Messages.extend(messages)

                    messages = self.getSystemOutput("cd " + tempDir + "; " +
                                                    gitAm + patch_path +
                                                    " 2>&1")
                    Messages.extend(messages)
                    self.env.log.debug("terminal call2: " + "cd " + tempDir +
                                       "; " + gitAm + patch_path + " 2>&1")

                    os.system("rm -Rf " + tempDir)
            except RuntimeError:
                print ""

        return {'data': {'messages': Messages}}

    def process_command(self, patchop, req):
        ind = patchop.find("-")
        if ind != -1:
            req.args.update([("select", patchop[ind + 1:])])
            patchop = patchop[:ind]

        if (patchop == "Upload patch"):
            return self.processUploadPatch(req)
        elif (patchop == "Delete"):
            self.processDeletePatch(req)
            req.redirect(req.href.patchmanager())
        elif (patchop == "Download Selected"):
            self.processDownloadPatch(req)
        elif (patchop == "Apply"):
            return self.processApplyPatch(req)
        elif (patchop == "Try Apply"):
            return self.processTryApplyPatch(req)
        elif (patchop == "Reject"):
            ret = self.processRejectPatch(req)
            req.redirect(req.href.patchmanager())
            return ret
        else:
            raise TracError('Don\'t know how to handle operation: "' +
                            patchop + '"')

    def is_jenkins_enabled(self):
        isEnabled = self.config.get('jenkins', 'enable', 'false')
        return isEnabled.lower() == 'true'
Exemple #23
0
class Environment(Component, ComponentManager):
    """Trac environment manager.

    Trac stores project information in a Trac environment. It consists
    of a directory structure containing among other things:

    * a configuration file,
    * project-specific templates and plugins,
    * the wiki and ticket attachments files,
    * the SQLite database file (stores tickets, wiki pages...)
      in case the database backend is sqlite

    """

    implements(ISystemInfoProvider)

    required = True

    system_info_providers = ExtensionPoint(ISystemInfoProvider)
    setup_participants = ExtensionPoint(IEnvironmentSetupParticipant)

    components_section = ConfigSection(
        'components', """This section is used to enable or disable components
        provided by plugins, as well as by Trac itself. The component
        to enable/disable is specified via the name of the
        option. Whether its enabled is determined by the option value;
        setting the value to `enabled` or `on` will enable the
        component, any other value (typically `disabled` or `off`)
        will disable the component.

        The option name is either the fully qualified name of the
        components or the module/package prefix of the component. The
        former enables/disables a specific component, while the latter
        enables/disables any component in the specified
        package/module.

        Consider the following configuration snippet:
        {{{
        [components]
        trac.ticket.report.ReportModule = disabled
        acct_mgr.* = enabled
        }}}

        The first option tells Trac to disable the
        [wiki:TracReports report module].
        The second option instructs Trac to enable all components in
        the `acct_mgr` package. Note that the trailing wildcard is
        required for module/package matching.

        To view the list of active components, go to the ''Plugins''
        page on ''About Trac'' (requires `CONFIG_VIEW`
        [wiki:TracPermissions permissions]).

        See also: TracPlugins
        """)

    shared_plugins_dir = PathOption(
        'inherit', 'plugins_dir', '',
        """Path to the //shared plugins directory//.

        Plugins in that directory are loaded in addition to those in
        the directory of the environment `plugins`, with this one
        taking precedence.

        Non-absolute paths are relative to the Environment `conf`
        directory.
        """)

    base_url = Option(
        'trac', 'base_url', '', """Reference URL for the Trac deployment.

        This is the base URL that will be used when producing
        documents that will be used outside of the web browsing
        context, like for example when inserting URLs pointing to Trac
        resources in notification e-mails.""")

    base_url_for_redirect = BoolOption(
        'trac', 'use_base_url_for_redirect', False,
        """Optionally use `[trac] base_url` for redirects.

        In some configurations, usually involving running Trac behind
        a HTTP proxy, Trac can't automatically reconstruct the URL
        that is used to access it. You may need to use this option to
        force Trac to use the `base_url` setting also for
        redirects. This introduces the obvious limitation that this
        environment will only be usable when accessible from that URL,
        as redirects are frequently used.
        """)

    secure_cookies = BoolOption(
        'trac', 'secure_cookies', False,
        """Restrict cookies to HTTPS connections.

        When true, set the `secure` flag on all cookies so that they
        are only sent to the server on HTTPS connections. Use this if
        your Trac instance is only accessible through HTTPS.
        """)

    anonymous_session_lifetime = IntOption(
        'trac', 'anonymous_session_lifetime', '90',
        """Lifetime of the anonymous session, in days.

        Set the option to 0 to disable purging old anonymous sessions.
        (''since 1.0.17'')""")

    project_name = Option('project', 'name', 'My Project',
                          """Name of the project.""")

    project_description = Option('project', 'descr', 'My example project',
                                 """Short description of the project.""")

    project_url = Option(
        'project', 'url', '',
        """URL of the main project web site, usually the website in
        which the `base_url` resides. This is used in notification
        e-mails.""")

    project_admin = Option(
        'project', 'admin', '',
        """E-Mail address of the project's administrator.""")

    project_admin_trac_url = Option(
        'project', 'admin_trac_url', '.',
        """Base URL of a Trac instance where errors in this Trac
        should be reported.

        This can be an absolute or relative URL, or '.' to reference
        this Trac instance. An empty value will disable the reporting
        buttons.
        """)

    project_footer = Option(
        'project', 'footer',
        N_('Visit the Trac open source project at<br />'
           '<a href="http://trac.edgewall.org/">'
           'http://trac.edgewall.org/</a>'),
        """Page footer text (right-aligned).""")

    project_icon = Option('project', 'icon', 'common/trac.ico',
                          """URL of the icon of the project.""")

    log_type = ChoiceOption('logging',
                            'log_type',
                            log.LOG_TYPES + log.LOG_TYPE_ALIASES,
                            """Logging facility to use.

        Should be one of (`none`, `file`, `stderr`, `syslog`, `winlog`).""",
                            case_sensitive=False)

    log_file = Option(
        'logging', 'log_file', 'trac.log',
        """If `log_type` is `file`, this should be a path to the
        log-file.  Relative paths are resolved relative to the `log`
        directory of the environment.""")

    log_level = ChoiceOption('logging',
                             'log_level',
                             log.LOG_LEVELS + log.LOG_LEVEL_ALIASES,
                             """Level of verbosity in log.

        Should be one of (`CRITICAL`, `ERROR`, `WARNING`, `INFO`, `DEBUG`).
        """,
                             case_sensitive=False)

    log_format = Option(
        'logging', 'log_format', None, """Custom logging format.

        If nothing is set, the following will be used:

        `Trac[$(module)s] $(levelname)s: $(message)s`

        In addition to regular key names supported by the
        [http://docs.python.org/library/logging.html Python logger library]
        one could use:

        - `$(path)s`     the path for the current environment
        - `$(basename)s` the last path component of the current environment
        - `$(project)s`  the project name

        Note the usage of `$(...)s` instead of `%(...)s` as the latter form
        would be interpreted by the !ConfigParser itself.

        Example:
        `($(thread)d) Trac[$(basename)s:$(module)s] $(levelname)s: $(message)s`
        """)

    def __init__(self, path, create=False, options=[]):
        """Initialize the Trac environment.

        :param path:   the absolute path to the Trac environment
        :param create: if `True`, the environment is created and
                       populated with default data; otherwise, the
                       environment is expected to already exist.
        :param options: A list of `(section, name, value)` tuples that
                        define configuration options
        """
        ComponentManager.__init__(self)

        self.path = os.path.normpath(os.path.normcase(path))
        self.log = None
        self.config = None

        if create:
            self.create(options)
            for setup_participant in self.setup_participants:
                setup_participant.environment_created()
        else:
            self.verify()
            self.setup_config()

    def __repr__(self):
        return '<%s %r>' % (self.__class__.__name__, self.path)

    @lazy
    def name(self):
        """The environment name.

        :since: 1.2
        """
        return os.path.basename(self.path)

    @property
    def env(self):
        """Property returning the `Environment` object, which is often
        required for functions and methods that take a `Component` instance.
        """
        # The cached decorator requires the object have an `env` attribute.
        return self

    @property
    def system_info(self):
        """List of `(name, version)` tuples describing the name and
        version information of external packages used by Trac and plugins.
        """
        info = []
        for provider in self.system_info_providers:
            info.extend(provider.get_system_info() or [])
        return sorted(set(info),
                      key=lambda args: (args[0] != 'Trac', args[0].lower()))

    def get_systeminfo(self):
        """Return a list of `(name, version)` tuples describing the name
        and version information of external packages used by Trac and plugins.

        :since 1.3.1: deprecated and will be removed in 1.5.1. Use
                      system_info property instead.
        """
        return self.system_info

    # ISystemInfoProvider methods

    def get_system_info(self):
        yield 'Trac', self.trac_version
        yield 'Python', sys.version
        yield 'setuptools', setuptools.__version__
        if pytz is not None:
            yield 'pytz', pytz.__version__
        if hasattr(self, 'webfrontend_version'):
            yield self.webfrontend, self.webfrontend_version

    def component_activated(self, component):
        """Initialize additional member variables for components.

        Every component activated through the `Environment` object
        gets three member variables: `env` (the environment object),
        `config` (the environment configuration) and `log` (a logger
        object)."""
        component.env = self
        component.config = self.config
        component.log = self.log

    def _component_name(self, name_or_class):
        name = name_or_class
        if not isinstance(name_or_class, basestring):
            name = name_or_class.__module__ + '.' + name_or_class.__name__
        return name.lower()

    @lazy
    def _component_rules(self):
        _rules = {}
        for name, value in self.components_section.options():
            name = name.rstrip('.*').lower()
            _rules[name] = as_bool(value)
        return _rules

    def is_component_enabled(self, cls):
        """Implemented to only allow activation of components that are
        not disabled in the configuration.

        This is called by the `ComponentManager` base class when a
        component is about to be activated. If this method returns
        `False`, the component does not get activated. If it returns
        `None`, the component only gets activated if it is located in
        the `plugins` directory of the environment.
        """
        component_name = self._component_name(cls)

        rules = self._component_rules
        cname = component_name
        while cname:
            enabled = rules.get(cname)
            if enabled is not None:
                return enabled
            idx = cname.rfind('.')
            if idx < 0:
                break
            cname = cname[:idx]

        # By default, all components in the trac package except
        # in trac.test or trac.tests are enabled
        return component_name.startswith('trac.') and \
               not component_name.startswith('trac.test.') and \
               not component_name.startswith('trac.tests.') or None

    def enable_component(self, cls):
        """Enable a component or module."""
        self._component_rules[self._component_name(cls)] = True
        super(Environment, self).enable_component(cls)

    @contextmanager
    def component_guard(self, component, reraise=False):
        """Traps any runtime exception raised when working with a component
        and logs the error.

        :param component: the component responsible for any error that
                          could happen inside the context
        :param reraise: if `True`, an error is logged but not suppressed.
                        By default, errors are suppressed.

        """
        try:
            yield
        except TracError as e:
            self.log.warning("Component %s failed with %s", component,
                             exception_to_unicode(e))
            if reraise:
                raise
        except Exception as e:
            self.log.error("Component %s failed with %s", component,
                           exception_to_unicode(e, traceback=True))
            if reraise:
                raise

    def verify(self):
        """Verify that the provided path points to a valid Trac environment
        directory."""
        try:
            tag = read_file(os.path.join(self.path, 'VERSION')).splitlines()[0]
            if tag != _VERSION:
                raise Exception(
                    _("Unknown Trac environment type '%(type)s'", type=tag))
        except Exception as e:
            raise TracError(
                _("No Trac environment found at %(path)s\n"
                  "%(e)s",
                  path=self.path,
                  e=e))

    @lazy
    def db_exc(self):
        """Return an object (typically a module) containing all the
        backend-specific exception types as attributes, named
        according to the Python Database API
        (http://www.python.org/dev/peps/pep-0249/).

        To catch a database exception, use the following pattern::

            try:
                with env.db_transaction as db:
                    ...
            except env.db_exc.IntegrityError as e:
                ...
        """
        return DatabaseManager(self).get_exceptions()

    @property
    def db_query(self):
        """Return a context manager
        (`~trac.db.api.QueryContextManager`) which can be used to
        obtain a read-only database connection.

        Example::

            with env.db_query as db:
                cursor = db.cursor()
                cursor.execute("SELECT ...")
                for row in cursor.fetchall():
                    ...

        Note that a connection retrieved this way can be "called"
        directly in order to execute a query::

            with env.db_query as db:
                for row in db("SELECT ..."):
                    ...

        :warning: after a `with env.db_query as db` block, though the
          `db` variable is still defined, you shouldn't use it as it
          might have been closed when exiting the context, if this
          context was the outermost context (`db_query` or
          `db_transaction`).

        If you don't need to manipulate the connection itself, this
        can even be simplified to::

            for row in env.db_query("SELECT ..."):
                ...

        """
        return QueryContextManager(self)

    @property
    def db_transaction(self):
        """Return a context manager
        (`~trac.db.api.TransactionContextManager`) which can be used
        to obtain a writable database connection.

        Example::

            with env.db_transaction as db:
                cursor = db.cursor()
                cursor.execute("UPDATE ...")

        Upon successful exit of the context, the context manager will
        commit the transaction. In case of nested contexts, only the
        outermost context performs a commit. However, should an
        exception happen, any context manager will perform a rollback.
        You should *not* call `commit()` yourself within such block,
        as this will force a commit even if that transaction is part
        of a larger transaction.

        Like for its read-only counterpart, you can directly execute a
        DML query on the `db`::

            with env.db_transaction as db:
                db("UPDATE ...")

        :warning: after a `with env.db_transaction` as db` block,
          though the `db` variable is still available, you shouldn't
          use it as it might have been closed when exiting the
          context, if this context was the outermost context
          (`db_query` or `db_transaction`).

        If you don't need to manipulate the connection itself, this
        can also be simplified to::

            env.db_transaction("UPDATE ...")

        """
        return TransactionContextManager(self)

    def shutdown(self, tid=None):
        """Close the environment."""
        from trac.versioncontrol.api import RepositoryManager
        RepositoryManager(self).shutdown(tid)
        DatabaseManager(self).shutdown(tid)
        if tid is None:
            log.shutdown(self.log)

    def create(self, options=[]):
        """Create the basic directory structure of the environment,
        initialize the database and populate the configuration file
        with default values.

        If options contains ('inherit', 'file'), default values will
        not be loaded; they are expected to be provided by that file
        or other options.

        :raises TracError: if the base directory of `path` does not exist.
        :raises TracError: if `path` exists and is not empty.
        """
        base_dir = os.path.dirname(self.path)
        if not os.path.exists(base_dir):
            raise TracError(
                _(
                    "Base directory '%(env)s' does not exist. Please create it "
                    "and retry.",
                    env=base_dir))

        if os.path.exists(self.path) and os.listdir(self.path):
            raise TracError(_("Directory exists and is not empty."))

        # Create the directory structure
        if not os.path.exists(self.path):
            os.mkdir(self.path)
        os.mkdir(self.htdocs_dir)
        os.mkdir(self.log_dir)
        os.mkdir(self.plugins_dir)
        os.mkdir(self.templates_dir)

        # Create a few files
        create_file(os.path.join(self.path, 'VERSION'), _VERSION + '\n')
        create_file(
            os.path.join(self.path, 'README'),
            'This directory contains a Trac environment.\n'
            'Visit http://trac.edgewall.org/ for more information.\n')

        # Setup the default configuration
        os.mkdir(self.conf_dir)
        config = Configuration(self.config_file_path)
        for section, name, value in options:
            config.set(section, name, value)
        config.save()
        self.setup_config()
        if not any((section, option) == ('inherit', 'file')
                   for section, option, value in options):
            self.config.set_defaults(self)
            self.config.save()

        # Create the sample configuration
        create_file(self.config_file_path + '.sample')
        self._update_sample_config()

        # Create the database
        DatabaseManager(self).init_db()

    @lazy
    def database_version(self):
        """Returns the current version of the database.

        :since 1.0.2:
        """
        return DatabaseManager(self) \
               .get_database_version('database_version')

    @lazy
    def database_initial_version(self):
        """Returns the version of the database at the time of creation.

        In practice, for database created before 0.11, this will
        return `False` which is "older" than any db version number.

        :since 1.0.2:
        """
        return DatabaseManager(self) \
               .get_database_version('initial_database_version')

    @lazy
    def trac_version(self):
        """Returns the version of Trac.
        :since: 1.2
        """
        from trac import core, __version__
        return get_pkginfo(core).get('version', __version__)

    def setup_config(self):
        """Load the configuration file."""
        self.config = Configuration(self.config_file_path,
                                    {'envname': self.name})
        if not self.config.exists:
            raise TracError(
                _("The configuration file is not found at "
                  "%(path)s",
                  path=self.config_file_path))
        self.setup_log()
        plugins_dir = self.shared_plugins_dir
        load_components(self, plugins_dir and (plugins_dir, ))

    @lazy
    def config_file_path(self):
        """Path of the trac.ini file."""
        return os.path.join(self.conf_dir, 'trac.ini')

    @lazy
    def log_file_path(self):
        """Path to the log file."""
        if not os.path.isabs(self.log_file):
            return os.path.join(self.log_dir, self.log_file)
        return self.log_file

    def _get_path_to_dir(self, *dirs):
        path = self.path
        for dir in dirs:
            path = os.path.join(path, dir)
        return os.path.realpath(path)

    @lazy
    def attachments_dir(self):
        """Absolute path to the attachments directory.

        :since: 1.3.1
        """
        return self._get_path_to_dir('files', 'attachments')

    @lazy
    def conf_dir(self):
        """Absolute path to the conf directory.

        :since: 1.0.11
        """
        return self._get_path_to_dir('conf')

    @lazy
    def files_dir(self):
        """Absolute path to the files directory.

        :since: 1.3.2
        """
        return self._get_path_to_dir('files')

    @lazy
    def htdocs_dir(self):
        """Absolute path to the htdocs directory.

        :since: 1.0.11
        """
        return self._get_path_to_dir('htdocs')

    @lazy
    def log_dir(self):
        """Absolute path to the log directory.

        :since: 1.0.11
        """
        return self._get_path_to_dir('log')

    @lazy
    def plugins_dir(self):
        """Absolute path to the plugins directory.

        :since: 1.0.11
        """
        return self._get_path_to_dir('plugins')

    @lazy
    def templates_dir(self):
        """Absolute path to the templates directory.

        :since: 1.0.11
        """
        return self._get_path_to_dir('templates')

    def setup_log(self):
        """Initialize the logging sub-system."""
        self.log, log_handler = \
            self.create_logger(self.log_type, self.log_file_path,
                               self.log_level, self.log_format)
        self.log.addHandler(log_handler)
        self.log.info('-' * 32 + ' environment startup [Trac %s] ' + '-' * 32,
                      self.trac_version)

    def create_logger(self, log_type, log_file, log_level, log_format):
        log_id = 'Trac.%s' % hashlib.sha1(self.path).hexdigest()
        if log_format:
            log_format = log_format.replace('$(', '%(') \
                                   .replace('%(path)s', self.path) \
                                   .replace('%(basename)s', self.name) \
                                   .replace('%(project)s', self.project_name)
        return log.logger_handler_factory(log_type,
                                          log_file,
                                          log_level,
                                          log_id,
                                          format=log_format)

    def get_known_users(self, as_dict=False):
        """Returns information about all known users, i.e. users that
        have logged in to this Trac environment and possibly set their
        name and email.

        By default this function returns a iterator that yields one
        tuple for every user, of the form (username, name, email),
        ordered alpha-numerically by username. When `as_dict` is `True`
        the function returns a dictionary mapping username to a
        (name, email) tuple.

        :since 1.2: the `as_dict` parameter is available.
        """
        return self._known_users_dict if as_dict else iter(self._known_users)

    @cached
    def _known_users(self):
        return self.db_query("""
                SELECT DISTINCT s.sid, n.value, e.value
                FROM session AS s
                 LEFT JOIN session_attribute AS n ON (n.sid=s.sid
                  AND n.authenticated=1 AND n.name = 'name')
                 LEFT JOIN session_attribute AS e ON (e.sid=s.sid
                  AND e.authenticated=1 AND e.name = 'email')
                WHERE s.authenticated=1 ORDER BY s.sid
        """)

    @cached
    def _known_users_dict(self):
        return {u[0]: (u[1], u[2]) for u in self._known_users}

    def invalidate_known_users_cache(self):
        """Clear the known_users cache."""
        del self._known_users
        del self._known_users_dict

    def backup(self, dest=None):
        """Create a backup of the database.

        :param dest: Destination file; if not specified, the backup is
                     stored in a file called db_name.trac_version.bak
        """
        return DatabaseManager(self).backup(dest)

    def needs_upgrade(self):
        """Return whether the environment needs to be upgraded."""
        for participant in self.setup_participants:
            with self.component_guard(participant, reraise=True):
                if participant.environment_needs_upgrade():
                    self.log.warning(
                        "Component %s requires environment upgrade",
                        participant)
                    return True
        return False

    def upgrade(self, backup=False, backup_dest=None):
        """Upgrade database.

        :param backup: whether or not to backup before upgrading
        :param backup_dest: name of the backup file
        :return: whether the upgrade was performed
        """
        upgraders = []
        for participant in self.setup_participants:
            with self.component_guard(participant, reraise=True):
                if participant.environment_needs_upgrade():
                    upgraders.append(participant)
        if not upgraders:
            return

        if backup:
            try:
                self.backup(backup_dest)
            except Exception as e:
                raise BackupError(e)

        for participant in upgraders:
            self.log.info("upgrading %s...", participant)
            with self.component_guard(participant, reraise=True):
                participant.upgrade_environment()
            # Database schema may have changed, so close all connections
            dbm = DatabaseManager(self)
            if dbm.connection_uri != 'sqlite::memory:':
                dbm.shutdown()

        self._update_sample_config()
        del self.database_version
        return True

    @lazy
    def href(self):
        """The application root path"""
        return Href(urlsplit(self.abs_href.base).path)

    @lazy
    def abs_href(self):
        """The application URL"""
        if not self.base_url:
            self.log.warning("base_url option not set in configuration, "
                             "generated links may be incorrect")
        return Href(self.base_url)

    def _update_sample_config(self):
        filename = os.path.join(self.config_file_path + '.sample')
        if not os.path.isfile(filename):
            return
        config = Configuration(filename)
        config.set_defaults()
        try:
            config.save()
        except EnvironmentError as e:
            self.log.warning("Couldn't write sample configuration file (%s)%s",
                             e, exception_to_unicode(e, traceback=True))
        else:
            self.log.info(
                "Wrote sample configuration file with the new "
                "settings and their default values: %s", filename)
Exemple #24
0
class OidcPlugin(Component):
    """ Authenticate via OpenID Connect

    """
    implements(INavigationContributor, IRequestHandler)

    RETURN_URL_SKEY = 'trac_oidc.return_url'

    client_secret_file = PathOption(
        'trac_oidc', 'client_secret_file', 'client_secret.json',
        """Path to client_secret file.  Relative paths are interpreted
        relative to the ``conf`` subdirectory of the trac environment.""")

    # deprecated
    absolute_trust_root = BoolOption(
        'openid', 'absolute_trust_root', 'true',
        """Whether we should use absolute trust root or by project.""")

    login_managers = ExtensionPoint(ILoginManager)

    def __init__(self):
        # We should show our own "Logout" link only if the stock
        # LoginModule is disabled.
        self.show_logout_link = not is_component_enabled(self.env, LoginModule)

        self.userdb = UserDatabase(self.env)

    # INavigationContributor methods

    def get_active_navigation_item(self, req):
        return 'trac_oidc.login'

    def get_navigation_items(self, req):
        oidc_href = req.href.trac_oidc
        path_qs = req.path_info
        if req.query_string:
            path_qs += '?' + req.query_string

        if not req.authname or req.authname == 'anonymous':
            # Not logged in, show login link
            login_link = tag.a(_('Login using Google'),
                               href=oidc_href('login', return_to=path_qs))
            yield 'metanav', 'trac_oidc.login', login_link

        elif self.show_logout_link:
            # Logged in and LoginModule is disabled, show logout link
            yield ('metanav', 'trac_oidc.login',
                   _('logged in as %(user)s', user=req.authname))
            yield ('metanav', 'trac_oidc.logout',
                   logout_link(oidc_href, return_to=path_qs))

    # IRequestHandler methods

    def match_request(self, req):
        path_info = req.path_info
        if path_info == '/login' and self.show_logout_link:
            # Stock LoginModule is disabled, so handle default /login too
            return True
        return path_info in ('/trac_oidc/login', '/trac_oidc/logout',
                             '/trac_oidc/redirect')

    def process_request(self, req):
        if req.path_info.endswith('/logout'):
            return_url = self._get_return_url(req)
            self._forget_user(req)
            return req.redirect(return_url)
        elif req.path_info.endswith('/login'):
            # Start the login process by redirectory to OP
            req.session[self.RETURN_URL_SKEY] = self._get_return_url(req)
            authenticator = self._get_authenticator(req)
            return req.redirect(authenticator.get_auth_url(req))
        elif req.path_info.endswith('/redirect'):
            # Finish the login process after redirect from OP
            return_url = req.session.pop(self.RETURN_URL_SKEY, req.abs_href())
            id_token = self._retrieve_id(req)
            if id_token:
                authname = self._find_or_create_session(req, id_token)
                assert authname
                self.log.debug("Logging in as %r", authname)
                self._remember_user(req, authname)
            return req.redirect(return_url)

    # private methods

    def _retrieve_id(self, req):
        """ Retrieve oidc id_token from provider.

        Returns ``None`` if authentication was unsuccessful for any reason.

        """
        authenticator = self._get_authenticator(req)
        try:
            return authenticator.get_identity(req)
        except AuthenticationFailed as ex:
            self.log.info("Authentication failed: %s", ex)
            add_warning(req, "Authentication failed: %s", ex)
        except AuthenticationError as ex:
            self.log.error("Authentication error: %s", ex)
            add_warning(req, "Authentication error: %s", ex)

    def _find_or_create_session(self, req, id_token):
        """ Find or create authenticated session for subject.
        """
        userdb = self.userdb
        authname = userdb.find_session(id_token)
        if not authname:
            # There is no authenticated session for the user,
            # create a new one
            # XXX: should it be configurable whether this happens?
            authname = userdb.create_session(id_token)
            add_notice(
                req,
                _(
                    "Hello! You appear to be new here. "
                    "A new authenticated session with "
                    "username '%(authname)s' has been created for you.",
                    authname=authname))
        return authname

    def _remember_user(self, req, authname):
        for lm in self.login_managers:
            lm.remember_user(req, authname)

    def _forget_user(self, req):
        for lm in self.login_managers:
            lm.forget_user(req)

    def _get_authenticator(self, req):
        conf_dir = os.path.join(self.env.path, 'conf')
        client_secret_file = os.path.join(conf_dir, self.client_secret_file)
        redirect_url = req.abs_href.trac_oidc('redirect')
        openid_realm = self._get_openid_realm(req)
        self.log.debug('openid_realm = %r', openid_realm)
        return Authenticator(client_secret_file, redirect_url, openid_realm,
                             self.log)

    def _get_openid_realm(self, req):
        """ Get the OpenID realm.

        This computes the OpenID realm in exactly the same manner
        that the ``TracAuthOpenID`` plugin does.

        Note that I'm not sure this is really the “right” way to do it,
        but, since we want to get back the same identity URLs from google
        as we did using ``TracAuthOpenID``, here we are.

        """
        href = req.href()
        abs_href = self.env.abs_href()
        if href and abs_href.endswith(href):
            base_url = abs_href[:-len(href)]
        else:  # pragma: NO COVER
            base_url = abs_href

        if self.absolute_trust_root:
            path = '/'
        else:
            path = href
        return base_url + path

    @staticmethod
    def _get_return_url(req):
        return_to = req.args.getfirst('return_to', '/')
        # We expect return_to to be a URL relative to the trac's base_path.
        # Be paranoid about this.
        scheme, netloc, path, query, anchor = urlsplit(return_to)
        if scheme or netloc or '..' in path.split('/') or anchor:
            # return url looks suspicious, ignore it.
            return req.abs_href()
        return_url = req.abs_href(path)
        if query:
            return_url += '?' + query
        return return_url