示例#1
0
 def _get_authz_info(self):
     try:
         mtime = os.path.getmtime(self.authz_file)
     except OSError as e:
         if self._authz is not None:
             self.log.error('Error accessing authz file: %s',
                            exception_to_unicode(e))
         self._mtime = mtime = 0
         self._authz = None
         self._users = set()
     if mtime != self._mtime:
         self._mtime = mtime
         rm = RepositoryManager(self.env)
         modules = set(repos.reponame
                       for repos in rm.get_real_repositories())
         if '' in modules and self.authz_module_name:
             modules.add(self.authz_module_name)
         modules.add('')
         self.log.info('Parsing authz file: %s', self.authz_file)
         try:
             self._authz = parse(read_file(self.authz_file), modules)
             self._users = set(user for paths in self._authz.itervalues()
                               for path in paths.itervalues()
                               for user, result in path.iteritems()
                               if result)
         except Exception as e:
             self._authz = None
             self._users = set()
             self.log.error('Error parsing authz file: %s',
                            exception_to_unicode(e))
     return self._authz, self._users
示例#2
0
 def _get_authz_info(self):
     if not self.authz_file:
         self.log.error("The [svn] authz_file configuration option in "
                        "trac.ini is empty or not defined")
         raise ConfigurationError()
     try:
         mtime = os.path.getmtime(self.authz_file)
     except OSError as e:
         self.log.error("Error accessing svn authz permission policy "
                        "file: %s", exception_to_unicode(e))
         raise ConfigurationError()
     if mtime != self._mtime:
         self._mtime = mtime
         rm = RepositoryManager(self.env)
         modules = set(repos.reponame
                       for repos in rm.get_real_repositories())
         if '' in modules and self.authz_module_name:
             modules.add(self.authz_module_name)
         modules.add('')
         self.log.info("Parsing authz file: %s", self.authz_file)
         try:
             self._authz = parse(self.authz_file, modules)
         except ParsingError as e:
             self.log.error("Error parsing svn authz permission policy "
                            "file: %s", exception_to_unicode(e))
             raise ConfigurationError()
         else:
             self._users = {user
                            for paths in self._authz.itervalues()
                            for path in paths.itervalues()
                            for user, result in path.iteritems()
                            if result}
     return self._authz, self._users
示例#3
0
 def get_navigation_items(self, req):
     rm = RepositoryManager(self.env)
     if any(
             repos.is_viewable(req.perm)
             for repos in rm.get_real_repositories()):
         yield ('mainnav', 'browser',
                tag.a(_('Browse Source'), href=req.href.browser()))
示例#4
0
    def post_process_request(self, req, template, data, content_type):
        #add_script(req, 'multiproject/js/browser.js')
        repository_name = None
        data_repositories = None
        latest_revisions = []
        if len(req.path_info.split("/")) > 2:
            #Get repository name from path_info
            repository_name = req.path_info.split("/")[2]
        if template == 'browser.html':
            username = urllib.quote(req.authname)
            project = Project.get(self.env)
            schemes = None
            if repository_name:
                scm_type = repository_name + ".type"
                scm_dir = repository_name + ".dir"
                scm = self.env.config.get('repositories', scm_type)
                repository_name = self.env.config.get('repositories', scm_dir).split("/")[-1]
                schemes = self.protocols(project.id, scm)
                rm = RepositoryManager(self.env)
                list_repos = rm.get_real_repositories()
                for r in list_repos:
                    if r.get_base().split("/")[-1] == repository_name:
                        l_rev = r.get_youngest_rev()
                        if l_rev:
                            export_url = '../export/archive/'+repository_name+"?rev="+str(l_rev)+"&format=zip"
                            latest_revisions.append(export_url)

            else:
                scm = self.env.config.get('trac', 'repository_type')
                schemes = self.protocols(project.id, scm)
                data_repo_names = self.get_repositories()
                if len(data_repo_names) > 0:
                    data_repositories = []
                    for repo in data_repo_names:
                        type_scheme = []
                        for data_scheme in self.protocols(project.id, repo[1]):
                            type_scheme.append(self.create_co_command(repo[1], username, data_scheme, repo[0]))
                        data_repositories.append(type_scheme)

            

            names = {'git':'GIT', 'svn':'Subversion', 'hg':'Mercurial'}
            cmd_kinds = {'git':'Clone', 'hg':'Clone', 'svn':'Check out'}

            type = names[scm]
            

            data['kinds'] = cmd_kinds
            data['schemes'] = schemes
            data['name'] = names[scm]
            data['type'] = scm
            data['data_repositories'] = data_repositories
            data['export_urls'] = latest_revisions

            co_commands = {}
            for scheme in schemes:
                co_commands[scheme] = self.create_co_command(scm, username, scheme, repository_name)
            data['co_commands'] = co_commands

        return template, data, content_type
示例#5
0
 def get_entries_for_index(self):
     repository_manager = RepositoryManager(self.env)
     for repository in repository_manager.get_real_repositories():
         rev = repository.oldest_rev
         stop = repository.youngest_rev
         while True:
             changeset = repository.get_changeset(rev)
             yield self.build_doc(changeset)
             if rev == stop:
                 break
             rev = repository.next_rev(rev)
 def get_entries_for_index(self):
     repository_manager = RepositoryManager(self.env)
     for repository in repository_manager.get_real_repositories():
         rev = repository.oldest_rev
         stop = repository.youngest_rev
         while True:
             changeset = repository.get_changeset(rev)
             yield ChangesetIndexer(self.env).build_doc(changeset)
             if rev == stop:
                 break
             rev = repository.next_rev(rev)
示例#7
0
    def _do_resync(self, reponame):
        rm = RepositoryManager(self.env)
        if reponame == '*':
            repositories = rm.get_real_repositories()
        else:
            if is_default(reponame):
                reponame = ''
            repos = rm.get_repository(reponame)
            if repos is None:
                raise TracError(
                    _("Repository '%(repo)s' not found",
                      repo=reponame or '(default)'))
            repositories = [repos]

        Changeset = namedtuple('changeset', 'repos rev message author date')
        for repos in sorted(repositories, key=lambda r: r.reponame):
            printout(
                _('Resyncing repository history for %(reponame)s... ',
                  reponame=repos.reponame or '(default)'))
            with self.env.db_transaction as db:
                db(
                    """
                    DELETE FROM codereviewer_map WHERE repo=%s
                    """, (repos.reponame, ))
                for time, author, message, rev in db(
                        """
                        SELECT time, author, message, rev FROM revision
                        WHERE repos=%s ORDER BY time
                        """, (repos.id, )):
                    cset = Changeset(repos, rev, message, author,
                                     from_utimestamp(time))
                    self._map(repos.reponame, cset)
                    self._sync_feedback(rev)

            for cnt, in self.env.db_query(
                    "SELECT count(rev) FROM revision WHERE repos=%s",
                (repos.id, )):
                printout(
                    ngettext('%(num)s revision cached.',
                             '%(num)s revisions cached.',
                             num=cnt))
        printout(_("Done."))
示例#8
0
 def get_navigation_items(self, req):
     rm = RepositoryManager(self.env)
     if 'BROWSER_VIEW' in req.perm and rm.get_real_repositories():
         yield ('mainnav', 'browser',
                tag.a(_('Browse Source'), href=req.href.browser()))
示例#9
0
    def sync(self, project):
        """Sync the trac environment with cydra
        
        This sets the options returned by ``get_default_options`` and adds Trac's own defaults if necessary"""
        if not self.has_env(project):
            logger.warning('Project %s has no Trac Environment to sync', project.name)
            return

        tracini = os.path.join(self.get_env_path(project), 'conf', 'trac.ini')
        options = self.get_default_options(project)

        # if inherit is enabled, the default values are supposed to be in
        # the inherited file. Thus, we can truncate the config file to get a bare minimum
        if 'inherit_config' in self.component_config:
            options.append(('inherit', 'file', self.component_config['inherit_config']))
            with open(tracini, 'w') as f:
                f.truncate()

        # re-create the configuration file
        config = Configuration(tracini)
        for section, name, value in options:
            config.set(section, name, value)
        config.save()

        # load defaults
        if not any((section, option) == ('inherit', 'file') for section, option, value in options):
            config.set_defaults()
            config.save()

        # check if repositories in cydra match repositories in trac
        env = Environment(self.get_env_path(project))
        rm = RepositoryManager(env)
        trac_repos = rm.get_real_repositories()
        trac_repo_names = [r.reponame for r in trac_repos]

        for repotype, repos in project.data.get('plugins', {}).get('trac', {}).items():
            for repo, tracname in (repos or {}).items():
                if tracname not in trac_repo_names:
                    logger.warning("Removing trac mapping from cydra for %s repo %s", repo, tracname)
                    del repos[repo]
                    if not repos:
                        del project.data.get('plugins', {}).get('trac', {})[repotype]

        # Now do the reverse
        revmap = dict([(y, x) for (x, y) in self.typemap.items()])

        for repo in trac_repos:
            logger.debug('Looking at trac repo %s', repo.reponame)

            try:
                baseparts = repo.get_base().split(':') # This is extremely naiive and possibly breaks some time
                repotype, path = baseparts[0], baseparts[-1]
            except:
                logger.error("Unable to parse: " + repo.get_base())

            reponame = os.path.basename(path)
            if repotype == 'git':
                reponame = reponame[:-4]

            try:
                repository = project.get_repository(revmap[repotype], reponame)
            except:
                logger.error("Unable to locate %s %s (%s)", repotype, reponame, path)
                repository = None

            logger.debug('Cydra repo %r', repository)

            if repository:
                # set this mapping if not there already
                project.data.setdefault('plugins', {}).setdefault('trac', {}).setdefault(repository.type, {})[repository.name] = repo.reponame
                logger.info('Setting trac mapping for %s %s -> %s', repository.type, repository.name, repo.reponame)
            else:
                logger.error("Unable to load %s %s (%s)", revmap[repotype], reponame, path)

        project.save()
示例#10
0
    def sync(self, project):
        """Sync the trac environment with cydra
        
        This sets the options returned by ``get_default_options`` and adds Trac's own defaults if necessary"""
        if not self.has_env(project):
            logger.warning('Project %s has no Trac Environment to sync',
                           project.name)
            return

        tracini = os.path.join(self.get_env_path(project), 'conf', 'trac.ini')
        options = self.get_default_options(project)

        # if inherit is enabled, the default values are supposed to be in
        # the inherited file. Thus, we can truncate the config file to get a bare minimum
        if 'inherit_config' in self.component_config:
            options.append(
                ('inherit', 'file', self.component_config['inherit_config']))
            with open(tracini, 'w') as f:
                f.truncate()

        # re-create the configuration file
        config = Configuration(tracini)
        for section, name, value in options:
            config.set(section, name, value)
        config.save()

        # load defaults
        if not any((section, option) == ('inherit', 'file')
                   for section, option, value in options):
            config.set_defaults()
            config.save()

        # check if repositories in cydra match repositories in trac
        env = Environment(self.get_env_path(project))
        rm = RepositoryManager(env)
        trac_repos = rm.get_real_repositories()
        trac_repo_names = [r.reponame for r in trac_repos]

        for repotype, repos in project.data.get('plugins', {}).get('trac',
                                                                   {}).items():
            for repo, tracname in (repos or {}).items():
                if tracname not in trac_repo_names:
                    logger.warning(
                        "Removing trac mapping from cydra for %s repo %s",
                        repo, tracname)
                    del repos[repo]
                    if not repos:
                        del project.data.get('plugins', {}).get('trac',
                                                                {})[repotype]

        # Now do the reverse
        revmap = dict([(y, x) for (x, y) in self.typemap.items()])

        for repo in trac_repos:
            logger.debug('Looking at trac repo %s', repo.reponame)

            try:
                baseparts = repo.get_base().split(
                    ':'
                )  # This is extremely naiive and possibly breaks some time
                repotype, path = baseparts[0], baseparts[-1]
            except:
                logger.error("Unable to parse: " + repo.get_base())

            reponame = os.path.basename(path)
            if repotype == 'git':
                reponame = reponame[:-4]

            try:
                repository = project.get_repository(revmap[repotype], reponame)
            except:
                logger.error("Unable to locate %s %s (%s)", repotype, reponame,
                             path)
                repository = None

            logger.debug('Cydra repo %r', repository)

            if repository:
                # set this mapping if not there already
                project.data.setdefault('plugins', {}).setdefault(
                    'trac', {}).setdefault(repository.type,
                                           {})[repository.name] = repo.reponame
                logger.info('Setting trac mapping for %s %s -> %s',
                            repository.type, repository.name, repo.reponame)
            else:
                logger.error("Unable to load %s %s (%s)", revmap[repotype],
                             reponame, path)

        project.save()
示例#11
0
文件: browser.py 项目: zjj/trac_hack
 def get_navigation_items(self, req):
     rm = RepositoryManager(self.env)
     if 'BROWSER_VIEW' in req.perm and rm.get_real_repositories():
         yield ('mainnav', 'browser',
                tag.a(_('Browse Source'), href=req.href.browser()))
示例#12
0
    def post_process_request(self, req, template, data, content_type):
        #add_script(req, 'multiproject/js/browser.js')
        repository_name = None
        data_repositories = None
        latest_revisions = []
        if len(req.path_info.split("/")) > 2:
            #Get repository name from path_info
            repository_name = req.path_info.split("/")[2]
        if template == 'browser.html':
            username = urllib.quote(req.authname)
            project = Project.get(self.env)
            schemes = None
            if repository_name:
                scm_type = repository_name + ".type"
                scm_dir = repository_name + ".dir"
                scm = self.env.config.get('repositories', scm_type)
                repository_name = self.env.config.get('repositories',
                                                      scm_dir).split("/")[-1]
                schemes = self.protocols(project.id, scm)
                rm = RepositoryManager(self.env)
                list_repos = rm.get_real_repositories()
                for r in list_repos:
                    if r.get_base().split("/")[-1] == repository_name:
                        l_rev = r.get_youngest_rev()
                        if l_rev:
                            export_url = '../export/archive/' + repository_name + "?rev=" + str(
                                l_rev) + "&format=zip"
                            latest_revisions.append(export_url)

            else:
                scm = self.env.config.get('trac', 'repository_type')
                schemes = self.protocols(project.id, scm)
                data_repo_names = self.get_repositories()
                if len(data_repo_names) > 0:
                    data_repositories = []
                    for repo in data_repo_names:
                        type_scheme = []
                        for data_scheme in self.protocols(project.id, repo[1]):
                            type_scheme.append(
                                self.create_co_command(repo[1], username,
                                                       data_scheme, repo[0]))
                        data_repositories.append(type_scheme)

            names = {'git': 'GIT', 'svn': 'Subversion', 'hg': 'Mercurial'}
            cmd_kinds = {'git': 'Clone', 'hg': 'Clone', 'svn': 'Check out'}

            type = names[scm]

            data['kinds'] = cmd_kinds
            data['schemes'] = schemes
            data['name'] = names[scm]
            data['type'] = scm
            data['data_repositories'] = data_repositories
            data['export_urls'] = latest_revisions

            co_commands = {}
            for scheme in schemes:
                co_commands[scheme] = self.create_co_command(
                    scm, username, scheme, repository_name)
            data['co_commands'] = co_commands

        return template, data, content_type
示例#13
0
文件: browser.py 项目: pkdevbox/trac
 def get_navigation_items(self, req):
     rm = RepositoryManager(self.env)
     if any(repos.is_viewable(req.perm) for repos in rm.get_real_repositories()):
         yield ("mainnav", "browser", tag.a(_("Browse Source"), href=req.href.browser()))
示例#14
0
class RepositoryManager(Component):
    """Adds creation, modification and deletion of repositories.

    This class extends Trac's `RepositoryManager` and adds some
    capabilities that allow users to create and manage repositories.
    The original `RepositoryManager` *just* allows adding and removing
    existing repositories from Trac's database, which means that still
    someone must do some shell work on the server.

    To work nicely together with manually created and added repositories
    a new `ManagedRepository` class is used to mark the ones that can be
    handled by this module. It also implements forking, if the connector
    supports that, which creates instances of `ForkedRepository`.
    """

    base_dir = Option('repository-manager',
                      'base_dir',
                      'repositories',
                      doc="""The base folder in which repositories will be
                             created.
                             """)
    owner_as_maintainer = BoolOption('repository-manager',
                                     'owner_as_maintainer',
                                     True,
                                     doc="""If true, the owner will have the
                                            role of a maintainer, too.
                                            Otherwise, he will only act as an
                                            administrator for his repositories.
                                            """)

    connectors = ExtensionPoint(IAdministrativeRepositoryConnector)

    manager = None

    roles = ('maintainer', 'writer', 'reader')

    def __init__(self):
        self.manager = TracRepositoryManager(self.env)

    def get_supported_types(self):
        """Return the list of supported repository types."""
        types = set(type for connector in self.connectors
                    for (type, prio) in connector.get_supported_types() or []
                    if prio >= 0)
        return list(types & set(self.manager.get_supported_types()))

    def get_forkable_types(self):
        """Return the list of forkable repository types."""
        return list(type for type in self.get_supported_types()
                    if self.can_fork(type))

    def can_fork(self, type):
        """Return whether the given repository type can be forked."""
        return self._get_repository_connector(type).can_fork(type)

    def can_delete_changesets(self, type):
        """Return whether the given repository type can delete changesets."""
        return self._get_repository_connector(type).can_delete_changesets(type)

    def can_ban_changesets(self, type):
        """Return whether the given repository type can ban changesets."""
        return self._get_repository_connector(type).can_ban_changesets(type)

    def get_forkable_repositories(self):
        """Return a dictionary of repository information, indexed by
        name and including only repositories that can be forked."""
        repositories = self.manager.get_all_repositories()
        result = {}
        for key in repositories:
            if repositories[key]['type'] in self.get_forkable_types():
                result[key] = repositories[key]['name']
        return result

    def get_managed_repositories(self):
        """Return the list of existing managed repositories."""
        repositories = self.manager.get_all_repositories()
        result = {}
        for key in repositories:
            try:
                self.get_repository(repositories[key]['name'], True)
                result[key] = repositories[key]['name']
            except:
                pass
        return result

    def get_repository(self, name, convert_to_managed=False):
        """Retrieve the appropriate repository for the given name.

        Converts the found repository into a `ManagedRepository`, if
        requested. In that case, expect an exception if the found
        repository was not created using this `RepositoryManager`.
        """
        repo = self.manager.get_repository(name)
        if repo and convert_to_managed:
            convert_managed_repository(self.env, repo)
        return repo

    def get_repository_by_id(self, id, convert_to_managed=False):
        """Retrieve a matching `Repository` for the given id."""
        repositories = self.manager.get_all_repositories()
        for name, info in repositories.iteritems():
            if info['id'] == int(id):
                return self.get_repository(name, convert_to_managed)
        return None

    def get_repository_by_path(self, path):
        """Retrieve a matching `Repository` for the given path."""
        return self.manager.get_repository_by_path(path)

    def get_base_directory(self, type):
        """Get the base directory for the given repository type."""
        return os.path.join(self.env.path, self.base_dir, type)

    def create(self, repo):
        """Create a new empty repository.

         * Checks if the new repository can be created and added
         * Prepares the filesystem
         * Uses an appropriate connector to create and initialize the
           repository
         * Postprocesses the filesystem (modes)
         * Inserts everything into the database and synchronizes Trac
        """
        if self.get_repository(repo['name']) or os.path.lexists(repo['dir']):
            raise TracError(_("Repository or directory already exists."))

        self._prepare_base_directory(repo['dir'])

        self._get_repository_connector(repo['type']).create(repo)

        self._adjust_modes(repo['dir'])

        with self.env.db_transaction as db:
            id = self.manager.get_repository_id(repo['name'])
            roles = list((id, role + 's', '') for role in self.roles)
            db.executemany(
                "INSERT INTO repository (id, name, value) VALUES (%s, %s, %s)",
                [(id, 'dir', repo['dir']), (id, 'type', repo['type']),
                 (id, 'owner', repo['owner'])] + roles)
            self.manager.reload_repositories()
        self.manager.get_repository(repo['name']).sync(None, True)
        self.update_auth_files()

    def fork_local(self, repo):
        """Fork a local repository.

         * Checks if the new repository can be created and added
         * Checks if the origin exists and can be forked
         * The filesystem is obviously already prepared
         * Uses an appropriate connector to fork the repository
         * Postprocesses the filesystem (modes)
         * Inserts everything into the database and synchronizes Trac
        """
        if self.get_repository(repo['name']) or os.path.lexists(repo['dir']):
            raise TracError(_("Repository or directory already exists."))

        origin = self.get_repository(repo['origin'], True)
        if not origin:
            raise TracError(_("Origin for local fork does not exist."))
        if origin.type != repo['type']:
            raise TracError(
                _("Fork of local repository must have same type "
                  "as origin."))
        repo.update({'origin_url': 'file://' + origin.directory})

        self._prepare_base_directory(repo['dir'])

        self._get_repository_connector(repo['type']).fork(repo)

        self._adjust_modes(repo['dir'])

        with self.env.db_transaction as db:
            id = self.manager.get_repository_id(repo['name'])
            roles = list((id, role + 's', '') for role in self.roles)
            db.executemany(
                "INSERT INTO repository (id, name, value) VALUES (%s, %s, %s)",
                [(id, 'dir', repo['dir']), (id, 'type', repo['type']),
                 (id, 'owner', repo['owner']),
                 (id, 'description', origin.description),
                 (id, 'origin', origin.id),
                 (id, 'inherit_readers', True)] + roles)
            self.manager.reload_repositories()
        self.manager.get_repository(repo['name']).sync(None, True)
        self.update_auth_files()

    def modify(self, repo, data):
        """Modify an existing repository."""
        convert_managed_repository(self.env, repo)
        if repo.directory != data['dir']:
            shutil.move(repo.directory, data['dir'])
        with self.env.db_transaction as db:
            db.executemany(
                "UPDATE repository SET value = %s WHERE id = %s AND name = %s",
                [(data[key], repo.id, key) for key in data])
            self.manager.reload_repositories()
        if repo.directory != data['dir']:
            repo = self.get_repository(data['name'])
            repo.sync(clean=True)
        self.update_auth_files()

    def remove(self, repo, delete):
        """Remove an existing repository.

        Depending on the parameter delete this method also removes the
        repository from the filesystem. This can not be undone.
        """
        convert_managed_repository(self.env, repo)
        if delete:
            shutil.rmtree(repo.directory)
        with self.env.db_transaction as db:
            db("DELETE FROM repository WHERE id = %d" % repo.id)
            db("DELETE FROM revision WHERE repos = %d" % repo.id)
            db("DELETE FROM node_change WHERE repos = %d" % repo.id)
        self.manager.reload_repositories()
        self.update_auth_files()

    def delete_changeset(self, repo, rev, ban):
        """Delete a changeset from a managed repository, if supported.

        Depending on the parameter ban this method also marks the
        changeset to be kept out of the repository. That features needs
        special support by the used scm.
        """
        convert_managed_repository(self.env, repo)
        self._get_repository_connector(repo.type).delete_changeset(
            repo, rev, ban)

    def add_role(self, repo, role, subject):
        """Add a role for the given repository."""
        assert role in self.roles
        convert_managed_repository(self.env, repo)
        role_attr = '_' + role + 's'
        setattr(repo, role_attr, getattr(repo, role_attr) | set([subject]))
        self._update_roles_in_db(repo)

    def revoke_roles(self, repo, roles):
        """Revoke a list of `role, subject` pairs."""
        convert_managed_repository(self.env, repo)
        for role, subject in roles:
            role_attr = '_' + role + 's'
            config = getattr(repo, role_attr)
            config = config - set([subject])
            setattr(repo, role_attr, getattr(repo, role_attr) - set([subject]))
        self._update_roles_in_db(repo)

    def update_auth_files(self):
        """Rewrites all configured auth files for all managed
        repositories.
        """
        types = self.get_supported_types()
        all_repositories = []
        for repo in self.manager.get_real_repositories():
            try:
                convert_managed_repository(self.env, repo)
                all_repositories.append(repo)
            except:
                pass
        for type in types:
            repos = [repo for repo in all_repositories if repo.type == type]
            self._get_repository_connector(type).update_auth_files(repos)

        authz_source_file = AuthzSourcePolicy(self.env).authz_file
        if authz_source_file:
            authz_source_path = os.path.join(self.env.path, authz_source_file)

            authz = ConfigParser()

            groups = set()
            for repo in all_repositories:
                groups |= {
                    name
                    for name in repo.maintainers() if name[0] == '@'
                }
                groups |= {name for name in repo.writers() if name[0] == '@'}
                groups |= {name for name in repo.readers() if name[0] == '@'}

            authz.add_section('groups')
            for group in groups:
                members = expand_user_set(self.env, [group])
                authz.set('groups', group[1:], ', '.join(sorted(members)))
            authenticated = sorted({u[0] for u in self.env.get_known_users()})
            authz.set('groups', 'authenticated', ', '.join(authenticated))

            for repo in all_repositories:
                section = repo.reponame + ':/'
                authz.add_section(section)
                r = repo.maintainers() | repo.writers() | repo.readers()

                def apply_user_list(users, action):
                    if not users:
                        return
                    if 'anonymous' in users:
                        authz.set(section, '*', action)
                        return
                    if 'authenticated' in users:
                        authz.set(section, '@authenticated', action)
                        return
                    for user in sorted(users):
                        authz.set(section, user, action)

                apply_user_list(r, 'r')

            self._prepare_base_directory(authz_source_path)
            with open(authz_source_path, 'wb') as authz_file:
                authz.write(authz_file)
            try:
                modes = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IWGRP
                os.chmod(authz_source_path, modes)
            except:
                pass

    ### Private methods
    def _get_repository_connector(self, repo_type):
        """Get the matching connector with maximum priority."""
        return max(((connector, type, prio) for connector in self.connectors
                    for (type, prio) in connector.get_supported_types()
                    if prio >= 0 and type == repo_type),
                   key=lambda x: x[2])[0]

    def _prepare_base_directory(self, directory):
        """Create the base directories and set the correct modes."""
        base = os.path.dirname(directory)
        original_umask = os.umask(0)
        try:
            os.makedirs(base, stat.S_IRWXU | stat.S_IRWXG)
        except OSError, e:
            if e.errno == errno.EEXIST and os.path.isdir(base):
                pass
            else:
                raise
        finally:
示例#15
0
    def process_request(self, req):
        """
        Handle the export requests

        :raises: TracError in case of failure
        """
        req.perm.require('BROWSER_VIEW')
        req.perm.require('FILE_VIEW')
        repository_name = req.path_info.split("/")[-1]
        # Get default repository and its type
        rm = RepositoryManager(self.env)
        list_repos = rm.get_real_repositories()
        repo = None
        repo_type = None
        for r in list_repos:
            if r.get_base().split("/")[-1].lower() == repository_name:
                repo = r
                break
        repo_type = repo.get_base().split(":")[0]
        svn_path = 'trunk'
        format = plaintext(req.args.get('format', 'zip'))
        conf.log.exception("Repotype at beginning: %s" % repo_type)
        # Get revision info. For svn it's in format: <revnum>/<path>
        revision = plaintext(str(req.args.get('rev', repo.get_youngest_rev())))

        # Validate if given revision really exists
        try:
            revision = repo.normalize_rev(revision)
        except NoSuchChangeset:
            raise HTTPNotFound('No such changeset')

        # Validate format
        if format not in self.formats:
            raise TracError('Format is not supported')

        # Load project object based on current environment
        env_name = conf.resolveProjectName(self.env)
        #repo_type = self.env.config.get('trac', 'repository_type')
        repo_dir = conf.getEnvironmentVcsPath(env_name, repo_type,
                                              repository_name)
        project = Project.get(env_name=env_name)

        if repo_type not in conf.supported_scm_systems:
            raise TracError('Non-supported VCS type')

        # Create temporary directory with appropriate subdirectory where to export repository
        tempfd = tempfile.NamedTemporaryFile(delete=False)

        # Dump the repository per type, into defined location
        conf.log.exception("Repotype: %s, repo_dir: %s" %
                           (repo_type, repo_dir))
        try:
            if repo_type == 'git':
                # Use short revision format
                revision = revision[:6]
                prefix = '%s-%s' % (env_name, revision[:6])
                self._archive_git(repo_dir, revision, format, tempfd.name,
                                  prefix)

            elif repo_type == 'hg':
                # In case of both local:global revision format, use only global
                if ':' in revision:
                    revision = revision.split(':', 1)[1]
                prefix = '%s-%s' % (env_name, revision[:6])
                self._archive_hg(repo_dir, revision, format, tempfd.name,
                                 prefix)

            elif repo_type == 'svn':
                assert format == 'zip', 'Only zip format is supported for subversion'

                # Redirect to Trac's internal changeset functionality
                # Example: https://localhost/svnproject/changeset/4/trunk?old_path=%2F&format=zip
                changeset_href = Href('/%s/changeset' % env_name)
                return req.redirect(
                    changeset_href(revision,
                                   repository_name + "/",
                                   format='zip'))

        # Redirect raises RequestDone: re-raise it
        except RequestDone:
            raise

        except Exception, err:
            self.env.log.exception('Repository dump failed: %s' % err)
            raise TracError(
                'Repository archive failed - please try again later')