def scrub_project(p, options):
    log.info('Scrubbing project "%s"' % p.shortname)
    preamble = options.preamble
    if not public(p):
        log.info('%s project "%s"' % (preamble, p.shortname))
        if not options.dry_run:
            p.delete()
        return
    for ac in p.app_configs:
        ac.project = p
        c.app = p.app_instance(ac)
        mount_point = ac.options.get('mount_point')
        tool_name = ac.tool_name.lower()
        if tool_name in ('admin', 'search', 'profile'):
            continue
        if not public(ac, project=p):
            log.info('%s tool %s/%s on project "%s"' %
                     (preamble, tool_name, mount_point, p.shortname))
            if not options.dry_run:
                p.uninstall_app(mount_point)
            continue
        q = dict(app_config_id=ac._id)
        ace = dict(access='DENY', permission='*', role_id=None)
        q['acl'] = {'$in': [ace]}
        counter = 0
        if tool_name == 'tickets':
            if ac.options.get('TicketMonitoringEmail'):
                log.info('%s options.TicketMonitoringEmail from the %s/%s '
                         'tool on project "%s"' %
                         (preamble, tool_name, mount_point, p.shortname))
                if not options.dry_run:
                    ac.options['TicketMonitoringEmail'] = None
            for tickets in utils.chunked_find(TM.Ticket, q):
                for t in tickets:
                    counter += 1
                    if not options.dry_run:
                        t.discussion_thread.delete()
                        t.delete()
                ThreadLocalORMSession.flush_all()
                ThreadLocalORMSession.close_all()
            if counter > 0:
                log.info(
                    '%s %s tickets from the %s/%s tool on '
                    'project "%s"' %
                    (preamble, counter, tool_name, mount_point, p.shortname))
        elif tool_name == 'discussion':
            for forums in utils.chunked_find(DM.Forum, q):
                for f in forums:
                    counter += 1
                    if not options.dry_run:
                        f.delete()
            if counter > 0:
                log.info(
                    '%s %s forums from the %s/%s tool on '
                    'project "%s"' %
                    (preamble, counter, tool_name, mount_point, p.shortname))
def scrub_project(p, options):
    log.info('Scrubbing project "%s"' % p.shortname)
    preamble = options.preamble
    if not public(p):
        log.info('%s project "%s"' % (preamble, p.shortname))
        if not options.dry_run:
            p.delete()
        return
    for ac in p.app_configs:
        ac.project = p
        c.app = p.app_instance(ac)
        mount_point = ac.options.get('mount_point')
        tool_name = ac.tool_name.lower()
        if tool_name in ('admin', 'search', 'profile'):
            continue
        if not public(ac, project=p):
            log.info('%s tool %s/%s on project "%s"' % (
                preamble, tool_name, mount_point, p.shortname))
            if not options.dry_run:
                p.uninstall_app(mount_point)
            continue
        q = dict(app_config_id=ac._id)
        ace = dict(access='DENY', permission='*', role_id=None)
        q['acl'] = {'$in': [ace]}
        counter = 0
        if tool_name == 'tickets':
            if ac.options.get('TicketMonitoringEmail'):
                log.info('%s options.TicketMonitoringEmail from the %s/%s '
                         'tool on project "%s"' % (preamble, tool_name,
                                                   mount_point, p.shortname))
                if not options.dry_run:
                    ac.options['TicketMonitoringEmail'] = None
            for tickets in utils.chunked_find(TM.Ticket, q):
                for t in tickets:
                    counter += 1
                    if not options.dry_run:
                        t.discussion_thread.delete()
                        t.delete()
                ThreadLocalORMSession.flush_all()
                ThreadLocalORMSession.close_all()
            if counter > 0:
                log.info('%s %s tickets from the %s/%s tool on '
                         'project "%s"' % (preamble, counter, tool_name,
                                           mount_point, p.shortname))
        elif tool_name == 'discussion':
            for forums in utils.chunked_find(DM.Forum, q):
                for f in forums:
                    counter += 1
                    if not options.dry_run:
                        f.delete()
            if counter > 0:
                log.info('%s %s forums from the %s/%s tool on '
                         'project "%s"' % (preamble, counter, tool_name,
                                           mount_point, p.shortname))
示例#3
0
    def command(self):
        from allura import model as M
        self.basic_setup()
        graph = build_model_inheritance_graph()
        if self.options.project:
            q_project = dict(shortname=self.options.project)
        elif self.options.project_regex:
            q_project = dict(shortname={'$regex': self.options.project_regex})
        elif self.options.neighborhood:
            neighborhood_id = M.Neighborhood.query.get(
                url_prefix='/%s/' % self.options.neighborhood)._id
            q_project = dict(neighborhood_id=neighborhood_id)
        else:
            q_project = {}

        # if none specified, do all
        if not self.options.solr and not self.options.refs:
            self.options.solr = self.options.refs = True

        for projects in utils.chunked_find(M.Project, q_project):
            for p in projects:
                c.project = p
                base.log.info('Reindex project %s', p.shortname)
                # Clear index for this project
                if self.options.solr and not self.options.skip_solr_delete:
                    g.solr.delete(q='project_id_s:%s' % p._id)
                if self.options.refs:
                    M.ArtifactReference.query.remove(
                        {'artifact_reference.project_id': p._id})
                    M.Shortlink.query.remove({'project_id': p._id})
                app_config_ids = [ac._id for ac in p.app_configs]
                # Traverse the inheritance graph, finding all artifacts that
                # belong to this project
                for _, a_cls in dfs(M.Artifact, graph):
                    base.log.info('  %s', a_cls)
                    ref_ids = []
                    # Create artifact references and shortlinks
                    for a in a_cls.query.find(dict(app_config_id={'$in': app_config_ids})):
                        if self.options.verbose:
                            base.log.info('      %s', a.shorthand_id())
                        if self.options.refs:
                            try:
                                M.ArtifactReference.from_artifact(a)
                                M.Shortlink.from_artifact(a)
                            except:
                                base.log.exception(
                                    'Making ArtifactReference/Shortlink from %s', a)
                                continue
                        ref_ids.append(a.index_id())
                    M.main_orm_session.flush()
                    M.artifact_orm_session.clear()
                    try:
                        self._chunked_add_artifacts(ref_ids)
                    except CompoundError as err:
                        base.log.exception(
                            'Error indexing artifacts:\n%r', err)
                        base.log.error('%s', err.format_error())
                    M.main_orm_session.flush()
                    M.main_orm_session.clear()
        base.log.info('Reindex %s', 'queued' if self.options.tasks else 'done')
def main():
    for chunk in utils.chunked_find(M.Project):
        for p in chunk:
            p.install_app('activity')

        ThreadLocalORMSession.flush_all()
        ThreadLocalORMSession.close_all()
def main():
    for chunk in utils.chunked_find(M.Project):
        for p in chunk:
            p.install_app('activity')

        ThreadLocalORMSession.flush_all()
        ThreadLocalORMSession.close_all()
示例#6
0
    def execute(cls, options):
        q_project = {}
        if options.nbhd:
            nbhd = M.Neighborhood.query.get(url_prefix=options.nbhd)
            if not nbhd:
                return "Invalid neighborhood url prefix."
            q_project['neighborhood_id'] = nbhd._id
        if options.project:
            q_project['shortname'] = options.project
        elif options.project_regex:
            q_project['shortname'] = {'$regex': options.project_regex}

        for chunk in chunked_find(M.Project, q_project):
            project_ids = []
            for p in chunk:
                log.info('Reindex project %s', p.shortname)
                if options.dry_run:
                    continue
                c.project = p
                project_ids.append(p._id)

            try:
                for chunk in chunked_list(project_ids, options.max_chunk):
                    if options.tasks:
                        cls._post_add_projects(chunk)
                    else:
                        add_projects(chunk)
            except CompoundError, err:
                log.exception('Error indexing projects:\n%r', err)
                log.error('%s', err.format_error())
            M.main_orm_session.flush()
            M.main_orm_session.clear()
示例#7
0
 def execute(cls, options):
     models = [M.Post, ForumPost]
     app_config_id = cls.get_tool_id(options.tool)
     # Find all posts that have parent_id, but does not have actual parent
     # and create fake parent for them
     for model in models:
         q = {'parent_id': {'$ne': None},
              'app_config_id': app_config_id}
         for chunk in chunked_find(model, q):
             for post in chunk:
                 if not post.parent:
                     log.info('Creating deleted parent for %s %s',
                              model.__mongometa__.name, post._id)
                     c.project = post.app_config.project
                     slug = post.slug.rsplit('/', 1)[0]
                     full_slug = post.full_slug.rsplit('/', 1)[0]
                     author = c.project.admins()[0]
                     deleted_post = model(
                         _id=post.parent_id,
                         deleted=True,
                         text="Automatically created in place of deleted post",
                         app_id=post.app_id,
                         app_config_id=post.app_config_id,
                         discussion_id=post.discussion_id,
                         thread_id=post.thread_id,
                         author_id=author._id,
                         slug=slug,
                         full_slug=full_slug,
                     )
                     if options.dry_run:
                         session(deleted_post).expunge(deleted_post)
                     else:
                         session(deleted_post).flush(deleted_post)
示例#8
0
    def execute(cls, options):
        q_project = {}
        if options.nbhd:
            nbhd = M.Neighborhood.query.get(url_prefix=options.nbhd)
            if not nbhd:
                return "Invalid neighborhood url prefix."
            q_project['neighborhood_id'] = nbhd._id
        if options.project:
            q_project['shortname'] = options.project
        elif options.project_regex:
            q_project['shortname'] = {'$regex': options.project_regex}

        for chunk in chunked_find(M.Project, q_project):
            project_ids = []
            for p in chunk:
                log.info('Reindex project %s', p.shortname)
                if options.dry_run:
                    continue
                c.project = p
                project_ids.append(p._id)

            try:
                for chunk in chunked_list(project_ids, options.max_chunk):
                    if options.tasks:
                        cls._post_add_projects(chunk)
                    else:
                        add_projects(chunk)
            except CompoundError, err:
                log.exception('Error indexing projects:\n%r', err)
                log.error('%s', err.format_error())
            M.main_orm_session.flush()
            M.main_orm_session.clear()
def main(options):
    log.addHandler(logging.StreamHandler(sys.stdout))
    log.setLevel(getattr(logging, options.log_level.upper()))

    g.solr = mock.Mock()
    preamble = options.dry_run and "Would delete" or "Deleting"
    options.preamble = preamble

    for nbhd in M.Neighborhood.query.find():
        q = {'neighborhood_id': nbhd._id}
        for projects in utils.chunked_find(M.Project, q):
            for p in projects:
                scrub_project(p, options)
            ThreadLocalORMSession.flush_all()
            ThreadLocalORMSession.close_all()

    log.info('%s %s EmailAddress documents' %
            (preamble, M.EmailAddress.query.find().count()))
    log.info('%s email addresses from %s User documents' %
            (preamble, M.User.query.find().count()))
    log.info('%s monitoring_email addresses from %s Forum documents' %
            (preamble, DM.Forum.query.find({"monitoring_email":
                                            {"$nin": [None, ""]}}).count()))

    if not options.dry_run:
        M.EmailAddress.query.remove()
        M.User.query.update({}, {"$set": {"email_addresses": []}}, multi=True)
        DM.Forum.query.update({"monitoring_email": {"$nin": [None, ""]}},
                              {"$set": {"monitoring_email": None}}, multi=True)
    return 0
示例#10
0
def main(options):
    nbhd = M.Neighborhood.query.get(url_prefix=options.nbhd)
    if not nbhd:
        return "Couldn't find neighborhood with url_prefix '%s'" % options.nbhd

    user = M.User.by_username(options.user)
    if not user:
        return "Couldn't find user with username '%s'" % options.user

    if options.replace_user:
        replace_user = M.User.by_username(options.replace_user)
        if not replace_user:
            return "Couldn't find user with username '%s'" % options.replace_user
    else:
        replace_user = None

    if options.project == 'ALLPROJECTS':
        for chunk in chunked_find(
                M.Project,
                dict(
                    neighborhood_id=nbhd._id,
                    shortname={'$ne': '--init--'},
                )):
            for p in chunk:
                update_project(options, user, p, replace_user=replace_user)
    else:
        project = M.Project.query.get(neighborhood_id=nbhd._id,
                                      shortname=options.project)
        if not project:
            return "Couldn't find project with shortname '%s'" % options.project
        update_project(options, user, project, replace_user=replace_user)
示例#11
0
def main(options):
    log.addHandler(logging.StreamHandler(sys.stdout))
    log.setLevel(getattr(logging, options.log_level.upper()))

    g.solr = mock.Mock()
    preamble = options.dry_run and "Would delete" or "Deleting"
    options.preamble = preamble

    for nbhd in M.Neighborhood.query.find():
        q = {'neighborhood_id': nbhd._id}
        for projects in utils.chunked_find(M.Project, q):
            for p in projects:
                scrub_project(p, options)
            ThreadLocalORMSession.flush_all()
            ThreadLocalORMSession.close_all()

    log.info('%s %s EmailAddress documents' %
            (preamble, M.EmailAddress.query.find().count()))
    log.info('%s email addresses from %s User documents' %
            (preamble, M.User.query.find().count()))
    log.info('%s monitoring_email addresses from %s Forum documents' %
            (preamble, DM.Forum.query.find({"monitoring_email":
                    {"$nin": [None, ""]}}).count()))

    if not options.dry_run:
        M.EmailAddress.query.remove()
        M.User.query.update({}, {"$set": {"email_addresses": []}}, multi=True)
        DM.Forum.query.update({"monitoring_email": {"$nin": [None, ""]}},
                {"$set": {"monitoring_email": None}}, multi=True)
    return 0
def main():
    test = sys.argv[-1] == 'test'
    num_projects_examined = 0
    log.info('Examining subroles in all non-user projects.')
    n_users = M.Neighborhood.query.get(name='Users')
    project_filter = dict(neighborhood_id={'$ne': n_users._id})
    for some_projects in utils.chunked_find(M.Project, project_filter):
        for project in some_projects:
            project_name = '%s.%s' % (
                project.neighborhood.name, project.shortname)
            project_roles = {}
            for parent, child in [('Admin', 'Developer'), ('Developer', 'Member')]:
                parent_role = M.ProjectRole.by_name(parent, project=project)
                child_role = M.ProjectRole.by_name(child, project=project)
                project_roles[parent] = parent_role
                project_roles[child] = child_role
                if not (parent_role and child_role):
                    break
                if len(parent_role.roles) != 1 or parent_role.roles[0] != child_role._id:
                    if test:
                        log.info('Would reset %s subroles for project "%s".' %
                                 (parent, project_name))
                        log.info('- Existing %s subrole(s): %s' %
                                 (parent, parent_role.roles))
                    else:
                        log.info('Resetting %s subroles for project "%s".' %
                                 (parent, project_name))
                        parent_role.roles = [child_role._id]
                        ThreadLocalORMSession.flush_all()
            if not (project_roles['Admin'] and project_roles['Developer']
                    and project_roles['Member']):
                log.info(
                    'Skipping "%s": missing Admin, Developer, or Member roles' %
                    project_name)
                continue
            for user in project.users():
                pr = user.project_role(project=project)
                if not pr.roles:
                    continue
                for parent, children in [('Admin', ('Developer', 'Member')),
                                         ('Developer', ('Member',))]:
                    if project_roles[parent]._id not in pr.roles:
                        continue
                    for role_name in children:
                        extra_role = project_roles[role_name]
                        if extra_role._id in pr.roles:
                            if test:
                                log.info('Would remove %s role from user "%s" in project "%s" (already has %s role).'
                                         % (role_name, user.username, project_name, parent))
                                pr.roles.remove(extra_role._id)
                            else:
                                log.info('Removing %s role from user "%s" in project "%s" (already has %s role).'
                                         % (role_name, user.username, project_name, parent))
                                pr.roles.remove(extra_role._id)
                                ThreadLocalORMSession.flush_all()
            num_projects_examined += 1
            session(project).clear()

        log.info('%s projects examined.' % num_projects_examined)
def main():
    test = sys.argv[-1] == 'test'
    num_projects_examined = 0
    log.info('Examining subroles in all non-user projects.')
    n_users = M.Neighborhood.query.get(name='Users')
    project_filter = dict(neighborhood_id={'$ne': n_users._id})
    for some_projects in utils.chunked_find(M.Project, project_filter):
        for project in some_projects:
            project_name = '%s.%s' % (project.neighborhood.name,
                                      project.shortname)
            project_roles = {}
            for parent, child in [('Admin', 'Developer'),
                                  ('Developer', 'Member')]:
                parent_role = M.ProjectRole.by_name(parent, project=project)
                child_role = M.ProjectRole.by_name(child, project=project)
                project_roles[parent] = parent_role
                project_roles[child] = child_role
                if not (parent_role and child_role):
                    break
                if len(parent_role.roles
                       ) != 1 or parent_role.roles[0] != child_role._id:
                    if test:
                        log.info('Would reset %s subroles for project "%s".' %
                                 (parent, project_name))
                        log.info('- Existing %s subrole(s): %s' %
                                 (parent, parent_role.roles))
                    else:
                        log.info('Resetting %s subroles for project "%s".' %
                                 (parent, project_name))
                        parent_role.roles = [child_role._id]
                        ThreadLocalORMSession.flush_all()
            if not (project_roles['Admin'] and project_roles['Developer'] \
                and project_roles['Member']):
                log.info(
                    'Skipping "%s": missing Admin, Developer, or Member roles'
                    % project_name)
                continue
            for user in project.users():
                pr = user.project_role(project=project)
                if not pr.roles: continue
                for parent, children in [('Admin', ('Developer', 'Member')),
                                         ('Developer', ('Member', ))]:
                    if project_roles[parent]._id not in pr.roles: continue
                    for role_name in children:
                        extra_role = project_roles[role_name]
                        if extra_role._id in pr.roles:
                            if test:
                                log.info('Would remove %s role from user "%s" in project "%s" (already has %s role).' \
                                         % (role_name, user.username, project_name, parent))
                                pr.roles.remove(extra_role._id)
                            else:
                                log.info('Removing %s role from user "%s" in project "%s" (already has %s role).' \
                                         % (role_name, user.username, project_name, parent))
                                pr.roles.remove(extra_role._id)
                                ThreadLocalORMSession.flush_all()
            num_projects_examined += 1
            session(project).clear()

        log.info('%s projects examined.' % num_projects_examined)
示例#14
0
    def execute(cls, options):
        q_project = {}
        if options.nbhd:
            nbhd = M.Neighborhood.query.get(url_prefix=options.nbhd)
            if not nbhd:
                return "Invalid neighborhood url prefix."
            q_project['neighborhood_id'] = nbhd._id
        if options.project:
            q_project['shortname'] = options.project
        elif options.project_regex:
            q_project['shortname'] = {'$regex': options.project_regex}

        log.info('Refreshing last commit data')

        for chunk in chunked_find(M.Project, q_project):
            for p in chunk:
                log.info("Refreshing last commit data for project '%s'." %
                         p.shortname)
                if options.dry_run:
                    continue
                c.project = p
                if options.mount_point:
                    mount_points = [options.mount_point]
                else:
                    mount_points = [
                        ac.options.mount_point
                        for ac in M.AppConfig.query.find(dict(
                            project_id=p._id))
                    ]
                for app in (p.app_instance(mp) for mp in mount_points):
                    c.app = app
                    if not hasattr(app, 'repo'):
                        continue
                    if c.app.repo.tool.lower() not in options.repo_types:
                        log.info("Skipping %r: wrong type (%s)", c.app.repo,
                                 c.app.repo.tool.lower())
                        continue

                    c.app.repo.status = 'analyzing'
                    session(c.app.repo).flush(c.app.repo)
                    try:
                        ci_ids = list(
                            reversed(list(c.app.repo.all_commit_ids())))
                        if options.clean:
                            cls._clean(ci_ids)

                        log.info('Refreshing all last commits in %r',
                                 c.app.repo)
                        cls.refresh_repo_lcds(ci_ids, options)
                        new_commit_ids = app.repo.unknown_commit_ids()
                        if len(new_commit_ids) > 0:
                            refresh.post()
                    except Exception:
                        log.exception('Error refreshing %r', c.app.repo)
                        raise
                    finally:
                        c.app.repo.status = 'ready'
                        session(c.app.repo).flush(c.app.repo)
            ThreadLocalORMSession.flush_all()
示例#15
0
    def command(self):
        from allura import model as M
        self.basic_setup()
        graph = build_model_inheritance_graph()
        if self.options.project:
            q_project = dict(shortname=self.options.project)
        elif self.options.project_regex:
            q_project = dict(shortname={'$regex': self.options.project_regex})
        elif self.options.neighborhood:
            neighborhood_id = M.Neighborhood.query.get(
                url_prefix='/%s/' % self.options.neighborhood)._id
            q_project = dict(neighborhood_id=neighborhood_id)
        else:
            q_project = {}

        # if none specified, do all
        if not self.options.solr and not self.options.refs:
            self.options.solr = self.options.refs = True

        for projects in utils.chunked_find(M.Project, q_project):
            for p in projects:
                c.project = p
                base.log.info('Reindex project %s', p.shortname)
                # Clear index for this project
                if self.options.solr and not self.options.skip_solr_delete:
                    g.solr.delete(q='project_id_s:%s' % p._id)
                if self.options.refs:
                    M.ArtifactReference.query.remove(
                        {'artifact_reference.project_id': p._id})
                    M.Shortlink.query.remove({'project_id': p._id})
                app_config_ids = [ac._id for ac in p.app_configs]
                # Traverse the inheritance graph, finding all artifacts that
                # belong to this project
                for _, a_cls in dfs(M.Artifact, graph):
                    base.log.info('  %s', a_cls)
                    ref_ids = []
                    # Create artifact references and shortlinks
                    for a in a_cls.query.find(dict(app_config_id={'$in': app_config_ids})):
                        if self.options.verbose:
                            base.log.info('      %s', a.shorthand_id())
                        if self.options.refs:
                            try:
                                M.ArtifactReference.from_artifact(a)
                                M.Shortlink.from_artifact(a)
                            except:
                                base.log.exception(
                                    'Making ArtifactReference/Shortlink from %s', a)
                                continue
                        ref_ids.append(a.index_id())
                    M.main_orm_session.flush()
                    M.artifact_orm_session.clear()
                    try:
                        self._chunked_add_artifacts(ref_ids)
                    except CompoundError, err:
                        base.log.exception(
                            'Error indexing artifacts:\n%r', err)
                        base.log.error('%s', err.format_error())
                    M.main_orm_session.flush()
                    M.main_orm_session.clear()
示例#16
0
def main():
    for chunk in utils.chunked_find(M.User):
        for user in chunk:
            print 'Processing {0}'.format(user.username)
            user.pending = False
            # Ming doesn't mark document for update, since pending is False
            # by default, even if field is missing from mongo
            state(user).status = state(user).dirty
            ThreadLocalORMSession.flush_all()
示例#17
0
 def execute(cls, options):
     for chunk in chunked_find(M.User, {}):
         for u in chunk:
             log.info('Trimming emails for user %s', u.username)
             new_addresses = [M.EmailAddress.canonical(addr) for addr in u.email_addresses]
             u.email_addresses = new_addresses
             if u.preferences.email_address is not None:
                 u.preferences.email_address = M.EmailAddress.canonical(
                     u.preferences.email_address)
             session(u).flush(u)
     for chunk in chunked_find(M.EmailAddress, {}):
         for a in chunk:
             log.info('Trimming email address entry %s', a.email)
             a.email = M.EmailAddress.canonical(a.email)
             session(a).flush(a)
     M.main_orm_session.flush()
     M.main_orm_session.clear()
     log.info('Finished trimming emails')
def main():
    for chunk in utils.chunked_find(M.MergeRequest):
        for mr in chunk:
            try:
                print('Processing {0}'.format(mr.url()))
                mr.subscribe(user=mr.creator)
                ThreadLocalORMSession.flush_all()
            except:
                log.exception('Error on %s', mr)
示例#19
0
 def test_filter_on_sort_key(self):
     query = {"username": {"$in": ["sample-user-1", "sample-user-2", "sample-user-3"]}}
     chunks = list(utils.chunked_find(M.User, query, 2, sort_key="username"))
     assert len(chunks) == 2, chunks
     assert len(chunks[0]) == 2, chunks[0]
     assert len(chunks[1]) == 1, chunks[1]
     assert_equal(chunks[0][0].username, "sample-user-1")
     assert_equal(chunks[0][1].username, "sample-user-2")
     assert_equal(chunks[1][0].username, "sample-user-3")
def main():
    for chunk in utils.chunked_find(M.User):
        for user in chunk:
            print 'Processing {0}'.format(user.username)
            user.pending = False
            # Ming doesn't mark document for update, since pending is False
            # by default, even if field is missing from mongo
            state(user).status = state(user).dirty
            ThreadLocalORMSession.flush_all()
def main():
    for chunk in utils.chunked_find(M.MergeRequest):
        for mr in chunk:
            try:
                print 'Processing {0}'.format(mr.url())
                mr.subscribe(user=mr.creator)
                ThreadLocalORMSession.flush_all()
            except:
                log.exception('Error on %s', mr)
def main():

    if TEST:
        log.info('Examining permissions for all Home Wikis')
    else:
        log.info('Fixing permissions for all Home Wikis')

    for some_projects in utils.chunked_find(
            M.Project,
        {
            'neighborhood_id': {
                '$nin': [
                    ObjectId('4be2faf8898e33156f00003e'),  # /u
                    ObjectId('4dbf2563bfc09e6362000005')
                ]
            }
        }):  # /motorola
        for project in some_projects:
            c.project = project
            home_app = project.app_instance('home')
            if isinstance(home_app, ForgeWikiApp):
                log.info('Examining permissions in project "%s".' %
                         project.shortname)
                root_project = project.root_project or project
                authenticated_role = project_role(root_project,
                                                  '*authenticated')
                member_role = project_role(root_project, 'Member')

                # remove *authenticated create/update permissions
                new_acl = OrderedDict(
                    ((ace.role_id, ace.access, ace.permission), ace)
                    for ace in home_app.acl
                    if not (ace.role_id == authenticated_role._id
                            and ace.access == M.ACE.ALLOW and ace.permission in
                            ('create', 'edit', 'delete', 'unmoderated_post')))
                if (member_role._id, M.ACE.ALLOW, 'update') in new_acl:
                    del new_acl[(member_role._id, M.ACE.ALLOW, 'update')]

                # add member create/edit permissions
                new_acl[(member_role._id, M.ACE.ALLOW,
                         'create')] = M.ACE.allow(member_role._id, 'create')
                new_acl[(member_role._id, M.ACE.ALLOW,
                         'edit')] = M.ACE.allow(member_role._id, 'edit')
                new_acl[(member_role._id, M.ACE.ALLOW,
                         'unmoderated_post')] = M.ACE.allow(
                             member_role._id, 'unmoderated_post')

                if TEST:
                    log.info(
                        '...would update acl for home app in project "%s".' %
                        project.shortname)
                else:
                    log.info('...updating acl for home app in project "%s".' %
                             project.shortname)
                    home_app.config.acl = map(dict, new_acl.values())
                    session(home_app.config).flush()
示例#23
0
    def execute(cls, options):
        q_project = {}
        if options.nbhd:
            nbhd = M.Neighborhood.query.get(url_prefix=options.nbhd)
            if not nbhd:
                return "Invalid neighborhood url prefix."
            q_project['neighborhood_id'] = nbhd._id
        if options.project:
            q_project['shortname'] = options.project
        elif options.project_regex:
            q_project['shortname'] = {'$regex': options.project_regex}

        log.info('Refreshing last commit data')

        for chunk in chunked_find(M.Project, q_project):
            for p in chunk:
                log.info("Refreshing last commit data for project '%s'." %
                         p.shortname)
                if options.dry_run:
                    continue
                c.project = p
                if options.mount_point:
                    mount_points = [options.mount_point]
                else:
                    mount_points = [ac.options.mount_point for ac in
                                    M.AppConfig.query.find(dict(project_id=p._id))]
                for app in (p.app_instance(mp) for mp in mount_points):
                    c.app = app
                    if not hasattr(app, 'repo'):
                        continue
                    if c.app.repo.tool.lower() not in options.repo_types:
                        log.info("Skipping %r: wrong type (%s)", c.app.repo,
                                 c.app.repo.tool.lower())
                        continue

                    c.app.repo.status = 'analyzing'
                    session(c.app.repo).flush(c.app.repo)
                    try:
                        ci_ids = list(
                            reversed(list(c.app.repo.all_commit_ids())))
                        if options.clean:
                            cls._clean(ci_ids)

                        log.info('Refreshing all last commits in %r',
                                 c.app.repo)
                        cls.refresh_repo_lcds(ci_ids, options)
                        new_commit_ids = app.repo.unknown_commit_ids()
                        if len(new_commit_ids) > 0:
                            refresh.post()
                    except:
                        log.exception('Error refreshing %r', c.app.repo)
                        raise
                    finally:
                        c.app.repo.status = 'ready'
                        session(c.app.repo).flush(c.app.repo)
            ThreadLocalORMSession.flush_all()
示例#24
0
 def execute(cls, options):
     query = {'tool_name': {'$regex': '^svn$', '$options': 'i'},
              'options.checkout_url': ''}
     for chunk in utils.chunked_find(M.AppConfig, query):
         for config in chunk:
             repo = Repository.query.get(app_config_id=config._id)
             trunk_path = "file://{0}{1}/trunk".format(repo.fs_path, repo.name)
             if svn_path_exists(trunk_path):
                 config.options['checkout_url'] = "trunk"
                 log.info("Update checkout_url for: %s", trunk_path)
         ThreadLocalORMSession.flush_all()
def main():
    for chunk in utils.chunked_find(SM.Repository):
        for r in chunk:
            print 'Processing {0}'.format(r)
            all_commit_ids = r._impl.all_commit_ids()
            if all_commit_ids:
                for commit in M.repo.Commit.query.find({'_id': {'$in': all_commit_ids}}):
                    if commit.tree_id and M.repo.Tree.query.get(_id=commit.tree_id):
                        kill_tree(r._impl, commit._id, '', commit.tree)
                ThreadLocalORMSession.flush_all()
                ThreadLocalORMSession.close_all()
def main():
    for chunk in utils.chunked_find(SM.Repository):
        for r in chunk:
            print 'Processing {0}'.format(r)
            all_commit_ids = r._impl.all_commit_ids()
            if all_commit_ids:
                for commit in M.repo.Commit.query.find({'_id':{'$in':all_commit_ids}}):
                    if commit.tree_id and M.repo.Tree.query.get(_id=commit.tree_id):
                        kill_tree(r._impl, commit._id, '', commit.tree)
                ThreadLocalORMSession.flush_all()
                ThreadLocalORMSession.close_all()
 def execute(cls, options):
     auth_provider = AuthenticationProvider.get(None)
     for i, chunk in enumerate(chunked_find(M.User, {})):
         log.info('Backfilling login details for chunk #%s', i)
         for u in chunk:
             try:
                 u.backfill_login_details(auth_provider)
                 session(u).flush(u)
             except Exception:
                 log.exception('Error backfilling on user %s', u)
     log.info('Finished backfilling previous login details')
示例#28
0
 def test_filter_on_sort_key(self):
     query = {'username': {'$in': ['sample-user-1', 'sample-user-2', 'sample-user-3']}}
     chunks = list(utils.chunked_find(M.User,
                                      query,
                                      2,
                                      sort_key='username'))
     assert len(chunks) == 2, chunks
     assert len(chunks[0]) == 2, chunks[0]
     assert len(chunks[1]) == 1, chunks[1]
     assert_equal(chunks[0][0].username, 'sample-user-1')
     assert_equal(chunks[0][1].username, 'sample-user-2')
     assert_equal(chunks[1][0].username, 'sample-user-3')
示例#29
0
 def test_filter_on_sort_key(self):
     query = {'username': {'$in': ['sample-user-1', 'sample-user-2', 'sample-user-3']}}
     chunks = list(utils.chunked_find(M.User,
                                      query,
                                      2,
                                      sort_key='username'))
     assert len(chunks) == 2, chunks
     assert len(chunks[0]) == 2, chunks[0]
     assert len(chunks[1]) == 1, chunks[1]
     assert_equal(chunks[0][0].username, 'sample-user-1')
     assert_equal(chunks[0][1].username, 'sample-user-2')
     assert_equal(chunks[1][0].username, 'sample-user-3')
示例#30
0
    def execute(cls, options):
        # This script will indirectly call app.sidebar_menu() for every app in
        # every project. Some of the sidebar_menu methods expect the
        # pylons.request threadlocal object to be present. So, we're faking it.
        #
        # The fact that this isn't a 'real' request doesn't matter for the
        # purposes of the sitemap.
        pylons.request._push_object(webob.Request.blank('/'))

        output_path = options.output_dir
        if os.path.exists(output_path):
            raise Exception('%s directory already exists.' % output_path)
        os.mkdir(output_path)

        now = datetime.utcnow().date()
        sitemap_content_template = Template(SITEMAP_TEMPLATE)

        def write_sitemap(urls, file_no):
            sitemap_content = sitemap_content_template.render(dict(now=now, locs=urls))
            with open(os.path.join(output_path, 'sitemap-%d.xml' % file_no), 'w') as f:
                f.write(sitemap_content)

        creds = security.Credentials.get()
        locs = []
        file_count = 0

        excl_nbhd_ids = []
        if options.exclude_neighborhoods:
            prefix = ['/%s/' % n for n in options.exclude_neighborhoods]
            excl_nbhd_ids = [nbhd._id for nbhd in M.Neighborhood.query.find({'url_prefix': {'$in': prefix}})]

        # write sitemap files, MAX_SITEMAP_URLS per file
        for chunk in utils.chunked_find(M.Project, {'deleted': False, 'neighborhood_id': {'$nin': excl_nbhd_ids}}):
            for p in chunk:
                c.project = p
                try:
                    for s in p.sitemap(excluded_tools=options.exclude_tools):
                        url = config['base_url'] + s.url if s.url[0] == '/' else s.url
                        locs.append({'url': url,
                                     'date': p.last_updated.strftime("%Y-%m-%d")})

                except Exception, e:
                    print "Error creating sitemap for project '%s': %s" %\
                        (p.shortname, e)
                creds.clear()
                if len(locs) >= options.urls_per_file:
                    write_sitemap(locs[:options.urls_per_file], file_count)
                    del locs[:options.urls_per_file]
                    file_count += 1
                M.main_orm_session.clear()
            ThreadLocalORMSession.close_all()
    def execute(cls, options):
        # This script will indirectly call app.sidebar_menu() for every app in
        # every project. Some of the sidebar_menu methods expect the
        # pylons.request threadlocal object to be present. So, we're faking it.
        #
        # The fact that this isn't a 'real' request doesn't matter for the
        # purposes of the sitemap.
        pylons.request._push_object(webob.Request.blank('/'))

        output_path = options.output_dir
        if os.path.exists(output_path):
            raise Exception('%s directory already exists.' % output_path)
        os.mkdir(output_path)

        now = datetime.utcnow().date()
        sitemap_content_template = Template(SITEMAP_TEMPLATE)

        def write_sitemap(urls, file_no):
            sitemap_content = sitemap_content_template.render(dict(now=now, locs=urls))
            with open(os.path.join(output_path, 'sitemap-%d.xml' % file_no), 'w') as f:
                f.write(sitemap_content)

        creds = security.Credentials.get()
        locs = []
        file_count = 0

        nbhd_id = []
        if options.neighborhood:
            prefix = ['/%s/' % n for n in options.neighborhood]
            nbhd_id = [nbhd._id for nbhd in M.Neighborhood.query.find({'url_prefix': {'$in': prefix}})]

        # write sitemap files, MAX_SITEMAP_URLS per file
        for chunk in utils.chunked_find(M.Project, {'deleted': False, 'neighborhood_id': {'$nin': nbhd_id}}):
            for p in chunk:
                c.project = p
                try:
                    for s in p.sitemap(excluded_tools=['git', 'hg', 'svn']):
                        url = config['base_url'] + s.url if s.url[0] == '/' else s.url
                        locs.append({'url': url,
                                     'date': p.last_updated.strftime("%Y-%m-%d")})

                except Exception, e:
                    print "Error creating sitemap for project '%s': %s" %\
                        (p.shortname, e)
                creds.clear()
                if len(locs) >= options.urls_per_file:
                    write_sitemap(locs[:options.urls_per_file], file_count)
                    del locs[:options.urls_per_file]
                    file_count += 1
                M.main_orm_session.clear()
            ThreadLocalORMSession.close_all()
示例#32
0
def main():

    if TEST:
        log.info('Examining permissions for all Home Wikis')
    else:
        log.info('Fixing permissions for all Home Wikis')

    for some_projects in utils.chunked_find(M.Project, {'neighborhood_id': {
        '$nin': [ObjectId('4be2faf8898e33156f00003e'),      # /u
                 ObjectId('4dbf2563bfc09e6362000005')]}}):  # /motorola
        for project in some_projects:
            c.project = project
            home_app = project.app_instance('home')
            if isinstance(home_app, ForgeWikiApp):
                log.info('Examining permissions in project "%s".' %
                         project.shortname)
                root_project = project.root_project or project
                authenticated_role = project_role(
                    root_project, '*authenticated')
                member_role = project_role(root_project, 'Member')

                # remove *authenticated create/update permissions
                new_acl = OrderedDict(
                    ((ace.role_id, ace.access, ace.permission), ace)
                    for ace in home_app.acl
                    if not (
                        ace.role_id == authenticated_role._id and ace.access == M.ACE.ALLOW and ace.permission in (
                            'create', 'edit', 'delete', 'unmoderated_post')
                    )
                )
                if (member_role._id, M.ACE.ALLOW, 'update') in new_acl:
                    del new_acl[(member_role._id, M.ACE.ALLOW, 'update')]

                # add member create/edit permissions
                new_acl[(member_role._id, M.ACE.ALLOW, 'create')
                        ] = M.ACE.allow(member_role._id, 'create')
                new_acl[(member_role._id, M.ACE.ALLOW, 'edit')
                        ] = M.ACE.allow(member_role._id, 'edit')
                new_acl[(member_role._id, M.ACE.ALLOW, 'unmoderated_post')] = M.ACE.allow(
                    member_role._id, 'unmoderated_post')

                if TEST:
                    log.info(
                        '...would update acl for home app in project "%s".' %
                        project.shortname)
                else:
                    log.info('...updating acl for home app in project "%s".' %
                             project.shortname)
                    home_app.config.acl = map(dict, new_acl.values())
                    session(home_app.config).flush()
def main():
    query = {'tool_name': {'$regex': '^tickets$', '$options': 'i'}}
    for chunk in utils.chunked_find(M.AppConfig, query):
        for a in chunk:
            # change 'deny write' and 'write' permission
            role_ids = [(p.role_id, p.access) for p in a.acl if p.permission == 'write']
            for role_id, access in role_ids:
                if access == M.ACE.DENY:
                    add(a.acl, M.ACE.deny(role_id, 'create'))
                    add(a.acl, M.ACE.deny(role_id, 'update'))
                else:
                    add(a.acl, M.ACE.allow(role_id, 'create'))
                    add(a.acl, M.ACE.allow(role_id, 'update'))

        ThreadLocalORMSession.flush_all()
    def execute(cls, options):
        for i, chunk in enumerate(chunked_find(M.User, {})):
            log.info('Adding default setting for chunk #%s', i)
            for u in chunk:
                try:
                    u.set_pref('mention_notifications', True)
                    session(u).flush(u)
                except Exception:
                    log.exception('Error processing on user %s', u)

            main_orm_session.clear()  # AuditLog and User objs
            main_explicitflush_orm_session.clear(
            )  # UserLoginDetails objs, already flushed individually

        log.info('Finished adding default user notification setting')
    def execute(cls, options):
        auth_provider = AuthenticationProvider.get(None)
        for i, chunk in enumerate(chunked_find(M.User, {}, pagesize=2)):
            log.info('Backfilling login details for chunk #%s', i)
            for u in chunk:
                try:
                    u.backfill_login_details(auth_provider)
                except Exception:
                    log.exception('Error backfilling on user %s', u)

            main_orm_session.clear()  # AuditLog and User objs
            main_explicitflush_orm_session.clear(
            )  # UserLoginDetails objs, already flushed individually

        log.info('Finished backfilling previous login details')
示例#36
0
def main():
    query = {'tool_name': {'$regex': '^tickets$', '$options': 'i'}}
    for chunk in utils.chunked_find(M.AppConfig, query):
        for a in chunk:
            # change 'deny write' and 'write' permission
            role_ids = [(p.role_id, p.access) for p in a.acl
                        if p.permission == 'write']
            for role_id, access in role_ids:
                if access == M.ACE.DENY:
                    add(a.acl, M.ACE.deny(role_id, 'create'))
                    add(a.acl, M.ACE.deny(role_id, 'update'))
                else:
                    add(a.acl, M.ACE.allow(role_id, 'create'))
                    add(a.acl, M.ACE.allow(role_id, 'update'))

        ThreadLocalORMSession.flush_all()
示例#37
0
 def command(self):
     from allura import model as M
     self.basic_setup()
     main_indexes = defaultdict(
         lambda: defaultdict(list))  # by db, then collection name
     project_indexes = defaultdict(list)  # by collection name
     base.log.info('Collecting indexes...')
     for m in Mapper.all_mappers():
         mgr = m.collection.m
         cname = mgr.collection_name
         cls = m.mapped_class
         if cname is None:
             base.log.info('... skipping abstract class %s', cls)
             continue
         base.log.info('... for class %s', cls)
         if session(cls) in (M.main_orm_session, M.repository_orm_session,
                             M.task_orm_session):
             idx = main_indexes[session(cls)][cname]
         else:
             idx = project_indexes[cname]
         idx.extend(mgr.indexes)
     base.log.info('Updating indexes for main DB')
     for odm_session, db_indexes in main_indexes.iteritems():
         db = odm_session.impl.db
         for name, indexes in db_indexes.iteritems():
             self._update_indexes(db[name], indexes)
     base.log.info('Updating indexes for project DBs')
     configured_dbs = set()
     for projects in utils.chunked_find(M.Project):
         for p in projects:
             db = p.database_uri
             if db in configured_dbs: continue
             configured_dbs.add(db)
             c.project = p
             db = M.project_doc_session.db
             base.log.info('... DB: %s', db)
             for name, indexes in project_indexes.iteritems():
                 self._update_indexes(db[name], indexes)
     if not configured_dbs:
         # e.g. during bootstrap with no projects
         db = M.project_doc_session.db
         base.log.info('... default DB: %s', db)
         for name, indexes in project_indexes.iteritems():
             self._update_indexes(db[name], indexes)
     base.log.info('Done updating indexes')
示例#38
0
 def command(self):
     from allura import model as M
     self.basic_setup()
     main_indexes = defaultdict(lambda: defaultdict(list))  # by db, then collection name
     project_indexes = defaultdict(list)  # by collection name
     base.log.info('Collecting indexes...')
     for m in Mapper.all_mappers():
         mgr = m.collection.m
         cname = mgr.collection_name
         cls = m.mapped_class
         if cname is None:
             base.log.info('... skipping abstract class %s', cls)
             continue
         base.log.info('... for class %s', cls)
         if session(cls) in (
             M.main_orm_session, M.repository_orm_session, M.task_orm_session):
             idx = main_indexes[session(cls)][cname]
         else:
             idx = project_indexes[cname]
         idx.extend(mgr.indexes)
     base.log.info('Updating indexes for main DB')
     for odm_session, db_indexes in main_indexes.iteritems():
         db = odm_session.impl.db
         for name, indexes in db_indexes.iteritems():
             self._update_indexes(db[name], indexes)
     base.log.info('Updating indexes for project DBs')
     configured_dbs = set()
     for projects in utils.chunked_find(M.Project):
         for p in projects:
             db = p.database_uri
             if db in configured_dbs: continue
             configured_dbs.add(db)
             c.project = p
             db = M.project_doc_session.db
             base.log.info('... DB: %s', db)
             for name, indexes in project_indexes.iteritems():
                 self._update_indexes(db[name], indexes)
     if not configured_dbs:
         # e.g. during bootstrap with no projects
         db = M.project_doc_session.db
         base.log.info('... default DB: %s', db)
         for name, indexes in project_indexes.iteritems():
             self._update_indexes(db[name], indexes)
     base.log.info('Done updating indexes')
示例#39
0
def main():
    users = M.Neighborhood.query.get(name='Users')
    for chunk in utils.chunked_find(M.Project, {'neighborhood_id': users._id}):
        for p in chunk:
            user = p.user_project_of
            if not user:
                continue

            description = p.description
            if description is None or re.match(default_description,
                                               description):
                continue

            app = p.app_instance('wiki')
            if app is None:
                try:
                    app = p.install_app('wiki')
                except Exception as e:
                    log.error("Unable to install wiki for user %s: %s" %
                              (user.username, str(e)))
                    continue

            page = WM.Page.query.get(app_config_id=app.config._id,
                                     title='Home')
            if page is None:
                continue

            c.app = app
            c.project = p
            c.user = user

            if "This is the personal project of" in page.text:
                if description not in page.text:
                    page.text = "%s\n\n%s" % (page.text, description)
                    log.info("Update wiki home page text for %s" %
                             user.username)
            elif "This is the default page" in page.text:
                page.text = default_personal_project_tmpl % (user.display_name,
                                                             description)
                log.info("Update wiki home page text for %s" % user.username)
            else:
                pass

        ThreadLocalORMSession.flush_all()
示例#40
0
 def execute(cls, options):
     for chunk in chunked_find(M.User, {}):
         user_ids = []
         for u in chunk:
             log.info('Reindex user %s', u.username)
             if options.dry_run:
                 continue
             user_ids.append(u._id)
         try:
             for chunk in chunked_list(user_ids, options.max_chunk):
                 if options.tasks:
                     cls._post_add_users(chunk)
                 else:
                     add_users(chunk)
         except CompoundError, err:
             log.exception('Error indexing users:\n%r', err)
             log.error('%s', err.format_error())
         M.main_orm_session.flush()
         M.main_orm_session.clear()
示例#41
0
 def execute(cls, options):
     for chunk in chunked_find(M.User, {}):
         user_ids = []
         for u in chunk:
             log.info('Reindex user %s', u.username)
             if options.dry_run:
                 continue
             user_ids.append(u._id)
         try:
             for chunk in chunked_list(user_ids, options.max_chunk):
                 if options.tasks:
                     cls._post_add_users(chunk)
                 else:
                     add_users(chunk)
         except CompoundError, err:
             log.exception('Error indexing users:\n%r', err)
             log.error('%s', err.format_error())
         M.main_orm_session.flush()
         M.main_orm_session.clear()
示例#42
0
def main(options):
    log.addHandler(logging.StreamHandler(sys.stdout))
    log.setLevel(getattr(logging, options.log_level.upper()))

    nbhd = M.Neighborhood.query.get(name=options.neighborhood)
    if not nbhd:
        return 'Invalid neighborhood "%s".' % options.neighborhood
    admin_role = M.ProjectRole.by_name('Admin',
                                       project=nbhd.neighborhood_project)
    nbhd_admin = admin_role.users_with_role(
        project=nbhd.neighborhood_project)[0].user
    log.info('Making updates as neighborhood admin "%s"' % nbhd_admin.username)

    q = {
        'neighborhood_id': nbhd._id,
        'is_nbhd_project': False,
        'deleted': False
    }
    private_count = public_count = 0
    for projects in utils.chunked_find(M.Project, q):
        for p in projects:
            role_anon = M.ProjectRole.upsert(name='*anonymous',
                                             project_id=p.root_project._id)
            if M.ACE.allow(role_anon._id, 'read') not in p.acl:
                if options.test:
                    log.info('Would be made public: "%s"' % p.shortname)
                else:
                    log.info('Making public: "%s"' % p.shortname)
                    p.acl.append(M.ACE.allow(role_anon._id, 'read'))
                    with h.push_config(c, project=p, user=nbhd_admin):
                        ThreadLocalORMSession.flush_all()
                        g.post_event('project_updated')
                private_count += 1
            else:
                log.info('Already public: "%s"' % p.shortname)
                public_count += 1

    log.info('Already public: %s' % public_count)
    if options.test:
        log.info('Would be made public: %s' % private_count)
    else:
        log.info('Made public: %s' % private_count)
    return 0
def main():
    users = M.Neighborhood.query.get(name='Users')
    for chunk in utils.chunked_find(M.Project, {'neighborhood_id': users._id}):
        for p in chunk:
            user = p.user_project_of
            if not user:
                continue

            description = p.description
            if description is None or re.match(default_description, description):
                continue

            app = p.app_instance('wiki')
            if app is None:
                try:
                    app = p.install_app('wiki')
                except Exception as e:
                    log.error("Unable to install wiki for user %s: %s" %
                              (user.username, str(e)))
                    continue

            page = WM.Page.query.get(
                app_config_id=app.config._id, title='Home')
            if page is None:
                continue

            c.app = app
            c.project = p
            c.user = user

            if "This is the personal project of" in page.text:
                if description not in page.text:
                    page.text = "%s\n\n%s" % (page.text, description)
                    log.info("Update wiki home page text for %s" %
                             user.username)
            elif "This is the default page" in page.text:
                page.text = default_personal_project_tmpl % (
                    user.display_name, description)
                log.info("Update wiki home page text for %s" % user.username)
            else:
                pass

        ThreadLocalORMSession.flush_all()
示例#44
0
def main(opts):
    if opts.project and not opts.nbhd:
        error('Specify neighborhood')
    p_query = {}
    if opts.nbhd:
        nbhd = M.Neighborhood.query.get(url_prefix=opts.nbhd)
        if not nbhd:
            error("Can't find such neighborhood")
        p_query['neighborhood_id'] = nbhd._id
        if opts.project:
            p_query['shortname'] = opts.project

        projects = M.Project.query.find(p_query).all()
        if not projects:
            error('No project matches given parameters')

        app_config_ids = []
        for p in projects:
            for ac in p.app_configs:
                if ac.tool_name.lower() == 'wiki':
                    app_config_ids.append(ac._id)

        if not app_config_ids:
            error('No wikis in given projects')
        query = {'app_config_id': {'$in': app_config_ids}}
    else:
        query = {}

    M.artifact_orm_session._get().skip_last_updated = True
    try:
        for chunk in chunked_find(Page, query):
            for page in chunk:
                if '/' in page.title:
                    log.info('Found {} in {}'.format(page.title,
                                                     page.app_config.url()))
                    page.title = page.title.replace('/', '-')
                    with h.push_context(page.app_config.project._id,
                                        app_config_id=page.app_config_id):
                        session(page).flush(page)
    finally:
        M.artifact_orm_session._get().skip_last_updated = False
示例#45
0
def main(options):
    log.addHandler(logging.StreamHandler(sys.stdout))
    log.setLevel(getattr(logging, options.log_level.upper()))

    nbhd = M.Neighborhood.query.get(name=options.neighborhood)
    if not nbhd:
        return 'Invalid neighborhood "%s".' % options.neighborhood
    admin_role = M.ProjectRole.by_name(
        'Admin', project=nbhd.neighborhood_project)
    nbhd_admin = admin_role.users_with_role(
        project=nbhd.neighborhood_project)[0].user
    log.info('Making updates as neighborhood admin "%s"' % nbhd_admin.username)

    q = {'neighborhood_id': nbhd._id,
         'is_nbhd_project': False, 'deleted': False}
    private_count = public_count = 0
    for projects in utils.chunked_find(M.Project, q):
        for p in projects:
            role_anon = M.ProjectRole.upsert(name='*anonymous',
                                             project_id=p.root_project._id)
            if M.ACE.allow(role_anon._id, 'read') not in p.acl:
                if options.test:
                    log.info('Would be made public: "%s"' % p.shortname)
                else:
                    log.info('Making public: "%s"' % p.shortname)
                    p.acl.append(M.ACE.allow(role_anon._id, 'read'))
                    with h.push_config(c, project=p, user=nbhd_admin):
                        ThreadLocalORMSession.flush_all()
                        g.post_event('project_updated')
                private_count += 1
            else:
                log.info('Already public: "%s"' % p.shortname)
                public_count += 1

    log.info('Already public: %s' % public_count)
    if options.test:
        log.info('Would be made public: %s' % private_count)
    else:
        log.info('Made public: %s' % private_count)
    return 0
def main():
    args = arguments()
    
    c.project = None # to avoid error in Artifact.__mongometa__.before_save
    project = M.Project.query.get(shortname=args.shortname)
    tool = project.app_config_by_tool_type(args.toolname)

    for chunk in utils.chunked_find(ForumPost, {'app_config_id':tool._id}):
        for p in chunk:
            has_access = bool(security.has_access(p, 'moderate', M.User.anonymous()))

            if has_access:
                anon_role_id = None
                for acl in p.acl:
                    # find the anon moderate acl
                    if acl.permission == 'moderate' and acl.access=='ALLOW':
                        anon_role_id = acl.role_id

                if anon_role_id:
                    print "revoking anon moderate privelege for '{}'".format(p._id)
                    security.simple_revoke(p.acl, anon_role_id, 'moderate')
                    session(p).flush(p)
def main():
    args = arguments()
    
    c.project = None # to avoid error in Artifact.__mongometa__.before_save
    project = M.Project.query.get(shortname=args.shortname)
    tool = project.app_config_by_tool_type(args.toolname)

    for chunk in utils.chunked_find(ForumPost, {'app_config_id':tool._id}):
        for p in chunk:
            has_access = bool(security.has_access(p, 'moderate', M.User.anonymous()))

            if has_access:
                anon_role_id = None
                for acl in p.acl:
                    # find the anon moderate acl
                    if acl.permission == 'moderate' and acl.access=='ALLOW':
                        anon_role_id = acl.role_id

                if anon_role_id:
                    print "revoking anon moderate privelege for '{}'".format(p._id)
                    security.simple_revoke(p.acl, anon_role_id, 'moderate')
                    session(p).flush(p)
示例#48
0
def main(opts):
    if opts.project and not opts.nbhd:
        error("Specify neighborhood")
    p_query = {}
    if opts.nbhd:
        nbhd = M.Neighborhood.query.get(url_prefix=opts.nbhd)
        if not nbhd:
            error("Can't find such neighborhood")
        p_query["neighborhood_id"] = nbhd._id
        if opts.project:
            p_query["shortname"] = opts.project

        projects = M.Project.query.find(p_query).all()
        if not projects:
            error("No project matches given parameters")

        app_config_ids = []
        for p in projects:
            for ac in p.app_configs:
                if ac.tool_name.lower() == "wiki":
                    app_config_ids.append(ac._id)

        if not app_config_ids:
            error("No wikis in given projects")
        query = {"app_config_id": {"$in": app_config_ids}}
    else:
        query = {}

    M.artifact_orm_session._get().skip_last_updated = True
    try:
        for chunk in chunked_find(Page, query):
            for page in chunk:
                if "/" in page.title:
                    log.info("Found {} in {}".format(page.title, page.app_config.url()))
                    page.title = page.title.replace("/", "-")
                    with h.push_context(page.app_config.project._id, app_config_id=page.app_config_id):
                        session(page).flush(page)
    finally:
        M.artifact_orm_session._get().skip_last_updated = False
示例#49
0
def main():
    test = sys.argv[-1] == 'test'
    num_projects_examined = 0
    log.info('Examining all projects for mount order.')
    for some_projects in utils.chunked_find(M.Project):
        for project in some_projects:
            c.project = project
            mounts = project.ordered_mounts(include_hidden=True)

            # ordered_mounts() means duplicate ordinals (if any) will be next
            # to each other
            duplicates_found = False
            prev_ordinal = None
            for mount in mounts:
                if mount['ordinal'] == prev_ordinal:
                    duplicates_found = True
                    break
                prev_ordinal = mount['ordinal']

            if duplicates_found:
                if test:
                    log.info('Would renumber mounts for project "%s".' %
                             project.shortname)
                else:
                    log.info('Renumbering mounts for project "%s".' %
                             project.shortname)
                    for i, mount in enumerate(mounts):
                        if 'ac' in mount:
                            mount['ac'].options['ordinal'] = i
                        elif 'sub' in mount:
                            mount['sub'].ordinal = i
                    ThreadLocalORMSession.flush_all()

            num_projects_examined += 1
            session(project).clear()

        log.info('%s projects examined.' % num_projects_examined)
        ThreadLocalORMSession.flush_all()
        ThreadLocalORMSession.close_all()
示例#50
0
def main():
    test = sys.argv[-1] == 'test'
    num_projects_examined = 0
    log.info('Examining all projects for mount order.')
    for some_projects in utils.chunked_find(M.Project):
        for project in some_projects:
            c.project = project
            mounts = project.ordered_mounts(include_hidden=True)

            # ordered_mounts() means duplicate ordinals (if any) will be next
            # to each other
            duplicates_found = False
            prev_ordinal = None
            for mount in mounts:
                if mount['ordinal'] == prev_ordinal:
                    duplicates_found = True
                    break
                prev_ordinal = mount['ordinal']

            if duplicates_found:
                if test:
                    log.info('Would renumber mounts for project "%s".' %
                             project.shortname)
                else:
                    log.info('Renumbering mounts for project "%s".' %
                             project.shortname)
                    for i, mount in enumerate(mounts):
                        if 'ac' in mount:
                            mount['ac'].options['ordinal'] = i
                        elif 'sub' in mount:
                            mount['sub'].ordinal = i
                    ThreadLocalORMSession.flush_all()

            num_projects_examined += 1
            session(project).clear()

        log.info('%s projects examined.' % num_projects_examined)
        ThreadLocalORMSession.flush_all()
        ThreadLocalORMSession.close_all()
示例#51
0
 def test_can_iterate(self):
     chunks = list(utils.chunked_find(M.User, {}, 2))
     assert len(chunks) > 1, chunks
     assert len(chunks[0]) == 2, chunks[0]
示例#52
0
 def test_can_iterate(self):
     chunks = list(utils.chunked_find(M.User, {}, 2))
     assert len(chunks) > 1, chunks
     assert len(chunks[0]) == 2, chunks[0]
示例#53
0
文件: macro.py 项目: Bitergia/allura
def get_projects_for_macro(category=None, display_mode='grid', sort='last_updated',
        show_total=False, limit=100, labels='', award='', private=False,
        columns=1, show_proj_icon=True, show_download_button=True, show_awards_banner=True,
        grid_view_tools='',
        initial_q={}):
    from allura.lib.widgets.project_list import ProjectList
    from allura.lib import utils
    from allura import model as M
    # 'trove' is internal substitution for 'category' filter in wiki macro
    trove = category
    limit = int(limit)
    q = dict(
        deleted=False,
        is_nbhd_project=False)
    q.update(initial_q)

    if labels:
        or_labels = labels.split('|')
        q['$or'] = [{'labels': {'$all': l.split(',')}} for l in or_labels]
    if trove is not None:
        trove = M.TroveCategory.query.get(fullpath=trove)
    if award:
        aw = M.Award.query.find(dict(
            created_by_neighborhood_id=c.project.neighborhood_id,
            short=award)).first()
        if aw:
            ids = [grant.granted_to_project_id for grant in
                M.AwardGrant.query.find(dict(
                    granted_by_neighborhood_id=c.project.neighborhood_id,
                    award_id=aw._id))]
            if '_id' in q:
                ids = list(set(q['_id']['$in']).intersection(ids))
            q['_id'] = {'$in': ids}

    if trove is not None:
        q['trove_' + trove.type] = trove._id
    sort_key, sort_dir = 'last_updated', pymongo.DESCENDING
    if sort == 'alpha':
        sort_key, sort_dir = 'name', pymongo.ASCENDING
    elif sort == 'random':
        sort_key, sort_dir = None, None
    elif sort == 'last_registered':
        sort_key, sort_dir = '_id', pymongo.DESCENDING
    elif sort == '_id':
        sort_key, sort_dir = '_id', pymongo.DESCENDING

    projects = []
    if private:
        # Only return private projects.
        # Can't filter these with a mongo query directly - have to iterate
        # through and check the ACL of each project.
        for chunk in utils.chunked_find(M.Project, q, sort_key=sort_key,
                sort_dir=sort_dir):
            projects.extend([p for p in chunk if p.private])
        total = len(projects)
        if sort == 'random':
            projects = random.sample(projects, min(limit, total))
        else:
            projects = projects[:limit]
    else:
        total = None
        if sort == 'random':
            # MongoDB doesn't have a random sort built in, so...
            # 1. Do a direct pymongo query (faster than ORM) to fetch just the
            #    _ids of objects that match our criteria
            # 2. Choose a random sample of those _ids
            # 3. Do an ORM query to fetch the objects with those _ids
            # 4. Shuffle the results
            from ming.orm import mapper
            m = mapper(M.Project)
            collection = M.main_doc_session.db[m.collection.m.collection_name]
            docs = list(collection.find(q, {'_id': 1}))
            if docs:
                ids = [doc['_id'] for doc in
                        random.sample(docs, min(limit, len(docs)))]
                if '_id' in q:
                    ids = list(set(q['_id']['$in']).intersection(ids))
                q['_id'] = {'$in': ids}
                projects = M.Project.query.find(q).all()
                random.shuffle(projects)
        else:
            projects = M.Project.query.find(q).limit(limit).sort(sort_key,
                sort_dir).all()

    pl = ProjectList()
    g.resource_manager.register(pl)
    response = pl.display(projects=projects, display_mode=display_mode,
                          columns=columns, show_proj_icon=show_proj_icon,
                          show_download_button=show_download_button,
                          show_awards_banner=show_awards_banner,
                          grid_view_tools=grid_view_tools)
    if show_total:
        if total is None:
            total = 0
            for p in M.Project.query.find(q):
                if h.has_access(p, 'read')():
                    total = total + 1
        response = '<p class="macro_projects_total">%s Projects</p>%s' % \
                (total, response)
    return response
示例#54
0
 def test_can_iterate(self):
     from allura import model as M
     chunks = [
         chunk for chunk in utils.chunked_find(M.User, {}, 2) ]
     assert len(chunks) > 1, chunks
     assert len(chunks[0]) == 2, chunks[0]
示例#55
0
    def execute(cls, options):
        q_project = {}
        if options.nbhd:
            nbhd = M.Neighborhood.query.get(url_prefix=options.nbhd)
            if not nbhd:
                return "Invalid neighborhood url prefix."
            q_project['neighborhood_id'] = nbhd._id
        if options.project:
            q_project['shortname'] = options.project
        elif options.project_regex:
            q_project['shortname'] = {'$regex': options.project_regex}

        log.info('Refreshing repositories')
        for chunk in chunked_find(M.Project, q_project):
            for p in chunk:
                log.info("Refreshing repos for project '%s'." % p.shortname)
                if options.dry_run:
                    continue
                c.project = p
                if options.mount_point:
                    mount_points = [options.mount_point]
                else:
                    mount_points = [ac.options.mount_point for ac in
                                    M.AppConfig.query.find(dict(project_id=p._id))]
                for app in (p.app_instance(mp) for mp in mount_points):
                    c.app = app
                    if not hasattr(app, 'repo'):
                        continue
                    if c.app.repo.tool.lower() not in options.repo_types:
                        log.info("Skipping %r: wrong type (%s)", c.app.repo,
                                 c.app.repo.tool.lower())
                        continue

                    ci_ids = []
                    if options.clean:
                        ci_ids = list(c.app.repo.all_commit_ids())
                    elif options.clean_after:
                        for ci in M.repository.CommitDoc.m.find({'repo_ids': c.app.repo._id,
                                                                 'committed.date': {'$gt': options.clean_after}}):
                            ci_ids.append(ci._id)

                    if ci_ids:
                        log.info("Deleting mongo data for %i commits...",
                                 len(ci_ids))
                        # delete these in chunks, otherwise the query doc can
                        # exceed the max BSON size limit (16MB at the moment)
                        for ci_ids_chunk in chunked_list(ci_ids, 3000):
                            i = M.repository.CommitDoc.m.find(
                                {"_id": {"$in": ci_ids_chunk}}).count()
                            if i:
                                log.info("Deleting %i CommitDoc docs...", i)
                                M.repository.CommitDoc.m.remove(
                                    {"_id": {"$in": ci_ids_chunk}})

                        # we used to have a TreesDoc (plural) collection to provide a mapping of commit_id to tree_id
                        # so that we could clear the relevant TreeDoc records
                        # its ok though, since they are created in refresh_tree_info() and overwrite existing records

                        for ci_ids_chunk in chunked_list(ci_ids, 3000):
                            # delete LastCommitDocs
                            i = M.repository.LastCommitDoc.m.find(
                                dict(commit_id={'$in': ci_ids_chunk})).count()
                            if i:
                                log.info(
                                    "Deleting %i LastCommitDoc docs...", i)
                                M.repository.LastCommitDoc.m.remove(
                                    dict(commit_id={'$in': ci_ids_chunk}))

                        del ci_ids

                    try:
                        if options.all:
                            log.info('Refreshing ALL commits in %r',
                                     c.app.repo)
                        else:
                            log.info('Refreshing NEW commits in %r',
                                     c.app.repo)
                        if options.profile:
                            import cProfile
                            cProfile.runctx(
                                'c.app.repo.refresh(options.all, notify=options.notify, '
                                '   commits_are_new=options.commits_are_new)',
                                globals(), locals(), 'refresh.profile')
                        else:
                            c.app.repo.refresh(
                                options.all, notify=options.notify, commits_are_new=options.commits_are_new)
                    except Exception:
                        log.exception('Error refreshing %r', c.app.repo)
            ThreadLocalORMSession.flush_all()
示例#56
0
    now = datetime.utcnow().date()
    sitemap_content_template = Template(SITEMAP_TEMPLATE)

    def write_sitemap(urls, file_no):
        sitemap_content = sitemap_content_template.render(
            dict(now=now, locs=urls))
        with open(os.path.join(output_path, 'sitemap-%d.xml' % file_no),
                  'w') as f:
            f.write(sitemap_content)

    creds = security.Credentials.get()
    locs = []
    file_count = 0
    # write sitemap files, MAX_SITEMAP_URLS per file
    for chunk in utils.chunked_find(M.Project):
        for p in chunk:
            c.project = p
            try:
                locs += [
                    BASE_URL + s.url if s.url[0] == '/' else s.url
                    for s in p.sitemap(excluded_tools=['git', 'hg', 'svn'])
                ]
            except Exception, e:
                print "Error creating sitemap for project '%s': %s" %\
                    (p.shortname, e)
            creds.clear()
            if len(locs) >= options.urls_per_file:
                write_sitemap(locs[:options.urls_per_file], file_count)
                del locs[:options.urls_per_file]
                file_count += 1
        sys.exit("Error: Couldn't create %s:\n%s" % (output_path, e))

    now = datetime.utcnow().date()
    sitemap_content_template = Template(SITEMAP_TEMPLATE)

    def write_sitemap(urls, file_no):
        sitemap_content = sitemap_content_template.render(dict(
            now=now, locs=urls))
        with open(os.path.join(output_path, 'sitemap-%d.xml' % file_no), 'w') as f:
            f.write(sitemap_content)

    creds = security.Credentials.get()
    locs = []
    file_count = 0
    # write sitemap files, MAX_SITEMAP_URLS per file
    for chunk in utils.chunked_find(M.Project):
        for p in chunk:
            c.project = p
            try:
                locs += [BASE_URL + s.url if s.url[0] == '/' else s.url
                         for s in p.sitemap(excluded_tools=['git', 'hg', 'svn'])]
            except Exception, e:
                print "Error creating sitemap for project '%s': %s" %\
                    (p.shortname, e)
            creds.clear()
            if len(locs) >= options.urls_per_file:
                write_sitemap(locs[:options.urls_per_file], file_count)
                del locs[:options.urls_per_file]
                file_count += 1
            M.main_orm_session.clear()
        ThreadLocalORMSession.close_all()