Exemplo n.º 1
0
 def index(self, **kw):
     kw = WidgetConfig.post_filter.validate(kw, None)
     page = kw.pop('page', 0)
     limit = kw.pop('limit', 50)
     status = kw.pop('status', 'pending')
     username = kw.pop('username', None)
     flag = kw.pop('flag', None)
     c.post_filter = WidgetConfig.post_filter
     c.moderate_posts = WidgetConfig.moderate_posts
     c.page_list = WidgetConfig.page_list
     query = dict(
         discussion_id=self.discussion._id,
         deleted=False)
     if status != '-':
         query['status'] = status
     if flag:
         query['flags'] = {'$gte': int(flag)}
     if username:
         filtered_user = User.by_username(username)
         query['author_id'] = filtered_user._id if filtered_user else None
     q = self.PostModel.query.find(query).sort('timestamp', -1)
     count = q.count()
     limit, page, start = g.handle_paging(limit, page or 0, default=50)
     q = q.skip(start)
     q = q.limit(limit)
     pgnum = (page // limit) + 1
     pages = (count // limit) + 1
     return dict(discussion=self.discussion,
                 posts=q, page=page, limit=limit,
                 status=status, flag=flag, username=username,
                 pgnum=pgnum, pages=pages, count=count)
Exemplo n.º 2
0
 def index(self, **kw):
     kw = WidgetConfig.post_filter.validate(kw, None)
     page = kw.pop('page', 0)
     limit = kw.pop('limit', 50)
     status = kw.pop('status', 'pending')
     username = kw.pop('username', None)
     flag = kw.pop('flag', None)
     c.post_filter = WidgetConfig.post_filter
     c.moderate_posts = WidgetConfig.moderate_posts
     query = dict(discussion_id=self.discussion._id, deleted=False)
     if status != '-':
         query['status'] = status
     if flag:
         query['flags'] = {'$gte': int(flag)}
     if username:
         filtered_user = User.by_username(username)
         query['author_id'] = filtered_user._id if filtered_user else None
     q = self.PostModel.query.find(query)
     count = q.count()
     limit, page, start = g.handle_paging(limit, page or 0, default=50)
     q = q.skip(start)
     q = q.limit(limit)
     pgnum = (page // limit) + 1
     pages = (count // limit) + 1
     return dict(discussion=self.discussion,
                 posts=q,
                 page=page,
                 limit=limit,
                 status=status,
                 flag=flag,
                 username=username,
                 pgnum=pgnum,
                 pages=pages)
Exemplo n.º 3
0
 def clone_command(self, category, username=''):
     '''Return a string suitable for copy/paste that would clone this repo locally
        category is one of 'ro' (read-only), 'rw' (read/write), or 'https' (read/write via https)
     '''
     if not username and c.user not in (None, User.anonymous()):
         username = c.user.username
     tpl = string.Template(tg.config.get('scm.clone.%s.%s' % (category, self.tool)) or
                           tg.config.get('scm.clone.%s' % self.tool))
     return tpl.substitute(dict(username=username,
                                source_url=self.clone_url(category, username)+c.app.config.options.get('checkout_url'),
                                dest_path=self.suggested_clone_dest_path()))
Exemplo n.º 4
0
 def save_moderation_bulk_user(self, username, **kw):
     # this is used by post.js as a quick way to deal with all a user's posts
     user = User.by_username(username)
     posts = self.PostModel.query.find({
         'author_id': user._id,
         'deleted': False,
         # this is what the main moderation forms does (e.g. single discussion within a forum app)
         # 'discussion_id': self.discussion._id
         # but instead want to do all discussions within this app
         'app_config_id': c.app.config._id
     })
     return self.save_moderation(posts, **kw)
Exemplo n.º 5
0
 def save_moderation_bulk_user(self, username, **kw):
     # this is used by post.js as a quick way to deal with all a user's posts
     user = User.by_username(username)
     posts = self.PostModel.query.find({
         'author_id': user._id,
         'deleted': False,
         # this is what the main moderation forms does (e.g. single discussion within a forum app)
         # 'discussion_id': self.discussion._id
         # but instead want to do all discussions within this app
         'app_config_id': c.app.config._id
     })
     return self.save_moderation(posts, **kw)
Exemplo n.º 6
0
def create_user(**kw):
    return User(**kw)
Exemplo n.º 7
0
def refresh_repo(repo, all_commits=False, notify=True, new_clone=False):
    all_commit_ids = commit_ids = list(repo.all_commit_ids())
    if not commit_ids:
        # the repo is empty, no need to continue
        return
    new_commit_ids = unknown_commit_ids(commit_ids)
    stats_log = h.log_action(log, 'commit')
    for ci in new_commit_ids:
        stats_log.info('', meta=dict(module='scm-%s' % repo.repo_id, read='0'))
    if not all_commits:
        # Skip commits that are already in the DB
        commit_ids = new_commit_ids
    log.info('Refreshing %d commits on %s', len(commit_ids), repo.full_fs_path)

    # Refresh commits
    seen = set()
    for i, oid in enumerate(commit_ids):
        repo.refresh_commit_info(oid, seen, not all_commits)
        if (i + 1) % 100 == 0:
            log.info('Refresh commit info %d: %s', (i + 1), oid)

    refresh_commit_repos(all_commit_ids, repo)

    # Refresh child references
    for i, oid in enumerate(commit_ids):
        ci = CommitDoc.m.find(dict(_id=oid), validate=False).next()
        refresh_children(ci)
        if (i + 1) % 100 == 0:
            log.info('Refresh child info %d for parents of %s', (i + 1),
                     ci._id)

    if repo._refresh_precompute:
        # Refresh commit runs
        commit_run_ids = commit_ids
        # Check if the CommitRuns for the repo are in a good state by checking for
        # a CommitRunDoc that contains the last known commit. If there isn't one,
        # the CommitRuns for this repo are in a bad state - rebuild them
        # entirely.
        if commit_run_ids != all_commit_ids:
            last_commit = last_known_commit_id(all_commit_ids, new_commit_ids)
            log.info('Last known commit id: %s', last_commit)
            if not CommitRunDoc.m.find(dict(commit_ids=last_commit)).count():
                log.info('CommitRun incomplete, rebuilding with all commits')
                commit_run_ids = all_commit_ids
        log.info('Starting CommitRunBuilder for %s', repo.full_fs_path)
        rb = CommitRunBuilder(commit_run_ids)
        rb.run()
        rb.cleanup()
        log.info('Finished CommitRunBuilder for %s', repo.full_fs_path)

    # Clear any existing caches for branches/tags
    if repo.cached_branches:
        repo.cached_branches = []
        session(repo).flush()

    if repo.cached_tags:
        repo.cached_tags = []
        session(repo).flush()
    # The first view can be expensive to cache,
    # so we want to do it here instead of on the first view.
    repo.get_branches()
    repo.get_tags()

    if not all_commits and not new_clone:
        for commit in commit_ids:
            new = repo.commit(commit)
            user = User.by_email_address(new.committed.email)
            if user is None:
                user = User.by_username(new.committed.name)
            if user is not None:
                g.statsUpdater.newCommit(new, repo.app_config.project, user)
            actor = user or TransientActor(
                activity_name=new.committed.name or new.committed.email)
            g.director.create_activity(actor,
                                       'committed',
                                       new,
                                       related_nodes=[repo.app_config.project],
                                       tags=['commit',
                                             repo.tool.lower()])

        from allura.webhooks import RepoPushWebhookSender
        by_branches, by_tags = _group_commits(repo, commit_ids)
        params = []
        for b, commits in by_branches.iteritems():
            ref = u'refs/heads/{}'.format(b) if b != '__default__' else None
            params.append(dict(commit_ids=commits, ref=ref))
        for t, commits in by_tags.iteritems():
            ref = u'refs/tags/{}'.format(t)
            params.append(dict(commit_ids=commits, ref=ref))
        if params:
            RepoPushWebhookSender().send(params)

    log.info('Refresh complete for %s', repo.full_fs_path)
    g.post_event('repo_refreshed', len(commit_ids), all_commits, new_clone)

    # Send notifications
    if notify:
        send_notifications(repo, reversed(commit_ids))
Exemplo n.º 8
0
def refresh_repo(repo,
                 all_commits=False,
                 notify=True,
                 new_clone=False,
                 commits_are_new=None):
    if commits_are_new is None:
        commits_are_new = not all_commits and not new_clone

    all_commit_ids = commit_ids = list(repo.all_commit_ids())
    if not commit_ids:
        # the repo is empty, no need to continue
        return
    new_commit_ids = unknown_commit_ids(commit_ids)
    if not all_commits:
        # Skip commits that are already in the DB
        commit_ids = new_commit_ids
    log.info('Refreshing %d commits on %s', len(commit_ids), repo.full_fs_path)

    # Refresh commits
    seen = set()
    for i, oid in enumerate(commit_ids):
        repo.refresh_commit_info(oid, seen, not all_commits)
        if (i + 1) % 100 == 0:
            log.info('Refresh commit info %d: %s', (i + 1), oid)

    refresh_commit_repos(all_commit_ids, repo)

    # Refresh child references
    for i, oid in enumerate(commit_ids):
        ci = next(CommitDoc.m.find(dict(_id=oid), validate=False))
        refresh_children(ci)
        if (i + 1) % 100 == 0:
            log.info('Refresh child info %d for parents of %s', (i + 1),
                     ci._id)

    # Clear any existing caches for branches/tags
    if repo.cached_branches:
        repo.cached_branches = []
        session(repo).flush()

    if repo.cached_tags:
        repo.cached_tags = []
        session(repo).flush()
    # The first view can be expensive to cache,
    # so we want to do it here instead of on the first view.
    repo.get_branches()
    repo.get_tags()

    if commits_are_new:
        for commit in commit_ids:
            new = repo.commit(commit)
            user = User.by_email_address(new.committed.email)
            if user is None:
                user = User.by_username(new.committed.name)
            if user is not None:
                g.statsUpdater.newCommit(new, repo.app_config.project, user)
            actor = user or TransientActor(
                activity_name=new.committed.name or new.committed.email)
            g.director.create_activity(actor,
                                       'committed',
                                       new,
                                       related_nodes=[repo.app_config.project],
                                       tags=['commit',
                                             repo.tool.lower()])

        from allura.webhooks import RepoPushWebhookSender
        by_branches, by_tags = _group_commits(repo, commit_ids)
        params = []
        for b, commits in six.iteritems(by_branches):
            ref = 'refs/heads/{}'.format(b) if b != '__default__' else None
            params.append(dict(commit_ids=commits, ref=ref))
        for t, commits in six.iteritems(by_tags):
            ref = 'refs/tags/{}'.format(t)
            params.append(dict(commit_ids=commits, ref=ref))
        if params:
            RepoPushWebhookSender().send(params)

    log.info('Refresh complete for %s', repo.full_fs_path)
    g.post_event('repo_refreshed', len(commit_ids), all_commits, new_clone)

    # Send notifications
    if notify:
        send_notifications(repo, reversed(commit_ids))
Exemplo n.º 9
0
def refresh_repo(repo, all_commits=False, notify=True, new_clone=False):
    all_commit_ids = commit_ids = list(repo.all_commit_ids())
    if not commit_ids:
        # the repo is empty, no need to continue
        return
    new_commit_ids = unknown_commit_ids(commit_ids)
    stats_log = h.log_action(log, 'commit')
    for ci in new_commit_ids:
        stats_log.info(
            '',
            meta=dict(
                module='scm-%s' % repo.repo_id,
                read='0'))
    if not all_commits:
        # Skip commits that are already in the DB
        commit_ids = new_commit_ids
    log.info('Refreshing %d commits on %s', len(commit_ids), repo.full_fs_path)

    # Refresh commits
    seen = set()
    for i, oid in enumerate(commit_ids):
        repo.refresh_commit_info(oid, seen, not all_commits)
        if (i + 1) % 100 == 0:
            log.info('Refresh commit info %d: %s', (i + 1), oid)

    refresh_commit_repos(all_commit_ids, repo)

    # Refresh child references
    for i, oid in enumerate(commit_ids):
        ci = CommitDoc.m.find(dict(_id=oid), validate=False).next()
        refresh_children(ci)
        if (i + 1) % 100 == 0:
            log.info('Refresh child info %d for parents of %s',
                     (i + 1), ci._id)

    if repo._refresh_precompute:
        # Refresh commit runs
        commit_run_ids = commit_ids
        # Check if the CommitRuns for the repo are in a good state by checking for
        # a CommitRunDoc that contains the last known commit. If there isn't one,
        # the CommitRuns for this repo are in a bad state - rebuild them
        # entirely.
        if commit_run_ids != all_commit_ids:
            last_commit = last_known_commit_id(all_commit_ids, new_commit_ids)
            log.info('Last known commit id: %s', last_commit)
            if not CommitRunDoc.m.find(dict(commit_ids=last_commit)).count():
                log.info('CommitRun incomplete, rebuilding with all commits')
                commit_run_ids = all_commit_ids
        log.info('Starting CommitRunBuilder for %s', repo.full_fs_path)
        rb = CommitRunBuilder(commit_run_ids)
        rb.run()
        rb.cleanup()
        log.info('Finished CommitRunBuilder for %s', repo.full_fs_path)

    # Refresh trees
    # Like diffs below, pre-computing trees for some SCMs is too expensive,
    # so we skip it here, then do it on-demand later.
    if repo._refresh_precompute:
        cache = {}
        for i, oid in enumerate(commit_ids):
            ci = CommitDoc.m.find(dict(_id=oid), validate=False).next()
            cache = refresh_commit_trees(ci, cache)
            if (i + 1) % 100 == 0:
                log.info('Refresh commit trees %d: %s', (i + 1), ci._id)

    # Compute diffs
    cache = {}
    # For some SCMs, we don't want to pre-compute the LCDs because that
    # would be too expensive, so we skip them here and do them on-demand
    # with caching.
    if repo._refresh_precompute:
        model_cache = ModelCache()
        lcid_cache = {}
        for i, oid in enumerate(reversed(commit_ids)):
            ci = model_cache.get(Commit, dict(_id=oid))
            ci.set_context(repo)
            compute_lcds(ci, model_cache, lcid_cache)
            ThreadLocalORMSession.flush_all()
            if (i + 1) % 100 == 0:
                log.info('Compute last commit info %d: %s', (i + 1), ci._id)

    # Clear any existing caches for branches/tags
    if repo.cached_branches:
        repo.cached_branches = []
        session(repo).flush()

    if repo.cached_tags:
        repo.cached_tags = []
        session(repo).flush()
    # The first view can be expensive to cache,
    # so we want to do it here instead of on the first view.
    repo.get_branches()
    repo.get_tags()

    if not all_commits and not new_clone:
        for commit in commit_ids:
            new = repo.commit(commit)
            user = User.by_email_address(new.committed.email)
            if user is None:
                user = User.by_username(new.committed.name)
            if user is not None:
                g.statsUpdater.newCommit(new, repo.app_config.project, user)
            actor = user or TransientActor(
                    activity_name=new.committed.name or new.committed.email)
            g.director.create_activity(actor, 'committed', new,
                                       related_nodes=[repo.app_config.project],
                                       tags=['commit', repo.tool.lower()])

        from allura.webhooks import RepoPushWebhookSender
        by_branches, by_tags = _group_commits(repo, commit_ids)
        params = []
        for b, commits in by_branches.iteritems():
            ref = u'refs/heads/{}'.format(b) if b != '__default__' else None
            params.append(dict(commit_ids=commits, ref=ref))
        for t, commits in by_tags.iteritems():
            ref = u'refs/tags/{}'.format(t)
            params.append(dict(commit_ids=commits, ref=ref))
        if params:
            RepoPushWebhookSender().send(params)

    log.info('Refresh complete for %s', repo.full_fs_path)
    g.post_event('repo_refreshed', len(commit_ids), all_commits, new_clone)

    # Send notifications
    if notify:
        send_notifications(repo, commit_ids)
Exemplo n.º 10
0
def refresh_repo(repo, all_commits=False, notify=True, new_clone=False):
    all_commit_ids = commit_ids = list(repo.all_commit_ids())
    if not commit_ids:
        # the repo is empty, no need to continue
        return
    new_commit_ids = unknown_commit_ids(commit_ids)
    stats_log = h.log_action(log, "commit")
    for ci in new_commit_ids:
        stats_log.info("", meta=dict(module="scm-%s" % repo.repo_id, read="0"))
    if not all_commits:
        # Skip commits that are already in the DB
        commit_ids = new_commit_ids
    log.info("Refreshing %d commits on %s", len(commit_ids), repo.full_fs_path)

    # Refresh commits
    seen = set()
    for i, oid in enumerate(commit_ids):
        repo.refresh_commit_info(oid, seen, not all_commits)
        if (i + 1) % 100 == 0:
            log.info("Refresh commit info %d: %s", (i + 1), oid)

    refresh_commit_repos(all_commit_ids, repo)

    # Refresh child references
    for i, oid in enumerate(commit_ids):
        ci = CommitDoc.m.find(dict(_id=oid), validate=False).next()
        refresh_children(ci)
        if (i + 1) % 100 == 0:
            log.info("Refresh child info %d for parents of %s", (i + 1), ci._id)

    if repo._refresh_precompute:
        # Refresh commit runs
        commit_run_ids = commit_ids
        # Check if the CommitRuns for the repo are in a good state by checking for
        # a CommitRunDoc that contains the last known commit. If there isn't one,
        # the CommitRuns for this repo are in a bad state - rebuild them entirely.
        if commit_run_ids != all_commit_ids:
            last_commit = last_known_commit_id(all_commit_ids, new_commit_ids)
            log.info("Last known commit id: %s", last_commit)
            if not CommitRunDoc.m.find(dict(commit_ids=last_commit)).count():
                log.info("CommitRun incomplete, rebuilding with all commits")
                commit_run_ids = all_commit_ids
        log.info("Starting CommitRunBuilder for %s", repo.full_fs_path)
        rb = CommitRunBuilder(commit_run_ids)
        rb.run()
        rb.cleanup()
        log.info("Finished CommitRunBuilder for %s", repo.full_fs_path)

    # Refresh trees
    # Like diffs below, pre-computing trees for some SCMs is too expensive,
    # so we skip it here, then do it on-demand later.
    if repo._refresh_precompute:
        cache = {}
        for i, oid in enumerate(commit_ids):
            ci = CommitDoc.m.find(dict(_id=oid), validate=False).next()
            cache = refresh_commit_trees(ci, cache)
            if (i + 1) % 100 == 0:
                log.info("Refresh commit trees %d: %s", (i + 1), ci._id)

    # Compute diffs
    cache = {}
    # For some SCMs, we don't want to pre-compute the diffs because that
    # would be too expensive, so we skip them here and do them on-demand
    # with caching.
    if repo._refresh_precompute:
        for i, oid in enumerate(commit_ids):
            cid = CommitDoc.m.find(dict(_id=oid), validate=False).next()
            ci = mapper(Commit).create(cid, dict(instrument=False))
            ci.set_context(repo)
            compute_diffs(repo._id, cache, ci)
            if (i + 1) % 100 == 0:
                log.info("Compute diffs %d: %s", (i + 1), ci._id)

    if repo._refresh_precompute:
        model_cache = ModelCache()
        lcid_cache = {}
        for i, oid in enumerate(reversed(commit_ids)):
            ci = model_cache.get(Commit, dict(_id=oid))
            ci.set_context(repo)
            compute_lcds(ci, model_cache, lcid_cache)
            ThreadLocalORMSession.flush_all()
            if (i + 1) % 100 == 0:
                log.info("Compute last commit info %d: %s", (i + 1), ci._id)

    if not all_commits and not new_clone:
        for commit in commit_ids:
            new = repo.commit(commit)
            user = User.by_email_address(new.committed.email)
            if user is None:
                user = User.by_username(new.committed.name)
            if user is not None:
                g.statsUpdater.newCommit(new, repo.app_config.project, user)

    log.info("Refresh complete for %s", repo.full_fs_path)
    g.post_event("repo_refreshed", len(commit_ids), all_commits, new_clone)

    # Send notifications
    if notify:
        send_notifications(repo, commit_ids)
Exemplo n.º 11
0
def create_user():
    return User()
Exemplo n.º 12
0
def refresh_repo(repo, all_commits=False, notify=True, new_clone=False, commits_are_new=None):
    if commits_are_new is None:
        commits_are_new = not all_commits and not new_clone

    all_commit_ids = commit_ids = list(repo.all_commit_ids())
    if not commit_ids:
        # the repo is empty, no need to continue
        return
    new_commit_ids = unknown_commit_ids(commit_ids)
    stats_log = h.log_action(log, 'commit')
    for ci in new_commit_ids:
        stats_log.info(
            '',
            meta=dict(
                module='scm-%s' % repo.repo_id,
                read='0'))
    if not all_commits:
        # Skip commits that are already in the DB
        commit_ids = new_commit_ids
    log.info('Refreshing %d commits on %s', len(commit_ids), repo.full_fs_path)

    # Refresh commits
    seen = set()
    for i, oid in enumerate(commit_ids):
        repo.refresh_commit_info(oid, seen, not all_commits)
        if (i + 1) % 100 == 0:
            log.info('Refresh commit info %d: %s', (i + 1), oid)

    refresh_commit_repos(all_commit_ids, repo)

    # Refresh child references
    for i, oid in enumerate(commit_ids):
        ci = CommitDoc.m.find(dict(_id=oid), validate=False).next()
        refresh_children(ci)
        if (i + 1) % 100 == 0:
            log.info('Refresh child info %d for parents of %s',
                     (i + 1), ci._id)

    # Clear any existing caches for branches/tags
    if repo.cached_branches:
        repo.cached_branches = []
        session(repo).flush()

    if repo.cached_tags:
        repo.cached_tags = []
        session(repo).flush()
    # The first view can be expensive to cache,
    # so we want to do it here instead of on the first view.
    repo.get_branches()
    repo.get_tags()

    if commits_are_new:
        for commit in commit_ids:
            new = repo.commit(commit)
            user = User.by_email_address(new.committed.email)
            if user is None:
                user = User.by_username(new.committed.name)
            if user is not None:
                g.statsUpdater.newCommit(new, repo.app_config.project, user)
            actor = user or TransientActor(
                    activity_name=new.committed.name or new.committed.email)
            g.director.create_activity(actor, 'committed', new,
                                       related_nodes=[repo.app_config.project],
                                       tags=['commit', repo.tool.lower()])

        from allura.webhooks import RepoPushWebhookSender
        by_branches, by_tags = _group_commits(repo, commit_ids)
        params = []
        for b, commits in by_branches.iteritems():
            ref = u'refs/heads/{}'.format(b) if b != '__default__' else None
            params.append(dict(commit_ids=commits, ref=ref))
        for t, commits in by_tags.iteritems():
            ref = u'refs/tags/{}'.format(t)
            params.append(dict(commit_ids=commits, ref=ref))
        if params:
            RepoPushWebhookSender().send(params)

    log.info('Refresh complete for %s', repo.full_fs_path)
    g.post_event('repo_refreshed', len(commit_ids), all_commits, new_clone)

    # Send notifications
    if notify:
        send_notifications(repo, reversed(commit_ids))
Exemplo n.º 13
0
def refresh_repo(repo, all_commits=False, notify=True, new_clone=False):
    all_commit_ids = commit_ids = list(repo.all_commit_ids())
    if not commit_ids:
        # the repo is empty, no need to continue
        return
    new_commit_ids = unknown_commit_ids(commit_ids)
    stats_log = h.log_action(log, 'commit')
    for ci in new_commit_ids:
        stats_log.info('', meta=dict(module='scm-%s' % repo.repo_id, read='0'))
    if not all_commits:
        # Skip commits that are already in the DB
        commit_ids = new_commit_ids
    log.info('Refreshing %d commits on %s', len(commit_ids), repo.full_fs_path)

    # Refresh commits
    seen = set()
    for i, oid in enumerate(commit_ids):
        repo.refresh_commit_info(oid, seen, not all_commits)
        if (i + 1) % 100 == 0:
            log.info('Refresh commit info %d: %s', (i + 1), oid)

    refresh_commit_repos(all_commit_ids, repo)

    # Refresh child references
    for i, oid in enumerate(commit_ids):
        ci = CommitDoc.m.find(dict(_id=oid), validate=False).next()
        refresh_children(ci)
        if (i + 1) % 100 == 0:
            log.info('Refresh child info %d for parents of %s', (i + 1),
                     ci._id)

    if repo._refresh_precompute:
        # Refresh commit runs
        commit_run_ids = commit_ids
        # Check if the CommitRuns for the repo are in a good state by checking for
        # a CommitRunDoc that contains the last known commit. If there isn't one,
        # the CommitRuns for this repo are in a bad state - rebuild them entirely.
        if commit_run_ids != all_commit_ids:
            last_commit = last_known_commit_id(all_commit_ids, new_commit_ids)
            log.info('Last known commit id: %s', last_commit)
            if not CommitRunDoc.m.find(dict(commit_ids=last_commit)).count():
                log.info('CommitRun incomplete, rebuilding with all commits')
                commit_run_ids = all_commit_ids
        log.info('Starting CommitRunBuilder for %s', repo.full_fs_path)
        rb = CommitRunBuilder(commit_run_ids)
        rb.run()
        rb.cleanup()
        log.info('Finished CommitRunBuilder for %s', repo.full_fs_path)

    # Refresh trees
    # Like diffs below, pre-computing trees for some SCMs is too expensive,
    # so we skip it here, then do it on-demand later.
    if repo._refresh_precompute:
        cache = {}
        for i, oid in enumerate(commit_ids):
            ci = CommitDoc.m.find(dict(_id=oid), validate=False).next()
            cache = refresh_commit_trees(ci, cache)
            if (i + 1) % 100 == 0:
                log.info('Refresh commit trees %d: %s', (i + 1), ci._id)

    # Compute diffs
    cache = {}
    # For some SCMs, we don't want to pre-compute the diffs because that
    # would be too expensive, so we skip them here and do them on-demand
    # with caching.
    if repo._refresh_precompute:
        for i, oid in enumerate(commit_ids):
            cid = CommitDoc.m.find(dict(_id=oid), validate=False).next()
            ci = mapper(Commit).create(cid, dict(instrument=False))
            ci.set_context(repo)
            compute_diffs(repo._id, cache, ci)
            if (i + 1) % 100 == 0:
                log.info('Compute diffs %d: %s', (i + 1), ci._id)

    if repo._refresh_precompute:
        model_cache = ModelCache()
        lcid_cache = {}
        for i, oid in enumerate(reversed(commit_ids)):
            ci = model_cache.get(Commit, dict(_id=oid))
            ci.set_context(repo)
            compute_lcds(ci, model_cache, lcid_cache)
            ThreadLocalORMSession.flush_all()
            if (i + 1) % 100 == 0:
                log.info('Compute last commit info %d: %s', (i + 1), ci._id)

    if not all_commits and not new_clone:
        for commit in commit_ids:
            new = repo.commit(commit)
            user = User.by_email_address(new.committed.email)
            if user is None:
                user = User.by_username(new.committed.name)
            if user is not None:
                g.statsUpdater.newCommit(new, repo.app_config.project, user)

    log.info('Refresh complete for %s', repo.full_fs_path)
    g.post_event('repo_refreshed', len(commit_ids), all_commits, new_clone)

    # Send notifications
    if notify:
        send_notifications(repo, commit_ids)