Пример #1
0
    def post(self):
        assert util.development() or oauth.is_current_user_admin()
        util.log_upload_data(self.request.path, self.request.get("data"))
        gerrit.poll()
        data = StringIO.StringIO(self.request.get("data"))
        new_commits = []
        for line in data:
            new_commits.append(self.load(json.loads(line)))
        model.commits().invalidate()

        self.update_depth(new_commits)
Пример #2
0
    def post(self):
        assert util.development() or oauth.is_current_user_admin()
        util.log_upload_data(self.request.path, self.request.get("data"))
        gerrit.poll()
        data = StringIO.StringIO(self.request.get("data"))
        new_commits = []
        for line in data:
            new_commits.append(self.load(json.loads(line)))
        model.commits().invalidate()

        self.update_depth(new_commits)
Пример #3
0
    def update_depth(self, commits):
      to_visit = list(commits)
      commits = model.commits()
      while(to_visit):
        commit = to_visit.pop()
        #if commit is None:
        #  continue

        c = commits[commit]
        if c.depth is not None:
          continue

        again = False
        depth = 0
        for parent in [commits[x] for x in commits[commit].parents]:
          if parent.depth:
            depth = max(depth, parent.depth)
          else:
            if not again:
              to_visit.append(commit)
              again = True
            to_visit.append(parent.key().name())

        if not again:
          c.depth = depth + 1
          c.put()
          logging.info("update_depth: %s depth %d"%(commit, c.depth))
Пример #4
0
    def get(self, metric, config, filename, commit, bitrate):

        config_info = fetch_config_info(metric, config, filename, commit)
        commit, config_flags, runtime_flags = config_info

        if bitrate != '':
            bitrate = float(bitrate)

            # Now we replace the string ${target_bitrate} in runtime_flags
            i = runtime_flags.find('${target_bitrate}')
            runtime_flags = runtime_flags[:i] + str(bitrate)

        # We see if this commit is in gerrit
        commit_data = model.commits()[commit]
        if commit_data.gerrit_url is not None:
            commit_url = commit_data.gerrit_url
            commit_ref = commit_data.gerrit_patchset_ref
            commit_in_gerrit = True
            commit = {
                'commitid': commit,
                'commit_in_gerrit': True,
                'commit_url': commit_data.gerrit_url,
                'commit_ref': commit_data.gerrit_patchset_ref
            }
        else:
            commit = {'commitid': commit, 'commit_in_gerrit': False}

        response = {
            'commit': commit,
            'config_flags': config_flags,
            'runtime_flags': runtime_flags
        }

        html = template.render("configinfo.html", response)
        self.response.out.write(html)
Пример #5
0
def percent_improvement_map(entry):
    config = entry.config_name
    commit = entry.commit
    depth = model.commits()[commit].depth
    branches = model.commits()[commit].branches
    for metric, filename, values in data_map(entry):
        filesets = model.files()[filename].file_sets

        for branch in branches:
            k = (metric, config, filename, branch)
            v = (depth, commit, filename, values)
            yield (json.dumps(k), json.dumps(v))
            for fileset in filesets:
                k = (metric, config, "~" + fileset, branch)
                v = (depth, commit, filename, values)
                yield (json.dumps(k), json.dumps(v))
Пример #6
0
    def update_depth(self, commits):
        to_visit = list(commits)
        commits = model.commits()
        while (to_visit):
            commit = to_visit.pop()
            #if commit is None:
            #  continue

            c = commits[commit]
            if c.depth is not None:
                continue

            again = False
            depth = 0
            for parent in [commits[x] for x in commits[commit].parents]:
                if parent.depth:
                    depth = max(depth, parent.depth)
                else:
                    if not again:
                        to_visit.append(commit)
                        again = True
                    to_visit.append(parent.key().name())

            if not again:
                c.depth = depth + 1
                c.put()
                logging.info("update_depth: %s depth %d" % (commit, c.depth))
Пример #7
0
def percent_improvement_map(entry):
    config = entry.config_name
    commit = entry.commit
    depth = model.commits()[commit].depth
    branches = model.commits()[commit].branches
    for metric, filename, values in data_map(entry):
        filesets = model.files()[filename].file_sets

        for branch in branches:
            k = (metric, config, filename, branch)
            v = (depth, commit, filename, values)
            yield (json.dumps(k), json.dumps(v))
            for fileset in filesets:
                k = (metric, config, "~"+fileset, branch)
                v = (depth, commit, filename, values)
                yield (json.dumps(k), json.dumps(v))
Пример #8
0
    def get(self, metric, config, filename, commit, bitrate):

        config_info = fetch_config_info(metric, config, filename, commit)
        commit, config_flags, runtime_flags = config_info

        if bitrate != '':
            bitrate = float(bitrate)

            # Now we replace the string ${target_bitrate} in runtime_flags
            i = runtime_flags.find('${target_bitrate}')
            runtime_flags = runtime_flags[:i] + str(bitrate)

        # We see if this commit is in gerrit
        commit_data = model.commits()[commit]
        if commit_data.gerrit_url is not None:
            commit_url = commit_data.gerrit_url
            commit_ref = commit_data.gerrit_patchset_ref
            commit_in_gerrit = True
            commit = {'commitid': commit,
                      'commit_in_gerrit': True,
                      'commit_url': commit_data.gerrit_url,
                      'commit_ref': commit_data.gerrit_patchset_ref}
        else:
            commit = {'commitid': commit,
                      'commit_in_gerrit': False}

        response = {'commit': commit,
                    'config_flags': config_flags,
                    'runtime_flags': runtime_flags}

        html = template.render("configinfo.html", response)
        self.response.out.write(html)
Пример #9
0
    def get(self):
        # We get the 5 most recent commits
        query = model.Commit.all()

        # We use this if we just want the newest 5, regardless of run data
        #current_commits = query.order("-commit_time").fetch(limit=5)

        # test data
        current_commits = [
            '0030303b6949ba2d3391f3ae400213acc0e80db7',
            '062864f4cc2179b6f222ae337538c18bfd08037a',
            '05bde9d4a4b575aaadd9b6f5d0f82826b1cb4900',
            '0c483d6b683fa4313cf7dadf448a707fe32714a4'
        ]

        formatted_commits = []  # These are commit_dict, formatted_resps pairs

        for commit in current_commits:
            # We get all the data about the commit we need
            #commit_data = commit

            # only for test data
            commit_data = model.commits()[commit]

            message = commit_data.message.split("\n")
            commit = {
                'commit': commit_data.key().name()[:9],
                'commitid': commit_data.key().name(),
                'author': commit_data.author,
                'subject': message[0],
                'body': message[1:],
                'date': commit_data.author_time,
                'branches': commit_data.branches
            }
            commitid = commit_data.key().name()

            # We need (metric, config, fileset) tuples
            resps = []
            query = model.CodecMetricIndex.all()
            query = query.filter('commit =', commitid)
            for item in query:
                resps.extend(
                    get_adhoc_improvement(item.metrics, item.config_name,
                                          item.files, commitid))

            # Now that we have our responses, we can format them by seeing if
            # the value crosses our threshold
            formatted_resps = run_formatter(commit, resps)

            formatted_commits.append((commit, formatted_resps))

        values = {
            "user": users.get_current_user(),
            "login_url": users.create_login_url("/"),
            "logout_url": users.create_logout_url("/"),
            "formatted_commits": formatted_commits,
        }
        self.response.out.write(template.render("commit_viewer.html", values))
Пример #10
0
    def get(self, commits):
        def gerrit_link(m):
            return GERRIT_LINK_HTML%(m.group(0), m.group(0))

        def commit_group(commits, rollup):
            return {'commits': commits, 'count': len(commits),
                    'rollup': rollup, 'id': commits[0]['commit']}

        commits = util.field_list(commits)
        # Don't print history for the whole branch
        for commit in commits:
            if commit[0] == '~':
                return

        # Find the oldest commit
        visited = set(commits[:1])
        for commit in commits:
            if commit in visited:
                visited = initial_visited(commit)

        history = [build_history(c, set(visited)) for c in commits]
        #self.response.out.write("\n".join(map(str, history)))

        history = sorted(history, key=lambda x:len(x))
        collapsed_history = history[0]
        collapsed_history_set = set(collapsed_history)
        for h in history[1:]:
            for c in h:
                if c not in collapsed_history_set:
                    collapsed_history_set.add(c)
                    collapsed_history.append(c)

        formatted = []
        rollup = []
        commit_cache = model.commits()
        for commit in collapsed_history:
            commit_data = commit_cache[commit]
            message = commit_data.message.split("\n")
            nonempty_lines = sum(map(bool, message))
            data = {'commit': commit_data.key().name()[:9],
                    'commitid': commit_data.key().name(),
                    'author': commit_data.author,
                    'subject': message[0],
                    'body': message[1:],
                    'selected': False,
                    'expandable': nonempty_lines > 1}
            if commit in commits:
                if rollup:
                    formatted.append(commit_group(rollup, rollup=True))
                    rollup = []
                data['selected'] = True
                formatted.append(commit_group([data], rollup=False))
            else:
                rollup.append(data)

        html = template.render("history.html", {"commit_groups": formatted})
        html = re.sub(GERRIT_LINK_PATTERN, gerrit_link, html)
        self.response.out.write(html)
Пример #11
0
def initial_visited(c1):
    visited = set()
    commit_cache = model.commits()
    while c1:
        c1 = commit_cache[c1]
        visited.update(c1.parents)
        if c1.parents:
            c1 = c1.parents[0]
        else:
            break
    return visited
Пример #12
0
def initial_visited(c1):
    visited=set()
    commit_cache = model.commits()
    while c1:
        c1 = commit_cache[c1]
        visited.update(c1.parents)
        if c1.parents:
            c1 = c1.parents[0]
        else:
            break
    return visited
Пример #13
0
def build_history(commit, visited=set()):
    to_visit = [commit]
    history = []
    commit_cache = model.commits()
    while (to_visit):
        commit = to_visit.pop(0)
        if commit not in visited:
            visited.add(commit)
            history.insert(0, commit)
            commit = commit_cache[commit]
            to_visit.extend(commit.parents)
    return history
Пример #14
0
def build_history(commit, visited=set()):
    to_visit = [commit]
    history = []
    commit_cache = model.commits()
    while(to_visit):
        commit = to_visit.pop(0)
        if commit not in visited:
            visited.add(commit)
            history.insert(0, commit)
            commit = commit_cache[commit]
            to_visit.extend(commit.parents)
    return history
Пример #15
0
    def get(self, commits, baseline):
        def gerrit_link(m):
            return GERRIT_LINK_HTML%(m.group(0), m.group(0))

        if baseline == '':
            # we will handle this case specially
            baseline = False

        commits = util.field_list(commits)

        # Look up the commit data for these commits
        selected_commits = {}
        commit_cache = model.commits()
        for commit in commits:
            if commit not in selected_commits:
                selected_commits[commit] = commit_cache[commit]

        # Sort in topological order
        commits = sorted(selected_commits.keys(),
                         key=lambda x: selected_commits[x].depth, reverse=True)

        visited = initial_visited(commits[-1])
        history = build_history(commits[0], visited)
        history.reverse()

        formatted = []
        for commit in history:
            commit_data = commit_cache[commit]
            message = commit_data.message.split("\n")
            nonempty_lines = sum(map(bool, message))
            data = {'commit': commit_data.key().name()[:9],
                    'commitid': commit_data.key().name(),
                    'author': commit_data.author,
                    'subject': message[0],
                    'body': message[1:],
                    'date': commit_data.author_time}
            formatted.append(data)

        # We also get the baseline
        if baseline:
            commit_data = commit_cache[baseline]
            message = commit_data.message.split("\n")
            nonempty_lines = sum(map(bool, message))
            baseline = {'commit': commit_data.key().name()[:9],
                        'commitid': commit_data.key().name(),
                        'author': commit_data.author,
                        'subject': message[0],
                        'body': message[1:],
                        'date': commit_data.author_time}

        html = template.render("commitinfo.html", {"commits": formatted, 'baseline':baseline})
        html = re.sub(GERRIT_LINK_PATTERN, gerrit_link, html)
        self.response.out.write(html)
Пример #16
0
def find_baseline(metric, config, filename, commits):
    def find_first_parent(commit, data, candidates):
        while True:
            parents = data[commit].parents
            if not parents:
                # root node
                return None
            commit = parents[0]
            if commit in candidates:
                return commit

    # Removes some errors when no commits are selected
    if len(commits) == 0:
        return None

    candidates = drilldown.query(metric, config, filename, commits)[3]
    commit_data = model.commits()
    commits = util.field_list(commits)
    parentage = {}
    for commit in commits:
        parentage[commit] = []

    root_nodes_seen = 0
    while root_nodes_seen < len(commits):
        for commit1 in commits:
            parents = parentage[commit1]
            if parents:
                this_commit = parents[-1]
            else:
                this_commit = commit1

            # already hit the root for this commit?
            if this_commit is None:
                continue

            parent = find_first_parent(this_commit, commit_data, candidates)
            parents.append(parent)
            if parent is None:
                root_nodes_seen += 1
                continue

            n = 0
            for commit2 in commits:
                if parent in parentage[commit2]:
                    n += 1

            if n == len(commits):
                # parent is found in all lineages
                return parent
    return None
Пример #17
0
def find_baseline(metric, config, filename, commits):
    def find_first_parent(commit, data, candidates):
        while True:
            parents = data[commit].parents
            if not parents:
               # root node
               return None
            commit = parents[0]
            if commit in candidates:
                return commit

    # Removes some errors when no commits are selected
    if len(commits) == 0:
        return None

    candidates = drilldown.query(metric, config, filename, commits)[3]
    commit_data = model.commits()
    commits = util.field_list(commits)
    parentage = {}
    for commit in commits:
        parentage[commit] = []

    root_nodes_seen = 0
    while root_nodes_seen < len(commits):
        for commit1 in commits:
            parents = parentage[commit1]
            if parents:
                this_commit = parents[-1]
            else:
                this_commit = commit1

            # already hit the root for this commit?
            if this_commit is None:
               continue

            parent = find_first_parent(this_commit, commit_data, candidates)
            parents.append(parent)
            if parent is None:
                root_nodes_seen += 1
                continue

            n = 0
            for commit2 in commits:
                if parent in parentage[commit2]:
                    n += 1

            if n == len(commits):
                # parent is found in all lineages
                return parent
    return None
Пример #18
0
 def get(self):
     # Prime caches
     for commit in model.commits():
         pass
     drilldown.query('', '', '', '')
Пример #19
0
def percent_improvement_reduce(key, values):
    metrics = model.MetricCache(
        [k.name() for k in model.Metric.all(keys_only=True)])

    # deserialize and sort values
    (metric, config, fileset, branch) = json.loads(key)
    values = sorted(map(json.loads, values), key=lambda x: x[0])

    # unpack values to per commit/file lists
    commit_order = []
    by_commit = {}
    for v in values:
        (unused_depth, commit, filename, metric_data) = v
        if not commit_order or commit_order[-1] != commit:
            commit_order.append(commit)
        runs = by_commit.setdefault(commit, {}).setdefault(filename, [])
        runs.append(metric_data)

    # calculate improvement
    last_data = None
    abs_imp = 1.0
    result_t = []
    result_v = []
    for commit in commit_order:
        this_data = by_commit[commit]
        if last_data:
            last_files = set(last_data.keys())
            this_files = set(this_data.keys())
            imp = []

            for f in last_files.intersection(this_files):
                if metrics[metric].distortion:
                    data1 = sorted(last_data[f], key=lambda x: x[0])
                    data2 = sorted(this_data[f], key=lambda x: x[0])
                    imp.append(DataBetter(data1, data2))
                else:
                    data1 = [x[0] for x in last_data[f]]
                    data2 = [x[0] for x in this_data[f]]
                    data1 = sum(data1) / len(data1)
                    data2 = sum(data2) / len(data2)
                    imp.append(data2 / data1 - 1.0)

            if not imp:
                # Discontinuity
                abs_imp = 1.0
            elif metrics[metric].distortion:
                # avg_imp gives the multiplier to get the bitrate for
                # the same quality. For example, the same quality at half the
                # bitrate is 0.5. Same quality at twice the bitrate is -1.0.
                # so, B1 = B0 * (1 - bavg_imp)
                #
                # abs_imp is directly related to abs_br. If the abs_br for
                # the same quality since the beginning of time is 0.5, then
                # the resulting improvement is 1.5 (1.0 = no improvement)
                avg_imp = sum(imp) / len(imp)
                abs_br = (2.0 - abs_imp)
                abs_br *= (1.0 - avg_imp)
                abs_imp = (2.0 - abs_br)
            else:
                avg_imp = sum(imp) / len(imp) + 1.0
                abs_imp *= avg_imp
        last_data = this_data
        result_t.append(model.commits()[commit].commit_time)
        result_v.append(abs_imp)

    h = hashlib.sha1()
    map(h.update, [metric, config, fileset, branch])
    ts = model.CodecMetricTimeSeries(key_name=h.hexdigest(),
                                     metric=metric,
                                     config_name=config,
                                     file_or_set_name=fileset,
                                     branch=branch,
                                     commits=commit_order,
                                     times=result_t,
                                     values=result_v)

    yield op.db.Put(ts)

    # Update the drilldown table
    branch = "~"+branch
    drilldown.insert(set([metric]), set([config]), set([fileset]),
                     set([branch]))
    drilldown.save()
Пример #20
0
 def get(self):
     # Prime caches
     for commit in model.commits():
         pass
     drilldown.query('', '', '', '')
Пример #21
0
    def get(self, commits):
        def gerrit_link(m):
            return GERRIT_LINK_HTML % (m.group(0), m.group(0))

        def commit_group(commits, rollup):
            return {
                'commits': commits,
                'count': len(commits),
                'rollup': rollup,
                'id': commits[0]['commit']
            }

        commits = util.field_list(commits)
        # Don't print history for the whole branch
        for commit in commits:
            if commit[0] == '~':
                return

        # Find the oldest commit
        visited = set(commits[:1])
        for commit in commits:
            if commit in visited:
                visited = initial_visited(commit)

        history = [build_history(c, set(visited)) for c in commits]
        #self.response.out.write("\n".join(map(str, history)))

        history = sorted(history, key=lambda x: len(x))
        collapsed_history = history[0]
        collapsed_history_set = set(collapsed_history)
        for h in history[1:]:
            for c in h:
                if c not in collapsed_history_set:
                    collapsed_history_set.add(c)
                    collapsed_history.append(c)

        formatted = []
        rollup = []
        commit_cache = model.commits()
        for commit in collapsed_history:
            commit_data = commit_cache[commit]
            message = commit_data.message.split("\n")
            nonempty_lines = sum(map(bool, message))
            data = {
                'commit': commit_data.key().name()[:9],
                'commitid': commit_data.key().name(),
                'author': commit_data.author,
                'subject': message[0],
                'body': message[1:],
                'selected': False,
                'expandable': nonempty_lines > 1
            }
            if commit in commits:
                if rollup:
                    formatted.append(commit_group(rollup, rollup=True))
                    rollup = []
                data['selected'] = True
                formatted.append(commit_group([data], rollup=False))
            else:
                rollup.append(data)

        html = template.render("history.html", {"commit_groups": formatted})
        html = re.sub(GERRIT_LINK_PATTERN, gerrit_link, html)
        self.response.out.write(html)
Пример #22
0
def percent_improvement_reduce(key, values):
    metrics = model.MetricCache(
        [k.name() for k in model.Metric.all(keys_only=True)])

    # deserialize and sort values
    (metric, config, fileset, branch) = json.loads(key)
    values = sorted(map(json.loads, values), key=lambda x: x[0])

    # unpack values to per commit/file lists
    commit_order = []
    by_commit = {}
    for v in values:
        (unused_depth, commit, filename, metric_data) = v
        if not commit_order or commit_order[-1] != commit:
            commit_order.append(commit)
        runs = by_commit.setdefault(commit, {}).setdefault(filename, [])
        runs.append(metric_data)

    # calculate improvement
    last_data = None
    abs_imp = 1.0
    result_t = []
    result_v = []
    for commit in commit_order:
        this_data = by_commit[commit]
        if last_data:
            last_files = set(last_data.keys())
            this_files = set(this_data.keys())
            imp = []

            for f in last_files.intersection(this_files):
                if metrics[metric].distortion:
                    data1 = sorted(last_data[f], key=lambda x: x[0])
                    data2 = sorted(this_data[f], key=lambda x: x[0])
                    imp.append(DataBetter(data1, data2))
                else:
                    data1 = [x[0] for x in last_data[f]]
                    data2 = [x[0] for x in this_data[f]]
                    data1 = sum(data1) / len(data1)
                    data2 = sum(data2) / len(data2)
                    imp.append(data2 / data1 - 1.0)

            if not imp:
                # Discontinuity
                abs_imp = 1.0
            elif metrics[metric].distortion:
                # avg_imp gives the multiplier to get the bitrate for
                # the same quality. For example, the same quality at half the
                # bitrate is 0.5. Same quality at twice the bitrate is -1.0.
                # so, B1 = B0 * (1 - bavg_imp)
                #
                # abs_imp is directly related to abs_br. If the abs_br for
                # the same quality since the beginning of time is 0.5, then
                # the resulting improvement is 1.5 (1.0 = no improvement)
                avg_imp = sum(imp) / len(imp)
                abs_br = (2.0 - abs_imp)
                abs_br *= (1.0 - avg_imp)
                abs_imp = (2.0 - abs_br)
            else:
                avg_imp = sum(imp) / len(imp) + 1.0
                abs_imp *= avg_imp
        last_data = this_data
        result_t.append(model.commits()[commit].commit_time)
        result_v.append(abs_imp)

    h = hashlib.sha1()
    map(h.update, [metric, config, fileset, branch])
    ts = model.CodecMetricTimeSeries(key_name=h.hexdigest(),
                                     metric=metric,
                                     config_name=config,
                                     file_or_set_name=fileset,
                                     branch=branch,
                                     commits=commit_order,
                                     times=result_t,
                                     values=result_v)

    yield op.db.Put(ts)

    # Update the drilldown table
    branch = "~" + branch
    drilldown.insert(set([metric]), set([config]), set([fileset]),
                     set([branch]))
    drilldown.save()
Пример #23
0
    def get(self, commits, baseline):
        def gerrit_link(m):
            return GERRIT_LINK_HTML % (m.group(0), m.group(0))

        if baseline == '':
            # we will handle this case specially
            baseline = False

        commits = util.field_list(commits)

        # Look up the commit data for these commits
        selected_commits = {}
        commit_cache = model.commits()
        for commit in commits:
            if commit not in selected_commits:
                selected_commits[commit] = commit_cache[commit]

        # Sort in topological order
        commits = sorted(selected_commits.keys(),
                         key=lambda x: selected_commits[x].depth,
                         reverse=True)

        visited = initial_visited(commits[-1])
        history = build_history(commits[0], visited)
        history.reverse()

        formatted = []
        for commit in history:
            commit_data = commit_cache[commit]
            message = commit_data.message.split("\n")
            nonempty_lines = sum(map(bool, message))
            data = {
                'commit': commit_data.key().name()[:9],
                'commitid': commit_data.key().name(),
                'author': commit_data.author,
                'subject': message[0],
                'body': message[1:],
                'date': commit_data.author_time
            }
            formatted.append(data)

        # We also get the baseline
        if baseline:
            commit_data = commit_cache[baseline]
            message = commit_data.message.split("\n")
            nonempty_lines = sum(map(bool, message))
            baseline = {
                'commit': commit_data.key().name()[:9],
                'commitid': commit_data.key().name(),
                'author': commit_data.author,
                'subject': message[0],
                'body': message[1:],
                'date': commit_data.author_time
            }

        html = template.render("commitinfo.html", {
            "commits": formatted,
            'baseline': baseline
        })
        html = re.sub(GERRIT_LINK_PATTERN, gerrit_link, html)
        self.response.out.write(html)
Пример #24
0
    def get(self, commit):
        commit = urllib.unquote(commit)

        # We start by seeing if its a valid commit (or email address)
        indexes = model.CodecMetricIndex.all(keys_only=True)
        indexes = indexes.filter('commit =', commit)
        keys = [k.parent() for k in indexes]
        if len(keys) == 0:

            values = {
                "user": users.get_current_user(),
                "login_url": users.create_login_url("/"),
                "logout_url": users.create_logout_url("/"),
                'commit': commit,
                'error': True,
                'errormessage':
                "There are no matching results for this search.",
            }

            html = template.render("commit_view.html", values)
            self.response.out.write(html)

            return

        # We get all the data about the commit we need
        commit_data = model.commits()[commit]
        message = commit_data.message.split("\n")
        commit = {
            'commit': commit_data.key().name()[:9],
            'commitid': commit_data.key().name(),
            'author': commit_data.author,
            'subject': message[0],
            'body': message[1:],
            'date': commit_data.author_time,
            'branches': commit_data.branches
        }
        commitid = commit_data.key().name()

        # We need (metric, config, fileset) tuples
        resps = []
        query = model.CodecMetricIndex.all()
        query = query.filter('commit =', commitid)
        for item in query:
            resps.extend(
                get_adhoc_improvement(item.metrics, item.config_name,
                                      item.files, commitid))

        # Now that we have our responses, we can format them by seeing if
        # the value crosses our threshold
        formatted_resps = run_formatter(commit, resps)

        values = {
            "user": users.get_current_user(),
            "login_url": users.create_login_url("/"),
            "logout_url": users.create_logout_url("/"),
            'commit': commit,
            'runs': formatted_resps
        }

        html = template.render("commit_view.html", values)
        self.response.out.write(html)