def get_time_series(self, metrics, configs, filenames, commits): metrics = util.field_list(metrics) configs = util.field_list(configs) filesets = util.field_list(filenames) branches = util.field_list(commits) result = [] for m in metrics: for c in configs: for f in filesets: for b in branches: # Get all the data for all files in the set files_and_set = util.filename_list(f) files_and_set.append(f) data = fetch_time_series(m, c, files_and_set, b) # Remove unnecessary commit info formatted_data = {} for key in data: data_list = data[key] formatted_data_list = [] for e in data_list: formatted_data_list.append([e[0], e[2]]) formatted_data[key] = formatted_data_list data = formatted_data # Build the column name col_name = [] if len(metrics) > 1: col_name.append(m) if len(configs) > 1: col_name.append(c) if len(filesets) > 1: col_name.append(f) if len(col_name) == 0 or len(branches) > 1: col_name.append(b[1:]) col_name = "/".join(col_name) # Build the rows for this column col = [] for filename, filedata in data.iteritems(): improvement = filedata[-1][1] col.append([filename, improvement]) result.append({'col': col_name, 'data': col}) # return the results result = {'data': result, 'commits': ','.join(branches) } self.response.headers['Content-Type'] = 'application/json' self.response.out.write(pretty_json(result))
def get_time_series(self, metrics, configs, filenames, commits): metrics = util.field_list(metrics) configs = util.field_list(configs) filesets = util.field_list(filenames) branches = util.field_list(commits) result = [] for m in metrics: for c in configs: for f in filesets: for b in branches: # Get all the data for all files in the set files_and_set = util.filename_list(f) files_and_set.append(f) data = fetch_time_series(m, c, files_and_set, b) # Remove unnecessary commit info formatted_data = {} for key in data: data_list = data[key] formatted_data_list = [] for e in data_list: formatted_data_list.append([e[0], e[2]]) formatted_data[key] = formatted_data_list data = formatted_data # Build the column name col_name = [] if len(metrics) > 1: col_name.append(m) if len(configs) > 1: col_name.append(c) if len(filesets) > 1: col_name.append(f) if len(col_name) == 0 or len(branches) > 1: col_name.append(b[1:]) col_name = "/".join(col_name) # Build the rows for this column col = [] for filename, filedata in data.iteritems(): improvement = filedata[-1][1] col.append([filename, improvement]) result.append({'col': col_name, 'data': col}) # return the results result = {'data': result, 'commits': ','.join(branches)} self.response.headers['Content-Type'] = 'application/json' self.response.out.write(pretty_json(result))
def get(self, commits): def gerrit_link(m): return GERRIT_LINK_HTML%(m.group(0), m.group(0)) def commit_group(commits, rollup): return {'commits': commits, 'count': len(commits), 'rollup': rollup, 'id': commits[0]['commit']} commits = util.field_list(commits) # Don't print history for the whole branch for commit in commits: if commit[0] == '~': return # Find the oldest commit visited = set(commits[:1]) for commit in commits: if commit in visited: visited = initial_visited(commit) history = [build_history(c, set(visited)) for c in commits] #self.response.out.write("\n".join(map(str, history))) history = sorted(history, key=lambda x:len(x)) collapsed_history = history[0] collapsed_history_set = set(collapsed_history) for h in history[1:]: for c in h: if c not in collapsed_history_set: collapsed_history_set.add(c) collapsed_history.append(c) formatted = [] rollup = [] commit_cache = model.commits() for commit in collapsed_history: commit_data = commit_cache[commit] message = commit_data.message.split("\n") nonempty_lines = sum(map(bool, message)) data = {'commit': commit_data.key().name()[:9], 'commitid': commit_data.key().name(), 'author': commit_data.author, 'subject': message[0], 'body': message[1:], 'selected': False, 'expandable': nonempty_lines > 1} if commit in commits: if rollup: formatted.append(commit_group(rollup, rollup=True)) rollup = [] data['selected'] = True formatted.append(commit_group([data], rollup=False)) else: rollup.append(data) html = template.render("history.html", {"commit_groups": formatted}) html = re.sub(GERRIT_LINK_PATTERN, gerrit_link, html) self.response.out.write(html)
def get(self, commits, baseline): def gerrit_link(m): return GERRIT_LINK_HTML%(m.group(0), m.group(0)) if baseline == '': # we will handle this case specially baseline = False commits = util.field_list(commits) # Look up the commit data for these commits selected_commits = {} commit_cache = model.commits() for commit in commits: if commit not in selected_commits: selected_commits[commit] = commit_cache[commit] # Sort in topological order commits = sorted(selected_commits.keys(), key=lambda x: selected_commits[x].depth, reverse=True) visited = initial_visited(commits[-1]) history = build_history(commits[0], visited) history.reverse() formatted = [] for commit in history: commit_data = commit_cache[commit] message = commit_data.message.split("\n") nonempty_lines = sum(map(bool, message)) data = {'commit': commit_data.key().name()[:9], 'commitid': commit_data.key().name(), 'author': commit_data.author, 'subject': message[0], 'body': message[1:], 'date': commit_data.author_time} formatted.append(data) # We also get the baseline if baseline: commit_data = commit_cache[baseline] message = commit_data.message.split("\n") nonempty_lines = sum(map(bool, message)) baseline = {'commit': commit_data.key().name()[:9], 'commitid': commit_data.key().name(), 'author': commit_data.author, 'subject': message[0], 'body': message[1:], 'date': commit_data.author_time} html = template.render("commitinfo.html", {"commits": formatted, 'baseline':baseline}) html = re.sub(GERRIT_LINK_PATTERN, gerrit_link, html) self.response.out.write(html)
def find_baseline(metric, config, filename, commits): def find_first_parent(commit, data, candidates): while True: parents = data[commit].parents if not parents: # root node return None commit = parents[0] if commit in candidates: return commit # Removes some errors when no commits are selected if len(commits) == 0: return None candidates = drilldown.query(metric, config, filename, commits)[3] commit_data = model.commits() commits = util.field_list(commits) parentage = {} for commit in commits: parentage[commit] = [] root_nodes_seen = 0 while root_nodes_seen < len(commits): for commit1 in commits: parents = parentage[commit1] if parents: this_commit = parents[-1] else: this_commit = commit1 # already hit the root for this commit? if this_commit is None: continue parent = find_first_parent(this_commit, commit_data, candidates) parents.append(parent) if parent is None: root_nodes_seen += 1 continue n = 0 for commit2 in commits: if parent in parentage[commit2]: n += 1 if n == len(commits): # parent is found in all lineages return parent return None
def _split_field(field): result = util.field_list(field) if not result: return [None] return result
def get(self, commits): def gerrit_link(m): return GERRIT_LINK_HTML % (m.group(0), m.group(0)) def commit_group(commits, rollup): return { 'commits': commits, 'count': len(commits), 'rollup': rollup, 'id': commits[0]['commit'] } commits = util.field_list(commits) # Don't print history for the whole branch for commit in commits: if commit[0] == '~': return # Find the oldest commit visited = set(commits[:1]) for commit in commits: if commit in visited: visited = initial_visited(commit) history = [build_history(c, set(visited)) for c in commits] #self.response.out.write("\n".join(map(str, history))) history = sorted(history, key=lambda x: len(x)) collapsed_history = history[0] collapsed_history_set = set(collapsed_history) for h in history[1:]: for c in h: if c not in collapsed_history_set: collapsed_history_set.add(c) collapsed_history.append(c) formatted = [] rollup = [] commit_cache = model.commits() for commit in collapsed_history: commit_data = commit_cache[commit] message = commit_data.message.split("\n") nonempty_lines = sum(map(bool, message)) data = { 'commit': commit_data.key().name()[:9], 'commitid': commit_data.key().name(), 'author': commit_data.author, 'subject': message[0], 'body': message[1:], 'selected': False, 'expandable': nonempty_lines > 1 } if commit in commits: if rollup: formatted.append(commit_group(rollup, rollup=True)) rollup = [] data['selected'] = True formatted.append(commit_group([data], rollup=False)) else: rollup.append(data) html = template.render("history.html", {"commit_groups": formatted}) html = re.sub(GERRIT_LINK_PATTERN, gerrit_link, html) self.response.out.write(html)
def get(self, commits, baseline): def gerrit_link(m): return GERRIT_LINK_HTML % (m.group(0), m.group(0)) if baseline == '': # we will handle this case specially baseline = False commits = util.field_list(commits) # Look up the commit data for these commits selected_commits = {} commit_cache = model.commits() for commit in commits: if commit not in selected_commits: selected_commits[commit] = commit_cache[commit] # Sort in topological order commits = sorted(selected_commits.keys(), key=lambda x: selected_commits[x].depth, reverse=True) visited = initial_visited(commits[-1]) history = build_history(commits[0], visited) history.reverse() formatted = [] for commit in history: commit_data = commit_cache[commit] message = commit_data.message.split("\n") nonempty_lines = sum(map(bool, message)) data = { 'commit': commit_data.key().name()[:9], 'commitid': commit_data.key().name(), 'author': commit_data.author, 'subject': message[0], 'body': message[1:], 'date': commit_data.author_time } formatted.append(data) # We also get the baseline if baseline: commit_data = commit_cache[baseline] message = commit_data.message.split("\n") nonempty_lines = sum(map(bool, message)) baseline = { 'commit': commit_data.key().name()[:9], 'commitid': commit_data.key().name(), 'author': commit_data.author, 'subject': message[0], 'body': message[1:], 'date': commit_data.author_time } html = template.render("commitinfo.html", { "commits": formatted, 'baseline': baseline }) html = re.sub(GERRIT_LINK_PATTERN, gerrit_link, html) self.response.out.write(html)
def get_adhoc_improvement(self, metrics, configs, filenames, commits): """Calculates the requested composite metrics and outputs as JSON""" # Find the baseline based on the raw URL variables parent = find_baseline(metrics, configs, filenames, commits) # We format the end of the table with extra info if parent: parent_str = parent[:9] else: parent_str = "None found" result = [] metrics = util.field_list(metrics) configs = util.field_list(configs) filenames = util.filename_list(filenames) commits = util.field_list(commits) # Fix for the case that a commit in commits has no parent # In this case we choose the oldest commit as the parent, ie the one # without a parent. if not parent: parent = commits[-1] metrics_cache = model.metrics() for m in metrics: if metrics_cache[m].distortion: improvement = rd_improvement else: improvement = mean_improvement for cfg in configs: baseline_data = fetch_metric_for_fileset( m, cfg, filenames, parent) for cm in commits: col = [] # Each m, cfg, cm combination will be a column in # the table average, results = calculate_improvement( m, cfg, filenames, cm, baseline_data, improvement) for f, composite in results.iteritems(): col.append([f, composite]) # Build the column name col_name = [] if len(metrics) > 1: col_name.append(m) if len(configs) > 1: col_name.append(cfg) if len(col_name) == 0 or len(commits) > 1: col_name.append(cm[:9]) col_name = "/".join(col_name) col.append(['OVERALL: (' + parent_str + ')', average]) result.append({'col': col_name, 'data': col}) # return the results result = { 'baseline': parent, 'data': result, 'commits': ','.join(commits) } self.response.headers['Content-Type'] = 'application/json' self.response.out.write(pretty_json(result))
def get_adhoc_improvement(self, metrics, configs, filenames, commits): """Calculates the requested composite metrics and outputs as JSON""" # Find the baseline based on the raw URL variables parent = find_baseline(metrics, configs, filenames, commits) # We format the end of the table with extra info if parent: parent_str = parent[:9] else: parent_str = "None found" result = [] metrics = util.field_list(metrics) configs = util.field_list(configs) filenames = util.filename_list(filenames) commits = util.field_list(commits) # Fix for the case that a commit in commits has no parent # In this case we choose the oldest commit as the parent, ie the one # without a parent. if not parent: parent = commits[-1] metrics_cache = model.metrics() for m in metrics: if metrics_cache[m].distortion: improvement = rd_improvement else: improvement = mean_improvement for cfg in configs: baseline_data = fetch_metric_for_fileset(m, cfg, filenames, parent) for cm in commits: col = [] # Each m, cfg, cm combination will be a column in # the table average, results = calculate_improvement( m, cfg, filenames, cm, baseline_data, improvement) for f, composite in results.iteritems(): col.append([f, composite]) # Build the column name col_name = [] if len(metrics) > 1: col_name.append(m) if len(configs) > 1: col_name.append(cfg) if len(col_name) == 0 or len(commits) > 1: col_name.append(cm[:9]) col_name = "/".join(col_name) col.append(['OVERALL: (' + parent_str + ')', average]) result.append({'col': col_name, 'data': col}) # return the results result = {'baseline': parent, 'data': result, 'commits': ','.join(commits) } self.response.headers['Content-Type'] = 'application/json' self.response.out.write(pretty_json(result))