def main():
    args = parse_args()
    train_filename, test_filename = parse(args.logfile_path, args.output_dir,
                                          args.delimiter)
    data = load_data(train_filename, args.delimiter)

    path_to_png = os.path.join(args.output_dir,
                               os.path.basename(args.logfile_path) + '.png')
    draw_line_graph(data, path_to_png, interval=50)
Exemplo n.º 2
0
    "-t",
    "--timings",
    nargs='+',
    dest="timings",
    help=
    "The names of the Coq files (with or without the extension) whose timings should be extracted"
)
parser.add_argument("-c",
                    "--commits",
                    dest="commits",
                    help="Restrict the graph to the given commits.")
args = parser.parse_args()
pp = pprint.PrettyPrinter()
log_file = sys.stdin if args.file == "-" else open(args.file, "r")

results = parse_log.parse(log_file, parse_times=True)
if args.commits:
    commits = set(parse_log.parse_git_commits(args.commits))
    results = filter(lambda r: r.commit in commits, results)
results = list(results)

timings = list(map(lambda t: t[:-2] if t.endswith(".v") else t, args.timings))
for timing in timings:
    plt.plot(list(map(lambda r: r.times.get(timing), results)),
             marker=next(markers),
             markersize=8)
plt.legend(timings, loc='upper left', bbox_to_anchor=(1.05, 1.0))
plt.xticks(range(len(results)),
           list(map(lambda r: r.commit[:7], results)),
           rotation=70)
plt.subplots_adjust(bottom=0.2,
Exemplo n.º 3
0
    "--artifacts",
    dest="artifacts",
    help=
    "Location of the artifacts (following GitLab's folder structure).  If not given (which should be the common case), the artifacts will be downloaded from GitLab."
)
args = parser.parse_args()
log_file = sys.stdout if args.file == "-" else open(args.file, "a")

# determine commit, if missing
if args.commits is None:
    if args.file == "-":
        raise Exception(
            "If you do not give explicit commits, you have to give a logfile so that we can determine the missing commits."
        )
    last_result = last(
        parse_log.parse(open(args.file, "r"), parse_times=parse_log.PARSE_NOT))
    args.commits = "{}..origin/master".format(last_result.commit)

projects = req("projects?per_page=512")
project = first(
    filter(lambda p: p['path_with_namespace'] == args.project,
           projects.json()))
if project is None:
    sys.stderr.write("Project not found.\n")
    sys.exit(1)

BREAK = False
for commit in parse_log.parse_git_commits(args.commits):
    if BREAK:
        break
    print("Fetching {}...".format(commit))
Exemplo n.º 4
0
    "-c",
    "--commits",
    dest="commits",
    help=
    "The commits to fetch. Default is everything since the most recent entry in the log file."
)
args = parser.parse_args()
log_file = sys.stdout if args.file == "-" else open(args.file, "a")

# determine commit, if missing
if args.commits is None:
    if args.file == "-":
        raise Exception(
            "If you do not give explicit commits, you have to give a logfile so that we can determine the missing commits."
        )
    last_result = last(parse_log.parse(open(args.file, "r"),
                                       parse_times=False))
    args.commits = "{}..origin/master".format(last_result.commit)

projects = req("projects")
project = first(
    filter(lambda p: p['path_with_namespace'] == args.project,
           projects.json()))
if project is None:
    sys.stderr.write("Project not found.\n")
    sys.exit(1)

for commit in parse_log.parse_git_commits(args.commits):
    print("Fetching {}...".format(commit))
    commit_data = req("/projects/{}/repository/commits/{}".format(
        project['id'], commit))
    if commit_data.status_code != 200:
# read command-line arguments
parser = argparse.ArgumentParser(description='Visualize iris-coq build times')
parser.add_argument("-f", "--file",
                    dest="file", required=True,
                    help="Filename to get the data from.")
parser.add_argument("-t", "--timings", nargs='+',
                    dest="timings",
                    help="The names of the Coq files (with or without the extension) whose timings should be extracted")
parser.add_argument("-c", "--commits",
                    dest="commits",
                    help="Restrict the graph to the given commits.")
args = parser.parse_args()
pp = pprint.PrettyPrinter()
log_file = sys.stdin if args.file == "-" else open(args.file, "r")

results = parse_log.parse(log_file, parse_times = True)
if args.commits:
    commits = set(parse_log.parse_git_commits(args.commits))
    results = filter(lambda r: r.commit in commits, results)
results = list(results)

timings = list(map(lambda t: t[:-2] if t.endswith(".v") else t, args.timings))
for timing in timings:
    plt.plot(list(map(lambda r: r.times.get(timing), results)), marker=next(markers), markersize=8)
plt.legend(timings, loc='upper left')
plt.xticks(range(len(results)), list(map(lambda r: r.commit[:7], results)), rotation=70)
plt.subplots_adjust(bottom=0.2) # more space for the commit labels

plt.xlabel('Commit')
plt.ylabel('Time (s)')
plt.title('Time to compile files')
Exemplo n.º 6
0
                    dest="commits",
                    help="The commits to fetch. Default is everything since the most recent entry in the log file.")
parser.add_argument("-a", "--artifacts",
                    dest="artifacts",
                    help="Location of the artifacts (following GitLab's folder structure).  If not given (which should be the common case), the artifacts will be downloaded from GitLab.")
parser.add_argument("-b", "--blacklist-branch",
                    dest="blacklist_branch",
                    help="Skip the commit if it is contained in the given branch.")
args = parser.parse_args()
log_file = sys.stdout if args.file == "-" else open(args.file, "a")

# determine commit, if missing
if args.commits is None:
    if args.file == "-":
        raise Exception("If you do not give explicit commits, you have to give a logfile so that we can determine the missing commits.")
    last_result = last(parse_log.parse(open(args.file, "r"), parse_times = parse_log.PARSE_NOT))
    args.commits = "{}..origin/master".format(last_result.commit)

projects = req("projects?per_page=512")
project = first(filter(lambda p: p['path_with_namespace'] == args.project, projects.json()))
if project is None:
    sys.stderr.write("Project not found.\n")
    sys.exit(1)

BREAK = False
for commit in parse_log.parse_git_commits(args.commits):
    if BREAK:
        break
    # test to skip the commit
    if args.blacklist_branch is not None:
        branches = subprocess.check_output(["git", "branch", "-r", "--contains", commit]).decode("utf-8")
Exemplo n.º 7
0
                    required=True,
                    help="The server (URL) to send the data to.")
parser.add_argument("-u",
                    "--user",
                    dest="user",
                    required=True,
                    help="Username for HTTP auth.")
parser.add_argument("--password",
                    dest="password",
                    required=True,
                    help="Password for HTTP auth.")
args = parser.parse_args()
pp = pprint.PrettyPrinter()
log_file = sys.stdin if args.file == "-" else open(args.file, "r")

results = parse_log.parse(log_file, parse_times=parse_log.PARSE_RAW)
if args.commits:
    commits = set(parse_log.parse_git_commits(args.commits))
    results = filter(lambda r: r.commit in commits, results)
results = list(results)

for datapoint in results:
    times = ''.join(datapoint.times)
    commit = datapoint.commit
    date = subprocess.check_output(
        ['git', 'show', commit, '-s', '--pretty=%cI']).strip().decode('UTF-8')
    headers = {
        'X-Project': args.project,
        'X-Branch': args.branch,
        'X-Commit': commit,
        'X-Date': date
Exemplo n.º 8
0
                    dest="config", required=True,
                    help="The config string.")
parser.add_argument("-s", "--server",
                    dest="server", required=True,
                    help="The server (URL) to send the data to.")
parser.add_argument("-u", "--user",
                    dest="user", required=True,
                    help="Username for HTTP auth.")
parser.add_argument("--password",
                    dest="password", required=True,
                    help="Password for HTTP auth.")
args = parser.parse_args()
pp = pprint.PrettyPrinter()
log_file = sys.stdin if args.file == "-" else open(args.file, "r")

results = parse_log.parse(log_file, parse_times = parse_log.PARSE_RAW)
if args.commits:
    commits = set(parse_log.parse_git_commits(args.commits))
    results = filter(lambda r: r.commit in commits, results)
results = list(results)

for datapoint in results:
    times = '\n'.join(datapoint.times)
    commit = datapoint.commit
    print("Sending {}...".format(commit), end='')
    date = subprocess.check_output(['git', 'show', commit, '-s', '--pretty=%cI']).strip().decode('UTF-8')
    headers = {'X-Project': args.project, 'X-Branch': args.branch, 'X-Commit': commit, 'X-Config': args.config, 'X-Date': date}
    r = requests.post(args.server+"/build_times", data=times, headers=headers, auth=(args.user, args.password))
    print(" {}".format(r.text.strip()))
    r.raise_for_status()