Exemple #1
0
def main():
    flags = docopt.docopt(__doc__)
    now = int(time.time())
    if flags['<path_to_git_repo>']:
        path_to_git_repo = flags['<path_to_git_repo>']
        repo_name = os.path.basename(os.path.abspath(path_to_git_repo))
        run = mk_run(path_to_git_repo)
        if flags["open-branches"]:
            master_branch = flags['--master-branch'] or 'origin/master'
            assert_master_branch(run, master_branch)
            gen = commit_author_time_and_branch_ref(run, master_branch)
            data = ((now, t, b, repo_name) for t, b in gen)
            write_open_branches_csv_file(data)
        elif flags["release-lead-time"]:
            earliest_date = int(flags["--earliest-date"] or 0)
            pattern = flags['--tag-pattern'] or '*'
            gen = commit_author_time_tag_author_time_and_from_to_tag_name(
                run,
                partial(fnmatch, pat=pattern),
                earliest_date,
            )
            data = ((cat, tat, old_tag, tag, repo_name) for cat, tat, old_tag, tag in gen)
            write_release_lead_time_csv_file(data)
    if flags["batch"] and flags["--open-branches"]:
        for path_to_git_repo in flags['<path_to_git_repos>']:
            print("checking master branch in repo:", path_to_git_repo, file=sys.stderr)
            run = mk_run(path_to_git_repo)
            assert_master_branch(run, 'origin/master')
        data = []
        for path_to_git_repo in flags['<path_to_git_repos>']:
            print("fetching data from in repo:", path_to_git_repo, file=sys.stderr)
            repo_name = os.path.basename(os.path.abspath(path_to_git_repo))
            run = mk_run(path_to_git_repo)
            gen = commit_author_time_and_branch_ref(run, 'origin/master')
            data.extend((now, t, b, repo_name) for t, b in gen)
        write_open_branches_csv_file(data)
    elif flags["batch"] and flags["--release-lead-time"]:
        earliest_date = int(flags["--earliest-date"] or 0)
        data = []
        for path_to_git_repo in flags['<path_to_git_repos>']:
            print("fetching data from in repo:", path_to_git_repo, file=sys.stderr)
            repo_name = os.path.basename(os.path.abspath(path_to_git_repo))
            run = mk_run(path_to_git_repo)
            gen = commit_author_time_tag_author_time_and_from_to_tag_name(
                run,
                lambda _: True,
                earliest_date=earliest_date
            )
            data.extend((cat, tat, old_tag, tag, repo_name) for cat, tat, old_tag, tag in gen)
        write_release_lead_time_csv_file(data)
Exemple #2
0
def calculate_lead_time(path_to_git_repo, pattern, start_date):
    run = mk_run(path_to_git_repo)
    gen = commit_author_time_tag_author_time_and_from_to_tag_name(
        run,
        partial(fnmatch, pat=pattern),
        start_date,
    )
    lead_time_data = ((tat - cat) for cat, tat, old_tag, tag in gen)
    mean_seconds = statistics.mean(lead_time_data)
    return mean_seconds
Exemple #3
0
def calculate_deploy_interval(path_to_git_repo, pattern, start_date, now):
    run = mk_run(path_to_git_repo)
    gen = fetch_tags_and_author_dates(
        run,
        partial(fnmatch, pat=pattern),
        start_date,
    )
    deployment_data = set(tat for tag, tat in gen)
    interval_seconds = (now - start_date) / len(deployment_data)
    return interval_seconds
def calculate_lead_time(path_to_git_repo, pattern, start_date):
    run = mk_run(path_to_git_repo)
    deployment_data = list(commit_author_time_tag_author_time_and_from_to_tag_name(
        run,
        partial(fnmatch, pat=pattern),
        start_date,
    ))
    deployment_tag_pairs = set(["%s..%s" % (old_tag, tag) for cat, tat, old_tag, tag in deployment_data])
    log.info("calculating lead time data from deployments %s", deployment_tag_pairs)
    lead_time_data = [(tat - cat) for cat, tat, old_tag, tag in deployment_data]
    return statistics.mean(lead_time_data) if lead_time_data else "N/A"
def calculate_deploy_interval(path_to_git_repo, pattern, start_date, now):
    run = mk_run(path_to_git_repo)
    deployments = list(fetch_tags_and_author_dates(
        run,
        partial(fnmatch, pat=pattern),
        start_date,
    ))
    log.info("calculating deploy interval from deployments %s", deployments)
    deployment_data = set(tat for tag, tat in deployments)
    interval_seconds = (now - start_date) / len(deployment_data) if deployment_data else "N/A"
    return interval_seconds
def calculate_change_fail_rate(path_to_git_repo, deploy_pattern, patch_pattern, start_date):
    run = mk_run(path_to_git_repo)
    deploy_tags = list(fetch_tags_and_author_dates(
        run,
        partial(fnmatch, pat=deploy_pattern),
        start_date,
    ))
    patch_tags = list(fetch_tags_and_author_dates(
        run,
        partial(fnmatch, pat=patch_pattern),
        start_date,
    ))
    log.info("calculating change fail rate from patches: %s and deploys: %s", patch_tags, deploy_tags)
    return len(patch_tags) / len(deploy_tags) * 100 if deploy_tags else "N/A"
Exemple #7
0
def calculate_change_fail_rate(path_to_git_repo, deploy_pattern, patch_pattern,
                               start_date):
    run = mk_run(path_to_git_repo)
    deploy_tags = fetch_tags_and_author_dates(
        run,
        partial(fnmatch, pat=deploy_pattern),
        start_date,
    )
    patch_tags = fetch_tags_and_author_dates(
        run,
        partial(fnmatch, pat=patch_pattern),
        start_date,
    )
    change_fail_rate = len(list(patch_tags)) / len(list(deploy_tags)) * 100
    return change_fail_rate
Exemple #8
0
def calculate_MTTR(path_to_git_repo, deploy_pattern, patch_pattern,
                   start_date):
    run = mk_run(path_to_git_repo)
    match_deploy = partial(fnmatch, pat=deploy_pattern)
    match_patch = partial(fnmatch, pat=patch_pattern)
    deploy_tags_author_date = fetch_tags_and_author_dates(
        run,
        match_deploy,
        start_date,
    )
    deploy_tags_commit_date = dict(fetch_tags_and_sha(run, match_deploy))
    patch_dates = set(date
                      for _tag, date in fetch_tags_and_sha(run, match_patch))
    deployments = []
    for deploy_tag, deploy_date in deploy_tags_author_date:
        is_patch = find_is_patch(deploy_tag, deploy_tags_commit_date,
                                 patch_dates)
        deployments.append(Deployment(is_patch, deploy_date))
    outages = find_outages(deployments)
    downtime = (end.time - start.time for start, end in outages)
    return statistics.mean(downtime)