Beispiel #1
0
def postmortem(date, logger=LOGGER, config=CONFIG):
    """creates a postmortem file based on completed releases and their unresolved issues.
    archives release files that are completed
    using the same date will only append and archive releases as they are updated

    argument: date of planned postmortem. format: YYYY-MM-DD
    """

    if not date:
        logger.critical("For now, you must be explicit and specify --date")
        sys.exit(1)

    date = sanitize_date_input(date, logger)

    completed_releases = [release for release in get_releases(config, logger, filter=complete_filter)]
    postmortem_data_path = os.path.join(config["releasewarrior_data_repo"], config["postmortems"],
                                        "{}.json".format(date))
    postmortem_wiki_path = os.path.join(config["releasewarrior_data_repo"], config["postmortems"],
                                        "{}.md".format(date))
    wiki_template = config['templates']["wiki"]["postmortem"]

    # validate
    validate_rw_repo(logger, config)
    if not completed_releases:
        logger.warning("No recently completed releases. Nothing to do!")
        sys.exit(1)
    # make sure archive and postmortem dirs exist
    for product in config['releases']['archive']:
        os.makedirs(
            os.path.join(config['releasewarrior_data_repo'], config['releases']['archive'][product]),
            exist_ok=True
        )
    os.makedirs(os.path.join(config['releasewarrior_data_repo'], config['postmortems']), exist_ok=True)

    # get existing postmortem data
    postmortem_data = {
        "date": date,
        "complete_releases": []
    }
    if os.path.exists(postmortem_data_path):
        postmortem_data = load_json(postmortem_data_path)

    # archive completed releases
    for release in completed_releases:
        _, data_path, wiki_path = get_release_info(release["product"], release["version"],
                                                   logger, config)
        # add release to postmortem data
        postmortem_data["complete_releases"].append(generate_release_postmortem_data(release))
        # archive release
        archive_dir = os.path.join(config["releasewarrior_data_repo"],
                                   config["releases"]["archive"][release["product"]])
        git.move(data_path, os.path.join(archive_dir, os.path.basename(data_path)), logger, config)
        git.move(wiki_path, os.path.join(archive_dir, os.path.basename(wiki_path)), logger, config)

    commit_msg = "updates {} postmortem".format(date)
    postmortem_data["complete_releases"] = sorted(postmortem_data["complete_releases"],
                                                  key=lambda x: x["date"])
    write_and_commit(postmortem_data, postmortem_data_path, postmortem_wiki_path,
                     commit_msg, logger, config, wiki_template=wiki_template)
Beispiel #2
0
def sync(product, version, logger=LOGGER, config=CONFIG):
    """takes currently saved json data of given release from data repo, generates wiki, and commits
    product and version is also used to determine branch. e.g 57.0rc, 57.0.1, 57.0b2, 52.0.1esr
    """
    validate_rw_repo(logger, config)
    release, data_path, wiki_path = get_release_info(product, version, logger, config)
    validate(release, logger, config, must_exist=True, must_exist_in="inflight")
    data = load_json(data_path)

    commit_msg = "{} {} - syncing wiki with current data".format(product, version)

    write_and_commit(data, data_path, wiki_path, commit_msg, logger, config)
Beispiel #3
0
def track(product, version, gtb_date, logger=LOGGER, config=CONFIG):
    """Start tracking an upcoming release.
    product and version is also used to determine branch. e.g 57.0rc, 57.0.1, 57.0b2, 52.0.1esr
    """
    validate_rw_repo(logger, config)
    release, data_path, wiki_path = get_release_info(product, version, logger, config)
    validate(release, logger, config, must_exist=False)
    data = {}

    commit_msg = "{} {} started tracking upcoming release.".format(product, version)
    gtb_date = sanitize_date_input(gtb_date, logger)
    data = get_tracking_release_data(release, gtb_date, logger, config)

    write_and_commit(data, data_path, wiki_path, commit_msg, logger, config)
Beispiel #4
0
def cancel(product, version, logger=LOGGER, config=CONFIG):
    """Similar to newbuild where it aborts current buildnum of given release but does not create
    a new build.
    """
    validate_rw_repo(logger, config)
    release, data_path, wiki_path = get_release_info(product, version, logger, config)
    validate(release, logger, config, must_exist=True, must_exist_in="inflight")
    data = load_json(data_path)

    logger.info("Most recent buildnum has been aborted. Release cancelled.")
    commit_msg = "{} {} - cancelling release".format(product, version)
    current_build_index = get_current_build_index(data)
    data["inflight"][current_build_index]["aborted"] = True

    write_and_commit(data, data_path, wiki_path, commit_msg, logger, config)
Beispiel #5
0
def prereq(product, version, resolve, logger=LOGGER, config=CONFIG):
    """Add or resolve a prerequisite (pre gtb)
    product and version is also used to determine branch. e.g 57.0rc, 57.0.1, 57.0b2, 52.0.1esr
    Without any options, you will be prompted to add a prerequisite human task
    """
    validate_rw_repo(logger, config)
    release, data_path, wiki_path = get_release_info(product, version, logger, config)
    validate(release, logger, config, must_exist=True, must_exist_in="upcoming")
    data = load_json(data_path)

    resolve_msg = "Resolved {}".format(resolve) if resolve else ""
    commit_msg = "{} {} - updated prerequisites. {}".format(product, version, resolve_msg)
    data = update_prereq_human_tasks(data, resolve)

    data = order_data(data)
    write_and_commit(data, data_path, wiki_path, commit_msg, logger, config)
Beispiel #6
0
def issue(product, version, resolve, logger=LOGGER, config=CONFIG):
    """Add or resolve an issue against current buildnum
    product and version is also used to determine branch. e.g 57.0rc, 57.0.1, 57.0b2, 52.0.1esr
    Without any options, you will be prompted to add an issue
    """
    validate_rw_repo(logger, config)
    release, data_path, wiki_path = get_release_info(product, version, logger, config)
    validate(release, logger, config, must_exist=True, must_exist_in="inflight")
    data = load_json(data_path)

    resolve_msg = "Resolved {}".format(resolve) if resolve else ""
    commit_msg = "{} {} - updated inflight issue. {}".format(product, version, resolve_msg)
    data = update_inflight_issue(data, resolve, logger)

    data = order_data(data)
    write_and_commit(data, data_path, wiki_path, commit_msg, logger, config)
Beispiel #7
0
def graphid(graphid, product, version, phase, logger=LOGGER, config=CONFIG):
    """Add a graphid to a release.
    product and version is also used to determine branch. e.g 57.0rc, 57.0.1, 57.0b2, 52.0.1esr
    The phase must match the product.

    """
    validate_rw_repo(logger, config)
    release, data_path, wiki_path = get_release_info(product, version, logger, config)
    validate(release, logger, config, must_exist=True, must_exist_in="inflight")
    data = load_json(data_path)

    validate_phase(product, version, phase, logger, config)
    commit_msg = "{} {} - added {} graphid.".format(product, version, phase)
    data = update_inflight_graphid(data, phase, graphid, logger)

    data = order_data(data)
    write_and_commit(data, data_path, wiki_path, commit_msg, logger, config)
Beispiel #8
0
def newbuild(product, version, logger=LOGGER, config=CONFIG):
    """Mark a release as submitted to shipit
    product and version is also used to determine branch. e.g 57.0rc, 57.0.1, 57.0b2, 52.0.1esr
    If this is the first buildnum, move the release from upcoming dir to inflight
    Otherwise, increment the buildnum of the already current inflight release
    """
    validate_rw_repo(logger, config)
    release, data_path, wiki_path = get_release_info(product, version, logger, config)
    validate(release, logger, config, must_exist=True)
    data = load_json(data_path)

    commit_msg = "{} {} - new buildnum started.".format(product, version)
    data, data_path, wiki_path = generate_newbuild_data(data, release, data_path,
                                                        wiki_path, logger, config)

    data = order_data(data)
    write_and_commit(data, data_path, wiki_path, commit_msg, logger, config)
Beispiel #9
0
def task(product, version, resolve, logger=LOGGER, config=CONFIG):
    """Add or resolve a human task within current buildnum
    product and version is also used to determine branch. e.g 57.0rc, 57.0.1, 57.0b2, 52.0.1esr
    Without any options, you will be prompted to add a task
    """
    validate_rw_repo(logger, config)
    release, data_path, wiki_path = get_release_info(product, version, logger, config)
    # if we are adding a human_task, the release does not have to be inflight yet
    must_exist_in = "inflight" if resolve else None
    validate(release, logger, config, must_exist=True, must_exist_in=must_exist_in)
    data = load_json(data_path)

    resolve_msg = "Resolved {}".format(resolve) if resolve else ""
    commit_msg = "{} {} - updated inflight tasks. {}".format(product, version, resolve_msg)
    data = update_inflight_human_tasks(data, resolve, logger)

    data = order_data(data)
    write_and_commit(data, data_path, wiki_path, commit_msg, logger, config)
Beispiel #10
0
def wnp_blob(wnp_url,
             product,
             version,
             blob_name,
             for_channels,
             for_locales,
             for_version,
             output_file,
             logger=LOGGER,
             config=CONFIG):
    """Generate local Balrog blob with WNP rule."""
    if not blob_name:
        validate_rw_repo(logger, config)
        release, data_path, _ = get_release_info(product, version, logger,
                                                 config)
        validate(release,
                 logger,
                 config,
                 must_exist=True,
                 must_exist_in="inflight")
        data = load_json(data_path)
        current_build_index = get_current_build_index(data)
        build_number = current_build_index + 1
        blob_name = '{}-{}-build{}'.format(product, version, build_number)
        try:
            ensure_blob_name_exists_on_balrog(blob_name)
        except TooManyBlobsFoundError as e:
            logger.critical(
                '{}. Which one do you want? (specify with --blob-name)'.format(
                    e))
            sys.exit(1)
    else:
        try:
            ensure_blob_name_exists_on_balrog(blob_name)
        except TooManyBlobsFoundError:
            # If blob_name was fully specified, then we use this one. Other blobs matching it are
            # suffixed by something else (e.g: Firefox-62.0-build1 vs Firefox-62.0-build1-No-WNP)
            pass
    orig_blob = get_release_blob(blob_name)
    new_blob = craft_wnp_blob(orig_blob, wnp_url, for_channels, for_locales,
                              for_version)
    json.dump(new_blob, output_file, sort_keys=True, indent=4)
    logger.info('New blob written at: {}'.format(output_file.name))