def log_release_status(release, logger): current_build_index = get_current_build_index(release) remaining_tasks = get_remaining_items( release["inflight"][current_build_index]["human_tasks"]) remaining_issues = get_remaining_items( release["inflight"][current_build_index]["issues"]) logger.info("=" * 79) logger.info("RELEASE: %s %s build%s %s", release["product"], release["version"], release["inflight"][current_build_index]["buildnum"], release["date"]) for graph_info in release["inflight"][current_build_index]["graphids"]: logger.info( "%s graph: https://tools.taskcluster.net/task-group-inspector/#/%s", graph_info[0], graph_info[1]) logger.info("\tIncomplete human tasks:") for task in remaining_tasks: alias = "" if task.get("alias"): alias = "(alias: {})".format(task["alias"]) logger.info("\t\t* ID %s %s - %s", task["id"], alias, task["description"]) logger.info("\tUnresolved issues:") for issue in remaining_issues: logger.info("\t\t* ID: %s bug: %s - %s", issue["id"], issue["bug"], issue["description"])
def log_release_status(release, logger): current_build_index = get_current_build_index(release) current_build = release["inflight"][current_build_index] remaining_preflight_tasks = get_remaining_items( release["preflight"]["human_tasks"]) remaining_tasks = get_remaining_items(current_build["human_tasks"]) remaining_issues = get_remaining_items(current_build["issues"]) logger.info("=" * 79) logger.info("RELEASE: %s %s build%s %s", release["product"], release["version"], release["inflight"][current_build_index]["buildnum"], release["date"]) for graph_info in release["inflight"][current_build_index]["graphids"]: logger.info( "%s graph: https://tools.taskcluster.net/task-group-inspector/#/%s", graph_info[0], graph_info[1]) logger.info("") logger.info("\treleaserunner variables:") for graph_info in release["inflight"][current_build_index]["graphids"]: print("\t\texport {}_TASK_ID={}".format(graph_info[0].upper(), graph_info[1])) first_remaining_preflight_task = True for task in remaining_preflight_tasks: if first_remaining_preflight_task: logger.info("") logger.info("\tIncomplete preflight tasks:") first_remaining_preflight_task = False logger.info("\t\t* Deadline {} - ID {} - Bug {} - {}".format( task["deadline"], task["id"], task["bug"], task["description"])) logger.info("") logger.info("\tIncomplete human tasks:") for task in remaining_tasks: alias = "" if task.get("alias"): alias = "(alias: {})".format(task["alias"]) logger.info("\t\t* ID %s %s - %s", task["id"], alias, task["description"]) logger.info("\tUnresolved issues:") for issue in remaining_issues: logger.info("\t\t* ID: %s bug: %s - %s", issue["id"], issue["bug"], issue["description"])
def status(verbose, logger=LOGGER, config=CONFIG): """shows upcoming prerequisites and inflight human tasks """ ### if not validate_data_repo_updated(logger, config): sys.exit(1) # upcoming prerequisites upcoming_releases = get_releases(config, logger, inflight=False, filter=incomplete_filter) if verbose: upcoming_releases = get_releases(config, logger, inflight=False, filter=no_filter) upcoming_releases = sorted(upcoming_releases, key=lambda x: x["date"], reverse=True) logger.info("UPCOMING RELEASES...") if not upcoming_releases: logger.info("=" * 79) logger.info("[no upcoming releases with prerequisite tasks to do]") for release in upcoming_releases: remaining_prereqs = get_remaining_items(release["preflight"]["human_tasks"]) logger.info("=" * 79) logger.info("Upcoming Release: %s %s", release["product"], release["version"]) logger.info("Expected GTB: %s", release["date"]) logger.info("\tIncomplete prerequisites:") for prereq in remaining_prereqs: logger.info("\t\t* ID: %s, deadline: %s, bug %s - %s", prereq['id'], prereq['deadline'], prereq["bug"], prereq["description"]) if not remaining_prereqs: logger.info("\t\t* none") ### ### # releases in flight incomplete_releases = [release for release in get_releases(config, logger, filter=incomplete_filter)] logger.info("") logger.info("INFLIGHT RELEASES...") if not incomplete_releases: logger.info("=" * 79) logger.info("[no inflight releases with human tasks to do]") for release in incomplete_releases: log_release_status(release, logger) ### ### # completed releases (unresolved issues) if verbose: complete_releases = [release for release in get_releases(config, logger, filter=complete_filter)] logger.info("") logger.info("COMPLETED RELEASES...") if not complete_releases: logger.info("=" * 79) logger.info("[all completed releases have been archived]") for release in complete_releases: log_release_status(release, logger)
def generate_newbuild_data(data, release, data_path, wiki_path, logger, config): is_first_gtb = "upcoming" in data_path current_build_index = get_current_build_index(data) if is_first_gtb: # resolve shipit task for index, task in enumerate( data["inflight"][current_build_index]["human_tasks"]): if task["alias"] == "shipit": data["inflight"][current_build_index]["human_tasks"][index][ "resolved"] = True # delete json and md files from upcoming dir, and set new dest paths to be inflight repo = Repo(config['releasewarrior_data_repo']) inflight_dir = os.path.join( config['releasewarrior_data_repo'], config['releases']['inflight'][release.product]) moved_files = repo.index.move([data_path, wiki_path, inflight_dir]) # set data and wiki paths to new dest (inflight) dir # moved_files is a list of tuples representing [files_moved][destination_location] # TODO data_path = os.path.join(config['releasewarrior_data_repo'], moved_files[0][1]) wiki_path = os.path.join(config['releasewarrior_data_repo'], moved_files[1][1]) else: # kill latest buildnum add new buildnum based most recent buildnum logger.info( "most recent buildnum has been aborted, starting a new buildnum") newbuild = deepcopy(data["inflight"][current_build_index]) # abort the now previous buildnum data["inflight"][current_build_index]["aborted"] = True newbuild["aborted"] = False for task in newbuild["human_tasks"]: if task["alias"] == "shipit": continue # leave submitted to shipit as resolved # reset all tasks to unresolved task["resolved"] = False # carry forward only unresolved issues newbuild["issues"] = [ issue for issue in get_remaining_items(newbuild["issues"]) ] # increment buildnum newbuild["buildnum"] = newbuild["buildnum"] + 1 # ignore old graphids newbuild["graphids"] = [] # add new buildnum based on previous to current release data["inflight"].append(newbuild) current_build_index = get_current_build_index(data) return data, data_path, wiki_path