def _gen_rows(): """ Generates reports """ days = _gen_days() metadata = _gen_metadata() rows = [] for jobname, jobdays in sorted(metadata.items()): sub_item = [jobname] for day in days: try: dates_to_test = [ datetime.strptime(obj["build_endtime"], "%Y-%m-%dT%H:%M:%S.%f") for obj in jobdays[day] ] max_date_for_day = max(dates_to_test) log.info(f"Testing {max_date_for_day}") for job in jobdays[day]: _day = datetime.strptime( job["build_endtime"], "%Y-%m-%dT%H:%M:%S.%f" ) log.info(f"{_day} == {max_date_for_day}") if _day == max_date_for_day: sub_item.append(job) except: sub_item.append( { "job_name": jobname, "bg_class": "", "build_endtime": day, "build_datetime": day, } ) rows.append(sub_item) return rows
async def finish_series_upgrade(machine, tools): log.info(f"completing series upgrade for machine {machine.id}") await tools.run( "juju", "upgrade-series", "--yes", "-m", tools.connection, machine.id, "complete", ) await wait_for_status("active", _units(machine))
async def prep_series_upgrade(machine, new_series, tools): log.info(f"preparing series upgrade for machine {machine.id}") await tools.run( "juju", "upgrade-series", "--yes", "-m", tools.connection, machine.id, "prepare", new_series, ) await wait_for_status("blocked", _units(machine))
async def do_series_upgrade(machine): file_name = "/etc/apt/apt.conf.d/50unattended-upgrades" option = "--force-confdef" log.info(f"doing series upgrade for machine {machine.id}") await machine.ssh(f""" if ! grep -q -- '{option}' {file_name}; then echo 'DPkg::options {{ "{option}"; }};' | sudo tee -a {file_name} fi sudo DEBIAN_FRONTEND=noninteractive do-release-upgrade -f DistUpgradeViewNonInteractive """) log.info(f"rebooting machine {machine.id}") try: await machine.ssh("sudo reboot && exit") except JujuError: # We actually expect this to "fail" because the reboot closes the session prematurely. pass
def _cut_stable_release(layer_list, charm_list, ancillary_list, filter_by_tag, dry_run): """This will merge each layers master onto the stable branches. PLEASE NOTE: This step should come after each stable branch has been tagged and references a current stable bundle revision. layer_list: YAML spec containing git repos and their upstream/downstream properties charm_list: YAML spec containing git repos and their upstream/downstream properties """ layer_list = yaml.safe_load(Path(layer_list).read_text(encoding="utf8")) charm_list = yaml.safe_load(Path(charm_list).read_text(encoding="utf8")) ancillary_list = yaml.safe_load( Path(ancillary_list).read_text(encoding="utf8")) new_env = os.environ.copy() for layer_map in layer_list + charm_list + ancillary_list: for layer_name, repos in layer_map.items(): downstream = repos["downstream"] if not repos.get("needs_stable", True): continue tags = repos.get("tags", None) if tags: if not any(match in filter_by_tag for match in tags): continue log.info( f"Releasing :: {layer_name:^35} :: from: master to: stable") if not dry_run: downstream = f"https://{new_env['CDKBOT_GH_USR']}:{new_env['CDKBOT_GH_PSW']}@github.com/{downstream}" identifier = str(uuid.uuid4()) os.makedirs(identifier) for line in git.clone(downstream, identifier, _iter=True): log.info(line) git_rev_master = git("rev-parse", "origin/master", _cwd=identifier).stdout.decode() git_rev_stable = git("rev-parse", "origin/stable", _cwd=identifier).stdout.decode() if git_rev_master == git_rev_stable: log.info( f"Skipping :: {layer_name:^35} :: master == stable") continue git.config("user.email", "*****@*****.**", _cwd=identifier) git.config("user.name", "cdkbot", _cwd=identifier) git.config("--global", "push.default", "simple") git.checkout("-f", "stable", _cwd=identifier) git.merge("master", "--no-ff", _cwd=identifier) for line in git.push("origin", "stable", _cwd=identifier, _iter=True): log.info(line)
async def get_svc_ingress(model, svc_name, timeout=2 * 60): log.info(f"Waiting for ingress address for {svc_name}") for attempt in range(timeout >> 2): result = await kubectl( model, f"get svc {svc_name} -o jsonpath={{.status.loadBalancer.ingress[0].ip}}", ) assert result.code == 0 ingress_address = result.stdout log.info(f"Ingress address: {ingress_address}") if ingress_address != "": return ingress_address else: await asyncio.sleep(2) else: raise TimeoutError( f"Timed out waiting for {svc_name} to have an ingress address" )
async def wait_for_status(workload_status, units): if not isinstance(units, (list, tuple)): units = [units] log.info(f'waiting for {workload_status} status on {", ".join(units)}') model = units[0].model try: await model.block_until( lambda: all(unit.workload_status == workload_status for unit in units), timeout=120, ) except asyncio.TimeoutError as e: unmatched_units = [ f"{unit.name}={unit.workload_status}" for unit in units if unit.workload_status != workload_status ] raise AssertionError( f'Units with unexpected status: {",".join(unmatched_units)}' ) from e
def __run_git(args): username, password, layer_name, upstream, downstream = args log.info(f"Syncing {layer_name} :: {upstream} -> {downstream}") downstream = f"https://{username}:{password}@github.com/{downstream}" identifier = str(uuid.uuid4()) os.makedirs(identifier) ret = capture(f"git clone {downstream} {identifier}") if not ret.ok: log.info(f"Failed to clone repo: {ret.stderr.decode()}") sys.exit(1) cmd_ok("git config user.email '*****@*****.**'", cwd=identifier) cmd_ok("git config user.name cdkbot", cwd=identifier) cmd_ok("git config push.default simple", cwd=identifier) cmd_ok(f"git remote add upstream {upstream}", cwd=identifier) cmd_ok("git fetch upstream", cwd=identifier) cmd_ok("git checkout master", cwd=identifier) cmd_ok("git merge upstream/master", cwd=identifier) cmd_ok("git push origin", cwd=identifier) cmd_ok("rm -rf {identifier}")
def _sync_upstream(layer_list, charm_list, dry_run): """ Syncs any of the forked upstream repos layer_list: YAML spec containing git repos and their upstream/downstream properties """ layer_list = yaml.safe_load(Path(layer_list).read_text(encoding="utf8")) charm_list = yaml.safe_load(Path(charm_list).read_text(encoding="utf8")) new_env = os.environ.copy() username = quote(new_env["CDKBOT_GH_USR"]) password = quote(new_env["CDKBOT_GH_PSW"]) repos_to_process = [] for layer_map in layer_list + charm_list: for layer_name, repos in layer_map.items(): upstream = repos["upstream"] downstream = repos["downstream"] if urlparse(upstream).path.lstrip("/") == downstream: log.info( f"Skipping {layer_name} :: {upstream} == {downstream}") continue items = (username, password, layer_name, upstream, downstream) log.info(f"Adding {layer_name} to queue") repos_to_process.append(items) if not dry_run: with concurrent.futures.ThreadPoolExecutor( max_workers=os.cpu_count()) as tp: git_runs = { tp.submit(__run_git, args): args for args in repos_to_process } for future in concurrent.futures.as_completed(git_runs): try: data = future.result() except Exception as exc: log.info(f"Failed thread: {exc}")
def _tag_stable_forks(layer_list, charm_list, k8s_version, bundle_rev, filter_by_tag, bugfix, dry_run): """Tags stable forks to a certain bundle revision for a k8s version layer_list: YAML spec containing git repos and their upstream/downstream properties bundle_rev: bundle revision to tag for a particular version of k8s git tag (ie. ck-{bundle_rev}), this would mean we tagged current stable branches for 1.14 with the latest charmed kubernetes(ck) bundle rev of {bundle_rev} TODO: Switch to different merge strategy git checkout master git checkout -b staging git merge stable -s ours git checkout stable git reset staging """ layer_list = yaml.safe_load(Path(layer_list).read_text(encoding="utf8")) charm_list = yaml.safe_load(Path(charm_list).read_text(encoding="utf8")) new_env = os.environ.copy() for layer_map in layer_list + charm_list: for layer_name, repos in layer_map.items(): tags = repos.get("tags", None) if tags: if not any(match in filter_by_tag for match in tags): continue downstream = repos["downstream"] if bugfix: tag = f"{k8s_version}+{bundle_rev}" else: tag = f"ck-{k8s_version}-{bundle_rev}" if not repos.get("needs_tagging", True): log.info(f"Skipping {layer_name} :: does not require tagging") continue log.info(f"Tagging {layer_name} ({tag}) :: {repos['downstream']}") if not dry_run: downstream = f"https://{new_env['CDKBOT_GH_USR']}:{new_env['CDKBOT_GH_PSW']}@github.com/{downstream}" identifier = str(uuid.uuid4()) os.makedirs(identifier) for line in git.clone(downstream, identifier, _iter=True): log.info(line) git.config("user.email", "*****@*****.**", _cwd=identifier) git.config("user.name", "cdkbot", _cwd=identifier) git.config("--global", "push.default", "simple") git.checkout("stable", _cwd=identifier) try: for line in git.tag("--force", tag, _cwd=identifier, _iter=True, _bg_exc=False): log.info(line) for line in git.push( "--force", "origin", tag, _cwd=identifier, _bg_exc=False, _iter=True, ): log.info(line) except sh.ErrorReturnCode as error: log.info( f"Problem tagging: {error.stderr.decode().strip()}, will skip for now.." )
def _cut_stable_release(layer_list, charm_list, ancillary_list, filter_by_tag, dry_run): """This will merge each layers master onto the stable branches. PLEASE NOTE: This step should come after each stable branch has been tagged and references a current stable bundle revision. layer_list: YAML spec containing git repos and their upstream/downstream properties charm_list: YAML spec containing git repos and their upstream/downstream properties """ layer_list = yaml.safe_load(Path(layer_list).read_text(encoding="utf8")) charm_list = yaml.safe_load(Path(charm_list).read_text(encoding="utf8")) ancillary_list = yaml.safe_load( Path(ancillary_list).read_text(encoding="utf8")) new_env = os.environ.copy() failed_to_release = [] for layer_map in layer_list + charm_list + ancillary_list: for layer_name, repos in layer_map.items(): downstream = repos["downstream"] if not repos.get("needs_stable", True): continue tags = repos.get("tags", None) if tags: if not any(match in filter_by_tag for match in tags): continue auth = (new_env.get("CDKBOT_GH_USR"), new_env.get("CDKBOT_GH_PSW")) default_branch = repos.get("branch") or default_gh_branch( downstream, auth=auth) log.info( f"Releasing :: {layer_name:^35} :: from: {default_branch} to: stable" ) downstream = f"https://{':'.join(auth)}@github.com/{downstream}" identifier = str(uuid.uuid4()) os.makedirs(identifier) for line in git.clone(downstream, identifier, _iter=True): log.info(line) git_rev_default = (git("rev-parse", f"origin/{default_branch}", _cwd=identifier).stdout.decode().strip()) git_rev_stable = (git("rev-parse", "origin/stable", _cwd=identifier).stdout.decode().strip()) if git_rev_default == git_rev_stable: log.info( f"Skipping :: {layer_name:^35} :: {default_branch} == stable" ) continue log.info( f"Commits :: {layer_name:^35} :: {default_branch} != stable") log.info(f" {default_branch:10}= {git_rev_default:32}") log.info(f" {'stable':10}= {git_rev_stable:32}") for line in git("rev-list", f"origin/stable..origin/{default_branch}", _cwd=identifier): for line in git.show( "--format=%h %an '%s' %cr", "--no-patch", line.strip(), _cwd=identifier, ): log.info(" " + line.strip()) if not dry_run: git.config("user.email", "*****@*****.**", _cwd=identifier) git.config("user.name", "cdkbot", _cwd=identifier) git.config("--global", "push.default", "simple") git.checkout("-f", "stable", _cwd=identifier) git.reset(default_branch, _cwd=identifier) for line in git.push("origin", "stable", "-f", _cwd=identifier, _iter=True): log.info(line)
def _gen_metadata(): """ Generates metadata """ log.info("Generating metadata...") items = [] table = dynamodb.Table("CIBuilds") # Required because only 1MB are returned # See: https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/GettingStarted.Python.04.html response = table.scan() for item in response["Items"]: items.append(item) while "LastEvaluatedKey" in response: response = table.scan(ExclusiveStartKey=response["LastEvaluatedKey"]) for item in response["Items"]: items.append(item) db = OrderedDict() for obj in items: if "validate" not in obj["job_name"]: continue job_name = obj["job_name"] if "snap_version" in obj: job_name = f"{job_name}-{obj['snap_version']}" elif "juju_version" in obj: job_name = f"{job_name}-juju-{obj['juju_version']}" if "job_name_custom" in obj: job_name = obj["job_name_custom"] if job_name not in db: db[job_name] = {} if "build_endtime" not in obj: continue if "test_result" not in obj: result_bg_class = "bg-light" result_btn_class = "btn-light" elif not obj["test_result"] or int(obj["test_result"]) == 0: result_bg_class = "bg-danger" result_btn_class = "btn-danger" else: result_btn_class = "btn-success" result_bg_class = "bg-success" obj["bg_class"] = result_bg_class obj["btn_class"] = result_btn_class try: day = datetime.strptime(obj["build_endtime"], "%Y-%m-%dT%H:%M:%S.%f") except: day = datetime.strptime(obj["build_endtime"], "%Y-%m-%d %H:%M:%S.%f") date_of_last_30 = datetime.today() - timedelta(days=30) if day < date_of_last_30: continue day = day.strftime("%Y-%m-%d") # set obj url debug_host_url = "https://jenkaas.s3.amazonaws.com/" build_log = obj.get("build_log", None) if build_log: build_log = str(Path(obj["build_log"]).parent) obj["debug_url"] = f"{debug_host_url}" f"{obj['job_name']}/" f"{build_log}" if day not in db[job_name]: db[job_name][day] = [] db[job_name][day].append(obj) return db