def _team(context: "Context", team_context: "TeamContext", output_path: str) -> None: input = os.path.join(MODELS_PATH, "teams", "00-team.yaml") output = os.path.join(output_path, f"{team_context.name}-00-team.yaml") with open(input, "r") as file: content: str = file.read() content = utils.resolve_parameters( content, dict( team=team_context.name, efsid=context.shared_efs_fs_id, efsapid=team_context.efs_ap_id, efsprivateapid=team_context.efs_private_ap_id if team_context.efs_private_ap_id else "", account_id=context.account_id, env_name=context.name, role_prefix=f"/{context.role_prefix}/" if context.role_prefix else "/", team_kms_key_arn=team_context.team_kms_key_arn, team_security_group_id=team_context.team_security_group_id, cluster_pod_security_group_id=context.cluster_pod_sg_id, team_context=ContextSerDe.dump_context_to_str(team_context), env_context=ContextSerDe.dump_context_to_str(context), region=context.region, ), ) _logger.debug("Kubectl Team %s manifest:\n%s", team_context.name, content) with open(output, "w") as file: file.write(content) # team rbac role input = os.path.join(MODELS_PATH, "teams", "01-team-rbac-role.yaml") output = os.path.join(output_path, f"{team_context.name}-01-team-rbac-role.yaml") with open(input, "r") as file: content = file.read() content = utils.resolve_parameters( content, dict(env_name=context.name, team=team_context.name)) with open(output, "w") as file: file.write(content) # bind to admin role if team_context.k8_admin: # user service account input = os.path.join(MODELS_PATH, "teams", "02-admin-binding.yaml") output = os.path.join(output_path, f"{team_context.name}-02-admin-binding.yaml") with open(input, "r") as file: content = file.read() content = utils.resolve_parameters(content, dict(team=team_context.name)) with open(output, "w") as file: file.write(content)
def _orbit_controller(context: "Context", output_path: str) -> None: filenames = ["01a-orbit-controller.yaml"] for filename in filenames: input = os.path.join(MODELS_PATH, "orbit-system", filename) output = os.path.join(output_path, filename) with open(input, "r") as file: content: str = file.read() content = resolve_parameters( content, dict( env_name=context.name, code_build_image=f"{context.images.code_build.repository}:" f"{context.images.code_build.version}", orbit_controller_image= f"{context.images.orbit_controller.repository}:" f"{context.images.orbit_controller.version}", k8s_utilities_image=f"{context.images.k8s_utilities.repository}:" f"{context.images.k8s_utilities.version}", image_pull_policy="Always" if aws_orbit.__version__.endswith(".dev0") else "IfNotPresent", account_id=context.account_id, region=context.region, sts_ep="legacy" if context.networking.data.internet_accessible else "regional", ), ) with open(output, "w") as file: file.write(content)
def _orbit_system_env_base(output_path: str, context: Context) -> None: os.makedirs(os.path.join(output_path, "base"), exist_ok=True) filenames = ["kustomization.yaml", "00-commons.yaml"] for filename in filenames: input = os.path.join(MODELS_PATH, "env", "base", filename) output = os.path.join(output_path, "base", filename) with open(input, "r") as file: content: str = file.read() content = utils.resolve_parameters( content, dict( env_name=context.name, orbit_controller_image= f"{context.images.orbit_controller.repository}:" f"{context.images.orbit_controller.version}", k8s_utilities_image=f"{context.images.k8s_utilities.repository}:" f"{context.images.k8s_utilities.version}", image_pull_policy="Always" if aws_orbit.__version__.endswith(".dev0") else "IfNotPresent", certArn=context.networking.frontend.ssl_cert_arn, cognitoAppClientId=context.user_pool_client_id, cognitoUserPoolID=context.user_pool_id, account_id=context.account_id, region=context.region, cognitoUserPoolDomain=context.cognito_external_provider_domain, ), ) with open(output, "w") as file: file.write(content)
def _generate_kubeflow_patch(context: "Context", clean_up: bool = True) -> Tuple[str, str]: output_path = os.path.join(".orbit.out", context.name, "kubectl", "env") os.makedirs(output_path, exist_ok=True) if clean_up: _cleanup_output(output_path=output_path) # kubeflow jupyter launcher configmap input = os.path.join(MODELS_PATH, "kubeflow", "kf-jupyter-launcher.yaml") output = os.path.join(output_path, "kf-jupyter-launcher.yaml") with open(input, "r") as file: content = file.read() content = utils.resolve_parameters( content, dict( orbit_jupyter_user_image= f"{context.images.jupyter_user.repository}:{context.images.jupyter_user.version}" ), ) with open(output, "w") as file: file.write(content) input = os.path.join(MODELS_PATH, "kubeflow", "kf-jupyter-patch.yaml") with open(input, "r") as file: patch = file.read() return output, patch
def update_docker_file(context: "Context", dir: str) -> None: _logger.debug("Docker directory before building: %s", os.path.abspath(dir)) utils.print_dir(dir) docker_file = os.path.join(dir, "Dockerfile") if os.path.exists(docker_file): _logger.info("Building DockerFile %s", docker_file) tag = context.images.jupyter_user.version jupyter_user_base = ( f"{context.account_id}.dkr.ecr.{context.region}.amazonaws.com/orbit-{context.name}/jupyter-user:{tag}" if context.images.jupyter_user.get_source( account_id=context.account_id, region=context.region) == "code" else f"{context.images.jupyter_user.repository}:{tag}") with open(docker_file, "r") as file: content: str = file.read() content = utils.resolve_parameters( content, dict( region=context.region, account=context.account_id, env=context.name, jupyter_user_base=jupyter_user_base, ), ) with open(docker_file, "w") as file: file.write(content)
def _kubeflow_namespaces(context: "Context", clean_up: bool = True) -> str: output_path = os.path.join(".orbit.out", context.name, "kubectl", "orbit-system") os.makedirs(output_path, exist_ok=True) if clean_up: _cleanup_output(output_path=output_path) filenames = ["kubeflow_namespace.yaml"] for filename in filenames: input = os.path.join(MODELS_PATH, "kubeflow", filename) output = os.path.join(output_path, filename) with open(input, "r") as file: content: str = file.read() content = resolve_parameters( content, dict( env_name=context.name, account_id=context.account_id, region=context.region, sts_ep="legacy" if context.networking.data.internet_accessible else "regional", ), ) with open(output, "w") as file: file.write(content) return output_path
def update_file(file_path: str, values: Dict[str, Any]) -> str: with open(file_path, "r") as file: _logger.debug("Updating file %s with values: %s", file_path, values) content: str = file.read() content = utils.resolve_parameters(content, values) with open(file_path, "w") as file: file.write(content) return content
def _metrics_server(context: "Context", output_path: str) -> None: filename = "06-metrics-server.yaml" input = os.path.join(MODELS_PATH, "apps", filename) output = os.path.join(output_path, filename) with open(input, "r") as file: content: str = file.read() content = utils.resolve_parameters( content, dict(image_pull_policy="Always" if aws_orbit.__version__. endswith(".dev0") else "IfNotPresent")) with open(output, "w") as file: file.write(content)
def deploy(plugin_id: str, context: "Context", team_context: "TeamContext", parameters: Dict[str, Any]) -> None: _logger.debug("Team Env name: %s | Team name: %s", context.name, team_context.name) plugin_id = plugin_id.replace("_", "-") _logger.debug("plugin_id: %s", plugin_id) chart_path = helm.create_team_charts_copy(team_context=team_context, path=CHART_PATH) _logger.debug("package dir") utils.print_dir(CHART_PATH) _logger.debug("copy chart dir") utils.print_dir(chart_path) vars: Dict[str, Optional[str]] = dict( team=team_context.name, region=context.region, account_id=context.account_id, env_name=context.name, tag=context.images.jupyter_hub.version, restart_policy=parameters["restartPolicy"] if "restartPolicy" in parameters else "Never", plugin_id=plugin_id, toolkit_s3_bucket=context.toolkit.s3_bucket, image_pull_policy="Always" if aws_orbit.__version__.endswith(".dev0") else "IfNotPresent", ) if "script" in parameters: script_body = parameters["script"] else: raise Exception(f"Plugin {plugin_id} must define parameter 'script'") script_file = os.path.join(chart_path, "script.txt") script_body = utils.resolve_parameters(script_body, vars) with open(script_file, "w") as file: file.write(script_body) repo_location = helm.init_team_repo(context=context, team_context=team_context) repo = team_context.name _logger.debug(script_body) helm.add_repo(repo=repo, repo_location=repo_location) chart_name, chart_version, chart_package = helm.package_chart( repo=repo, chart_path=chart_path, values=vars) helm.install_chart( repo=repo, namespace=team_context.name, name=f"{team_context.name}-{plugin_id}", chart_name=chart_name, chart_version=chart_version, )
def write_context_ssm(profiles: PROFILES_TYPE, env_name: str, team_name: str) -> None: ssm_profile_name = f"/orbit/{env_name}/teams/{team_name}/user/profiles" client = utils.boto3_client(service_name="ssm") _logger.debug("Writing team %s user profiles to SSM parameter.", team_name) json_str = str(json.dumps(obj=profiles, sort_keys=True)) # resolve any parameters inside team context per context json_str = utils.resolve_parameters( json_str, dict(region=utils.get_region(), account=utils.get_account_id(), env=env_name, team=team_name) ) client.put_parameter( Name=ssm_profile_name, Value=json_str, Overwrite=True, Tier="Intelligent-Tiering", )
def write_resolve_parameters( manifest_name: str, name: str, filename: str, region: Optional[str], ) -> None: region_str: str = region if region is not None else utils.get_region() input = os.path.join(ORBIT_CLI_ROOT, "data", "init", manifest_name) with open(input, "r") as file: content: str = file.read() content = utils.resolve_parameters(content, dict(region=region_str, name=name)) with open(filename, "w") as file: file.write(content)
def _commons(context: "Context", output_path: str) -> None: filename = "00-commons.yaml" input = os.path.join(MODELS_PATH, "apps", filename) output = os.path.join(output_path, filename) with open(input, "r") as file: content: str = file.read() content = resolve_parameters( content, dict( account_id=context.account_id, region=context.region, env_name=context.name, ), ) with open(output, "w") as file: file.write(content)
def _fsx_driver_base(output_path: str, context: "Context") -> None: os.makedirs(os.path.join(output_path, "base"), exist_ok=True) filenames = [ "controller.yaml", "csidriver.yaml", "kustomization.yaml", "node.yaml", "rbac.yaml" ] for filename in filenames: input = os.path.join(MODELS_PATH, "fsx_driver", "base", filename) output = os.path.join(output_path, "base", filename) _logger.debug("Copying fsx driver base file: %s -> %s", input, output) with open(input, "r") as file: content: str = file.read() content = utils.resolve_parameters( content, dict(orbit_cluster_role=context.eks_cluster_role_arn, ), ) with open(output, "w") as file: file.write(content)
def _generate_kube_system_manifest(context: "Context", clean_up: bool = True) -> str: output_path = os.path.join(".orbit.out", context.name, "kubectl", "kube-system") os.makedirs(output_path, exist_ok=True) if clean_up: _cleanup_output(output_path=output_path) filenames = [ "00-observability.yaml", "01-aws-vgpu-daemonset.yaml", "01-nvidia-daemonset.yaml", "02-cluster-autoscaler-autodiscover.yaml", ] for filename in filenames: input = os.path.join(MODELS_PATH, "kube-system", filename) output = os.path.join(output_path, filename) with open(input, "r") as file: content: str = file.read() content = utils.resolve_parameters( content, dict( account_id=context.account_id, region=context.region, role_prefix=f"/{context.role_prefix}/" if context.role_prefix else "/", env_name=context.name, cluster_name=f"orbit-{context.name}", sts_ep="legacy" if context.networking.data.internet_accessible else "regional", image_pull_policy="Always" if aws_orbit.__version__.endswith(".dev0") else "IfNotPresent", use_static_instance_list=str( not context.networking.data.internet_accessible).lower(), ), ) with open(output, "w") as file: file.write(content) return output_path
def _load_toolkit_helper(file_path: str, image_name: str, env: str) -> str: with open(file_path, "r") as file: helper: str = file.read() params = { "ACCOUNT_ID": get_account_id(), "REGION": get_region(), "IMAGE_NAME": image_name, "ENV": env, } t = resolve_parameters(helper, dict(params)) j = json.loads(t) if j.get("extra_dirs"): new_extra_dirs = {} for e in j["extra_dirs"]: key = e val = os.path.realpath(os.path.join(ORBIT_CLI_ROOT, j["extra_dirs"][e])) new_extra_dirs[key] = val _logger.debug(f" new extra dir = {new_extra_dirs}") j["extra_dirs"] = new_extra_dirs _logger.debug(f"OUT of HELPER {j}") return json.dumps(j)
def _k8_dashboard(context: "Context", output_path: str) -> None: filename = "05-dashboard.yaml" input = os.path.join(MODELS_PATH, "apps", filename) output = os.path.join(output_path, filename) if context.networking.data.internet_accessible is False: dashboard_image = ( f"{context.account_id}.dkr.ecr.{context.region}.amazonaws.com/" f"orbit-{context.name}-k8-dashboard:{ImagesManifest.k8_dashboard.version}" ) scraper_image = ( f"{context.account_id}.dkr.ecr.{context.region}.amazonaws.com/" f"orbit-{context.name}-k8-metrics-scraper:{ImagesManifest.k8_dashboard.version}" ) else: dashboard_image = f"{ImagesManifest.k8_dashboard.repository}:{ImagesManifest.k8_dashboard.version}" scraper_image = f"{ImagesManifest.k8_metrics_scraper.repository}:{ImagesManifest.k8_metrics_scraper.version}" with open(input, "r") as file: content: str = file.read() _logger.debug("using for k8 dashboard images: \n%s \n%s", dashboard_image, scraper_image) content = resolve_parameters(content, parameters=dict(dashboard_image=dashboard_image, scraper_image=scraper_image)) with open(output, "w") as file: file.write(content)
def update_docker_file(account_id: str, region: str, env: str, tag: str, dir: str) -> None: _logger.debug("Docker directory before building: %s", os.path.abspath(dir)) utils.print_dir(dir) docker_file = os.path.join(dir, "Dockerfile") if os.path.exists(docker_file): _logger.info("Building DockerFile %s", docker_file) jupyter_user_base = f"{account_id}.dkr.ecr.{region}.amazonaws.com/orbit-{env}/jupyter-user:{tag}" _logger.debug( f"update_docker_file: jupyter_user_base = {jupyter_user_base}") with open(docker_file, "r") as file: content: str = file.read() content = utils.resolve_parameters( content, dict( region=region, account=account_id, env=env, jupyter_user_base=jupyter_user_base, ), ) with open(docker_file, "w") as file: file.write(content)
def _cluster_autoscaler(output_path: str, context: "Context") -> None: filename = "07-cluster-autoscaler-autodiscover.yaml" input = os.path.join(MODELS_PATH, "apps", filename) output = os.path.join(output_path, filename) with open(input, "r") as file: content: str = file.read() content = utils.resolve_parameters( content, dict( account_id=context.account_id, env_name=context.name, cluster_name=f"orbit-{context.name}", sts_ep="legacy" if context.networking.data.internet_accessible else "regional", image_pull_policy="Always" if aws_orbit.__version__.endswith(".dev0") else "IfNotPresent", use_static_instance_list=str( not context.networking.data.internet_accessible).lower(), ), ) with open(output, "w") as file: file.write(content)
def _orbit_system_commons_base(output_path: str, context: Context) -> None: os.makedirs(os.path.join(output_path, "base"), exist_ok=True) filenames = [ "kustomization.yaml", "00a-commons.yaml", "00b-cert-manager.yaml" ] for filename in filenames: input = os.path.join(MODELS_PATH, "orbit-system", "commons", "base", filename) output = os.path.join(output_path, "base", filename) with open(input, "r") as file: content: str = file.read() content = resolve_parameters( content, dict( account_id=context.account_id, region=context.region, env_name=context.name, role_prefix=f"/{context.role_prefix}/" if context.role_prefix else "/", secure_port="10260", ), ) with open(output, "w") as file: file.write(content)
def _orbit_image_replicator(context: "Context", output_path: str) -> None: filenames = ["01b-image-replicator.yaml"] for filename in filenames: input = os.path.join(MODELS_PATH, "orbit-system", filename) output = os.path.join(output_path, filename) with open(input, "r") as file: content: str = file.read() content = resolve_parameters( content, dict( account_id=context.account_id, region=context.region, env_name=context.name, orbit_controller_image= f"{context.images.orbit_controller.repository}:" f"{context.images.orbit_controller.version}", image_pull_policy="Always" if aws_orbit.__version__.endswith(".dev0") else "IfNotPresent", ), ) with open(output, "w") as file: file.write(content)
def gen_kubeflow_config(context: Context, output_path: str, cluster_name: str) -> None: os.makedirs(output_path, exist_ok=True) _cleanup_output(output_path=output_path) if context.account_id is None: raise RuntimeError("context.account_id is None!") if context.region is None: raise RuntimeError("context.region is None!") input = os.path.join(CONFIG_PATH, "kfctl_aws.yaml") output = os.path.join(output_path, "kfctl_aws.yaml") client = boto3_client(service_name="cognito-idp") response: Dict[str, Any] = client.describe_user_pool( UserPoolId=context.user_pool_id) domain: str = response["UserPool"].get("Domain") with open(input, "r") as file: content: str = file.read() content = utils.resolve_parameters( content, dict( certArn=context.networking.frontend.ssl_cert_arn, cognitoAppClientId=context.user_pool_client_id, cognitoUserPoolID=context.user_pool_id, account_id=context.account_id, region=context.region, env_name=context.name, cluster_name=cluster_name, cognitoUserPoolDomain=domain, ), ) _logger.debug("Kubeflow configuration:\n%s", content) with open(output, "w") as file: file.write(content) k8s_context = get_k8s_context(context=context) input = os.path.join(CONFIG_PATH, "apply_kf.sh") output = os.path.join(output_path, "apply_kf.sh") with open(input, "r") as file: content = file.read() content = utils.resolve_parameters( content, dict(cluster_name=cluster_name, k8s_context=k8s_context), ) _logger.debug("Kubeflow script:\n%s", content) with open(output, "w") as file: file.write(content) sh.run(f"chmod a+x {output}") input = os.path.join(CONFIG_PATH, "delete_kf.sh") output = os.path.join(output_path, "delete_kf.sh") with open(input, "r") as file: content = file.read() content = utils.resolve_parameters( content, dict(cluster_name=cluster_name, k8s_context=k8s_context), ) _logger.debug("Kubeflow script:\n%s", content) with open(output, "w") as file: file.write(content) sh.run(f"chmod a+x {output}")
def helm_package(plugin_id: str, context: "Context", team_context: "TeamContext", parameters: Dict[str, Any]) -> Tuple[str, str, str]: chart_path = helm.create_team_charts_copy(team_context=team_context, path=CHART_PATH, target_path=plugin_id) _logger.debug("copy chart dir") utils.print_dir(chart_path) if "image" not in parameters: image = f"{context.images.jupyter_user.repository}:{context.images.jupyter_user.version}" elif "aws-orbit-workbench/utility-data" in parameters["image"]: image = f"{context.images.utility_data.repository}:{context.images.utility_data.version}" else: image = parameters["image"] _logger.debug(f"For plugin {plugin_id} using image: {image}") vars: Dict[str, Optional[str]] = dict( team=team_context.name, region=context.region, account_id=context.account_id, env_name=context.name, tag=parameters["tag"] if "tag" in parameters else context.images.jupyter_user.version, restart_policy=parameters["restartPolicy"] if "restartPolicy" in parameters else "Never", plugin_id=plugin_id, toolkit_s3_bucket=context.toolkit.s3_bucket, image_pull_policy="Always" if aws_orbit.__version__.endswith(".dev0") else "IfNotPresent", image=image, uid=parameters["uid"] if "uid" in parameters else "1000", gid=parameters["gid"] if "gid" in parameters else "100", ) if "script" in parameters: script_body = parameters["script"] else: raise Exception(f"Plugin {plugin_id} must define parameter 'script'") script_file = os.path.join(chart_path, "team-script-launcher", "script.txt") script_body = utils.resolve_parameters(script_body, vars) with open(script_file, "w") as file: file.write(script_body) if not team_context.team_helm_repository: raise Exception("Missing team helm repository") repo_location = team_context.team_helm_repository repo = team_context.name _logger.debug(script_body) _init_team_repo(context=context, team_context=team_context, repo_location=repo_location) helm.add_repo(repo=repo, repo_location=repo_location) chart_name, chart_version, chart_package = helm.package_chart( repo=repo, chart_path=os.path.join(chart_path, "team-script-launcher"), values=vars) return (chart_name, chart_version, chart_package)
def _team(context: "Context", team_context: "TeamContext", output_path: str) -> None: input = os.path.join(MODELS_PATH, "apps", "01-team.yaml") output = os.path.join(output_path, f"01-{team_context.name}-team.yaml") with open(input, "r") as file: content: str = file.read() content = utils.resolve_parameters( content, dict( team=team_context.name, efsid=context.shared_efs_fs_id, efsapid=team_context.efs_ap_id, account_id=context.account_id, env_name=context.name, team_kms_key_arn=team_context.team_kms_key_arn, team_security_group_id=team_context.team_security_group_id, cluster_pod_security_group_id=context.cluster_pod_sg_id, ), ) _logger.debug("Kubectl Team %s manifest:\n%s", team_context.name, content) with open(output, "w") as file: file.write(content) # user service account input = os.path.join(MODELS_PATH, "apps", "02-user-service-account.yaml") output = os.path.join(output_path, f"02-{team_context.name}-user-service-account.yaml") with open(input, "r") as file: content = file.read() content = utils.resolve_parameters(content, dict(team=team_context.name)) with open(output, "w") as file: file.write(content) # deploy voila service input = os.path.join(MODELS_PATH, "apps", "08-voila_service.yaml") output = os.path.join(output_path, f"08-{team_context.name}-voila_service.yaml") with open(input, "r") as file: content = file.read() content = utils.resolve_parameters( content, dict( team=team_context.name, env_name=context.name, repository=context.images.jupyter_user.repository, tag=context.images.jupyter_user.version, sts_ep="legacy" if context.networking.data.internet_accessible else "regional", image_pull_policy="Always" if aws_orbit.__version__.endswith(".dev0") else "IfNotPresent", ), ) with open(output, "w") as file: file.write(content) # bind to admin role if team_context.k8_admin: # user service account input = os.path.join(MODELS_PATH, "apps", "04-admin-binding.yaml") output = os.path.join(output_path, f"04-{team_context.name}-admin-binding.yaml") with open(input, "r") as file: content = file.read() content = utils.resolve_parameters(content, dict(team=team_context.name)) with open(output, "w") as file: file.write(content)
def deploy(plugin_id: str, context: "Context", team_context: "TeamContext", parameters: Dict[str, Any]) -> None: _logger.debug("Team Env name: %s | Team name: %s", context.name, team_context.name) plugin_id = plugin_id.replace("_", "-") _logger.debug("plugin_id: %s", plugin_id) configmap_script_name = f"{plugin_id}-script" vars = dict( team=team_context.name, region=context.region, account_id=context.account_id, env_name=context.name, tag=context.images.jupyter_hub.version, restart_policy=parameters["restartPolicy"] if "restartPolicy" in parameters else "Never", plugin_id=plugin_id, toolkit_s3_bucket=context.toolkit.s3_bucket, ) if "script" in parameters: script_body = parameters["script"] else: raise Exception(f"Plugin {plugin_id} must define parameter 'script'") script_file = os.path.join(os.path.dirname(POD_FILENAME), f"{plugin_id}-script.sh") script_body = utils.resolve_parameters(script_body, cast(Dict[str, str], vars)) with open(script_file, "w") as file: file.write(script_body) _logger.debug(script_body) # Cleanup of previous installation if needed sh.run( f"kubectl delete jobs/team-script-{plugin_id} --namespace {team_context.name} --ignore-not-found" ) sh.run( f"kubectl delete configmap {configmap_script_name} --namespace {team_context.name} --ignore-not-found" ) # Create the configmap with the script sh.run( f"kubectl create configmap {configmap_script_name} --from-file={script_file} --namespace {team_context.name}" ) _logger.debug( f"Create config map: {configmap_script_name} at namespace {team_context.name}" ) _logger.debug( "Using S3 Sync Pod at %s for Env name: %s | Team name: %s", POD_FILENAME, context.name, team_context.name, ) input = POD_FILENAME output = os.path.join(os.path.dirname(POD_FILENAME), f"{plugin_id}-team.yaml") with open(input, "r") as file: content: str = file.read() content = utils.resolve_parameters(content, cast(Dict[str, str], vars)) _logger.debug("Kubectl Team %s context:\n%s", team_context.name, content) with open(output, "w") as file: file.write(content) # run the POD to execute the script cmd = f"kubectl apply -f {output} --namespace {team_context.name}" _logger.debug(cmd) sh.run(cmd)