def update_uimetadata(artifact_name, uimetadata_path='/mlpipeline-ui-metadata.json'): """Update ui-metadata dictionary with a new web-app entry. Args: artifact_name: Name of the artifact uimetadata_path: path to mlpipeline-ui-metadata.json """ # Default empty ui-metadata dict outputs = {"outputs": []} if os.path.exists(uimetadata_path): try: outputs = json.loads(open(uimetadata_path, 'r').read()) if not outputs.get('outputs', None): outputs['outputs'] = [] except json.JSONDecodeError as e: print("Failed to parse json file {}: {}\n" "This step will not be able to visualize artifacts in the" " KFP UI".format(uimetadata_path, e)) pod_name = pod_utils.get_pod_name() namespace = pod_utils.get_namespace() workflow_name = pod_utils.get_workflow_name(pod_name, namespace) html_artifact_entry = [{ 'type': 'web-app', 'storage': 'minio', 'source': 'minio://mlpipeline/artifacts/{}/{}/{}'.format(workflow_name, pod_name, artifact_name + '.tgz') }] outputs['outputs'] += html_artifact_entry with open(uimetadata_path, "w") as f: json.dump(outputs, f)
def __init__(self): self.store = self._connect() self.pod_name = pod_utils.get_pod_name() self.pod_namespace = pod_utils.get_namespace() self.pod = pod_utils.get_pod(self.pod_name, self.pod_namespace) self.workflow_name = self.pod.metadata.labels.get( ARGO_WORKFLOW_LABEL_KEY) self.workflow = pod_utils.get_workflow(self.workflow_name, self.pod_namespace) workflow_labels = self.workflow["metadata"].get("labels", {}) self.run_uuid = workflow_labels.get(pod_utils.KFP_RUN_ID_LABEL_KEY, self.workflow_name) workflow_annotations = self.workflow["metadata"].get("annotations", {}) pipeline_spec = json.loads( workflow_annotations.get("pipelines.kubeflow.org/pipeline_spec", {})) self.pipeline_name = pipeline_spec.get("name", self.workflow_name) self.component_id = pod_utils.compute_component_id(self.pod) self.execution_hash = self.pod.metadata.annotations.get( MLMD_EXECUTION_HASH_PROPERTY_KEY, utils.random_string(10)) self.run_context = self._get_or_create_run_context() self.execution = self._create_execution_in_run_context() self._label_with_context_and_execution()
def snapshot_notebook(bucket=DEFAULT_BUCKET, obj=None): rok = _get_client() hostname = os.getenv("HOSTNAME") namespace = pod_utils.get_namespace() commit_title = f"Snapshot of notebook {hostname}" commit_message = NOTEBOOK_SNAPSHOT_COMMIT_MESSAGE.format(hostname, namespace) params = {"namespace": namespace, "commit_title": commit_title, "commit_message": commit_message} obj = obj or pod_utils.get_pod_name() # Create the bucket in case it does not exist pod_utils.create_rok_bucket(bucket, client=rok) return rok.version_register(bucket, obj, "jupyter", params)
def check_rok_availability(request): """Check if Rok is available.""" log = request.log if hasattr(request, "log") else logger try: rok = _get_client() except ImportError: log.exception("Failed to import RokClient") raise RPCNotFoundError(details="Rok Gateway Client module not found", trans_id=request.trans_id) except Exception: log.exception("Failed to initialize RokClient") raise RPCServiceUnavailableError(details=("Failed to initialize" " RokClient"), trans_id=request.trans_id) try: rok.account_info() except Exception: log.exception("Failed to retrieve account information") raise RPCServiceUnavailableError(details="Failed to access Rok", trans_id=request.trans_id) name = pod_utils.get_pod_name() namespace = pod_utils.get_namespace() try: suggestions = rok.version_register_suggest(DEFAULT_BUCKET, name, "jupyter", "params:lab", {"namespace": namespace}, ignore_env=True) except Exception as e: log.exception("Failed to list lab suggestions") message = "%s: %s" % (e.__class__.__name__, e) raise RPCServiceUnavailableError(message=message, details=("Rok cannot list notebooks" " in this namespace"), trans_id=request.trans_id) if not any(s["value"] == name for s in suggestions): log.error("Could not find notebook '%s' in list of suggestions", name) raise RPCNotFoundError(details=("Could not find this notebook in" " notebooks listed by Rok"), trans_id=request.trans_id)