Beispiel #1
0
def _get_pipeline_details(mlmd_handle: metadata.Metadata,
                          task_queue: tq.TaskQueue) -> List[_PipelineDetail]:
    """Scans MLMD and returns pipeline details."""
    result = []

    contexts = mlmd_handle.store.get_contexts_by_type(
        _ORCHESTRATOR_RESERVED_ID)

    for context in contexts:
        active_executions = [
            e for e in mlmd_handle.store.get_executions_by_context(context.id)
            if execution_lib.is_execution_active(e)
        ]
        if len(active_executions) > 1:
            raise status_lib.StatusNotOkError(
                code=status_lib.Code.INTERNAL,
                message=(
                    f'Expected 1 but found {len(active_executions)} active '
                    f'executions for context named: {context.name}'))
        if not active_executions:
            continue
        execution = active_executions[0]

        # TODO(goutham): Instead of parsing the pipeline IR each time, we could
        # cache the parsed pipeline IR in `initiate_pipeline_start` and reuse it.
        pipeline_ir_b64 = common_utils.get_metadata_value(
            execution.properties[_PIPELINE_IR])
        pipeline = pipeline_pb2.Pipeline()
        pipeline.ParseFromString(base64.b64decode(pipeline_ir_b64))

        stop_initiated = _is_stop_initiated(execution)

        if stop_initiated:
            generator = None
        else:
            if pipeline.execution_mode == pipeline_pb2.Pipeline.SYNC:
                generator = sync_pipeline_task_gen.SyncPipelineTaskGenerator(
                    mlmd_handle, pipeline, task_queue.contains_task_id)
            elif pipeline.execution_mode == pipeline_pb2.Pipeline.ASYNC:
                generator = async_pipeline_task_gen.AsyncPipelineTaskGenerator(
                    mlmd_handle, pipeline, task_queue.contains_task_id)
            else:
                raise status_lib.StatusNotOkError(
                    code=status_lib.Code.FAILED_PRECONDITION,
                    message=
                    (f'Only SYNC and ASYNC pipeline execution modes supported; '
                     f'found pipeline with execution mode: {pipeline.execution_mode}'
                     ))

        result.append(
            _PipelineDetail(
                context=context,
                execution=execution,
                pipeline=pipeline,
                pipeline_uid=task_lib.PipelineUid.from_pipeline(pipeline),
                stop_initiated=stop_initiated,
                generator=generator))

    return result
Beispiel #2
0
 def pipeline(self) -> pipeline_pb2.Pipeline:
     if not self._pipeline:
         pipeline_ir_b64 = common_utils.get_metadata_value(
             self.execution.properties[_PIPELINE_IR])
         pipeline = pipeline_pb2.Pipeline()
         pipeline.ParseFromString(base64.b64decode(pipeline_ir_b64))
         self._pipeline = pipeline
     return self._pipeline
Beispiel #3
0
def _extract_properties(
        execution: metadata_store_pb2.Execution) -> Dict[Text, types.Property]:
    result = {}
    for key, prop in itertools.chain(execution.properties.items(),
                                     execution.custom_properties.items()):
        value = common_utils.get_metadata_value(prop)
        if value is None:
            raise ValueError(
                f'Unexpected property with empty value; key: {key}')
        result[key] = value
    return result
Beispiel #4
0
def _is_stop_initiated(execution: metadata_store_pb2.Execution) -> bool:
    if _STOP_INITIATED in execution.custom_properties:
        return common_utils.get_metadata_value(
            execution.custom_properties[_STOP_INITIATED]) == 1
    return False
Beispiel #5
0
 def is_stop_initiated(self):
     """Returns `True` if pipeline execution stopping has been initiated."""
     if _STOP_INITIATED in self.execution.custom_properties:
         return common_utils.get_metadata_value(
             self.execution.custom_properties[_STOP_INITIATED]) == 1
     return False