def build_root_spec_from_pipeline_params( pipeline_params: List[dsl.PipelineParam], ) -> pipeline_spec_pb2.ComponentSpec: """Builds the root component spec instance from pipeline params. This is useful when building the component spec for a pipeline (aka piipeline root). Such a component spec doesn't need output_definitions, and its implementation field will be filled in later. Args: pipeline_params: The list of pipeline params. Returns: An instance of IR ComponentSpec. """ result = pipeline_spec_pb2.ComponentSpec() for param in pipeline_params or []: if type_utils.is_parameter_type(param.param_type): result.input_definitions.parameters[ param.name].type = type_utils.get_parameter_type( param.param_type) else: result.input_definitions.artifacts[ param.name].artifact_type.instance_schema = ( type_utils.get_artifact_type_schema(param.param_type)) return result
def build_component_spec_from_structure( component_spec: structures.ComponentSpec, ) -> pipeline_spec_pb2.ComponentSpec: """Builds an IR ComponentSpec instance from structures.ComponentSpec. Args: component_spec: The structure component spec. Returns: An instance of IR ComponentSpec. """ result = pipeline_spec_pb2.ComponentSpec() result.executor_label = dsl_utils.sanitize_executor_label( component_spec.name) for input_spec in component_spec.inputs or []: if type_utils.is_parameter_type(input_spec.type): result.input_definitions.parameters[ input_spec.name].type = type_utils.get_parameter_type( input_spec.type) else: result.input_definitions.artifacts[ input_spec.name].artifact_type.instance_schema = ( type_utils.get_artifact_type_schema(input_spec.type)) for output_spec in component_spec.outputs or []: if type_utils.is_parameter_type(output_spec.type): result.output_definitions.parameters[ output_spec.name].type = type_utils.get_parameter_type( output_spec.type) else: result.output_definitions.artifacts[ output_spec.name].artifact_type.instance_schema = ( type_utils.get_artifact_type_schema(output_spec.type)) return result
def test_get_parameter_type(self): self.assertEqual(pb.PrimitiveType.INT, type_utils.get_parameter_type('Integer')) self.assertEqual(pb.PrimitiveType.DOUBLE, type_utils.get_parameter_type('Double')) self.assertEqual(pb.PrimitiveType.STRING, type_utils.get_parameter_type('String')) self.assertEqual(None, type_utils.get_parameter_type('dummy')) with self.assertRaises(AttributeError): type_utils.get_parameter_type(None)
def test_get_parameter_type(self): self.assertEqual(pb.PrimitiveType.INT, type_utils.get_parameter_type('Integer')) self.assertEqual(pb.PrimitiveType.INT, type_utils.get_parameter_type('Int')) self.assertEqual(pb.PrimitiveType.DOUBLE, type_utils.get_parameter_type('Double')) self.assertEqual(pb.PrimitiveType.DOUBLE, type_utils.get_parameter_type('Float')) self.assertEqual(pb.PrimitiveType.STRING, type_utils.get_parameter_type('String')) self.assertEqual(pb.PrimitiveType.STRING, type_utils.get_parameter_type('arbitrary type')) self.assertEqual(pb.PrimitiveType.STRING, type_utils.get_parameter_type(None))
def build_component_outputs_spec( component_spec: pipeline_spec_pb2.ComponentSpec, pipeline_params: List[dsl.PipelineParam], ) -> None: """Builds component outputs spec from pipeline params. Args: component_spec: The component spec to fill in its outputs spec. pipeline_params: The list of pipeline params. """ for param in pipeline_params or []: output_name = param.full_name if type_utils.is_parameter_type(param.param_type): component_spec.output_definitions.parameters[ output_name].type = type_utils.get_parameter_type( param.param_type) else: component_spec.output_definitions.artifacts[ output_name].artifact_type.instance_schema = ( type_utils.get_artifact_type_schema(param.param_type))
def _get_custom_job_op( task_name: str, job_spec: Dict[str, Any], input_artifacts: Optional[Dict[str, dsl.PipelineParam]] = None, input_parameters: Optional[Dict[str, _ValueOrPipelineParam]] = None, output_artifacts: Optional[Dict[str, Type[artifact.Artifact]]] = None, output_parameters: Optional[Dict[str, Any]] = None, ) -> AiPlatformCustomJobOp: """Gets an AiPlatformCustomJobOp from job spec and I/O definition.""" pipeline_task_spec = pipeline_spec_pb2.PipelineTaskSpec() pipeline_component_spec = pipeline_spec_pb2.ComponentSpec() pipeline_task_spec.task_info.CopyFrom( pipeline_spec_pb2.PipelineTaskInfo(name=task_name)) # Iterate through the inputs/outputs declaration to get pipeline component # spec. for input_name, param in input_parameters.items(): if isinstance(param, dsl.PipelineParam): pipeline_component_spec.input_definitions.parameters[ input_name].type = type_utils.get_parameter_type( param.param_type) else: pipeline_component_spec.input_definitions.parameters[ input_name].type = type_utils.get_parameter_type(type(param)) for input_name, art in input_artifacts.items(): if not isinstance(art, dsl.PipelineParam): raise RuntimeError( 'Get unresolved input artifact for input %s. Input ' 'artifacts must be connected to a producer task.' % input_name) pipeline_component_spec.input_definitions.artifacts[ input_name].artifact_type.CopyFrom( type_utils.get_artifact_type_schema_message(art.param_type)) for output_name, param_type in output_parameters.items(): pipeline_component_spec.output_definitions.parameters[ output_name].type = type_utils.get_parameter_type(param_type) for output_name, artifact_type in output_artifacts.items(): pipeline_component_spec.output_definitions.artifacts[ output_name].artifact_type.CopyFrom(artifact_type.get_ir_type()) pipeline_component_spec.executor_label = dsl_utils.sanitize_executor_label( task_name) # Iterate through the inputs/outputs specs to get pipeline task spec. for input_name, param in input_parameters.items(): if isinstance(param, dsl.PipelineParam) and param.op_name: # If the param has a valid op_name, this should be a pipeline parameter # produced by an upstream task. pipeline_task_spec.inputs.parameters[input_name].CopyFrom( pipeline_spec_pb2.TaskInputsSpec.InputParameterSpec( task_output_parameter=pipeline_spec_pb2.TaskInputsSpec. InputParameterSpec.TaskOutputParameterSpec( producer_task='task-{}'.format(param.op_name), output_parameter_key=param.name))) elif isinstance(param, dsl.PipelineParam) and not param.op_name: # If a valid op_name is missing, this should be a pipeline parameter. pipeline_task_spec.inputs.parameters[input_name].CopyFrom( pipeline_spec_pb2.TaskInputsSpec.InputParameterSpec( component_input_parameter=param.name)) else: # If this is not a pipeline param, then it should be a value. pipeline_task_spec.inputs.parameters[input_name].CopyFrom( pipeline_spec_pb2.TaskInputsSpec.InputParameterSpec( runtime_value=pipeline_spec_pb2.ValueOrRuntimeParameter( constant_value=dsl_utils.get_value(param)))) for input_name, art in input_artifacts.items(): if art.op_name: # If the param has a valid op_name, this should be an artifact produced # by an upstream task. pipeline_task_spec.inputs.artifacts[input_name].CopyFrom( pipeline_spec_pb2.TaskInputsSpec.InputArtifactSpec( task_output_artifact=pipeline_spec_pb2.TaskInputsSpec. InputArtifactSpec.TaskOutputArtifactSpec( producer_task='task-{}'.format(art.op_name), output_artifact_key=art.name))) else: # Otherwise, this should be from the input of the subdag. pipeline_task_spec.inputs.artifacts[input_name].CopyFrom( pipeline_spec_pb2.TaskInputsSpec.InputArtifactSpec( component_input_artifact=art.name)) # TODO: Add task dependencies/trigger policies/caching/iterator pipeline_task_spec.component_ref.name = dsl_utils.sanitize_component_name( task_name) # Construct dummy I/O declaration for the op. # TODO: resolve name conflict instead of raising errors. dummy_outputs = collections.OrderedDict() for output_name, _ in output_artifacts.items(): dummy_outputs[output_name] = _DUMMY_PATH for output_name, _ in output_parameters.items(): if output_name in dummy_outputs: raise KeyError( 'Got name collision for output key %s. Consider renaming ' 'either output parameters or output ' 'artifacts.' % output_name) dummy_outputs[output_name] = _DUMMY_PATH dummy_inputs = collections.OrderedDict() for input_name, art in input_artifacts.items(): dummy_inputs[input_name] = _DUMMY_PATH for input_name, param in input_parameters.items(): if input_name in dummy_inputs: raise KeyError( 'Got name collision for input key %s. Consider renaming ' 'either input parameters or input ' 'artifacts.' % input_name) dummy_inputs[input_name] = _DUMMY_PATH # Construct the AIP (Unified) custom job op. return AiPlatformCustomJobOp( name=task_name, custom_job_spec=job_spec, component_spec=pipeline_component_spec, task_spec=pipeline_task_spec, task_inputs=[ dsl.InputArgumentPath( argument=dummy_inputs[input_name], input=input_name, path=path, ) for input_name, path in dummy_inputs.items() ], task_outputs=dummy_outputs)
def create_container_op_from_component_and_arguments( component_spec: structures.ComponentSpec, arguments: Mapping[str, Any], component_ref: structures.ComponentReference = None, ) -> container_op.ContainerOp: """Instantiates ContainerOp object. Args: component_spec: The component spec object. arguments: The dictionary of component arguments. component_ref: The component reference. Optional. Returns: A ContainerOp instance. """ pipeline_task_spec = pipeline_spec_pb2.PipelineTaskSpec() pipeline_task_spec.task_info.name = component_spec.name # might need to append suffix to exuector_label to ensure its uniqueness? pipeline_task_spec.executor_label = component_spec.name # Keep track of auto-injected importer spec. importer_spec = {} # Check types of the reference arguments and serialize PipelineParams arguments = arguments.copy() for input_name, argument_value in arguments.items(): if isinstance(argument_value, dsl.PipelineParam): input_type = component_spec._inputs_dict[input_name].type reference_type = argument_value.param_type types.verify_type_compatibility( reference_type, input_type, 'Incompatible argument passed to the input "{}" of component "{}": ' .format(input_name, component_spec.name)) arguments[input_name] = str(argument_value) if type_utils.is_parameter_type(input_type): if argument_value.op_name: pipeline_task_spec.inputs.parameters[ input_name].task_output_parameter.producer_task = ( argument_value.op_name) pipeline_task_spec.inputs.parameters[ input_name].task_output_parameter.output_parameter_key = ( argument_value.name) else: pipeline_task_spec.inputs.parameters[ input_name].runtime_value.runtime_parameter = argument_value.name else: if argument_value.op_name: pipeline_task_spec.inputs.artifacts[ input_name].producer_task = (argument_value.op_name) pipeline_task_spec.inputs.artifacts[ input_name].output_artifact_key = (argument_value.name) else: # argument_value.op_name could be none, in which case an importer node # will be inserted later. pipeline_task_spec.inputs.artifacts[ input_name].producer_task = '' type_schema = type_utils.get_input_artifact_type_schema( input_name, component_spec.inputs) importer_spec[ input_name] = importer_node.build_importer_spec( input_type_schema=type_schema, pipeline_param_name=argument_value.name) elif isinstance(argument_value, str): input_type = component_spec._inputs_dict[input_name].type if type_utils.is_parameter_type(input_type): pipeline_task_spec.inputs.parameters[ input_name].runtime_value.constant_value.string_value = ( argument_value) else: # An importer node with constant value artifact_uri will be inserted. pipeline_task_spec.inputs.artifacts[ input_name].producer_task = '' type_schema = type_utils.get_input_artifact_type_schema( input_name, component_spec.inputs) importer_spec[input_name] = importer_node.build_importer_spec( input_type_schema=type_schema, constant_value=argument_value) elif isinstance(argument_value, int): pipeline_task_spec.inputs.parameters[ input_name].runtime_value.constant_value.int_value = argument_value elif isinstance(argument_value, float): pipeline_task_spec.inputs.parameters[ input_name].runtime_value.constant_value.double_value = argument_value elif isinstance(argument_value, dsl.ContainerOp): raise TypeError( 'ContainerOp object {} was passed to component as an input argument. ' 'Pass a single output instead.'.format(input_name)) else: raise NotImplementedError( 'Input argument supports only the following types: PipelineParam' ', str, int, float. Got: "{}".'.format(argument_value)) for output in component_spec.outputs or []: if type_utils.is_parameter_type(output.type): pipeline_task_spec.outputs.parameters[ output.name].type = type_utils.get_parameter_type(output.type) else: pipeline_task_spec.outputs.artifacts[ output.name].artifact_type.instance_schema = ( type_utils.get_artifact_type_schema(output.type)) inputs_dict = { input_spec.name: input_spec for input_spec in component_spec.inputs or [] } outputs_dict = { output_spec.name: output_spec for output_spec in component_spec.outputs or [] } def _input_artifact_uri_placeholder(input_key: str) -> str: if type_utils.is_parameter_type(inputs_dict[input_key].type): raise TypeError( 'Input "{}" with type "{}" cannot be paired with InputUriPlaceholder.' .format(input_key, inputs_dict[input_key].type)) else: return "{{{{$.inputs.artifacts['{}'].uri}}}}".format(input_key) def _input_artifact_path_placeholder(input_key: str) -> str: if type_utils.is_parameter_type(inputs_dict[input_key].type): raise TypeError( 'Input "{}" with type "{}" cannot be paired with InputPathPlaceholder.' .format(input_key, inputs_dict[input_key].type)) else: return "{{{{$.inputs.artifacts['{}'].path}}}}".format(input_key) def _input_parameter_placeholder(input_key: str) -> str: if type_utils.is_parameter_type(inputs_dict[input_key].type): return "{{{{$.inputs.parameters['{}']}}}}".format(input_key) else: raise TypeError( 'Input "{}" with type "{}" cannot be paired with InputValuePlaceholder.' .format(input_key, inputs_dict[input_key].type)) def _output_artifact_uri_placeholder(output_key: str) -> str: if type_utils.is_parameter_type(outputs_dict[output_key].type): raise TypeError( 'Output "{}" with type "{}" cannot be paired with OutputUriPlaceholder.' .format(output_key, outputs_dict[output_key].type)) else: return "{{{{$.outputs.artifacts['{}'].uri}}}}".format(output_key) def _output_artifact_path_placeholder(output_key: str) -> str: return "{{{{$.outputs.artifacts['{}'].path}}}}".format(output_key) def _output_parameter_path_placeholder(output_key: str) -> str: return "{{{{$.outputs.parameters['{}'].output_file}}}}".format( output_key) def _resolve_output_path_placeholder(output_key: str) -> str: if type_utils.is_parameter_type(outputs_dict[output_key].type): return _output_parameter_path_placeholder(output_key) else: return _output_artifact_path_placeholder(output_key) resolved_cmd = _resolve_command_line_and_paths( component_spec=component_spec, arguments=arguments, input_value_generator=_input_parameter_placeholder, input_uri_generator=_input_artifact_uri_placeholder, output_uri_generator=_output_artifact_uri_placeholder, input_path_generator=_input_artifact_path_placeholder, output_path_generator=_resolve_output_path_placeholder, ) container_spec = component_spec.implementation.container pipeline_container_spec = ( pipeline_spec_pb2.PipelineDeploymentConfig.PipelineContainerSpec()) pipeline_container_spec.image = container_spec.image pipeline_container_spec.command.extend(resolved_cmd.command) pipeline_container_spec.args.extend(resolved_cmd.args) output_uris_and_paths = resolved_cmd.output_uris.copy() output_uris_and_paths.update(resolved_cmd.output_paths) input_uris_and_paths = resolved_cmd.input_uris.copy() input_uris_and_paths.update(resolved_cmd.input_paths) old_warn_value = dsl.ContainerOp._DISABLE_REUSABLE_COMPONENT_WARNING dsl.ContainerOp._DISABLE_REUSABLE_COMPONENT_WARNING = True task = container_op.ContainerOp( name=component_spec.name or _default_component_name, image=container_spec.image, command=resolved_cmd.command, arguments=resolved_cmd.args, file_outputs=output_uris_and_paths, artifact_argument_paths=[ dsl.InputArgumentPath( argument=arguments[input_name], input=input_name, path=path, ) for input_name, path in input_uris_and_paths.items() ], ) task.task_spec = pipeline_task_spec task.importer_spec = importer_spec task.container_spec = pipeline_container_spec dsl.ContainerOp._DISABLE_REUSABLE_COMPONENT_WARNING = old_warn_value component_meta = copy.copy(component_spec) task._set_metadata(component_meta) component_ref_without_spec = copy.copy(component_ref) component_ref_without_spec.spec = None task._component_ref = component_ref_without_spec # Previously, ContainerOp had strict requirements for the output names, so we # had to convert all the names before passing them to the ContainerOp # constructor. Outputs with non-pythonic names could not be accessed using # their original names. Now ContainerOp supports any output names, so we're # now using the original output names. However to support legacy pipelines, # we're also adding output references with pythonic names. # TODO: Add warning when people use the legacy output names. output_names = [ output_spec.name for output_spec in component_spec.outputs or [] ] # Stabilizing the ordering output_name_to_python = generate_unique_name_conversion_table( output_names, _sanitize_python_function_name) for output_name in output_names: pythonic_output_name = output_name_to_python[output_name] # Note: Some component outputs are currently missing from task.outputs # (e.g. MLPipeline UI Metadata) if pythonic_output_name not in task.outputs and output_name in task.outputs: task.outputs[pythonic_output_name] = task.outputs[output_name] if component_spec.metadata: annotations = component_spec.metadata.annotations or {} for key, value in annotations.items(): task.add_pod_annotation(key, value) for key, value in (component_spec.metadata.labels or {}).items(): task.add_pod_label(key, value) # Disabling the caching for the volatile components by default if annotations.get('volatile_component', 'false') == 'true': task.execution_options.caching_strategy.max_cache_staleness = 'P0D' return task
def test_get_parameter_type(self): # Test get parameter type by name. self.assertEqual(pb.PrimitiveType.INT, type_utils.get_parameter_type('Int')) self.assertEqual(pb.PrimitiveType.INT, type_utils.get_parameter_type('Integer')) self.assertEqual(pb.PrimitiveType.DOUBLE, type_utils.get_parameter_type('Double')) self.assertEqual(pb.PrimitiveType.DOUBLE, type_utils.get_parameter_type('Float')) self.assertEqual(pb.PrimitiveType.STRING, type_utils.get_parameter_type('String')) self.assertEqual(pb.PrimitiveType.STRING, type_utils.get_parameter_type('Str')) # Test get parameter by Python type. self.assertEqual(pb.PrimitiveType.INT, type_utils.get_parameter_type(int)) self.assertEqual(pb.PrimitiveType.DOUBLE, type_utils.get_parameter_type(float)) self.assertEqual(pb.PrimitiveType.STRING, type_utils.get_parameter_type(str)) with self.assertRaises(AttributeError): type_utils.get_parameter_type_schema(None) with self.assertRaisesRegex(TypeError, 'Got illegal parameter type.'): type_utils.get_parameter_type(bool)