def test_component_spec_with_placeholder_referencing_nonexisting_input_output( self): with self.assertRaisesRegex( pydantic.ValidationError, 'Argument "input_name=\'input000\'" ' 'references non-existing input.'): structures.ComponentSpec( name='component_1', implementation=structures.Implementation( container=structures.ContainerSpec( image='alpine', commands=[ 'sh', '-c', 'set -ex\necho "$0" > "$1"', structures.InputValuePlaceholder( input_name='input000'), structures.OutputPathPlaceholder( output_name='output1'), ], )), inputs={'input1': structures.InputSpec(type='String')}, outputs={'output1': structures.OutputSpec(type='String')}, ) with self.assertRaisesRegex( pydantic.ValidationError, 'Argument "output_name=\'output000\'" ' 'references non-existing output.'): structures.ComponentSpec( name='component_1', implementation=structures.Implementation( container=structures.ContainerSpec( image='alpine', commands=[ 'sh', '-c', 'set -ex\necho "$0" > "$1"', structures.InputValuePlaceholder( input_name='input1'), structures.OutputPathPlaceholder( output_name='output000'), ], )), inputs={'input1': structures.InputSpec(type='String')}, outputs={'output1': structures.OutputSpec(type='String')}, )
def test_simple_component_spec_save_to_component_yaml(self): open_mock = mock.mock_open() expected_yaml = textwrap.dedent("""\ name: component_1 inputs: input1: {type: String} outputs: output1: {type: String} implementation: container: image: alpine commands: - sh - -c - 'set -ex echo "$0" > "$1"' - {inputValue: input1} - {outputPath: output1} """) with mock.patch("builtins.open", open_mock, create=True): structures.ComponentSpec( name='component_1', implementation=structures.Implementation( container=structures.ContainerSpec( image='alpine', commands=[ 'sh', '-c', 'set -ex\necho "$0" > "$1"', structures.InputValuePlaceholder( input_name='input1'), structures.OutputPathPlaceholder( output_name='output1'), ], )), inputs={ 'input1': structures.InputSpec(type='String') }, outputs={ 'output1': structures.OutputSpec(type='String') }, ).save_to_component_yaml('test_save_file.txt') open_mock.assert_called_with('test_save_file.txt', 'a') open_mock.return_value.write.assert_called_once_with(expected_yaml)
def test_create_pipeline_task_valid(self): expected_component_spec = structures.ComponentSpec( name='component1', implementation=structures.Implementation( container=structures.ContainerSpec( image='alpine', commands=['sh', '-c', 'echo "$0" >> "$1"'], arguments=[ structures.InputValuePlaceholder(input_name='input1'), structures.OutputPathPlaceholder( output_name='output1'), ], )), inputs={ 'input1': structures.InputSpec(type='String'), }, outputs={ 'output1': structures.OutputSpec(type='Artifact'), }, ) expected_task_spec = structures.TaskSpec( name='component1', inputs={'input1': 'value'}, dependent_tasks=[], component_ref='component1', ) expected_container_spec = structures.ContainerSpec( image='alpine', commands=['sh', '-c', 'echo "$0" >> "$1"'], arguments=[ "{{$.inputs.parameters['input1']}}", "{{$.outputs.artifacts['output1'].path}}", ], ) task = pipeline_task.PipelineTask( component_spec=structures.ComponentSpec.load_from_component_yaml( V2_YAML), arguments={'input1': 'value'}, ) self.assertEqual(task.task_spec, expected_task_spec) self.assertEqual(task.component_spec, expected_component_spec) self.assertEqual(task.container_spec, expected_container_spec)
def test_simple_component_spec_load_from_v2_component_yaml(self): component_yaml_v2 = textwrap.dedent("""\ name: component_1 inputs: input1: type: String outputs: output1: type: String implementation: container: image: alpine commands: - sh - -c - 'set -ex echo "$0" > "$1"' - inputValue: input1 - outputPath: output1 """) generated_spec = structures.ComponentSpec.load_from_component_yaml( component_yaml_v2) expected_spec = structures.ComponentSpec( name='component_1', implementation=structures.Implementation( container=structures.ContainerSpec( image='alpine', commands=[ 'sh', '-c', 'set -ex\necho "$0" > "$1"', structures.InputValuePlaceholder(input_name='input1'), structures.OutputPathPlaceholder( output_name='output1'), ], )), inputs={'input1': structures.InputSpec(type='String')}, outputs={'output1': structures.OutputSpec(type='String')}) self.assertEqual(generated_spec, expected_spec)
pass component_op = TestComponent(component_spec=structures.ComponentSpec( name='component_1', implementation=structures.Implementation( container=structures.ContainerSpec( image='alpine', commands=[ 'sh', '-c', 'set -ex\necho "$0" > "$1"', structures.InputValuePlaceholder(input_name='input1'), structures.OutputPathPlaceholder(output_name='output1'), ], )), inputs={'input1': structures.InputSpec(type='String')}, outputs={'output1': structures.OutputSpec(type='String')}, )) @dsl.pipeline(name='experimental-v2-component', pipeline_root='dummy_root') def my_pipeline(text: str = 'Hello world!'): component_1 = component_op(input1=text) component_2 = component_op(input1=component_1.outputs['output1']) if __name__ == '__main__': compiler.Compiler().compile(pipeline_func=my_pipeline, package_path=__file__.replace('.py', '.json'))
def extract_component_interface(func: Callable) -> structures.ComponentSpec: single_output_name_const = 'Output' signature = inspect.signature(func) parameters = list(signature.parameters.values()) parsed_docstring = docstring_parser.parse(inspect.getdoc(func)) doc_dict = {p.arg_name: p.description for p in parsed_docstring.params} inputs = {} outputs = {} input_names = set() output_names = set() for parameter in parameters: parameter_type = type_annotations.maybe_strip_optional_from_annotation( parameter.annotation) passing_style = None io_name = parameter.name if type_annotations.is_artifact_annotation(parameter_type): # passing_style is either type_annotations.InputAnnotation or # type_annotations.OutputAnnotation. passing_style = type_annotations.get_io_artifact_annotation( parameter_type) # parameter_type is type_annotations.Artifact or one of its subclasses. parameter_type = type_annotations.get_io_artifact_class( parameter_type) if not issubclass(parameter_type, artifact_types.Artifact): raise ValueError( 'Input[T] and Output[T] are only supported when T is a ' 'subclass of Artifact. Found `{} with type {}`'.format( io_name, parameter_type)) if parameter.default is not inspect.Parameter.empty: raise ValueError( 'Default values for Input/Output artifacts are not supported.' ) elif isinstance( parameter_type, (type_annotations.InputPath, type_annotations.OutputPath)): passing_style = type(parameter_type) parameter_type = parameter_type.type if parameter.default is not inspect.Parameter.empty and not ( passing_style == type_annotations.InputPath and parameter.default is None): raise ValueError( 'Path inputs only support default values of None. Default' ' values for outputs are not supported.') type_struct = _annotation_to_type_struct(parameter_type) if passing_style in [ type_annotations.OutputAnnotation, type_annotations.OutputPath ]: io_name = _maybe_make_unique(io_name, output_names) output_names.add(io_name) output_spec = structures.OutputSpec(type=type_struct, description=doc_dict.get( parameter.name)) outputs[io_name] = output_spec else: io_name = _maybe_make_unique(io_name, input_names) input_names.add(io_name) input_spec = structures.InputSpec(type=type_struct, description=doc_dict.get( parameter.name)) if parameter.default is not inspect.Parameter.empty: # input_spec.optional = True if parameter.default is not None: outer_type_name = list( type_struct.keys())[0] if isinstance( type_struct, dict) else type_struct try: input_spec.default = parameter.default except Exception as ex: warnings.warn( 'Could not serialize the default value of the' ' parameter "{}". {}'.format(parameter.name, ex)) inputs[io_name] = input_spec #Analyzing the return type annotations. return_ann = signature.return_annotation if hasattr(return_ann, '_fields'): #NamedTuple # Getting field type annotations. # __annotations__ does not exist in python 3.5 and earlier # _field_types does not exist in python 3.9 and later field_annotations = getattr(return_ann, '__annotations__', None) or getattr( return_ann, '_field_types', None) for field_name in return_ann._fields: type_struct = None if field_annotations: type_struct = _annotation_to_type_struct( field_annotations.get(field_name, None)) output_name = _maybe_make_unique(field_name, output_names) output_names.add(output_name) output_spec = structures.OutputSpec(type=type_struct) outputs[output_name] = output_spec # Deprecated dict-based way of declaring multiple outputs. Was only used by # the @component decorator elif isinstance(return_ann, dict): warnings.warn( "The ability to specify multiple outputs using the dict syntax" " has been deprecated. It will be removed soon after release" " 0.1.32. Please use typing.NamedTuple to declare multiple" " outputs.") for output_name, output_type_annotation in return_ann.items(): output_type_struct = _annotation_to_type_struct( output_type_annotation) output_spec = structures.OutputSpec(type=output_type_struct) outputs[name] = output_spec elif signature.return_annotation is not None and signature.return_annotation != inspect.Parameter.empty: output_name = _maybe_make_unique(single_output_name_const, output_names) # Fixes exotic, but possible collision: # `def func(output_path: OutputPath()) -> str: ...` output_names.add(output_name) type_struct = _annotation_to_type_struct(signature.return_annotation) output_spec = structures.OutputSpec(type=type_struct) outputs[output_name] = output_spec # Component name and description are derived from the function's name and # docstring. The name can be overridden by setting setting func.__name__ # attribute (of the legacy func._component_human_name attribute). The # description can be overridden by setting the func.__doc__ attribute (or # the legacy func._component_description attribute). component_name = getattr(func, '_component_human_name', None) or _python_function_name_to_component_name( func.__name__) description = getattr(func, '_component_description', None) or parsed_docstring.short_description if description: description = description.strip() component_spec = structures.ComponentSpec( name=component_name, description=description, inputs=inputs if inputs else None, outputs=outputs if outputs else None, # Dummy implementation to bypass model validation. implementation=structures.Implementation(), ) return component_spec
component_spec=structures.ComponentSpec( name='component_1', implementation=structures.ContainerSpec( image='alpine', commands=[ 'sh', '-c', 'set -ex\necho "$0" "$1" "$2" > "$3"', structures.InputValuePlaceholder(input_name='input1'), structures.InputValuePlaceholder(input_name='input2'), structures.InputValuePlaceholder(input_name='input3'), structures.OutputPathPlaceholder(output_name='output1'), ], ), inputs={ 'input1': structures.InputSpec(type='String'), 'input2': structures.InputSpec(type='Integer'), 'input3': structures.InputSpec(type='Float', default=3.14), }, outputs={ 'output1': structures.OutputSpec(name='output1', type='String'), }, )) class BaseComponentTest(unittest.TestCase): @patch.object(pipeline_task, 'create_pipeline_task', autospec=True) def test_instantiate_component_with_keyword_arguments( self, mock_create_pipeline_task):
def test_component_spec_load_from_v1_component_yaml(self): component_yaml_v1 = textwrap.dedent("""\ name: Component with 2 inputs and 2 outputs inputs: - {name: Input parameter, type: String} - {name: Input artifact} outputs: - {name: Output 1} - {name: Output 2} implementation: container: image: busybox command: [sh, -c, ' mkdir -p $(dirname "$2") mkdir -p $(dirname "$3") echo "$0" > "$2" cp "$1" "$3" ' ] args: - {inputValue: Input parameter} - {inputPath: Input artifact} - {outputPath: Output 1} - {outputPath: Output 2} """) generated_spec = structures.ComponentSpec.load_from_component_yaml( component_yaml_v1) expected_spec = structures.ComponentSpec( name='Component with 2 inputs and 2 outputs', implementation=structures.Implementation( container=structures.ContainerSpec( image='busybox', commands=[ 'sh', '-c', (' mkdir -p $(dirname "$2") mkdir -p $(dirname "$3") ' 'echo "$0" > "$2" cp "$1" "$3" '), ], arguments=[ structures.InputValuePlaceholder( input_name='Input parameter'), structures.InputPathPlaceholder( input_name='Input artifact'), structures.OutputPathPlaceholder( output_name='Output 1'), structures.OutputPathPlaceholder( output_name='Output 2'), ], env={}, )), inputs={ 'Input parameter': structures.InputSpec(type='String'), 'Input artifact': structures.InputSpec(type='Artifact') }, outputs={ 'Output 1': structures.OutputSpec(type='Artifact'), 'Output 2': structures.OutputSpec(type='Artifact'), }) self.assertEqual(generated_spec, expected_spec)
image='alpine', arguments=[ structures.IfPresentPlaceholder( if_structure=structures.IfPresentPlaceholderStructure( input_name='optional_input_1', then=[ '--arg1', structures.InputValuePlaceholder( input_name='optional_input_1'), ], otherwise=[ '--arg2', 'default', ])) ])), inputs={'optional_input_1': structures.InputSpec(type='String')}, ) V1_YAML_CONCAT_PLACEHOLDER = textwrap.dedent("""\ name: component_concat inputs: - {name: input_prefix, type: String} implementation: container: image: alpine args: - concat: ['--arg1', {inputValue: input_prefix}] """) V2_YAML_CONCAT_PLACEHOLDER = textwrap.dedent("""\ name: component_concat