Ejemplo n.º 1
0
class _MyComponentSpec(component_spec.ComponentSpec):
    PARAMETERS = {
        'my_module_file':
        component_spec.ExecutionParameter(type=str, optional=True),
        'my_module_path':
        component_spec.ExecutionParameter(type=str, optional=True),
    }
    INPUTS = {}
    OUTPUTS = {}
Ejemplo n.º 2
0
class _BasicComponentSpec(types.ComponentSpec):

    PARAMETERS = {
        "folds":
        component_spec.ExecutionParameter(type=int),
        "proto":
        component_spec.ExecutionParameter(type=example_gen_pb2.Input,
                                          optional=True),
    }
    INPUTS = {
        "input": component_spec.ChannelParameter(type=_InputArtifact),
    }
    OUTPUTS = {
        "output": component_spec.ChannelParameter(type=_OutputArtifact),
    }
class DownStreamSpec(types.ComponentSpec):
  PARAMETERS = {
      'input_num':
          component_spec.ExecutionParameter(type=int)
  }
  INPUTS = {}
  OUTPUTS = {}
Ejemplo n.º 4
0
class ProducerSpec(types.ComponentSpec):
  INPUTS = {}
  OUTPUTS = {
      'output1': component_spec.ChannelParameter(type=standard_artifacts.Model),
  }
  PARAMETERS = {
      'param1': component_spec.ExecutionParameter(type=Text),
  }
Ejemplo n.º 5
0
class _HelloWorldSpec(component_spec.ComponentSpec):
    INPUTS = {}
    OUTPUTS = {
        'greeting':
        component_spec.ChannelParameter(type=standard_artifacts.String)
    }
    PARAMETERS = {
        'word': component_spec.ExecutionParameter(type=str),
    }
Ejemplo n.º 6
0
class _BasicComponentSpec(component_spec.ComponentSpec):

    PARAMETERS = {
        'folds': component_spec.ExecutionParameter(type=int),
    }
    INPUTS = {
        'input': component_spec.ChannelParameter(type=_InputArtifact),
    }
    OUTPUTS = {
        'output': component_spec.ChannelParameter(type=_OutputArtifact),
    }
Ejemplo n.º 7
0
class TestComponentSpec(types.ComponentSpec):
  INPUTS = {
      'input_artifact':
          component_spec.ChannelParameter(type=standard_artifacts.Examples),
  }
  OUTPUTS = {
      'output_artifact':
          component_spec.ChannelParameter(type=standard_artifacts.Model),
  }
  PARAMETERS = {
      'input_parameter': component_spec.ExecutionParameter(type=int),
  }
Ejemplo n.º 8
0
class TransformerSpec(component_spec.ComponentSpec):
    """ComponentSpec for a dummy container component."""
    INPUTS = {
        'input1':
        component_spec.ChannelParameter(
            type=tfx.types.standard_artifacts.Model),
    }
    OUTPUTS = {
        'output1':
        component_spec.ChannelParameter(
            type=tfx.types.standard_artifacts.Model),
    }
    PARAMETERS = {
        'param1': component_spec.ExecutionParameter(type=str),
    }
Ejemplo n.º 9
0
def generate_component_spec_class(
    step_name: str,
    input_spec: Dict[str, Type[BaseArtifact]],
    output_spec: Dict[str, Type[BaseArtifact]],
    execution_parameter_names: Set[str],
) -> Type[component_spec.ComponentSpec]:
    """Generates a TFX component spec class for a ZenML step.

    Args:
        step_name: Name of the step for which the component will be created.
        input_spec: Input artifacts of the step.
        output_spec: Output artifacts of the step
        execution_parameter_names: Execution parameter names of the step.

    Returns:
        A TFX component spec class.
    """
    inputs = {
        key: component_spec.ChannelParameter(type=artifact_type)
        for key, artifact_type in input_spec.items()
    }
    outputs = {
        key: component_spec.ChannelParameter(type=artifact_type)
        for key, artifact_type in output_spec.items()
    }
    parameters = {
        key: component_spec.ExecutionParameter(
            type=str)  # type: ignore[no-untyped-call] # noqa
        for key in execution_parameter_names
    }
    return type(
        f"{step_name}_Spec",
        (component_spec.ComponentSpec, ),
        {
            "INPUTS": inputs,
            "OUTPUTS": outputs,
            "PARAMETERS": parameters,
        },
    )
class _HelloWorldSpec(component_spec.ComponentSpec):
    INPUTS = {}
    OUTPUTS = {}
    PARAMETERS = {
        'name': component_spec.ExecutionParameter(type=str),
    }
Ejemplo n.º 11
0
def create_container_component(
    name: Text,
    image: Text,
    command: List[executor_specs.CommandlineArgumentType],
    inputs: Dict[Text, Any] = None,
    outputs: Dict[Text, Any] = None,
    parameters: Dict[Text, Any] = None,
) -> Callable[..., base_component.BaseComponent]:
    """Creates a container-based component.

  Args:
    name: The name of the component
    image: Container image name.
    command: Container entrypoint command-line. Not executed within a shell. The
      command-line can use placeholder objects that will be replaced at the
      compilation time. The placeholder objects can be imported from
      tfx.dsl.component.experimental.placeholders. Note that Jinja templates are
      not supported.
    inputs: The list of component inputs
    outputs: The list of component outputs
    parameters: The list of component parameters

  Returns:
    Component that can be instantiated and user inside pipeline.

  Example:

    component = create_container_component(
        name='TrainModel',
        inputs={
            'training_data': Dataset,
        },
        outputs={
            'model': Model,
        },
        parameters={
            'num_training_steps': int,
        },
        image='gcr.io/my-project/my-trainer',
        command=[
            'python3', 'my_trainer',
            '--training_data_uri', InputUriPlaceholder('training_data'),
            '--model_uri', OutputUriPlaceholder('model'),
            '--num_training-steps', InputValuePlaceholder('num_training_steps'),
        ]
    )
  """
    if not name:
        raise ValueError('Component name cannot be empty.')

    if inputs is None:
        inputs = {}
    if outputs is None:
        outputs = {}
    if parameters is None:
        parameters = {}

    input_channel_parameters = {}
    output_channel_parameters = {}
    output_channels = {}
    execution_parameters = {}

    for input_name, channel_type in inputs.items():
        # TODO(b/155804245) Sanitize the names so that they're valid python names
        input_channel_parameters[input_name] = (
            component_spec.ChannelParameter(type=channel_type, ))

    for output_name, channel_type in outputs.items():
        # TODO(b/155804245) Sanitize the names so that they're valid python names
        output_channel_parameters[output_name] = (
            component_spec.ChannelParameter(type=channel_type))
        artifact = channel_type()
        channel = channel_utils.as_channel([artifact])
        output_channels[output_name] = channel

    for param_name, parameter_type in parameters.items():
        # TODO(b/155804245) Sanitize the names so that they're valid python names

        execution_parameters[param_name] = (component_spec.ExecutionParameter(
            type=parameter_type))

    default_init_args = {**output_channels}

    return component_utils.create_tfx_component_class(
        name=name,
        tfx_executor_spec=executor_specs.TemplatedExecutorContainerSpec(
            image=image,
            command=command,
        ),
        input_channel_parameters=input_channel_parameters,
        output_channel_parameters=output_channel_parameters,
        execution_parameters=execution_parameters,
        default_init_args=default_init_args)
Ejemplo n.º 12
0
def component(func: types.FunctionType) -> Callable[..., Any]:
    """Decorator: creates a component from a typehint-annotated Python function.

  This decorator creates a component based on typehint annotations specified for
  the arguments and return value for a Python function. Specifically, function
  arguments can be annotated with the following types and associated semantics:

  * `Parameter[T]` where `T` is `int`, `float`, `str`, or `bytes`: indicates
    that a primitive type execution parameter, whose value is known at pipeline
    construction time, will be passed for this argument. These parameters will
    be recorded in ML Metadata as part of the component's execution record. Can
    be an optional argument.
  * `int`, `float`, `str`, `bytes`: indicates that a primitive type value will
    be passed for this argument. This value is tracked as an `Integer`, `Float`
    `String` or `Bytes` artifact (see `tfx.types.standard_artifacts`) whose
    value is read and passed into the given Python component function. Can be
    an optional argument.
  * `InputArtifact[ArtifactType]`: indicates that an input artifact object of
    type `ArtifactType` (deriving from `tfx.types.Artifact`) will be passed for
    this argument. This artifact is intended to be consumed as an input by this
    component (possibly reading from the path specified by its `.uri`). Can be
    an optional argument by specifying a default value of `None`.
  * `OutputArtifact[ArtifactType]`: indicates that an output artifact object of
    type `ArtifactType` (deriving from `tfx.types.Artifact`) will be passed for
    this argument. This artifact is intended to be emitted as an output by this
    component (and written to the path specified by its `.uri`). Cannot be an
    optional argument.

  The return value typehint should be either empty or `None`, in the case of a
  component function that has no return values, or an instance of
  `OutputDict(key_1=type_1, ...)`, where each key maps to a given type (each
  type is a primitive value type, i.e. `int`, `float`, `str` or `bytes`), to
  indicate that the return value is a dictionary with specified keys and value
  types.

  Note that output artifacts should not be included in the return value
  typehint; they should be included as `OutputArtifact` annotations in the
  function inputs, as described above.

  The function to which this decorator is applied must be at the top level of
  its Python module (it may not be defined within nested classes or function
  closures).

  This is example usage of component definition using this decorator:

      from tfx.components.base.annotations import OutputDict
      from tfx.components.base.annotations import
      InputArtifact
      from tfx.components.base.annotations import
      OutputArtifact
      from tfx.components.base.annotations import
      Parameter
      from tfx.components.base.decorators import component
      from tfx.types.standard_artifacts import Examples
      from tfx.types.standard_artifacts import Model

      @component
      def MyTrainerComponent(
          training_data: InputArtifact[Examples],
          model: OutputArtifact[Model],
          dropout_hyperparameter: float,
          num_iterations: Parameter[int] = 10
          ) -> OutputDict(loss=float, accuracy=float):
        '''My simple trainer component.'''

        records = read_examples(training_data.uri)
        model_obj = train_model(records, num_iterations, dropout_hyperparameter)
        model_obj.write_to(model.uri)

        return {
          'loss': model_obj.loss,
          'accuracy': model_obj.accuracy
        }

      # Example usage in a pipeline graph definition:
      # ...
      trainer = MyTrainerComponent(
          examples=example_gen.outputs['examples'],
          dropout_hyperparameter=other_component.outputs['dropout'],
          num_iterations=1000)
      pusher = Pusher(model=trainer.outputs['model'])
      # ...

  Experimental: no backwards compatibility guarantees.

  Args:
    func: Typehint-annotated component executor function.

  Returns:
    `base_component.BaseComponent` subclass for the given component executor
    function.

  Raises:
    EnvironmentError: if the current Python interpreter is not Python 3.
  """
    if six.PY2:
        raise EnvironmentError('`@component` is only supported in Python 3.')

    # Defining a component within a nested class or function closure causes
    # problems because in this case, the generated component classes can't be
    # referenced via their qualified module path.
    #
    # See https://www.python.org/dev/peps/pep-3155/ for details about the special
    # '<locals>' namespace marker.
    if '<locals>' in func.__qualname__.split('.'):
        raise ValueError(
            'The @component decorator can only be applied to a function defined '
            'at the module level. It cannot be used to construct a component for a '
            'function defined in a nested class or function closure.')

    inputs, outputs, parameters, arg_formats, arg_defaults, returned_values = (
        function_parser.parse_typehint_component_function(func))

    spec_inputs = {}
    spec_outputs = {}
    spec_parameters = {}
    for key, artifact_type in inputs.items():
        spec_inputs[key] = component_spec.ChannelParameter(
            type=artifact_type, optional=(key in arg_defaults))
    for key, artifact_type in outputs.items():
        assert key not in arg_defaults, 'Optional outputs are not supported.'
        spec_outputs[key] = component_spec.ChannelParameter(type=artifact_type)
    for key, primitive_type in parameters.items():
        spec_parameters[key] = component_spec.ExecutionParameter(
            type=primitive_type, optional=(key in arg_defaults))
    component_spec_class = type(
        '%s_Spec' % func.__name__, (tfx_types.ComponentSpec, ), {
            'INPUTS': spec_inputs,
            'OUTPUTS': spec_outputs,
            'PARAMETERS': spec_parameters,
        })

    executor_class = type(
        '%s_Executor' % func.__name__,
        (_FunctionExecutor, ),
        {
            '_ARG_FORMATS': arg_formats,
            '_ARG_DEFAULTS': arg_defaults,
            # The function needs to be marked with `staticmethod` so that later
            # references of `self._FUNCTION` do not result in a bound method (i.e.
            # one with `self` as its first parameter).
            '_FUNCTION': staticmethod(func),
            '_RETURNED_VALUES': returned_values,
            '__module__': func.__module__,
        })

    # Expose the generated executor class in the same module as the decorated
    # function. This is needed so that the executor class can be accessed at the
    # proper module path. One place this is needed is in the Dill pickler used by
    # Apache Beam serialization.
    module = sys.modules[func.__module__]
    setattr(module, '%s_Executor' % func.__name__, executor_class)

    executor_spec_instance = executor_spec.ExecutorClassSpec(
        executor_class=executor_class)

    return type(
        func.__name__, (_SimpleComponent, ), {
            'SPEC_CLASS': component_spec_class,
            'EXECUTOR_SPEC': executor_spec_instance,
            '__module__': func.__module__,
        })
Ejemplo n.º 13
0
def create_ai_platform_training(
    name: Text,
    project_id: Text,
    region: Optional[Text] = None,
    job_id: Optional[Text] = None,
    image_uri: Optional[Text] = None,
    args: Optional[List[placeholders.CommandlineArgumentType]] = None,
    # TODO(jxzheng): support Python training spec
    scale_tier: Optional[Text] = None,
    training_input: Optional[Dict[Text, Any]] = None,
    labels: Optional[Dict[Text, Text]] = None,
    inputs: Dict[Text, Any] = None,
    outputs: Dict[Text, Any] = None,
    parameters: Dict[Text, Any] = None,
) -> base_component.BaseComponent:
    """Creates a pipeline step that launches a AIP training job.

  The generated TFX component will have a component spec specified dynamically,
  through inputs/outputs/parameters in the following format:
  - inputs: A mapping from input name to the upstream channel connected. The
      artifact type of the channel will be automatically inferred.
  - outputs: A mapping from output name to the associated artifact type.
  - parameters: A mapping from execution property names to its associated value.
      Only primitive typed values are supported. Note that RuntimeParameter is
      not supported yet.

  For example:

  create_ai_platform_training(
    ...
    inputs: {
        # Assuming there is an upstream node example_gen, with an output
        # 'examples' of the type Examples.
        'examples': example_gen.outputs['examples'],
    },
    outputs: {
        'model': standard_artifacts.Model,
    },
    parameters: {
        'n_steps': 100,
        'optimizer': 'sgd',
    }
    ...
  )

  will generate a component instance with a component spec equivalent to:

  class MyComponentSpec(ComponentSpec):
    INPUTS = {
        'examples': ChannelParameter(type=standard_artifacts.Examples)
    }
    OUTPUTS = {
        'model': ChannelParameter(type=standard_artifacts.Model)
    }
    PARAMETERS = {
        'n_steps': ExecutionParameter(type=int),
        'optimizer': ExecutionParameter(type=str)
    }

  with its input 'examples' is connected to the example_gen output, and
  execution properties specified as 100 and 'sgd' respectively.

  Example usage of the component:
    # A single node training job.
    my_train = create_ai_platform_training(
        name='my_training_step',
        project_id='my-project',
        region='us-central1',
        image_uri='gcr.io/my-project/caip-training-test:latest',
        'args': [
            '--examples',
            placeholders.InputUriPlaceholder('examples'),
            '--n-steps',
            placeholders.InputValuePlaceholder('n_step'),
            '--output-location',
            placeholders.OutputUriPlaceholder('model')
        ]
        scale_tier='BASIC_GPU',
        inputs={'examples': example_gen.outputs['examples']},
        outputs={
            'model': standard_artifacts.Model
        },
        parameters={'n_step': 100}
    )

    # More complex setting can be expressed by providing training_input
    # directly.
    my_distributed_train = create_ai_platform_training(
        name='my_training_step',
        project_id='my-project',
        training_input={
            'scaleTier':
                'CUSTOM',
            'region':
                'us-central1',
            'masterType': 'n1-standard-8',
            'masterConfig': {
                'imageUri': 'gcr.io/my-project/my-dist-training:latest'
            },
            'workerType': 'n1-standard-8',
            'workerCount': 8,
            'workerConfig': {
                'imageUri': 'gcr.io/my-project/my-dist-training:latest'
            },
            'args': [
                '--examples',
                placeholders.InputUriPlaceholder('examples'),
                '--n-steps',
                placeholders.InputValuePlaceholder('n_step'),
                '--output-location',
                placeholders.OutputUriPlaceholder('model')
            ]
        },
        inputs={'examples': example_gen.outputs['examples']},
        outputs={'model': Model},
        parameters={'n_step': 100}
    )

  Args:
    name: name of the component. This is needed to construct the component spec
      and component class dynamically as well.
    project_id: the GCP project under which the AIP training job will be
      running.
    region: GCE region where the AIP training job will be running.
    job_id: the unique ID of the job. Default to 'tfx_%Y%m%d%H%M%S'
    image_uri: the GCR location of the container image, which will be used to
      execute the training program. If the same field is specified in
      training_input, the latter overrides image_uri.
    args: command line arguments that will be passed into the training program.
      Users can use placeholder semantics as in
      tfx.dsl.component.experimental.container_component to wire the args with
      component inputs/outputs/parameters.
    scale_tier: Cloud ML resource requested by the job. See
      https://cloud.google.com/ai-platform/training/docs/reference/rest/v1/projects.jobs#ScaleTier
    training_input: full training job spec. This field overrides other
      specifications if applicable. This field follows the
      [TrainingInput](https://cloud.google.com/ai-platform/training/docs/reference/rest/v1/projects.jobs#traininginput)
        schema.
    labels: user-specified label attached to the job.
    inputs: the dict of component inputs.
    outputs: the dict of component outputs.
    parameters: the dict of component parameters, aka, execution properties.

  Returns:
    A component instance that represents the AIP job in the DSL.

  Raises:
    ValueError: when image_uri is missing and masterConfig is not specified in
      training_input, or when region is missing and training_input
      does not provide region either.
    TypeError: when non-primitive parameters are specified.
  """
    training_input = training_input or {}

    if scale_tier and not training_input.get('scale_tier'):
        training_input['scaleTier'] = scale_tier

    if not training_input.get('masterConfig'):
        # If no replica config is specified, create a default one.
        if not image_uri:
            raise ValueError('image_uri is required when masterConfig is not '
                             'explicitly specified in training_input.')
        training_input['masterConfig'] = {'imageUri': image_uri}
        # Note: A custom entrypoint can be set to training_input['masterConfig']
        # through key 'container_command'.

    training_input['args'] = args

    if not training_input.get('region'):
        if not region:
            raise ValueError('region is required when it is not set in '
                             'training_input.')
        training_input['region'] = region

    # Squash training_input, project, job_id, and labels into an exec property
    # namely 'aip_training_config'.
    aip_training_config = {
        ai_platform_training_executor.PROJECT_CONFIG_KEY: project_id,
        ai_platform_training_executor.TRAINING_INPUT_CONFIG_KEY:
        training_input,
        ai_platform_training_executor.JOB_ID_CONFIG_KEY: job_id,
        ai_platform_training_executor.LABELS_CONFIG_KEY: labels,
    }

    aip_training_config_str = json_utils.dumps(aip_training_config)

    # Construct the component spec.
    if inputs is None:
        inputs = {}
    if outputs is None:
        outputs = {}
    if parameters is None:
        parameters = {}

    input_channel_parameters = {}
    output_channel_parameters = {}
    output_channels = {}
    execution_parameters = {
        ai_platform_training_executor.CONFIG_KEY:
        component_spec.ExecutionParameter(type=(str, Text))
    }

    for input_name, single_channel in inputs.items():
        # Infer the type of input channels based on the channels passed in.
        # TODO(b/155804245) Sanitize the names so that they're valid python names
        input_channel_parameters[input_name] = (
            component_spec.ChannelParameter(type=single_channel.type))

    for output_name, channel_type in outputs.items():
        # TODO(b/155804245) Sanitize the names so that they're valid python names
        output_channel_parameters[output_name] = (
            component_spec.ChannelParameter(type=channel_type))
        artifact = channel_type()
        channel = channel_utils.as_channel([artifact])
        output_channels[output_name] = channel

    # TODO(jxzheng): Support RuntimeParameter as parameters.
    for param_name, single_parameter in parameters.items():
        # Infer the type of parameters based on the parameters passed in.
        # TODO(b/155804245) Sanitize the names so that they're valid python names
        if not isinstance(single_parameter, (int, float, Text, bytes)):
            raise TypeError(
                'Parameter can only be int/float/str/bytes, got {}'.format(
                    type(single_parameter)))
        execution_parameters[param_name] = (component_spec.ExecutionParameter(
            type=type(single_parameter)))

    default_init_args = {
        **inputs,
        **output_channels,
        **parameters, ai_platform_training_executor.CONFIG_KEY:
        aip_training_config_str
    }

    tfx_component_class = component_utils.create_tfx_component_class(
        name=name,
        tfx_executor_spec=executor_spec.ExecutorClassSpec(
            ai_platform_training_executor.AiPlatformTrainingExecutor),
        input_channel_parameters=input_channel_parameters,
        output_channel_parameters=output_channel_parameters,
        execution_parameters=execution_parameters,
        default_init_args=default_init_args)

    return tfx_component_class()
Ejemplo n.º 14
0
def create_container_component(
    name: Text,
    image: Text,
    command: List[executor_specs.CommandlineArgumentType],
    inputs: Dict[Text, Any] = None,
    outputs: Dict[Text, Any] = None,
    parameters: Dict[Text, Any] = None,
) -> Callable[..., base_component.BaseComponent]:
    """Creates a container-based component.

  Args:
    name: The name of the component
    image: Container image name.
    command: Container entrypoint command-line. Not executed within a shell.
      The command-line can use placeholder objects that will be replaced at
      the compilation time. The placeholder objects can be imported from
      tfx.dsl.component.experimental.placeholders.
      Note that Jinja templates are not supported.

    inputs: The list of component inputs
    outputs: The list of component outputs
    parameters: The list of component parameters

  Returns:
    Component that can be instantiated and user inside pipeline.

  Example:

    component = create_container_component(
        name='TrainModel',
        inputs={
            'training_data': Dataset,
        },
        outputs={
            'model': Model,
        },
        parameters={
            'num_training_steps': int,
        },
        image='gcr.io/my-project/my-trainer',
        command=[
            'python3', 'my_trainer',
            '--training_data_uri', InputUriPlaceholder('training_data'),
            '--model_uri', OutputUriPlaceholder('model'),
            '--num_training-steps', InputValuePlaceholder('num_training_steps'),
        ]
    )
  """
    if not name:
        raise ValueError('Component name cannot be empty.')

    if inputs is None:
        inputs = {}
    if outputs is None:
        outputs = {}
    if parameters is None:
        parameters = {}

    input_channel_parameters = {}
    output_channel_parameters = {}
    output_channels = {}
    execution_parameters = {}

    for input_name, channel_type in inputs.items():
        # TODO(b/155804245) Sanitize the names so that they're valid python names
        input_channel_parameters[input_name] = (
            component_spec.ChannelParameter(type=channel_type, ))

    for output_name, channel_type in outputs.items():
        # TODO(b/155804245) Sanitize the names so that they're valid python names
        output_channel_parameters[output_name] = (
            component_spec.ChannelParameter(type=channel_type))
        artifact = channel_type()
        channel = channel_utils.as_channel([artifact])
        output_channels[output_name] = channel

    for param_name, parameter_type in parameters.items():
        # TODO(b/155804245) Sanitize the names so that they're valid python names

        execution_parameters[param_name] = (component_spec.ExecutionParameter(
            type=parameter_type))

    tfx_component_spec_class = type(
        # Need str() for Python 2 compatibility.
        str(name) + 'Spec',
        (component_spec.ComponentSpec, ),
        dict(
            PARAMETERS=execution_parameters,
            INPUTS=input_channel_parameters,
            OUTPUTS=output_channel_parameters,
        ),
    )

    def tfx_component_class_init(self, **kwargs):
        instance_name = kwargs.pop('instance_name', None)
        arguments = {}
        arguments.update(output_channels)
        arguments.update(kwargs)

        base_component.BaseComponent.__init__(
            self,
            spec=self.__class__.SPEC_CLASS(**arguments),
            instance_name=instance_name,
        )

    tfx_component_class = type(
        # Need str() for Python 2 compatibility.
        str(name),
        (base_component.BaseComponent, ),
        dict(
            SPEC_CLASS=tfx_component_spec_class,
            EXECUTOR_SPEC=executor_specs.TemplatedExecutorContainerSpec(
                image=image,
                command=command,
            ),
            __init__=tfx_component_class_init,
        ),
    )
    return tfx_component_class
Ejemplo n.º 15
0
class _FakeComponentSpecF(types.ComponentSpec):
    PARAMETERS = {'a': component_spec.ExecutionParameter(type=str)}
    INPUTS = {}
    OUTPUTS = {}