예제 #1
0
    def testSingleNodeCustomContainer(self):
        self.maxDiff = None
        expected_custom_job_spec = {
            'name': 'my-custom-job',
            'jobSpec': {
                'workerPoolSpecs': [{
                    'machineSpec': {
                        'machineType': 'n1-standard-4'
                    },
                    'replicaCount': '1',
                    'containerSpec': {
                        'imageUri':
                        'my_image:latest',
                        'command': ['python', 'entrypoint.py'],
                        'args': [
                            '--input_path',
                            "{{$.inputs.artifacts['examples'].uri}}",
                            '--output_path',
                            "{{$.outputs.artifacts['model'].uri}}",
                            '--optimizer',
                            "{{$.inputs.parameters['optimizer']}}",
                            '--output_param_path',
                            "{{$.outputs.parameters['out_param'].output_file}}"
                        ]
                    }
                }]
            }
        }

        task = aiplatform.custom_job(
            name='my-custom-job',
            input_artifacts={
                'examples':
                dsl.PipelineParam(name='output',
                                  op_name='ingestor',
                                  param_type='Dataset')
            },
            input_parameters={'optimizer': 'sgd'},
            output_artifacts={'model': ontology_artifacts.Model},
            output_parameters={'out_param': str},
            image_uri='my_image:latest',
            commands=['python', 'entrypoint.py'],
            args=[
                '--input_path',
                structures.InputUriPlaceholder('examples'), '--output_path',
                structures.OutputUriPlaceholder('model'), '--optimizer',
                structures.InputValuePlaceholder('optimizer'),
                '--output_param_path',
                structures.OutputPathPlaceholder('out_param')
            ])
        self.assertDictEqual(expected_custom_job_spec, task.custom_job_spec)
        self.assertDictEqual(_EXPECTED_COMPONENT_SPEC,
                             json_format.MessageToDict(task.component_spec))
        self.assertDictEqual(_EXPECTED_TASK_SPEC,
                             json_format.MessageToDict(task.task_spec))
예제 #2
0
 def _transform_arg(arg: Union[str, BasePlaceholder]) -> Any:
     if isinstance(arg, str):
         return arg
     elif isinstance(arg, InputValuePlaceholder):
         return v1_components.InputValuePlaceholder(arg.name)
     elif isinstance(arg, InputPathPlaceholder):
         return v1_components.InputPathPlaceholder(arg.name)
     elif isinstance(arg, InputUriPlaceholder):
         return v1_components.InputUriPlaceholder(arg.name)
     elif isinstance(arg, OutputPathPlaceholder):
         return v1_components.OutputPathPlaceholder(arg.name)
     elif isinstance(arg, OutputUriPlaceholder):
         return v1_components.OutputUriPlaceholder(arg.name)
     else:
         # TODO(chensun): transform additional placeholders: if, concat, etc.?
         raise ValueError(
             f'Unexpected command/argument type: "{arg}" of type "{type(arg)}".'
         )
예제 #3
0
 def testScaffoldProgramToSpecs(self):
     expected_custom_job_spec = {
         'name': 'my-custom-job',
         'jobSpec': {
             "workerPoolSpecs": [{
                 "replicaCount": 1,
                 "machineSpec": {
                     "machineType": "n1-standard-4"
                 },
                 "containerSpec": {
                     "imageUri":
                     "my_image:latest",
                     "command": ["python", "entrypoint.py"],
                     "args": [
                         "--input_path",
                         "{{$.inputs.artifacts['examples'].uri}}",
                         "--output_path",
                         "{{$.outputs.artifacts['model'].uri}}",
                         "--optimizer",
                         "{{$.inputs.parameters['optimizer']}}",
                         "--output_param_path",
                         "{{$.outputs.parameters['out_param'].output_file}}"
                     ]
                 }
             }, {
                 "replicaCount": 4,
                 "containerSpec": {
                     "imageUri": "gcr.io/my-project/my-worker-image:latest",
                     "command": ["python3", "override_entrypoint.py"],
                     "args": ["--arg1", "param1"]
                 },
                 "machineSpec": {
                     "machineType": "n1-standard-8",
                     "acceleratorType": "NVIDIA_TESLA_K80",
                     "acceleratorCount": 1
                 }
             }]
         }
     }
     task = aiplatform.custom_job(
         name='my-custom-job',
         input_artifacts={
             'examples':
             dsl.PipelineParam(name='output',
                               op_name='ingestor',
                               param_type='Dataset')
         },
         input_parameters={'optimizer': 'sgd'},
         output_artifacts={'model': ontology_artifacts.Model},
         output_parameters={'out_param': str},
         image_uri='my_image:latest',
         commands=['python', 'entrypoint.py'],
         args=[
             '--input_path',
             structures.InputUriPlaceholder('examples'), '--output_path',
             structures.OutputUriPlaceholder('model'), '--optimizer',
             structures.InputValuePlaceholder('optimizer'),
             '--output_param_path',
             structures.OutputPathPlaceholder('out_param')
         ],
         additional_job_spec={
             'workerPoolSpecs': [
                 {
                     'replicaCount': 1,
                     'machineSpec': {
                         'machineType': 'n1-standard-4'
                     }
                 },
                 {
                     'replicaCount': 4,
                     'containerSpec': {
                         'imageUri':
                         'gcr.io/my-project/my-worker-image:latest',
                         'command': ['python3', 'override_entrypoint.py'],
                         'args': ['--arg1', 'param1']
                     },
                     # Optionally one can also attach accelerators.
                     'machineSpec': {
                         'machineType': 'n1-standard-8',
                         'acceleratorType': 'NVIDIA_TESLA_K80',
                         'acceleratorCount': 1
                     }
                 }
             ]
         })
     self.assertDictEqual(expected_custom_job_spec, task.custom_job_spec)
     self.assertDictEqual(_EXPECTED_COMPONENT_SPEC,
                          json_format.MessageToDict(task.component_spec))
     self.assertDictEqual(_EXPECTED_TASK_SPEC,
                          json_format.MessageToDict(task.task_spec))
예제 #4
0
def build_python_component(
    component_func: Callable,
    target_image: str,
    base_image: Optional[str] = None,
    dependency: Optional[List[VersionedDependency]] = None,
    staging_gcs_path: Optional[str] = None,
    timeout: int = 600,
    namespace: Optional[str] = None,
    target_component_file: Optional[str] = None,
    is_v2: bool = False
):
  """build_component automatically builds a container image for the
  component_func based on the base_image and pushes to the target_image.

  Args:
    component_func (python function): The python function to build components
      upon.
    base_image (str): Docker image to use as a base image.
    target_image (str): Full URI to push the target image.
    staging_gcs_path (str): GCS blob that can store temporary build files.
    target_image (str): The target image path.
    timeout (int): The timeout for the image build(in secs), default is 600
      seconds.
    namespace (str): The namespace within which to run the kubernetes Kaniko
      job. If the job is running on GKE and value is None the underlying
      functions will use the default namespace from GKE.
    dependency (list): The list of VersionedDependency, which includes the
      package name and versions, default is empty.
    target_component_file (str): The path to save the generated component YAML
      spec.
    is_v2: Whether or not generating a v2 KFP component, default
      is false.

  Raises:
    ValueError: The function is not decorated with python_component decorator or
      the python_version is neither python2 nor python3
  """

  _configure_logger(logging.getLogger())

  if component_func is None:
    raise ValueError('component_func must not be None')
  if target_image is None:
    raise ValueError('target_image must not be None')

  if staging_gcs_path is None:
    raise ValueError('staging_gcs_path must not be None')

  if base_image is None:
    base_image = getattr(component_func, '_component_base_image', None)
  if base_image is None:
    from ..components._python_op import default_base_image_or_builder
    base_image = default_base_image_or_builder
    if isinstance(base_image, Callable):
      base_image = base_image()
  if not dependency:
    dependency = []

  logging.info('Build an image that is based on ' +
               base_image +
               ' and push the image to ' +
               target_image)

  component_spec = _func_to_component_spec(
      component_func, base_image=base_image)

  if is_v2:
    # TODO: Remove this warning once we make v2 component compatible with KFP
    # v1 stack.
    logging.warning('Currently V2 component is only compatible with v2 KFP.')
    # Annotate the component to be a V2 one.
    if not component_spec.metadata:
      component_spec.metadata = _structures.MetadataSpec()
    if not component_spec.metadata.annotations:
      component_spec.metadata.annotations = {}
    component_spec.metadata.annotations[V2_COMPONENT_ANNOTATION] = 'true'

  command_line_args = component_spec.implementation.container.command

  # The relative path to put the Python program code.
  program_path = 'ml/main.py'
  # The relative path used when building a V2 component.
  v2_entrypoint_path = None
  # Python program code extracted from the component spec.
  program_code = None

  if is_v2:

    program_code = _purge_program_launching_code(
        commands=command_line_args,
        is_v2=True)

    # Override user program args for new-styled component.
    # TODO: The actual program args will be changed after we support v2
    # component on KFP.
    program_args = []
    for component_input in component_spec.inputs or []:
      if component_input._passing_style == components.InputArtifact:
        # For each input artifact, there'll be possibly 3 arguments passed to
        # the user program:
        # 1. {name of the artifact}_input_path: The actual path, or uri, of the
        #    input artifact.
        # 2. {name of the artifact}_input_pod_name: The pod ID of the producer.
        # 3. {name of the artifact}_input_output_name: The output name of the
        #    artifact, by which the artifact can be found in the producer
        #    metadata JSON file.
        program_args.append('--{}{}'.format(
            component_input.name,
            entrypoint.INPUT_URI_SUFFIX
        ))
        program_args.append(
            _structures.InputUriPlaceholder(
                input_name=component_input.name))
        program_args.append('--{}{}'.format(
            component_input.name,
            entrypoint.PRODUCER_POD_ID_SUFFIX
        ))
        program_args.append(
            '{{{{inputs.parameters.{input}}}}}'.format(
                input=_components.PRODUCER_POD_NAME_PARAMETER.format(
                    component_input.name)))
        # TODO(numerology): Consider removing the need of output name
        # placeholder by letting v2 component output two metadata files per
        # output.
        program_args.append('--{}{}'.format(
            component_input.name,
            entrypoint.OUTPUT_NAME_SUFFIX
        ))
        program_args.append(_structures.InputOutputPortNamePlaceholder(
            input_name=component_input.name))

      elif component_input._passing_style is None:
        program_args.append('--{}{}'.format(
            component_input.name,
            entrypoint.ARGO_PARAM_SUFFIX
        ))
        program_args.append(_structures.InputValuePlaceholder(
            input_name=component_input.name))
      else:
        raise TypeError(
            'Only Input/OutputArtifact and parameter annotations '
            'are supported in V2 components. '
            'Got %s' % component_input._passing_style)

    for component_output in component_spec.outputs or []:
      if component_output._passing_style == components.OutputArtifact:
        # For each output artifact, there'll be one arguments passed to
        # the user program:
        # - {name of the artifact}_output_path: The actual path, or uri, of the
        #    input artifact.
        program_args.append('--{}{}'.format(
            component_output.name,
            entrypoint.OUTPUT_ARTIFACT_PATH_SUFFIX
        ))
        program_args.append(
            _structures.OutputUriPlaceholder(
                output_name=component_output.name))
      elif component_output._passing_style is not None:
        raise TypeError(
            'Only Input/OutputArtifact and parameter annotations '
            'are supported in V2 components. '
            'Got %s' % component_output._passing_style)

    program_args.append('--pipeline_context')
    program_args.append(dsl.RUN_ID_PLACEHOLDER)
    program_args.append('--{}'.format(entrypoint.FN_NAME_ARG))
    program_args.append(component_func.__name__)

    component_spec.implementation.container.args = program_args
  else:
    program_code = _purge_program_launching_code(
        commands=command_line_args,
        entrypoint_container_path='/' + program_path)

  arc_docker_filename = 'Dockerfile'
  arc_requirement_filename = 'requirements.txt'

  with tempfile.TemporaryDirectory() as local_build_dir:
    # Write the program code to a file in the context directory
    local_python_filepath = os.path.join(local_build_dir, program_path)
    os.makedirs(os.path.dirname(local_python_filepath), exist_ok=True)

    with open(local_python_filepath, 'w') as f:
      f.write(program_code)

    # Generate the python package requirements file in the context directory
    local_requirement_filepath = os.path.join(local_build_dir, arc_requirement_filename)
    if is_v2:
      # For v2 components, KFP are expected to be packed in the container.
      dependency.append(VersionedDependency(name='kfp', min_version='1.4.0'))

    _dependency_to_requirements(dependency, local_requirement_filepath)

    # Generate Dockerfile in the context directory
    local_docker_filepath = os.path.join(local_build_dir, arc_docker_filename)
    add_files = {program_path: '/' + program_path}

    _generate_dockerfile(
        local_docker_filepath, base_image,
        arc_requirement_filename,
        add_files=add_files)

    logging.info('Building and pushing container image.')
    container_builder = ContainerBuilder(staging_gcs_path, target_image, namespace)
    image_name_with_digest = container_builder.build(local_build_dir, arc_docker_filename, target_image, timeout)

  component_spec.implementation.container.image = image_name_with_digest

  # Optionally writing the component definition to a local file for sharing
  target_component_file = target_component_file or getattr(component_func, '_component_target_component_file', None)
  if target_component_file:
    component_spec.save(target_component_file)

  task_factory_function = _create_task_factory_from_component_spec(component_spec)
  return task_factory_function