def test_use_azure_secret(self): op1 = ContainerOp(name='op1', image='image') op1 = op1.apply(use_azure_secret('foo')) assert len(op1.container.env) == 4 index = 0 for expected in ['AZ_SUBSCRIPTION_ID', 'AZ_TENANT_ID', 'AZ_CLIENT_ID', 'AZ_CLIENT_SECRET']: assert op1.container.env[index].name == expected assert op1.container.env[index].value_from.secret_key_ref.name == 'foo' assert op1.container.env[index].value_from.secret_key_ref.key == expected index += 1
def test_use_set_volume_use_secret(self): op1 = ContainerOp(name="op1", image="image") secret_name = "my-secret" secret_path = "/here/are/my/secret" op1 = op1.apply( use_secret(secret_name=secret_name, secret_volume_mount_path=secret_path)) self.assertEqual(type(op1.container.env), type(None)) container_dict = op1.container.to_dict() volume_mounts = container_dict["volume_mounts"][0] self.assertEqual(type(volume_mounts), dict) self.assertEqual(volume_mounts["mount_path"], secret_path)
def test_use_aws_secret(self): op1 = ContainerOp(name='op1', image='image') op1 = op1.apply(use_aws_secret('myaws-secret', 'key_id', 'access_key')) assert len(op1.container.env) == 2 index = 0 for expected_name, expected_key in [('AWS_ACCESS_KEY_ID', 'key_id'), ('AWS_SECRET_ACCESS_KEY', 'access_key')]: assert op1.container.env[index].name == expected_name assert op1.container.env[ index].value_from.secret_key_ref.name == 'myaws-secret' assert op1.container.env[ index].value_from.secret_key_ref.key == expected_key index += 1
def test_use_aws_secret(self): with Pipeline('somename') as p: op1 = ContainerOp(name='op1', image='image') op1 = op1.apply( use_aws_secret('myaws-secret', 'key_id', 'access_key')) assert len(op1.env_variables) == 2 index = 0 for expected in ['key_id', 'access_key']: assert op1.env_variables[index].name == expected assert op1.env_variables[ index].value_from.secret_key_ref.name == 'myaws-secret' assert op1.env_variables[ index].value_from.secret_key_ref.key == expected index += 1
def test_use_azure_secret(self): with Pipeline('somename') as p: op1 = ContainerOp(name='op1', image='image') op1 = op1.apply(use_azure_secret('azcreds')) assert len(op1.env_variables) == 4 index = 0 for expected in [ 'AZ_SUBSCRIPTION_ID', 'AZ_TENANT_ID', 'AZ_CLIENT_ID', 'AZ_CLIENT_SECRET' ]: print(op1.env_variables[index].name) print(op1.env_variables[index].value_from.secret_key_ref.name) print(op1.env_variables[index].value_from.secret_key_ref.key) index += 1
def test_use_set_env_use_secret(self): op1 = ContainerOp(name="op1", image="image") secret_name = "my-secret" secret_path = "/here/are/my/secret/" env_variable = "MY_SECRET" secret_file_path_in_volume = "secret.json" op1 = op1.apply( use_secret(secret_name=secret_name, secret_volume_mount_path=secret_path, env_variable=env_variable, secret_file_path_in_volume=secret_file_path_in_volume)) self.assertEqual(len(op1.container.env), 1) container_dict = op1.container.to_dict() volume_mounts = container_dict["volume_mounts"][0] self.assertEqual(type(volume_mounts), dict) self.assertEqual(volume_mounts["mount_path"], secret_path) env_dict = op1.container.env[0].to_dict() self.assertEqual(env_dict["name"], env_variable) self.assertEqual(env_dict["value"], secret_path + secret_file_path_in_volume)
def update_op(op: dsl.ContainerOp, pipeline_name: dsl.PipelineParam, pipeline_root: dsl.PipelineParam, launcher_image: Optional[str] = None) -> None: """Updates the passed in Op for running in v2-compatible mode. Args: op: The Op to update. pipeline_spec: The PipelineSpec for the pipeline under which `op` runs. pipeline_root: The root output directory for pipeline artifacts. launcher_image: An optional launcher image. Useful for tests. """ op.is_v2 = True # Inject the launcher binary and overwrite the entrypoint. image_name = launcher_image or _DEFAULT_LAUNCHER_IMAGE launcher_container = dsl.UserContainer( name="kfp-launcher", image=image_name, command=["launcher", "--copy", "/kfp-launcher/launch"], mirror_volume_mounts=True) op.add_init_container(launcher_container) op.add_volume(k8s_client.V1Volume(name='kfp-launcher')) op.add_volume_mount( k8s_client.V1VolumeMount(name='kfp-launcher', mount_path='/kfp-launcher')) # op.command + op.args will have the following sections: # 1. args passed to kfp-launcher # 2. a separator "--" # 3. parameters in format "key1=value1", "key2=value2", ... # 4. a separator "--" as end of arguments passed to launcher # 5. (start of op.args) arguments of the original user program command + args # # example: # - command: # - /kfp-launcher/launch # - '--mlmd_server_address' # - $(METADATA_GRPC_SERVICE_HOST) # - '--mlmd_server_port' # - $(METADATA_GRPC_SERVICE_PORT) # - ... # more launcher params # - '--pipeline_task_id' # - $(KFP_POD_NAME) # - '--pipeline_root' # - '' # - '--' # start of parameter values # - first=first # - second=second # - '--' # start of user command and args # args: # - sh # - '-ec' # - | # program_path=$(mktemp) # printf "%s" "$0" > "$program_path" # python3 -u "$program_path" "$@" # - > # import json # import xxx # ... op.command = [ "/kfp-launcher/launch", "--mlmd_server_address", "$(METADATA_GRPC_SERVICE_HOST)", "--mlmd_server_port", "$(METADATA_GRPC_SERVICE_PORT)", "--runtime_info_json", "$(KFP_V2_RUNTIME_INFO)", "--container_image", "$(KFP_V2_IMAGE)", "--task_name", op.name, "--pipeline_name", pipeline_name, "--run_id", "$(KFP_RUN_ID)", "--run_resource", "workflows.argoproj.io/$(WORKFLOW_ID)", "--namespace", "$(KFP_NAMESPACE)", "--pod_name", "$(KFP_POD_NAME)", "--pod_uid", "$(KFP_POD_UID)", "--pipeline_root", pipeline_root, "--enable_caching", "$(ENABLE_CACHING)", ] # Mount necessary environment variables. op.apply(_default_transformers.add_kfp_pod_env) op.container.add_env_variable( k8s_client.V1EnvVar(name="KFP_V2_IMAGE", value=op.container.image)) config_map_ref = k8s_client.V1ConfigMapEnvSource( name='metadata-grpc-configmap', optional=True) op.container.add_env_from( k8s_client.V1EnvFromSource(config_map_ref=config_map_ref)) op.arguments = list(op.container_spec.command) + list( op.container_spec.args) runtime_info = { "inputParameters": collections.OrderedDict(), "inputArtifacts": collections.OrderedDict(), "outputParameters": collections.OrderedDict(), "outputArtifacts": collections.OrderedDict(), } op.command += ["--"] component_spec = op.component_spec for parameter, spec in sorted( component_spec.input_definitions.parameters.items()): parameter_info = { "type": pipeline_spec_pb2.PrimitiveType.PrimitiveTypeEnum.Name(spec.type), } op.command += [f"{parameter}={op._parameter_arguments[parameter]}"] runtime_info["inputParameters"][parameter] = parameter_info op.command += ["--"] for artifact_name, spec in sorted( component_spec.input_definitions.artifacts.items()): artifact_info = { "metadataPath": op.input_artifact_paths[artifact_name], "schemaTitle": spec.artifact_type.schema_title, "instanceSchema": spec.artifact_type.instance_schema, } runtime_info["inputArtifacts"][artifact_name] = artifact_info for parameter, spec in sorted( component_spec.output_definitions.parameters.items()): parameter_info = { "type": pipeline_spec_pb2.PrimitiveType.PrimitiveTypeEnum.Name(spec.type), "path": op.file_outputs[parameter], } runtime_info["outputParameters"][parameter] = parameter_info for artifact_name, spec in sorted( component_spec.output_definitions.artifacts.items()): # TODO: Assert instance_schema. artifact_info = { # Type used to register output artifacts. "schemaTitle": spec.artifact_type.schema_title, "instanceSchema": spec.artifact_type.instance_schema, # File used to write out the registered artifact ID. "metadataPath": op.file_outputs[artifact_name], } runtime_info["outputArtifacts"][artifact_name] = artifact_info op.container.add_env_variable( k8s_client.V1EnvVar(name="KFP_V2_RUNTIME_INFO", value=json.dumps(runtime_info))) op.pod_annotations['pipelines.kubeflow.org/v2_component'] = "true" op.pod_labels['pipelines.kubeflow.org/v2_component'] = "true"
def update_op(op: dsl.ContainerOp, pipeline_name: dsl.PipelineParam, pipeline_root: dsl.PipelineParam, launcher_image: Optional[str] = None) -> None: """Updates the passed in Op for running in v2-compatible mode. Args: op: The Op to update. pipeline_spec: The PipelineSpec for the pipeline under which `op` runs. pipeline_root: The root output directory for pipeline artifacts. launcher_image: An optional launcher image. Useful for tests. """ # Inject the launcher binary and overwrite the entrypoint. image_name = launcher_image or _DEFAULT_LAUNCHER_IMAGE launcher_container = dsl.UserContainer(name="kfp-launcher", image=image_name, command="/bin/mount_launcher.sh", mirror_volume_mounts=True) op.add_init_container(launcher_container) op.add_volume(k8s_client.V1Volume(name='kfp-launcher')) op.add_volume_mount( k8s_client.V1VolumeMount(name='kfp-launcher', mount_path='/kfp-launcher')) op.command = [ "/kfp-launcher/launch", "--mlmd_server_address", "$(METADATA_GRPC_SERVICE_HOST)", "--mlmd_server_port", "$(METADATA_GRPC_SERVICE_PORT)", "--runtime_info_json", "$(KFP_V2_RUNTIME_INFO)", "--container_image", "$(KFP_V2_IMAGE)", "--task_name", op.name, "--pipeline_name", pipeline_name, "--pipeline_run_id", "$(WORKFLOW_ID)", "--pipeline_task_id", "$(KFP_POD_NAME)", "--pipeline_root", pipeline_root, ] # Mount necessary environment variables. op.apply(_default_transformers.add_kfp_pod_env) op.container.add_env_variable( k8s_client.V1EnvVar(name="KFP_V2_IMAGE", value=op.container.image)) config_map_ref = k8s_client.V1ConfigMapEnvSource( name='metadata-grpc-configmap', optional=True) op.container.add_env_from( k8s_client.V1EnvFromSource(config_map_ref=config_map_ref)) op.arguments = list(op.container_spec.command) + list(op.container_spec.args) runtime_info = { "inputParameters": collections.OrderedDict(), "inputArtifacts": collections.OrderedDict(), "outputParameters": collections.OrderedDict(), "outputArtifacts": collections.OrderedDict(), } component_spec = op.component_spec for parameter, spec in sorted( component_spec.input_definitions.parameters.items()): parameter_info = { "parameterType": pipeline_spec_pb2.PrimitiveType.PrimitiveTypeEnum.Name(spec.type), "parameterValue": op._parameter_arguments[parameter], } runtime_info["inputParameters"][parameter] = parameter_info for artifact_name, spec in sorted( component_spec.input_definitions.artifacts.items()): artifact_info = {"fileInputPath": op.input_artifact_paths[artifact_name]} runtime_info["inputArtifacts"][artifact_name] = artifact_info for parameter, spec in sorted( component_spec.output_definitions.parameters.items()): parameter_info = { "parameterType": pipeline_spec_pb2.PrimitiveType.PrimitiveTypeEnum.Name(spec.type), "fileOutputPath": op.file_outputs[parameter], } runtime_info["outputParameters"][parameter] = parameter_info for artifact_name, spec in sorted( component_spec.output_definitions.artifacts.items()): # TODO: Assert instance_schema. artifact_info = { # Type used to register output artifacts. "artifactSchema": spec.artifact_type.instance_schema, # File used to write out the registered artifact ID. "fileOutputPath": op.file_outputs[artifact_name], } runtime_info["outputArtifacts"][artifact_name] = artifact_info op.container.add_env_variable( k8s_client.V1EnvVar(name="KFP_V2_RUNTIME_INFO", value=json.dumps(runtime_info))) op.pod_annotations['pipelines.kubeflow.org/v2_component'] = "true"