def componentA( a: {'ArtifactA': { 'file_type': 'csv' }}, b: Integer() = 12, c: {'ArtifactB': { 'path_type': 'file', 'file_type': 'tsv' }} = 'gs://hello/world' ) -> { 'model': Integer() }: return MockContainerOp()
def b_op( field_x: { 'customized_type': { 'openapi_schema_validator': '{"type": "string", "pattern": "^gcs://.*$"}' } }, field_y: Integer(), field_z: GCSPath() ) -> { 'output_model_uri': 'GcsUri' }: return ContainerOp(name='operator b', image='gcr.io/ml-pipeline/component-a', command=[ 'python3', field_x, ], arguments=[ '--field-y', field_y, '--field-z', field_z, ], file_outputs={ 'output_model_uri': '/schema.txt', })
def b_op( field_x: { 'customized_type_a': { 'property_a': 'value_a', 'property_b': 'value_b' } }, field_y: Integer(), field_z: {'ArtifactB': { 'path_type': 'file', 'file_type': 'tsv' }} ) -> { 'output_model_uri': 'GcsUri' }: return ContainerOp(name='operator b', image='gcr.io/ml-pipeline/component-a', command=[ 'python3', field_x, ], arguments=[ '--field-y', field_y, '--field-z', field_z, ], file_outputs={ 'output_model_uri': '/schema.txt', })
def a_op(field_l: Integer()) -> { 'field_m': { 'ArtifactB': { 'path_type': 'file', 'file_type': 'tsv' } }, 'field_n': { 'customized_type': { 'property_a': 'value_a', 'property_b': 'value_b' } }, 'field_o': 'Integer' }: return ContainerOp(name='operator a', image='gcr.io/ml-pipeline/component-b', arguments=[ '--field-l', field_l, ], file_outputs={ 'field_m': '/schema.txt', 'field_n': '/feature.txt', 'field_o': '/output.txt' })
def my_pipeline( a: {'GCSPath': { 'path_type': 'file', 'file_type': 'csv' }} = 'good', b: Integer() = 12): a_op(field_m=a, field_o=b)
def sl_segmentation_pipeline( python_train_path='/deephealth/use_case_pipeline/python/skin_lesion_segmentation_training.py', input_dataset_yaml='/deephealth/dataset/isic_segmentation/isic_segmentation.yml', output_path='/deephealth/outputs', num_epochs: Integer() = 1, num_batch_size: Integer() = 10, output_dataset_folder='/deephealth/dataset/isic_segmentation', split_partition_number: Integer() = 3, python_inference_path='/deephealth/use_case_pipeline/python/skin_lesion_segmentation_inference.py', model_file_folder='/deephealth/outputs', is_gpu_used='no'): dhealth_vop = dsl.PipelineVolume(pvc='dhealth-efs-claim') dhealth_vop_param = {'/deephealth': dhealth_vop} # gpu = '' # if is_gpu_used == 'yes': # gpu = 'yes' _gpu = _get_gpu_op(is_gpu_used) \ .set_display_name("GPU input parameter") _train_op = dhealth_train_sl_segmentation_op(python_train_path, input_dataset_yaml, output_path, num_epochs, num_batch_size, _gpu.outputs) \ .after(_gpu) \ .add_pvolumes(dhealth_vop_param) \ .set_display_name('Training Model') _split_yaml_op = dhealth_splityaml_op(input_dataset_yaml, output_dataset_folder, split_partition_number) \ .after(_train_op) \ .add_pvolumes(dhealth_vop_param) \ .set_display_name('Split Datset YAML') model: ContainerOp = _get_latest_model_op(model_file_folder) \ .after(_split_yaml_op) \ .add_pvolumes({"/deephealth": dhealth_vop}) \ .set_display_name('Load Model') subyamls = _get_yaml_op(output_dataset_folder) \ .after(_split_yaml_op) \ .add_pvolumes(dhealth_vop_param) \ .set_display_name('Load sub-YAMLs') with dsl.ParallelFor(subyamls.outputs['yamlfile']) as sub_yaml: dhealth_inference_sl_segmentation_op( python_inference_path, sub_yaml, model.output, output_path, 2, is_gpu_used) \ .add_pvolumes(dhealth_vop_param) \ .set_display_name('Inference')
def a_op(field_l: Integer()) -> {'field_m': 'GCSPath', 'field_n': {'customized_type': {'openapi_schema_validator': '{"type": "string", "pattern": "^gs://.*$"}'}}, 'field_o': 'Integer'}: return ContainerOp( name = 'operator a', image = 'gcr.io/ml-pipeline/component-b', arguments = [ '--field-l', field_l, ], file_outputs = { 'field_m': '/schema.txt', 'field_n': '/feature.txt', 'field_o': '/output.txt' } )
def a_op( field_m: {'GCSPath': { 'path_type': 'file', 'file_type': 'tsv' }}, field_o: Integer()): return ContainerOp( name='operator a', image='gcr.io/ml-pipeline/component-b', arguments=[ '--field-l', field_m, '--field-o', field_o, ], )
def dhealth_inference_sl_segmentation_op(python_train_path, input_dataset_yaml, model, output_path, num_batch_size: Integer(), gpu_boolean): if gpu_boolean == 'yes': return dsl.ContainerOp( name='DeepHealth - Inference Skin Lesion Segmentation', image='dhealth/pylibs:latest', command=["python3", python_train_path], arguments=[ input_dataset_yaml, model, '--out-dir', output_path, '--batch-size', num_batch_size, '--gpu' ]).set_gpu_limit(1) else: return dsl.ContainerOp( name='DeepHealth - Inference Skin Lesion Segmentation', image='dhealth/pylibs:latest', command=["python3", python_train_path], arguments=[ input_dataset_yaml, model, '--out-dir', output_path, '--batch-size', num_batch_size ])
def my_pipeline1(a: {'Schema': {'file_type': 'csv'}}='good', b: Integer()=12): pass