def my_pipeline(dataset2: str = 'gs://ml-pipeline-playground/shakespeare2.txt'): importer = dsl.importer( artifact_uri='gs://ml-pipeline-playground/shakespeare1.txt', artifact_class=Dataset, reimport=False) train1 = train_op(dataset=importer.output) with dsl.Condition(train1.outputs['scalar'] == '123'): importer2 = dsl.importer( artifact_uri=dataset2, artifact_class=Dataset, reimport=True) train_op(dataset=importer2.output)
def pipeline_with_importer(): importer1 = importer( artifact_uri='gs://ml-pipeline-playground/shakespeare1.txt', artifact_class=Dataset, reimport=False) train(dataset=importer1.output)
def my_pipeline(): dsl.importer(artifact_uri='dummy', artifact_class=Artifact)