"docker.io/doctorai/ml-pipelines-tfx-custom:0.22.0", ) from pipelines.base_pipeline import init_components components = init_components(data_dir, module_file, 50000, 10000, serving_model_dir=serving_model_dir) runner_config = kubeflow_dag_runner.KubeflowDagRunnerConfig( kubeflow_metadata_config=metadata_config, # Specify custom docker image to use. tfx_image=tfx_image, pipeline_operator_funcs=( # If running on K8s Engine (GKE) on Google Cloud Platform (GCP), # kubeflow_dag_runner.get_default_pipeline_operator_funcs() # provides default configurations specifically for GKE on GCP, # such as secrets. kubeflow_dag_runner.get_default_pipeline_operator_funcs()), ) p = init_kubeflow_pipeline(components, output_base, direct_num_workers=0) output_filename = f"{pipeline_name}.yaml" kubeflow_dag_runner.KubeflowDagRunner( config=runner_config, output_dir=output_dir, output_filename=output_filename, ).run(p)
) eval_max_latency = data_types.RuntimeParameter( name='eval-max-latency', default=0.01, ptype=float ) pipeline_root = f'{config.ARTIFACT_STORE_URI}/{config.PIPELINE_NAME}/{kfp.dsl.RUN_ID_PLACEHOLDER}' # Set KubeflowDagRunner settings metadata_config = kubeflow_dag_runner.get_default_kubeflow_metadata_config() runner_config = kubeflow_dag_runner.KubeflowDagRunnerConfig( kubeflow_metadata_config = metadata_config, pipeline_operator_funcs = kubeflow_dag_runner.get_default_pipeline_operator_funcs( config.USE_KFP_SA == 'True'), tfx_image=config.ML_IMAGE_URI ) # Compile the pipeline kubeflow_dag_runner.KubeflowDagRunner(config=runner_config).run( pipeline.create_pipeline( pipeline_name=config.PIPELINE_NAME, pipeline_root=pipeline_root, project_id=config.PROJECT_ID, bq_dataset_name=config.BQ_DATASET_NAME, min_item_frequency=min_item_frequency, max_group_size=max_group_size, dimensions=dimensions, num_leaves=num_leaves, eval_min_recall=eval_min_recall,