Ejemplo n.º 1
0
        def pipeline():

            model_upload_op = ModelUploadOp(
                project=self._project,
                display_name=self._display_name,
                serving_container_image_uri=self._serving_container_image_uri,
                artifact_uri=self._artifact_uri
            )

            endpoint_create_op = EndpointCreateOp(
                project=self._project,
                display_name=self._display_name
            )

            model_deploy_op = ModelDeployOp(
                project=self._project,
                model=model_upload_op.outputs["model"]
            )

            batch_predict_op = ModelBatchPredictOp(
                project=self._project,
                model=model_upload_op.outputs["model"],
                job_display_name=self._display_name,
                gcs_source=self._gcs_source,
                gcs_destination_prefix=self._gcs_destination_prefix
            )
Ejemplo n.º 2
0
        def pipeline():
            model_upload_op = ModelUploadOp(
                project=self._project,
                display_name=self._display_name,
                serving_container_image_uri=self._serving_container_image_uri,
                artifact_uri=self._artifact_uri)

            create_endpoint_op = EndpointCreateOp(
                project=self._project,
                location=self._location,
                display_name=self._display_name)

            model_deploy_op = ModelDeployOp(
                model=model_upload_op.outputs["model"],
                endpoint=create_endpoint_op.outputs["endpoint"],
                deployed_model_display_name="deployed_model_display_name",
                traffic_split={},
                dedicated_resources_machine_type="n1-standard-4",
                dedicated_resources_min_replica_count=1,
                dedicated_resources_max_replica_count=2,
                dedicated_resources_accelerator_type="fake-accelerator",
                dedicated_resources_accelerator_count=1,
                automatic_resources_min_replica_count=1,
                automatic_resources_max_replica_count=2,
                service_account="fake-sa",
                explanation_metadata={"xai_m": "bar"},
                explanation_parameters={"xai_p": "foo"},
            )

            _ = ModelUndeployOp(
                model=model_upload_op.outputs["model"],
                endpoint=create_endpoint_op.outputs["endpoint"],
            ).after(model_deploy_op)
Ejemplo n.º 3
0
        def pipeline():
            dataset_create_op = ImageDatasetCreateOp(
                project=self._project,
                display_name=self._display_name,
                gcs_source=self._gcs_source,
                import_schema_uri=aiplatform.schema.dataset.ioformat.image.
                single_label_classification,
            )

            training_job_run_op = AutoMLImageTrainingJobRunOp(
                project=self._project,
                display_name=self._display_name,
                prediction_type="classification",
                model_type="CLOUD",
                base_model=None,
                dataset=dataset_create_op.outputs["dataset"],
                model_display_name=self._model_display_name,
                training_fraction_split=0.6,
                validation_fraction_split=0.2,
                test_fraction_split=0.2,
                budget_milli_node_hours=8000,
            )

            model_deploy_op = ModelDeployOp(
                project=self._project,
                model=training_job_run_op.outputs["model"]
            )

            batch_predict_op = ModelBatchPredictOp(
                project=self._project,
                model=training_job_run_op.outputs["model"],
                job_display_name=self._display_name,
                gcs_source=self._gcs_source,
                gcs_destination_prefix=self._gcs_destination_prefix,
            )

            dataset_export_op = ImageDatasetExportDataOp(
                project=self._project,
                dataset=dataset_create_op.outputs["dataset"],
                output_dir=self._gcs_output_dir,
            )

            dataset_import_op = ImageDatasetImportDataOp(
                gcs_source=self._gcs_source,
                dataset=dataset_create_op.outputs["dataset"],
                import_schema_uri=aiplatform.schema.dataset.ioformat.image.
                single_label_classification
            )
Ejemplo n.º 4
0
        def pipeline():
            dataset_create_op = TextDatasetCreateOp(
                project=self._project,
                display_name=self._display_name,
                gcs_source=self._gcs_source,
                import_schema_uri=aiplatform.schema.dataset.ioformat.text.
                multi_label_classification,
            )

            training_job_run_op = AutoMLTextTrainingJobRunOp(
                project=self._project,
                display_name=self._display_name,
                dataset=dataset_create_op.outputs["dataset"],
                prediction_type="classification",
                multi_label=True,
                training_fraction_split=0.6,
                validation_fraction_split=0.2,
                test_fraction_split=0.2,
                model_display_name=self._model_display_name,
            )

            model_deploy_op = ModelDeployOp(
                project=self._project,
                model=training_job_run_op.outputs["model"]
            )

            batch_predict_op = ModelBatchPredictOp(
                project=self._project,
                model=training_job_run_op.outputs["model"],
                job_display_name=self._display_name,
                gcs_source=self._gcs_source,
                gcs_destination_prefix=self._gcs_destination_prefix,
            )

            dataset_export_op = TextDatasetExportDataOp(
                project=self._project,
                dataset=dataset_create_op.outputs["dataset"],
                output_dir=self._gcs_output_dir,
            )

            dataset_import_op = TextDatasetImportDataOp(
                gcs_source=self._gcs_source,
                dataset=dataset_create_op.outputs["dataset"],
                import_schema_uri=aiplatform.schema.dataset.ioformat.text.
                multi_label_classification
            )
Ejemplo n.º 5
0
        def pipeline():
            dataset_create_op = TabularDatasetCreateOp(
                project=self._project,
                display_name=self._display_name,
                gcs_source=self._gcs_source,
            )

            training_job_run_op = AutoMLTabularTrainingJobRunOp(
                project=self._project,
                display_name=self._display_name,
                optimization_prediction_type='regression',
                optimization_objective='minimize-rmse',
                column_transformations=[
                    {
                        "numeric": {
                            "column_name": "longitude"
                        }
                    },
                ],
                target_column="longitude",
                dataset=dataset_create_op.outputs["dataset"],
            )

            model_deploy_op = ModelDeployOp(
                project=self._project,
                model=training_job_run_op.outputs["model"]
            )

            batch_predict_op = ModelBatchPredictOp(
                project=self._project,
                model=training_job_run_op.outputs["model"],
                job_display_name=self._display_name,
                gcs_source=self._gcs_source,
                gcs_destination_prefix=self._gcs_destination_prefix,
            )

            dataset_export_op = TabularDatasetExportDataOp(
                project=self._project,
                dataset=dataset_create_op.outputs["dataset"],
                output_dir=self._gcs_output_dir,
            )