Esempio n. 1
0
        def pipeline():

            model_upload_op = ModelUploadOp(
                project=self._project,
                display_name=self._display_name,
                serving_container_image_uri=self._serving_container_image_uri,
                artifact_uri=self._artifact_uri
            )

            endpoint_create_op = EndpointCreateOp(
                project=self._project,
                display_name=self._display_name
            )

            model_deploy_op = ModelDeployOp(
                project=self._project,
                model=model_upload_op.outputs["model"]
            )

            batch_predict_op = ModelBatchPredictOp(
                project=self._project,
                model=model_upload_op.outputs["model"],
                job_display_name=self._display_name,
                gcs_source=self._gcs_source,
                gcs_destination_prefix=self._gcs_destination_prefix
            )
Esempio n. 2
0
        def pipeline():

            model_upload_op = ModelUploadOp(
                project=self._project,
                display_name=self._display_name,
                serving_container_image_uri=self._serving_container_image_uri,
                artifact_uri=self._artifact_uri)

            batch_predict_op = ModelBatchPredictOp(
                project=self._project,
                location=self._location,
                job_display_name=self._display_name,
                model=model_upload_op.outputs["model"],
                instances_format="instance_format",
                gcs_source_uris=[self._gcs_source],
                bigquery_source_input_uri="bigquery_source_input_uri",
                model_parameters={"foo": "bar"},
                predictions_format="predictions_format",
                gcs_destination_output_uri_prefix=self._gcs_destination_prefix,
                bigquery_destination_output_uri=
                "bigquery_destination_output_uri",
                machine_type="machine_type",
                accelerator_type="accelerator_type",
                accelerator_count=1,
                starting_replica_count=2,
                max_replica_count=3,
                manual_batch_tuning_parameters_batch_size=4,
                generate_explanation=True,
                explanation_metadata={"xai_m": "bar"},
                explanation_parameters={"xai_p": "foo"},
                encryption_spec_key_name="some encryption_spec_key_name",
                labels={"foo": "bar"})
Esempio n. 3
0
        def pipeline():
            dataset_create_op = ImageDatasetCreateOp(
                project=self._project,
                display_name=self._display_name,
                gcs_source=self._gcs_source,
                import_schema_uri=aiplatform.schema.dataset.ioformat.image.
                single_label_classification,
            )

            training_job_run_op = AutoMLImageTrainingJobRunOp(
                project=self._project,
                display_name=self._display_name,
                prediction_type="classification",
                model_type="CLOUD",
                base_model=None,
                dataset=dataset_create_op.outputs["dataset"],
                model_display_name=self._model_display_name,
                training_fraction_split=0.6,
                validation_fraction_split=0.2,
                test_fraction_split=0.2,
                budget_milli_node_hours=8000,
            )

            model_deploy_op = ModelDeployOp(
                project=self._project,
                model=training_job_run_op.outputs["model"]
            )

            batch_predict_op = ModelBatchPredictOp(
                project=self._project,
                model=training_job_run_op.outputs["model"],
                job_display_name=self._display_name,
                gcs_source=self._gcs_source,
                gcs_destination_prefix=self._gcs_destination_prefix,
            )

            dataset_export_op = ImageDatasetExportDataOp(
                project=self._project,
                dataset=dataset_create_op.outputs["dataset"],
                output_dir=self._gcs_output_dir,
            )

            dataset_import_op = ImageDatasetImportDataOp(
                gcs_source=self._gcs_source,
                dataset=dataset_create_op.outputs["dataset"],
                import_schema_uri=aiplatform.schema.dataset.ioformat.image.
                single_label_classification
            )
Esempio n. 4
0
        def pipeline():
            dataset_create_op = TextDatasetCreateOp(
                project=self._project,
                display_name=self._display_name,
                gcs_source=self._gcs_source,
                import_schema_uri=aiplatform.schema.dataset.ioformat.text.
                multi_label_classification,
            )

            training_job_run_op = AutoMLTextTrainingJobRunOp(
                project=self._project,
                display_name=self._display_name,
                dataset=dataset_create_op.outputs["dataset"],
                prediction_type="classification",
                multi_label=True,
                training_fraction_split=0.6,
                validation_fraction_split=0.2,
                test_fraction_split=0.2,
                model_display_name=self._model_display_name,
            )

            model_deploy_op = ModelDeployOp(
                project=self._project,
                model=training_job_run_op.outputs["model"]
            )

            batch_predict_op = ModelBatchPredictOp(
                project=self._project,
                model=training_job_run_op.outputs["model"],
                job_display_name=self._display_name,
                gcs_source=self._gcs_source,
                gcs_destination_prefix=self._gcs_destination_prefix,
            )

            dataset_export_op = TextDatasetExportDataOp(
                project=self._project,
                dataset=dataset_create_op.outputs["dataset"],
                output_dir=self._gcs_output_dir,
            )

            dataset_import_op = TextDatasetImportDataOp(
                gcs_source=self._gcs_source,
                dataset=dataset_create_op.outputs["dataset"],
                import_schema_uri=aiplatform.schema.dataset.ioformat.text.
                multi_label_classification
            )
Esempio n. 5
0
        def pipeline():
            dataset_create_op = TabularDatasetCreateOp(
                project=self._project,
                display_name=self._display_name,
                gcs_source=self._gcs_source,
            )

            training_job_run_op = AutoMLTabularTrainingJobRunOp(
                project=self._project,
                display_name=self._display_name,
                optimization_prediction_type='regression',
                optimization_objective='minimize-rmse',
                column_transformations=[
                    {
                        "numeric": {
                            "column_name": "longitude"
                        }
                    },
                ],
                target_column="longitude",
                dataset=dataset_create_op.outputs["dataset"],
            )

            model_deploy_op = ModelDeployOp(
                project=self._project,
                model=training_job_run_op.outputs["model"]
            )

            batch_predict_op = ModelBatchPredictOp(
                project=self._project,
                model=training_job_run_op.outputs["model"],
                job_display_name=self._display_name,
                gcs_source=self._gcs_source,
                gcs_destination_prefix=self._gcs_destination_prefix,
            )

            dataset_export_op = TabularDatasetExportDataOp(
                project=self._project,
                dataset=dataset_create_op.outputs["dataset"],
                output_dir=self._gcs_output_dir,
            )