def test_init_wrong_parameters(self, mock_model):
        """Ensure correct exceptions are raised when initializing with invalid args"""

        aiplatform.init(project=_TEST_PROJECT)

        with pytest.raises(ValueError,
                           match=r"not a supported prediction type"):
            training_jobs.AutoMLImageTrainingJob(
                display_name=_TEST_DISPLAY_NAME,
                prediction_type="abcdefg",
            )

        with pytest.raises(ValueError,
                           match=r"not a supported model_type for"):
            training_jobs.AutoMLImageTrainingJob(
                display_name=_TEST_DISPLAY_NAME,
                prediction_type="classification",
                model_type=_TEST_MODEL_TYPE_IOD,
            )

        with pytest.raises(ValueError,
                           match=r"`base_model` is only supported"):
            training_jobs.AutoMLImageTrainingJob(
                display_name=_TEST_DISPLAY_NAME,
                prediction_type=_TEST_PREDICTION_TYPE_IOD,
                base_model=mock_model,
            )
Exemplo n.º 2
0
    def test_splits_filter_incomplete(
        self,
        mock_pipeline_service_create,
        mock_pipeline_service_get,
        mock_dataset_image,
        mock_model_service_get,
        mock_model,
    ):
        """
        Initiate aiplatform with encryption key name.
        Create and run an AutoML Video Classification training job, verify calls and return value
        """

        aiplatform.init(
            project=_TEST_PROJECT,
            encryption_spec_key_name=_TEST_DEFAULT_ENCRYPTION_KEY_NAME,
        )

        job = training_jobs.AutoMLImageTrainingJob(
            display_name=_TEST_DISPLAY_NAME, base_model=mock_model
        )

        with pytest.raises(ValueError):
            job.run(
                dataset=mock_dataset_image,
                model_display_name=_TEST_MODEL_DISPLAY_NAME,
                training_filter_split=_TEST_FILTER_SPLIT_TRAINING,
                validation_fraction_split=None,
                test_filter_split=_TEST_FILTER_SPLIT_TEST,
                disable_early_stopping=_TEST_TRAINING_DISABLE_EARLY_STOPPING,
            )
Exemplo n.º 3
0
    def test_run_called_twice_raises(self, mock_dataset_image, sync):
        aiplatform.init(project=_TEST_PROJECT)

        job = training_jobs.AutoMLImageTrainingJob(
            display_name=_TEST_DISPLAY_NAME, )

        job.run(
            dataset=mock_dataset_image,
            model_display_name=_TEST_MODEL_DISPLAY_NAME,
            training_fraction_split=_TEST_FRACTION_SPLIT_TRAINING,
            validation_fraction_split=_TEST_FRACTION_SPLIT_VALIDATION,
            test_fraction_split=_TEST_FRACTION_SPLIT_TEST,
            disable_early_stopping=_TEST_TRAINING_DISABLE_EARLY_STOPPING,
            sync=sync,
        )

        with pytest.raises(RuntimeError):
            job.run(
                dataset=mock_dataset_image,
                model_display_name=_TEST_MODEL_DISPLAY_NAME,
                training_fraction_split=_TEST_FRACTION_SPLIT_TRAINING,
                validation_fraction_split=_TEST_FRACTION_SPLIT_VALIDATION,
                test_fraction_split=_TEST_FRACTION_SPLIT_TEST,
                sync=sync,
            )
    def test_splits_default(
        self,
        mock_pipeline_service_create,
        mock_pipeline_service_get,
        mock_dataset_image,
        mock_model_service_get,
        mock_model,
        sync,
    ):
        """
        Initiate aiplatform with encryption key name.
        Create and run an AutoML Video Classification training job, verify calls and return value
        """

        aiplatform.init(
            project=_TEST_PROJECT,
            encryption_spec_key_name=_TEST_DEFAULT_ENCRYPTION_KEY_NAME,
        )

        job = training_jobs.AutoMLImageTrainingJob(
            display_name=_TEST_DISPLAY_NAME, base_model=mock_model)

        model_from_job = job.run(
            dataset=mock_dataset_image,
            model_display_name=_TEST_MODEL_DISPLAY_NAME,
            budget_milli_node_hours=_TEST_TRAINING_BUDGET_MILLI_NODE_HOURS,
            disable_early_stopping=_TEST_TRAINING_DISABLE_EARLY_STOPPING,
            sync=sync,
            create_request_timeout=None,
        )

        if not sync:
            model_from_job.wait()

        true_managed_model = gca_model.Model(
            display_name=_TEST_MODEL_DISPLAY_NAME,
            description=mock_model._gca_resource.description,
            encryption_spec=_TEST_DEFAULT_ENCRYPTION_SPEC,
        )

        true_input_data_config = gca_training_pipeline.InputDataConfig(
            dataset_id=mock_dataset_image.name, )

        true_training_pipeline = gca_training_pipeline.TrainingPipeline(
            display_name=_TEST_DISPLAY_NAME,
            training_task_definition=schema.training_job.definition.
            automl_image_classification,
            training_task_inputs=_TEST_TRAINING_TASK_INPUTS_WITH_BASE_MODEL,
            model_to_upload=true_managed_model,
            input_data_config=true_input_data_config,
            encryption_spec=_TEST_DEFAULT_ENCRYPTION_SPEC,
        )

        mock_pipeline_service_create.assert_called_once_with(
            parent=initializer.global_config.common_location_path(),
            training_pipeline=true_training_pipeline,
            timeout=None,
        )
Exemplo n.º 5
0
    def test_run_call_pipeline_if_no_model_display_name(
        self,
        mock_pipeline_service_create,
        mock_dataset_image,
        mock_model_service_get,
        sync,
    ):
        aiplatform.init(project=_TEST_PROJECT)

        job = training_jobs.AutoMLImageTrainingJob(
            display_name=_TEST_DISPLAY_NAME,
            training_encryption_spec_key_name=
            _TEST_PIPELINE_ENCRYPTION_KEY_NAME,
            model_encryption_spec_key_name=_TEST_MODEL_ENCRYPTION_KEY_NAME,
        )

        model_from_job = job.run(
            dataset=mock_dataset_image,
            training_fraction_split=_TEST_FRACTION_SPLIT_TRAINING,
            validation_fraction_split=_TEST_FRACTION_SPLIT_VALIDATION,
            test_fraction_split=_TEST_FRACTION_SPLIT_TEST,
            budget_milli_node_hours=_TEST_TRAINING_BUDGET_MILLI_NODE_HOURS,
            disable_early_stopping=_TEST_TRAINING_DISABLE_EARLY_STOPPING,
        )

        if not sync:
            model_from_job.wait()

        true_fraction_split = gca_training_pipeline.FractionSplit(
            training_fraction=_TEST_FRACTION_SPLIT_TRAINING,
            validation_fraction=_TEST_FRACTION_SPLIT_VALIDATION,
            test_fraction=_TEST_FRACTION_SPLIT_TEST,
        )

        # Test that if defaults to the job display name
        true_managed_model = gca_model.Model(
            display_name=_TEST_DISPLAY_NAME,
            encryption_spec=_TEST_MODEL_ENCRYPTION_SPEC)

        true_input_data_config = gca_training_pipeline.InputDataConfig(
            fraction_split=true_fraction_split,
            dataset_id=mock_dataset_image.name,
        )

        true_training_pipeline = gca_training_pipeline.TrainingPipeline(
            display_name=_TEST_DISPLAY_NAME,
            training_task_definition=schema.training_job.definition.
            automl_image_classification,
            training_task_inputs=_TEST_TRAINING_TASK_INPUTS,
            model_to_upload=true_managed_model,
            input_data_config=true_input_data_config,
            encryption_spec=_TEST_PIPELINE_ENCRYPTION_SPEC,
        )

        mock_pipeline_service_create.assert_called_once_with(
            parent=initializer.global_config.common_location_path(),
            training_pipeline=true_training_pipeline,
        )
Exemplo n.º 6
0
    def test_raises_before_run_is_called(self, mock_pipeline_service_create):
        aiplatform.init(project=_TEST_PROJECT)

        job = training_jobs.AutoMLImageTrainingJob(display_name=_TEST_DISPLAY_NAME,)

        with pytest.raises(RuntimeError):
            job.get_model()

        with pytest.raises(RuntimeError):
            job.has_failed

        with pytest.raises(RuntimeError):
            job.state
Exemplo n.º 7
0
    def test_init_all_parameters(self, mock_model):
        """Ensure all private members are set correctly at initialization"""

        aiplatform.init(project=_TEST_PROJECT)

        job = training_jobs.AutoMLImageTrainingJob(
            display_name=_TEST_DISPLAY_NAME,
            prediction_type=_TEST_PREDICTION_TYPE_ICN,
            model_type=_TEST_MODEL_TYPE_MOBILE,
            base_model=mock_model,
            multi_label=True,
        )

        assert job._display_name == _TEST_DISPLAY_NAME
        assert job._model_type == _TEST_MODEL_TYPE_MOBILE
        assert job._prediction_type == _TEST_PREDICTION_TYPE_ICN
        assert job._multi_label is True
        assert job._base_model == mock_model
Exemplo n.º 8
0
    def test_run_with_two_split_raises(
        self, mock_dataset_image, sync,
    ):
        aiplatform.init(project=_TEST_PROJECT)

        job = training_jobs.AutoMLImageTrainingJob(display_name=_TEST_DISPLAY_NAME,)

        with pytest.raises(ValueError):
            model_from_job = job.run(
                dataset=mock_dataset_image,
                model_display_name=_TEST_MODEL_DISPLAY_NAME,
                training_fraction_split=_TEST_FRACTION_SPLIT_TRAINING,
                validation_fraction_split=_TEST_FRACTION_SPLIT_VALIDATION,
                test_fraction_split=_TEST_FRACTION_SPLIT_TEST,
                training_filter_split=_TEST_FILTER_SPLIT_TRAINING,
                validation_filter_split=_TEST_FILTER_SPLIT_VALIDATION,
                test_filter_split=_TEST_FILTER_SPLIT_TEST,
                sync=sync,
            )
            if not sync:
                model_from_job.wait()
Exemplo n.º 9
0
    def test_run_raises_if_pipeline_fails(
        self, mock_pipeline_service_create_and_get_with_fail, mock_dataset_image, sync
    ):

        aiplatform.init(project=_TEST_PROJECT)

        job = training_jobs.AutoMLImageTrainingJob(display_name=_TEST_DISPLAY_NAME,)

        with pytest.raises(RuntimeError):
            job.run(
                model_display_name=_TEST_MODEL_DISPLAY_NAME,
                dataset=mock_dataset_image,
                training_fraction_split=_TEST_FRACTION_SPLIT_TRAINING,
                validation_fraction_split=_TEST_FRACTION_SPLIT_VALIDATION,
                test_fraction_split=_TEST_FRACTION_SPLIT_TEST,
                sync=sync,
            )

            if not sync:
                job.wait()

        with pytest.raises(RuntimeError):
            job.get_model()
Exemplo n.º 10
0
    def test_run_call_pipeline_service_create(
        self,
        mock_pipeline_service_create,
        mock_pipeline_service_get,
        mock_dataset_image,
        mock_model_service_get,
        mock_model,
        sync,
    ):
        """Create and run an AutoML ICN training job, verify calls and return value"""

        aiplatform.init(
            project=_TEST_PROJECT,
            encryption_spec_key_name=_TEST_DEFAULT_ENCRYPTION_KEY_NAME,
        )

        job = training_jobs.AutoMLImageTrainingJob(
            display_name=_TEST_DISPLAY_NAME, base_model=mock_model, labels=_TEST_LABELS,
        )

        model_from_job = job.run(
            dataset=mock_dataset_image,
            model_display_name=_TEST_MODEL_DISPLAY_NAME,
            model_labels=_TEST_MODEL_LABELS,
            training_filter_split=_TEST_FILTER_SPLIT_TRAINING,
            validation_filter_split=_TEST_FILTER_SPLIT_VALIDATION,
            test_filter_split=_TEST_FILTER_SPLIT_TEST,
            budget_milli_node_hours=_TEST_TRAINING_BUDGET_MILLI_NODE_HOURS,
            disable_early_stopping=_TEST_TRAINING_DISABLE_EARLY_STOPPING,
            sync=sync,
        )

        if not sync:
            model_from_job.wait()

        true_filter_split = gca_training_pipeline.FilterSplit(
            training_filter=_TEST_FILTER_SPLIT_TRAINING,
            validation_filter=_TEST_FILTER_SPLIT_VALIDATION,
            test_filter=_TEST_FILTER_SPLIT_TEST,
        )

        true_managed_model = gca_model.Model(
            display_name=_TEST_MODEL_DISPLAY_NAME,
            labels=mock_model._gca_resource.labels,
            description=mock_model._gca_resource.description,
            encryption_spec=_TEST_DEFAULT_ENCRYPTION_SPEC,
        )

        true_input_data_config = gca_training_pipeline.InputDataConfig(
            filter_split=true_filter_split, dataset_id=mock_dataset_image.name,
        )

        true_training_pipeline = gca_training_pipeline.TrainingPipeline(
            display_name=_TEST_DISPLAY_NAME,
            labels=_TEST_LABELS,
            training_task_definition=schema.training_job.definition.automl_image_classification,
            training_task_inputs=_TEST_TRAINING_TASK_INPUTS_WITH_BASE_MODEL,
            model_to_upload=true_managed_model,
            input_data_config=true_input_data_config,
            encryption_spec=_TEST_DEFAULT_ENCRYPTION_SPEC,
        )

        mock_pipeline_service_create.assert_called_once_with(
            parent=initializer.global_config.common_location_path(),
            training_pipeline=true_training_pipeline,
        )

        mock_model_service_get.assert_called_once_with(
            name=_TEST_MODEL_NAME, retry=base._DEFAULT_RETRY
        )
        assert job._gca_resource is mock_pipeline_service_get.return_value
        assert model_from_job._gca_resource is mock_model_service_get.return_value
        assert job.get_model()._gca_resource is mock_model_service_get.return_value
        assert not job.has_failed
        assert job.state == gca_pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED