Пример #1
0
    def test_evaluate_interface(self):
        """
        <b>Description:</b>
        Check IEvaluationTask class object initialization

        <b>Input data:</b>
        IEvaluationTask object

        <b>Expected results:</b>
        Test passes if IEvaluationTask object evaluate method raises NotImplementedError exception
        """
        dataset = DatasetEntity()
        configuration = ModelConfiguration(
            configurable_parameters=ConfigurableParameters(
                header="Test Header"),
            label_schema=LabelSchemaEntity(),
        )
        model_entity = ModelEntity(configuration=configuration,
                                   train_dataset=dataset)
        with pytest.raises(NotImplementedError):
            IEvaluationTask().evaluate(
                ResultSetEntity(
                    model=model_entity,
                    ground_truth_dataset=dataset,
                    prediction_dataset=dataset,
                ))
Пример #2
0
    def test_training_interface(self):
        """
        <b>Description:</b>
        Check ITrainingTask class object initialization

        <b>Input data:</b>
        ITrainingTask object

        <b>Expected results:</b>
        Test passes if ITrainingTask object methods raise NotImplementedError exception
        """
        i_training_task = ITrainingTask()
        dataset = DatasetEntity()
        configuration = ModelConfiguration(
            configurable_parameters=ConfigurableParameters(
                header="Test Header"),
            label_schema=LabelSchemaEntity(),
        )
        model_entity = ModelEntity(configuration=configuration,
                                   train_dataset=dataset)
        train_parameters = TrainParameters()

        with pytest.raises(NotImplementedError):
            i_training_task.save_model(model_entity)
        with pytest.raises(NotImplementedError):
            i_training_task.train(
                dataset=dataset,
                output_model=model_entity,
                train_parameters=train_parameters,
            )
        with pytest.raises(NotImplementedError):
            i_training_task.cancel_training()
Пример #3
0
    def test_optimization_interface(self):
        """
        <b>Description:</b>
        Check IOptimizationTask class object initialization

        <b>Input data:</b>
        IOptimizationTask object

        <b>Expected results:</b>
        Test passes if IOptimizationTask object optimize method raises NotImplementedError exception
        """
        dataset = DatasetEntity()
        configuration = ModelConfiguration(
            configurable_parameters=ConfigurableParameters(
                header="Test Header"),
            label_schema=LabelSchemaEntity(),
        )
        model_entity = ModelEntity(configuration=configuration,
                                   train_dataset=dataset)
        optimization_parameters = OptimizationParameters()
        with pytest.raises(NotImplementedError):
            IOptimizationTask().optimize(
                optimization_type=OptimizationType.POT,
                dataset=dataset,
                output_model=model_entity,
                optimization_parameters=optimization_parameters,
            )
Пример #4
0
 def metadata_item_with_model() -> MetadataItemEntity:
     data = TensorEntity(
         name="appended_metadata_with_model",
         numpy=np.random.randint(low=0, high=255, size=(10, 15, 3)),
     )
     configuration = ModelConfiguration(
         configurable_parameters=ConfigurableParameters(
             header="Test Header"),
         label_schema=LabelSchemaEntity(),
     )
     model = ModelEntity(configuration=configuration,
                         train_dataset=DatasetEntity())
     metadata_item_with_model = MetadataItemEntity(data=data, model=model)
     return metadata_item_with_model
Пример #5
0
    def test_set_hyper_parameters(self):
        """
        <b>Description:</b>
        Check set_hyper_parameters() method

        <b>Input data:</b>
        Dummmy data

        <b>Expected results:</b>
        Test passes if incoming data is processed correctly

        <b>Steps</b>
        1. Checking parameters after setting
        """
        env = environment()

        header = "Test header"
        description = "Test description"
        visible_in_ui = False
        id = ID(123456789)

        hyper_parameters = ConfigurableParameters(header=header,
                                                  description=description,
                                                  visible_in_ui=visible_in_ui,
                                                  id=id)
        env.set_hyper_parameters(hyper_parameters=hyper_parameters)
        assert env.get_hyper_parameters().header == header
        assert env.get_hyper_parameters().description == description
        assert env.get_hyper_parameters().visible_in_ui == visible_in_ui
        assert env.get_hyper_parameters().id == id

        assert env.get_model_configuration(
        ).configurable_parameters.header == header
        assert (env.get_model_configuration().configurable_parameters.
                description == description)
        assert (env.get_model_configuration().configurable_parameters.
                visible_in_ui == visible_in_ui)
        assert env.get_model_configuration().configurable_parameters.id == id

        with pytest.raises(ValueError):
            # ValueError: Unable to set hyper parameters, invalid input: 123
            env.set_hyper_parameters(hyper_parameters="123")
    def test_model_configuration(self):
        """
        <b>Description:</b>
        Check that ModelConfiguration correctly returns the configuration

        <b>Input data:</b>
        ConfigurableParameters, LabelSchemaEntity

        <b>Expected results:</b>
        Test passes if ModelConfiguration correctly returns the configuration

        <b>Steps</b>
        1. Check configuration params in the ModelConfiguration
        """
        parameters = ConfigurableParameters(header="Test header")
        label_schema = LabelSchemaEntity()
        model_configuration = ModelConfiguration(
            configurable_parameters=parameters, label_schema=label_schema)
        assert model_configuration.configurable_parameters == parameters
        assert model_configuration.label_schema == label_schema
Пример #7
0
    def test_export_interface(self):
        """
        <b>Description:</b>
        Check IExportTask class object initialization

        <b>Input data:</b>
        IExportTask object

        <b>Expected results:</b>
        Test passes if IExportTask object export method raises NotImplementedError exception
        """
        dataset = DatasetEntity()
        configuration = ModelConfiguration(
            configurable_parameters=ConfigurableParameters(
                header="Test Header"),
            label_schema=LabelSchemaEntity(),
        )
        model_entity = ModelEntity(configuration=configuration,
                                   train_dataset=dataset)
        with pytest.raises(NotImplementedError):
            IExportTask().export(export_type=ExportType.OPENVINO,
                                 output_model=model_entity)
 def other_configuration(self):
     parameters = ConfigurableParameters(header="Other test header")
     label_schema = LabelSchemaEntity()
     return ModelConfiguration(configurable_parameters=parameters,
                               label_schema=label_schema)