Esempio n. 1
0
    class _SubsetParameters(ParameterGroup):
        # Parameters governing sample distribution over subsets
        header = string_attribute("Subset parameters")
        description = string_attribute("Parameters for the different subsets")

        # Add a parameter group 'Subset parameters'
        auto_subset_fractions = configurable_boolean(
            default_value=True,
            description="Test",
            header="Automatically determine subset proportions",
            affects_outcome_of=ModelLifecycle.TRAINING,
        )

        __ui_rules = UIRules(
            rules=[
                Rule(
                    parameter="auto_subset_fractions",
                    value=False,
                    operator=Operator.EQUAL_TO,
                )
            ],
            action=Action.SHOW,
        )

        train_proportion = configurable_float(
            default_value=0.75,
            min_value=0.0,
            max_value=1.0,
            header="Training set proportion",
            ui_rules=__ui_rules,
            affects_outcome_of=ModelLifecycle.TRAINING,
        )

        validation_proportion = configurable_float(
            default_value=0.1,
            min_value=0.0,
            max_value=1.0,
            header="Validation set proportion",
            ui_rules=__ui_rules,
            affects_outcome_of=ModelLifecycle.TRAINING,
        )

        test_proportion = configurable_float(
            default_value=0.15,
            min_value=0.0,
            max_value=1.0,
            header="Test set proportion",
            ui_rules=__ui_rules,
            affects_outcome_of=ModelLifecycle.TRAINING,
        )
Esempio n. 2
0
        class _SubgroupOne(ParameterGroup):
            # Subgroup one of the nested group, with a couple of parameters
            header = string_attribute("Parameter group one")

            __ui_rules = UIRules(
                rules=[
                    Rule(
                        parameter=[
                            "nested_parameter_group", "show_subgroup_one"
                        ],
                        operator=Operator.EQUAL_TO,
                        value=False,
                    )
                ],
                action=Action.HIDE,
            )

            bogus_parameter_one = configurable_float(
                default_value=42,
                ui_rules=__ui_rules,
                header="Bogus parameter to test nested parameter groups",
            )
            bogus_parameter_two = configurable_float(
                default_value=42,
                ui_rules=__ui_rules,
                header="Bogus parameter to test nested parameter groups",
            )
class STFPMConfig(BaseAnomalyClassificationConfig):
    """
    Configurable parameters for STFPM anomaly classification task.
    """

    header = string_attribute("Configuration for STFPM")
    description = header

    @attrs
    class ModelParameters(ParameterGroup):
        """
        Parameter Group for training model
        """

        header = string_attribute("Model Parameters")
        description = header

        @attrs
        class EarlyStoppingParameters(ParameterGroup):
            """
            Early stopping parameters
            """

            header = string_attribute("Early Stopping Parameters")
            description = header

            metric = selectable(
                default_value=EarlyStoppingMetrics.IMAGE_ROC_AUC,
                header="Early Stopping Metric",
                description="The metric used to determine if the model should stop training",
            )

        early_stopping = add_parameter_group(EarlyStoppingParameters)

    model = add_parameter_group(ModelParameters)
    class DatasetParameters(ParameterGroup):
        """
        Parameters related to dataloader
        """

        header = string_attribute("Dataset Parameters")
        description = header

        train_batch_size = configurable_integer(
            default_value=32,
            min_value=1,
            max_value=512,
            header="Batch size",
            description=
            "The number of training samples seen in each iteration of training. Increasing this value "
            "improves training time and may make the training more stable. A larger batch size has higher "
            "memory requirements.",
            warning=
            "Increasing this value may cause the system to use more memory than available, "
            "potentially causing out of memory errors, please update with caution.",
            affects_outcome_of=ModelLifecycle.TRAINING,
        )

        num_workers = configurable_integer(
            default_value=8,
            min_value=0,
            max_value=36,
            header="Number of workers",
            description=
            "Increasing this value might improve training speed however it might cause out of memory "
            "errors. If the number of workers is set to zero, data loading will happen in the main "
            "training thread.",
        )
class PadimConfig(BaseAnomalyClassificationConfig):
    """
    Configurable parameters for PADIM anomaly classification task.
    """

    header = string_attribute("Configuration for Padim")
    description = header
    class __LearningParameters(ParameterGroup):
        header = string_attribute("Test Learning Parameters")
        description = header

        batch_size = configurable_integer(
            default_value=5,
            min_value=1,
            max_value=512,
            header="Test batch size",
            description=
            "The number of training samples seen in each iteration of training. Increasing this value "
            "improves training time and may make the training more stable. A larger batch size has higher "
            "memory requirements.",
            warning=
            "Increasing this value may cause the system to use more memory than available, "
            "potentially causing out of memory errors, please update with caution.",
            affects_outcome_of=ModelLifecycle.TRAINING,
        )

        num_iters = configurable_integer(
            default_value=1,
            min_value=1,
            max_value=100000,
            header="Number of training iterations",
            description=
            "Increasing this value causes the results to be more robust but training time will be longer.",
            affects_outcome_of=ModelLifecycle.TRAINING,
        )

        learning_rate = configurable_float(
            default_value=0.01,
            min_value=1e-07,
            max_value=1e-01,
            header="Learning rate",
            description=
            "Increasing this value will speed up training convergence but might make it unstable.",
            affects_outcome_of=ModelLifecycle.TRAINING,
        )

        learning_rate_warmup_iters = configurable_integer(
            default_value=100,
            min_value=1,
            max_value=10000,
            header="Number of iterations for learning rate warmup",
            description="Test learning rate warmup",
            affects_outcome_of=ModelLifecycle.TRAINING,
        )

        num_workers = configurable_integer(
            default_value=4,
            min_value=2,
            max_value=10,
            header="num_workers test header",
            description="num_workers test description",
            affects_outcome_of=ModelLifecycle.NONE,
        )
Esempio n. 7
0
    class _NestedParameterGroup(ParameterGroup):
        # A nested group of parameters
        # header is a required attribute, all parameter groups should define one.
        header = string_attribute("Test group of parameter groups")

        @attr.s
        class _SubgroupOne(ParameterGroup):
            # Subgroup one of the nested group, with a couple of parameters
            header = string_attribute("Parameter group one")

            __ui_rules = UIRules(
                rules=[
                    Rule(
                        parameter=[
                            "nested_parameter_group", "show_subgroup_one"
                        ],
                        operator=Operator.EQUAL_TO,
                        value=False,
                    )
                ],
                action=Action.HIDE,
            )

            bogus_parameter_one = configurable_float(
                default_value=42,
                ui_rules=__ui_rules,
                header="Bogus parameter to test nested parameter groups",
            )
            bogus_parameter_two = configurable_float(
                default_value=42,
                ui_rules=__ui_rules,
                header="Bogus parameter to test nested parameter groups",
            )

        subgroup_one = add_parameter_group(_SubgroupOne)

        show_subgroup_one = configurable_boolean(
            default_value=True, header="Show the parameters in subgroup one?")

        @attr.s
        class _SubgroupTwo(ParameterGroup):
            # Subgroup two of the nested group, with a couple of parameters
            header = string_attribute("Parameter group two")
            bogus_parameter_three = configurable_float(
                default_value=42,
                header="Bogus parameter to test nested parameter groups",
            )

            bogus_parameter_four = configurable_float(
                default_value=42,
                header="Bogus parameter to test nested parameter groups",
            )

        subgroup_two = add_parameter_group(_SubgroupTwo)
Esempio n. 8
0
        class _SubgroupTwo(ParameterGroup):
            # Subgroup two of the nested group, with a couple of parameters
            header = string_attribute("Parameter group two")
            bogus_parameter_three = configurable_float(
                default_value=42,
                header="Bogus parameter to test nested parameter groups",
            )

            bogus_parameter_four = configurable_float(
                default_value=42,
                header="Bogus parameter to test nested parameter groups",
            )
        class EarlyStoppingParameters(ParameterGroup):
            """
            Early stopping parameters
            """

            header = string_attribute("Early Stopping Parameters")
            description = header

            metric = selectable(
                default_value=EarlyStoppingMetrics.IMAGE_ROC_AUC,
                header="Early Stopping Metric",
                description="The metric used to determine if the model should stop training",
            )
Esempio n. 10
0
    class __LearningParameters(ParameterGroup):
        header = string_attribute("Learning Parameters")
        description = header

        batch_size = configurable_integer(
            default_value=32,
            min_value=1,
            max_value=512,
            header="Batch size",
            description=
            "The number of training samples seen in each iteration of training. Increasing this value "
            "improves training time and may make the training more stable. A larger batch size has higher "
            "memory requirements.",
            warning=
            "Increasing this value may cause the system to use more memory than available, "
            "potentially causing out of memory errors, please update with caution.",
            affects_outcome_of=ModelLifecycle.TRAINING)

        max_num_epochs = configurable_integer(
            default_value=200,
            min_value=1,
            max_value=1000,
            header="Maximum number of training epochs",
            description=
            "Increasing this value causes the results to be more robust but training time "
            "will be longer.",
            affects_outcome_of=ModelLifecycle.TRAINING)

        learning_rate = configurable_float(
            default_value=0.01,
            min_value=1e-07,
            max_value=1e-01,
            header="Learning rate",
            description="Increasing this value will speed up training \
                         convergence but might make it unstable.",
            affects_outcome_of=ModelLifecycle.TRAINING)

        enable_lr_finder = configurable_boolean(
            default_value=False,
            header="Enable automatic learing rate estimation",
            description=
            "Learning rate parameter value will be ignored if enabled.",
            affects_outcome_of=ModelLifecycle.TRAINING)

        enable_early_stopping = configurable_boolean(
            default_value=True,
            header="Enable adaptive early stopping of the training",
            description="Adaptive early exit from training when accuracy isn't \
                         changed or decreased for several epochs.",
            affects_outcome_of=ModelLifecycle.TRAINING)
Esempio n. 11
0
    class __POTParameter(ParameterGroup):
        header = string_attribute("POT Parameters")
        description = header

        stat_subset_size = configurable_integer(
            header="Number of data samples",
            description=
            "Number of data samples used for post-training optimization",
            default_value=300,
            min_value=1,
            max_value=maxsize)

        preset = selectable(
            default_value=POTQuantizationPreset.PERFORMANCE,
            header="Preset",
            description="Quantization preset that defines quantization scheme",
            editable=False,
            visible_in_ui=False)
    class __Postprocessing(ParameterGroup):
        header = string_attribute("Test Postprocessing")
        description = header

        result_based_confidence_threshold = configurable_boolean(
            default_value=True,
            header="Test Result based confidence threshold",
            description="Test confidence threshold is derived from the results",
            affects_outcome_of=ModelLifecycle.INFERENCE,
        )

        confidence_threshold = configurable_float(
            default_value=0.25,
            min_value=0,
            max_value=1,
            header="Test Confidence threshold",
            description=
            "This threshold only takes effect if the threshold is not set based on the result.--Only test",
            affects_outcome_of=ModelLifecycle.INFERENCE,
        )
Esempio n. 13
0
    class __NNCFOptimization(ParameterGroup):
        header = string_attribute("Optimization by NNCF")
        description = header

        enable_quantization = configurable_boolean(
            default_value=True,
            header="Enable quantization algorithm",
            description="Enable quantization algorithm",
            affects_outcome_of=ModelLifecycle.TRAINING)

        enable_pruning = configurable_boolean(
            default_value=False,
            header="Enable filter pruning algorithm",
            description="Enable filter pruning algorithm",
            affects_outcome_of=ModelLifecycle.TRAINING)

        maximal_accuracy_degradation = configurable_float(
            default_value=1.0,
            min_value=0.0,
            max_value=100.0,
            header="Maximum accuracy degradation",
            description="The maximal allowed accuracy metric drop",
            affects_outcome_of=ModelLifecycle.TRAINING)
class ConfigExample(ConfigurableParameters):
    header = string_attribute(
        "Test configuration for an object detection task")
    description = header

    class __LearningParameters(ParameterGroup):
        header = string_attribute("Test Learning Parameters")
        description = header

        batch_size = configurable_integer(
            default_value=5,
            min_value=1,
            max_value=512,
            header="Test batch size",
            description=
            "The number of training samples seen in each iteration of training. Increasing this value "
            "improves training time and may make the training more stable. A larger batch size has higher "
            "memory requirements.",
            warning=
            "Increasing this value may cause the system to use more memory than available, "
            "potentially causing out of memory errors, please update with caution.",
            affects_outcome_of=ModelLifecycle.TRAINING,
        )

        num_iters = configurable_integer(
            default_value=1,
            min_value=1,
            max_value=100000,
            header="Number of training iterations",
            description=
            "Increasing this value causes the results to be more robust but training time will be longer.",
            affects_outcome_of=ModelLifecycle.TRAINING,
        )

        learning_rate = configurable_float(
            default_value=0.01,
            min_value=1e-07,
            max_value=1e-01,
            header="Learning rate",
            description=
            "Increasing this value will speed up training convergence but might make it unstable.",
            affects_outcome_of=ModelLifecycle.TRAINING,
        )

        learning_rate_warmup_iters = configurable_integer(
            default_value=100,
            min_value=1,
            max_value=10000,
            header="Number of iterations for learning rate warmup",
            description="Test learning rate warmup",
            affects_outcome_of=ModelLifecycle.TRAINING,
        )

        num_workers = configurable_integer(
            default_value=4,
            min_value=2,
            max_value=10,
            header="num_workers test header",
            description="num_workers test description",
            affects_outcome_of=ModelLifecycle.NONE,
        )

    class __Postprocessing(ParameterGroup):
        header = string_attribute("Test Postprocessing")
        description = header

        result_based_confidence_threshold = configurable_boolean(
            default_value=True,
            header="Test Result based confidence threshold",
            description="Test confidence threshold is derived from the results",
            affects_outcome_of=ModelLifecycle.INFERENCE,
        )

        confidence_threshold = configurable_float(
            default_value=0.25,
            min_value=0,
            max_value=1,
            header="Test Confidence threshold",
            description=
            "This threshold only takes effect if the threshold is not set based on the result.--Only test",
            affects_outcome_of=ModelLifecycle.INFERENCE,
        )

    learning_parameters = add_parameter_group(__LearningParameters)
    postprocessing = add_parameter_group(__Postprocessing)
class BaseAnomalyClassificationConfig(ConfigurableParameters):
    """
    Base OTE configurable parameters for anomaly classification task.
    """

    header = string_attribute(
        "Configuration for an anomaly classification task")
    description = header

    @attrs
    class DatasetParameters(ParameterGroup):
        """
        Parameters related to dataloader
        """

        header = string_attribute("Dataset Parameters")
        description = header

        train_batch_size = configurable_integer(
            default_value=32,
            min_value=1,
            max_value=512,
            header="Batch size",
            description=
            "The number of training samples seen in each iteration of training. Increasing this value "
            "improves training time and may make the training more stable. A larger batch size has higher "
            "memory requirements.",
            warning=
            "Increasing this value may cause the system to use more memory than available, "
            "potentially causing out of memory errors, please update with caution.",
            affects_outcome_of=ModelLifecycle.TRAINING,
        )

        num_workers = configurable_integer(
            default_value=8,
            min_value=0,
            max_value=36,
            header="Number of workers",
            description=
            "Increasing this value might improve training speed however it might cause out of memory "
            "errors. If the number of workers is set to zero, data loading will happen in the main "
            "training thread.",
        )

    @attrs
    class POTParameters(ParameterGroup):
        """
        Training parameters for post-training optimization
        """

        header = string_attribute("POT Parameters")
        description = header

        preset = selectable(
            default_value=POTQuantizationPreset.PERFORMANCE,
            header="Preset",
            description="Quantization preset that defines quantization scheme",
        )

        stat_subset_size = configurable_integer(
            header="Number of data samples",
            description=
            "Number of data samples used for post-training optimization",
            default_value=300,
            min_value=1,
            max_value=maxsize,
        )

    dataset = add_parameter_group(DatasetParameters)
    pot_parameters = add_parameter_group(POTParameters)
Esempio n. 16
0
class OTEClassificationParameters(ConfigurableParameters):
    header = string_attribute("Configuration for an image classification task")
    description = header

    @attrs
    class __LearningParameters(ParameterGroup):
        header = string_attribute("Learning Parameters")
        description = header

        batch_size = configurable_integer(
            default_value=32,
            min_value=1,
            max_value=512,
            header="Batch size",
            description=
            "The number of training samples seen in each iteration of training. Increasing this value "
            "improves training time and may make the training more stable. A larger batch size has higher "
            "memory requirements.",
            warning=
            "Increasing this value may cause the system to use more memory than available, "
            "potentially causing out of memory errors, please update with caution.",
            affects_outcome_of=ModelLifecycle.TRAINING)

        max_num_epochs = configurable_integer(
            default_value=200,
            min_value=1,
            max_value=1000,
            header="Maximum number of training epochs",
            description=
            "Increasing this value causes the results to be more robust but training time "
            "will be longer.",
            affects_outcome_of=ModelLifecycle.TRAINING)

        learning_rate = configurable_float(
            default_value=0.01,
            min_value=1e-07,
            max_value=1e-01,
            header="Learning rate",
            description="Increasing this value will speed up training \
                         convergence but might make it unstable.",
            affects_outcome_of=ModelLifecycle.TRAINING)

        enable_lr_finder = configurable_boolean(
            default_value=False,
            header="Enable automatic learing rate estimation",
            description=
            "Learning rate parameter value will be ignored if enabled.",
            affects_outcome_of=ModelLifecycle.TRAINING)

        enable_early_stopping = configurable_boolean(
            default_value=True,
            header="Enable adaptive early stopping of the training",
            description="Adaptive early exit from training when accuracy isn't \
                         changed or decreased for several epochs.",
            affects_outcome_of=ModelLifecycle.TRAINING)

    @attrs
    class __NNCFOptimization(ParameterGroup):
        header = string_attribute("Optimization by NNCF")
        description = header

        enable_quantization = configurable_boolean(
            default_value=True,
            header="Enable quantization algorithm",
            description="Enable quantization algorithm",
            affects_outcome_of=ModelLifecycle.TRAINING)

        enable_pruning = configurable_boolean(
            default_value=False,
            header="Enable filter pruning algorithm",
            description="Enable filter pruning algorithm",
            affects_outcome_of=ModelLifecycle.TRAINING)

        maximal_accuracy_degradation = configurable_float(
            default_value=1.0,
            min_value=0.0,
            max_value=100.0,
            header="Maximum accuracy degradation",
            description="The maximal allowed accuracy metric drop",
            affects_outcome_of=ModelLifecycle.TRAINING)

    @attrs
    class __POTParameter(ParameterGroup):
        header = string_attribute("POT Parameters")
        description = header

        stat_subset_size = configurable_integer(
            header="Number of data samples",
            description=
            "Number of data samples used for post-training optimization",
            default_value=300,
            min_value=1,
            max_value=maxsize)

        preset = selectable(
            default_value=POTQuantizationPreset.PERFORMANCE,
            header="Preset",
            description="Quantization preset that defines quantization scheme",
            editable=False,
            visible_in_ui=False)

    learning_parameters = add_parameter_group(__LearningParameters)
    nncf_optimization = add_parameter_group(__NNCFOptimization)
    pot_parameters = add_parameter_group(__POTParameter)
Esempio n. 17
0
class DatasetManagerConfig(ConfigurableParameters):
    """Dummy configurable parameters class"""

    # type: ignore
    # This class is used for testing purposes only, so mypy should ignore it

    # Component and header are required, description is optional.
    header = string_attribute("Dataset Manager configuration -- TEST ONLY")
    description = string_attribute(
        "Configurable parameters for the DatasetManager -- TEST ONLY")

    # Add some parameters
    number_of_samples_for_auto_train = configurable_integer(
        default_value=5,
        min_value=1,
        max_value=1000,
        header="Samples required for new training round",
    )
    label_constraints = configurable_boolean(default_value=True,
                                             header="Apply label constraints")

    @attr.s
    class _NestedParameterGroup(ParameterGroup):
        # A nested group of parameters
        # header is a required attribute, all parameter groups should define one.
        header = string_attribute("Test group of parameter groups")

        @attr.s
        class _SubgroupOne(ParameterGroup):
            # Subgroup one of the nested group, with a couple of parameters
            header = string_attribute("Parameter group one")

            __ui_rules = UIRules(
                rules=[
                    Rule(
                        parameter=[
                            "nested_parameter_group", "show_subgroup_one"
                        ],
                        operator=Operator.EQUAL_TO,
                        value=False,
                    )
                ],
                action=Action.HIDE,
            )

            bogus_parameter_one = configurable_float(
                default_value=42,
                ui_rules=__ui_rules,
                header="Bogus parameter to test nested parameter groups",
            )
            bogus_parameter_two = configurable_float(
                default_value=42,
                ui_rules=__ui_rules,
                header="Bogus parameter to test nested parameter groups",
            )

        subgroup_one = add_parameter_group(_SubgroupOne)

        show_subgroup_one = configurable_boolean(
            default_value=True, header="Show the parameters in subgroup one?")

        @attr.s
        class _SubgroupTwo(ParameterGroup):
            # Subgroup two of the nested group, with a couple of parameters
            header = string_attribute("Parameter group two")
            bogus_parameter_three = configurable_float(
                default_value=42,
                header="Bogus parameter to test nested parameter groups",
            )

            bogus_parameter_four = configurable_float(
                default_value=42,
                header="Bogus parameter to test nested parameter groups",
            )

        subgroup_two = add_parameter_group(_SubgroupTwo)

    @attr.s
    class _SubsetParameters(ParameterGroup):
        # Parameters governing sample distribution over subsets
        header = string_attribute("Subset parameters")
        description = string_attribute("Parameters for the different subsets")

        # Add a parameter group 'Subset parameters'
        auto_subset_fractions = configurable_boolean(
            default_value=True,
            description="Test",
            header="Automatically determine subset proportions",
            affects_outcome_of=ModelLifecycle.TRAINING,
        )

        __ui_rules = UIRules(
            rules=[
                Rule(
                    parameter="auto_subset_fractions",
                    value=False,
                    operator=Operator.EQUAL_TO,
                )
            ],
            action=Action.SHOW,
        )

        train_proportion = configurable_float(
            default_value=0.75,
            min_value=0.0,
            max_value=1.0,
            header="Training set proportion",
            ui_rules=__ui_rules,
            affects_outcome_of=ModelLifecycle.TRAINING,
        )

        validation_proportion = configurable_float(
            default_value=0.1,
            min_value=0.0,
            max_value=1.0,
            header="Validation set proportion",
            ui_rules=__ui_rules,
            affects_outcome_of=ModelLifecycle.TRAINING,
        )

        test_proportion = configurable_float(
            default_value=0.15,
            min_value=0.0,
            max_value=1.0,
            header="Test set proportion",
            ui_rules=__ui_rules,
            affects_outcome_of=ModelLifecycle.TRAINING,
        )

    # Add a selectable and float selectable parameter
    dummy_float_selectable = float_selectable(options=[1.0, 2.0, 3.0, 4.0],
                                              default_value=2.0,
                                              header="Test float selectable")

    dummy_selectable = selectable(
        default_value=SomeEnumSelectable.BOGUS_NAME,
        header="Test",
        affects_outcome_of=ModelLifecycle.INFERENCE,
    )

    # Finally, add the nested parameter group and subset parameter groups to the config
    # NOTE! group initialization should use a factory to avoid passing mutable default arguments. This is why the
    # add_parameter_group function is needed.
    nested_parameter_group = add_parameter_group(_NestedParameterGroup)
    subset_parameters = add_parameter_group(_SubsetParameters)