Exemplo n.º 1
0
    def test_id(self):
        """
        <b>Description:</b>
        Check ID class object initialization

        <b>Input data:</b>
        ID object with specified representation parameter

        <b>Expected results:</b>
        Test passes if ID object representation property and __repr__ method return expected values

        <b>Steps</b>
        1. Check representation property and __repr__ method for ID object with not specified representation parameter
        2. Check representation property and __repr__ method for ID object with ObjectId class representation parameter
        3. Check representation property and __repr__ method for ID object with str type representation parameter
        """
        # Scenario for ID object with not specified representation parameter
        no_representation_id = ID()
        assert no_representation_id.representation == ""
        assert repr(no_representation_id.representation) == "ID()"
        # Scenario for ID object with ObjectId class representation parameter
        expected_oid = "61a8b869fb7665916a39eb95"
        oid_representation = ObjectId(expected_oid)
        oid_representation_id = ID(oid_representation)
        assert oid_representation_id.representation == "61a8b869fb7665916a39eb95"
        assert (repr(oid_representation_id.representation) ==
                "ID(61a8b869fb7665916a39eb95)")
        # Scenario for ID object with str-type representation parameter
        str_representation = " String-type representation ID_1 "
        str_representation_id = ID(str_representation)
        # Leading and trailing whitespaces should be removed, only uppercase letters should be replaced by lowercase
        assert str_representation_id.representation == "string-type representation id_1"
        assert repr(
            str_representation_id) == "ID(string-type representation id_1)"
    def test_annotation_scene_entity_get_label_ids(self):
        """
        <b>Description:</b>
        Check Annotation get_label_ids method

        <b>Input data:</b>
        Initialized instance of AnnotationSceneEntity

        <b>Expected results:</b>
        Test passes if AnnotationSceneEntity get_label_ids method returns correct values

        <b>Steps</b>
        1. Create AnnotationSceneEntity instances
        2. Check returning value of get_label_ids method
        """

        annotation_scene_entity = self.annotation_scene_entity

        assert annotation_scene_entity.get_label_ids() == {ID()}

        bus = LabelEntity(id=ID(123456789),
                          name="bus",
                          domain=Domain.DETECTION)
        bus_label = ScoredLabel(bus)
        labels = [bus_label]
        annotation = Annotation(shape=self.rectangle, labels=labels)
        annotation_scene_entity.append_annotation(annotation)

        assert annotation_scene_entity.get_label_ids() == {ID(), ID(123456789)}
    def test_annotation_scene_entity_setters(self):
        """
        <b>Description:</b>
        Check that AnnotationSceneEntity can correctly return modified property value

        <b>Input data:</b>
        Annotation class

        <b>Expected results:</b>
        Test passes if the AnnotationSceneEntity return correct values

        <b>Steps</b>
        1. Create AnnotationSceneEntity instances
        2. Set another values
        3. Check changed values
        """

        annotation_scene_entity = self.annotation_scene_entity

        creation_date = self.creation_date
        annotation_scene_entity.id = ID(123456789)
        annotation_scene_entity.kind = AnnotationSceneKind.PREDICTION
        annotation_scene_entity.editor_name = "editor"
        annotation_scene_entity.creation_date = creation_date
        annotation_scene_entity.annotations = self.annotation

        assert annotation_scene_entity.id == ID(123456789)
        assert annotation_scene_entity.kind == AnnotationSceneKind.PREDICTION
        assert annotation_scene_entity.editor_name == "editor"
        assert annotation_scene_entity.creation_date == creation_date
        assert annotation_scene_entity.annotations == self.annotation
Exemplo n.º 4
0
def environment():
    """
    Return TaskEnvironment
    """
    car = LabelEntity(id=ID(123456789),
                      name="car",
                      domain=Domain.DETECTION,
                      is_empty=True)
    person = LabelEntity(id=ID(987654321),
                         name="person",
                         domain=Domain.DETECTION,
                         is_empty=True)
    labels_list = [car, person]
    dummy_template = __get_path_to_file("./dummy_template.yaml")
    model_template = parse_model_template(dummy_template)
    hyper_parameters = model_template.hyper_parameters.data
    params = ote_config_helper.create(hyper_parameters)
    labels_schema = LabelSchemaEntity.from_labels(labels_list)
    environment = TaskEnvironment(
        model=None,
        hyper_parameters=params,
        label_schema=labels_schema,
        model_template=model_template,
    )
    return environment
Exemplo n.º 5
0
    def test_task_environment(self):
        """
        <b>Description:</b>
        Check the TaskEnvironment can correctly return the value

        <b>Input data:</b>
        Dummy data

        <b>Expected results:</b>
        Test passes if incoming data is processed correctly

        <b>Steps</b>
        1. Using an already created dummy environment.
        2. Checking class fields
        """

        env = environment()
        __dummy_config = dummy_config()

        assert env == TaskEnvironment(
            model=None,
            model_template=env.model_template,
            hyper_parameters=env.get_hyper_parameters(),
            label_schema=env.label_schema,
        )
        assert isinstance(env, TaskEnvironment)
        assert env != "Fail params"
        assert env.get_labels() == []

        for i in ["header", "description", "visible_in_ui"]:
            assert (getattr(
                env.get_model_configuration().configurable_parameters,
                i) == __dummy_config[i])

        assert env.get_model_configuration().configurable_parameters.id == ID()

        for param in __dummy_config:
            getattr(env.get_hyper_parameters(), param) == __dummy_config[param]

        assert env.get_hyper_parameters().id == ID()

        assert "model=None" in repr(env)
        assert "label_schema=LabelSchemaEntity(label_groups=[LabelGroup(id=" in repr(
            env)
        assert "name=from_label_list" in repr(env)
        assert "group_type=LabelGroupType.EXCLUSIVE" in repr(env)
        assert (
            "labels=[LabelEntity(123456789, name=car, hotkey=ctrl+0, domain=DETECTION"
            in repr(env))
        assert (
            "LabelEntity(987654321, name=person, hotkey=ctrl+0, domain=DETECTION"
            in repr(env))
        assert (
            "CONFIGURABLE_PARAMETERS(header='Configuration for an object detection task -- TEST ONLY'"
            in repr(env))
        assert (
            "description='Configuration for an object detection task -- TEST ONLY'"
            in repr(env))
        assert "visible_in_ui=True" in repr(env)
        assert "id=ID()" in repr(env)
 def optional_result_media_parameters(self) -> dict:
     optional_result_media_parameters = self.default_result_media_parameters(
     )
     roi_label = LabelEntity(
         "ROI label",
         Domain.DETECTION,
         Color(10, 200, 40),
         creation_date=datetime.datetime(year=2021, month=12, day=18),
         id=ID("roi_label_1"),
     )
     roi = Annotation(
         shape=Rectangle(x1=0.3, y1=0.2, x2=0.7, y2=0.6),
         labels=[ScoredLabel(roi_label)],
         id=ID("roi_annotation"),
     )
     result_media_label = LabelEntity(
         "ResultMedia label",
         Domain.CLASSIFICATION,
         Color(200, 60, 100),
         creation_date=datetime.datetime(year=2021, month=12, day=20),
         id=ID("result_media_1"),
     )
     optional_result_media_parameters["roi"] = roi
     optional_result_media_parameters["label"] = result_media_label
     return optional_result_media_parameters
 def default_result_media_parameters() -> dict:
     rectangle_label = LabelEntity(
         name="Rectangle Annotation Label",
         domain=Domain.DETECTION,
         color=Color(100, 200, 60),
         creation_date=datetime.datetime(year=2021, month=12, day=16),
         id=ID("rectangle_label_1"),
     )
     rectangle_annotation = Annotation(
         shape=Rectangle(x1=0.1, y1=0.4, x2=0.4, y2=0.9),
         labels=[ScoredLabel(rectangle_label)],
         id=ID("rectangle_annotation"),
     )
     annotation_scene = AnnotationSceneEntity(
         annotations=[rectangle_annotation],
         kind=AnnotationSceneKind.ANNOTATION,
         creation_date=datetime.datetime(year=2021, month=12, day=16),
         id=ID("annotation_scene"),
     )
     return {
         "name": "ResultMedia name",
         "type": "Test ResultMedia",
         "annotation_scene": annotation_scene,
         "numpy": RANDOM_IMAGE,
     }
Exemplo n.º 8
0
    def test_dataset_item_append_annotations(self):
        """
        <b>Description:</b>
        Check DatasetItemEntity class "append_annotations" method

        <b>Input data:</b>
        DatasetItemEntity class object with specified "media", "annotation_scene", "roi", "metadata" and "subset"
        parameters

        <b>Expected results:</b>
        Test passes if annotations list returned after "append_annotations" method is equal to expected

        <b>Steps</b>
        1. Check annotations list returned after "append_annotations" method with specified non-included annotations
        2. Check annotations list returned after "append_annotations" method with incorrect shape annotation
        """
        # Checking annotations list returned after "append_annotations" method with specified non-included annotations
        dataset_item = DatasetItemParameters().default_values_dataset_item()
        full_box_annotations = list(dataset_item.annotation_scene.annotations)
        annotations_to_add = self.annotations_to_add()
        normalized_annotations = []
        for annotation in annotations_to_add:
            normalized_annotations.append(
                Annotation(
                    shape=annotation.shape.normalize_wrt_roi_shape(
                        dataset_item.roi.shape),
                    labels=annotation.get_labels(),
                ))
        dataset_item.append_annotations(annotations_to_add)
        # Random id is generated for normalized annotations
        normalized_annotations[
            0].id = dataset_item.annotation_scene.annotations[2].id
        normalized_annotations[
            1].id = dataset_item.annotation_scene.annotations[3].id
        assert (
            dataset_item.annotation_scene.annotations == full_box_annotations +
            normalized_annotations)
        # Checking annotations list returned after "append_annotations" method with incorrect shape annotation
        incorrect_shape_label = LabelEntity(
            name="Label for incorrect shape",
            domain=Domain.CLASSIFICATION,
            color=Color(red=80, green=70, blue=155),
            id=ID("incorrect_shape_label"),
        )
        incorrect_polygon = Polygon(
            [Point(x=0.01, y=0.1),
             Point(x=0.35, y=0.1),
             Point(x=0.35, y=0.1)])
        incorrect_shape_annotation = Annotation(
            shape=incorrect_polygon,
            labels=[ScoredLabel(incorrect_shape_label)],
            id=ID("incorrect_shape_annotation"),
        )
        dataset_item.append_annotations([incorrect_shape_annotation])
        assert (
            dataset_item.annotation_scene.annotations == full_box_annotations +
            normalized_annotations)
Exemplo n.º 9
0
 def annotations_to_add(self) -> List[Annotation]:
     labels_to_add = self.labels_to_add()
     annotation_to_add = Annotation(
         shape=Rectangle(x1=0.1, y1=0.1, x2=0.7, y2=0.8),
         labels=[ScoredLabel(label=labels_to_add[0])],
         id=ID("added_annotation_1"),
     )
     other_annotation_to_add = Annotation(
         shape=Rectangle(x1=0.2, y1=0.3, x2=0.8, y2=0.9),
         labels=[ScoredLabel(label=labels_to_add[1])],
         id=ID("added_annotation_2"),
     )
     return [annotation_to_add, other_annotation_to_add]
Exemplo n.º 10
0
def convert_string_to_id(id_string: Optional[Union[str, ID]]) -> ID:
    """
    This function converts an input string representing an ID into an OTE SDK ID object.
    Inputs that are already in the form of an ID are left untouched.

    :param id_string: string, ID or None object that should be converted to an ID
    :return: the input as an instance of ID
    """
    if id_string is None:
        output_id = ID()
    elif isinstance(id_string, str):
        output_id = ID(id_string)
    else:
        output_id = id_string
    return output_id
Exemplo n.º 11
0
 def annotations(self) -> List[Annotation]:
     labels = self.labels()
     rectangle = Rectangle(x1=0.2, y1=0.2, x2=0.6, y2=0.7)
     other_rectangle = Rectangle(x1=0.3, y1=0.2, x2=0.9, y2=0.9)
     detection_annotation = Annotation(
         shape=rectangle,
         labels=[ScoredLabel(label=labels[0])],
         id=ID("detection_annotation_1"),
     )
     segmentation_annotation = Annotation(
         shape=other_rectangle,
         labels=[ScoredLabel(label=labels[1])],
         id=ID("segmentation_annotation_1"),
     )
     return [detection_annotation, segmentation_annotation]
Exemplo n.º 12
0
 def annotations_entity(self) -> AnnotationSceneEntity:
     return AnnotationSceneEntity(
         annotations=self.annotations(),
         kind=AnnotationSceneKind.ANNOTATION,
         creation_date=datetime.datetime(year=2021, month=12, day=19),
         id=ID("annotation_entity_1"),
     )
    def test_label_entity_serialization(self):
        """
        This test serializes LabelEntity and checks serialized representation.
        Then it compares deserialized LabelEntity with original one.
        """

        cur_date = now()
        red = randint(0, 255)  # nosec
        green = randint(0, 255)  # nosec
        blue = randint(0, 255)  # nosec
        alpha = randint(0, 255)  # nosec

        label = LabelEntity(
            name="my_label",
            domain=Domain.DETECTION,
            color=Color(red, green, blue, alpha),
            hotkey="ctrl+1",
            creation_date=cur_date,
            is_empty=False,
            id=ID("0000213"),
        )
        serialized = LabelMapper.forward(label)

        assert serialized == {
            "_id": "0000213",
            "name": "my_label",
            "color": {"red": red, "green": green, "blue": blue, "alpha": alpha},
            "hotkey": "ctrl+1",
            "domain": "DETECTION",
            "creation_date": DatetimeMapper.forward(cur_date),
            "is_empty": False,
        }

        deserialized = LabelMapper.backward(serialized)
        assert label == deserialized
    def convert_to_annotation(
            self,
            predictions: np.ndarray,
            metadata: Optional[Dict] = None) -> AnnotationSceneEntity:
        """
        Converts a set of predictions into an AnnotationScene object

        :param predictions: Prediction with shape [num_predictions, 6] or
                            [num_predictions, 7]
        Supported detection formats are

        * [label, confidence, x1, y1, x2, y2]
        * [_, label, confidence, x1, y1, x2, y2]

        .. note::
           `label` can be any integer that can be mapped to `self.labels`
           `confidence` should be a value between 0 and 1
           `x1`, `x2`, `y1` and `y2` are expected to be normalized.

        :returns AnnotationScene: AnnotationScene Object containing the boxes
                                  obtained from the prediction
        """
        annotations = self.__convert_to_annotations(predictions)
        # media_identifier = ImageIdentifier(image_id=ID())
        annotation_scene = AnnotationSceneEntity(
            id=ID(),
            kind=AnnotationSceneKind.PREDICTION,
            editor="ote",
            creation_date=now(),
            annotations=annotations,
        )

        return annotation_scene
 def __init__(self,
              shape: ShapeEntity,
              labels: List[ScoredLabel],
              id: Optional[ID] = None):
     self.__id = ID(ObjectId()) if id is None else id
     self.__shape = shape
     self.__labels = labels
    def test_annotation_scene_entity_default_value(self):
        """
        <b>Description:</b>
        Check that AnnotationSceneEntity default values

        <b>Input data:</b>
        AnnotationSceneEntity class

        <b>Expected results:</b>
        Test passes if the AnnotationSceneEntity return correct values

        <b>Steps</b>
        1. Create AnnotationSceneEntity instances
        2. Check default values
        """

        annotation_scene_entity = self.annotation_scene_entity

        assert annotation_scene_entity.id == ID()
        assert annotation_scene_entity.kind == AnnotationSceneKind.ANNOTATION
        assert annotation_scene_entity.editor_name == ""
        assert type(annotation_scene_entity.creation_date) == datetime.datetime
        assert "Annotation(shape=Rectangle" in str(
            annotation_scene_entity.annotations)
        assert "Annotation(shape=Polygon" in str(
            annotation_scene_entity.annotations)
        assert annotation_scene_entity.shapes == [self.rectangle, self.polygon]
Exemplo n.º 17
0
 def labels_to_add() -> List[LabelEntity]:
     label_to_add = LabelEntity(
         name="Label which will be added",
         domain=Domain.DETECTION,
         color=Color(red=60, green=120, blue=70),
         creation_date=datetime.datetime(year=2021, month=12, day=12),
         id=ID("label_to_add_1"),
     )
     other_label_to_add = LabelEntity(
         name="Other label to add",
         domain=Domain.SEGMENTATION,
         color=Color(red=80, green=70, blue=100),
         creation_date=datetime.datetime(year=2021, month=12, day=11),
         is_empty=True,
         id=ID("label_to_add_2"),
     )
     return [label_to_add, other_label_to_add]
    def test_scored_label(self):
        """
        <b>Description:</b>
        Check the ScoredLabel can correctly return the value

        <b>Input data:</b>
        LabelEntity

        <b>Expected results:</b>
        Test passes if the results match
        """
        car = LabelEntity(id=ID(123456789),
                          name="car",
                          domain=Domain.DETECTION,
                          is_empty=True)
        person = LabelEntity(id=ID(987654321),
                             name="person",
                             domain=Domain.DETECTION,
                             is_empty=True)
        car_label = ScoredLabel(car)
        person_label = ScoredLabel(person)

        for attr in [
                "id", "name", "color", "hotkey", "creation_date", "is_empty"
        ]:
            assert getattr(car_label, attr) == getattr(car, attr)

        assert car_label.get_label() == car
        assert car_label == ScoredLabel(car)
        assert car_label != car
        assert car_label != person_label
        assert hash(car_label) == hash(str(car_label))

        probability = 0.0
        assert car_label.probability == probability
        delta_probability = 0.4
        probability += delta_probability
        car_label.probability += delta_probability
        assert car_label.probability == probability

        car.color = Color(red=16, green=15, blue=56, alpha=255)
        assert (
            "ScoredLabel(123456789, name=car, probability=0.4, domain=DETECTION, color="
            in repr(car_label))
        assert "Color(red=16, green=15, blue=56, alpha=255), hotkey=ctrl+0)" in repr(
            car_label)
 def __init__(self) -> None:
     super().__init__(
         id=ID(),
         kind=AnnotationSceneKind.NONE,
         editor="",
         creation_date=datetime.datetime.now(),
         annotations=[],
     )
    def test_serialized_representiaton(self):
        """
        This test serializes ID and checks serialized representation.
        """

        id = ID("21434231456")
        serialized_id = IDMapper.forward(id)
        assert serialized_id == "21434231456"
 def rectangle_labels() -> list:
     rectangle_label = LabelEntity(
         name="Rectangle label",
         domain=Domain.DETECTION,
         color=Color(red=100, green=50, blue=200),
         id=ID("rectangle_label_1"),
     )
     other_rectangle_label = LabelEntity(
         name="Other rectangle label",
         domain=Domain.SEGMENTATION,
         color=Color(red=200, green=80, blue=100),
         id=ID("rectangle_label_2"),
     )
     return [
         ScoredLabel(label=rectangle_label),
         ScoredLabel(label=other_rectangle_label),
     ]
Exemplo n.º 22
0
 def roi_labels() -> List[LabelEntity]:
     creation_date = datetime.datetime(year=2021, month=12, day=9)
     roi_label = LabelEntity(
         name="ROI label",
         domain=Domain.DETECTION,
         color=Color(red=40, green=180, blue=80),
         creation_date=creation_date,
         id=ID("roi_label_1"),
     )
     other_roi_label = LabelEntity(
         name="Second ROI label",
         domain=Domain.SEGMENTATION,
         color=Color(red=80, green=90, blue=70),
         creation_date=creation_date,
         is_empty=True,
         id=ID("roi_label_2"),
     )
     return [roi_label, other_roi_label]
Exemplo n.º 23
0
 def labels() -> List[LabelEntity]:
     creation_date = datetime.datetime(year=2021, month=12, day=9)
     detection_label = LabelEntity(
         name="Label for Detection",
         domain=Domain.DETECTION,
         color=Color(red=100, green=200, blue=150),
         creation_date=creation_date,
         id=ID("detection_label"),
     )
     segmentation_label = LabelEntity(
         name="Label for Segmentation",
         domain=Domain.DETECTION,
         color=Color(red=50, green=80, blue=200),
         creation_date=creation_date,
         is_empty=True,
         id=ID("segmentation_label"),
     )
     return [detection_label, segmentation_label]
    def test_serialization_deserialization(self):
        """
        This test serializes ID, deserializes serialized ID and compare with original.
        """

        id = ID("21434231456")
        serialized_id = IDMapper.forward(id)
        deserialized_id = IDMapper.backward(serialized_id)
        assert id == deserialized_id
    def __init__(
        self,
        name: str,
        labels: Sequence[LabelEntity],
        group_type: LabelGroupType = LabelGroupType.EXCLUSIVE,
        id: ID = None,
    ):
        self.id = ID(ObjectId()) if id is None else id

        self.labels = sorted(labels, key=lambda x: x.id)
        self.name = name
        self.group_type = group_type
def init_environment(params, model_template, number_of_images=10):
    resolution = (224, 224)
    colors = [(0, 255, 0), (0, 0, 255)]
    cls_names = ['b', 'g']
    texts = ['Blue', 'Green']
    env_labels = [
        LabelEntity(name=name,
                    domain=Domain.CLASSIFICATION,
                    is_empty=False,
                    id=ID(i)) for i, name in enumerate(cls_names)
    ]

    items = []

    for _ in range(0, number_of_images):
        for j, lbl in enumerate(env_labels):
            class_img = np.zeros((*resolution, 3), dtype=np.uint8)
            class_img[:] = colors[j]
            class_img = cv.putText(class_img, texts[j], (50, 50),
                                   cv.FONT_HERSHEY_SIMPLEX, .8 + j * .2,
                                   colors[j - 1], 2, cv.LINE_AA)

            image = Image(data=class_img)
            labels = [ScoredLabel(label=lbl, probability=1.0)]
            shapes = [Annotation(Rectangle.generate_full_box(), labels)]
            annotation_scene = AnnotationSceneEntity(
                kind=AnnotationSceneKind.ANNOTATION, annotations=shapes)
            items.append(
                DatasetItemEntity(media=image,
                                  annotation_scene=annotation_scene))

    rng = random.Random()
    rng.seed(100)
    rng.shuffle(items)
    for i, _ in enumerate(items):
        subset_region = i / number_of_images
        if subset_region >= 0.9:
            subset = Subset.TESTING
        elif subset_region >= 0.6:
            subset = Subset.VALIDATION
        else:
            subset = Subset.TRAINING
        items[i].subset = subset

    dataset = DatasetEntity(items)
    labels_schema = generate_label_schema(dataset.get_labels(),
                                          multilabel=False)
    environment = TaskEnvironment(model=None,
                                  hyper_parameters=params,
                                  label_schema=labels_schema,
                                  model_template=model_template)
    return environment, dataset
Exemplo n.º 27
0
def generate_label_schema(dataset, task_type):
    """
    Generates label schema depending on task type.
    """

    if task_type == TaskType.CLASSIFICATION and dataset.is_multilabel():
        not_empty_labels = dataset.get_labels()
        assert len(not_empty_labels) > 1
        label_schema = LabelSchemaEntity()
        empty_label = LabelEntity(
            name="Empty label", is_empty=True, domain=Domain.CLASSIFICATION
        )
        empty_group = LabelGroup(
            name="empty", labels=[empty_label], group_type=LabelGroupType.EMPTY_LABEL
        )
        single_groups = []
        for label in not_empty_labels:
            single_groups.append(
                LabelGroup(
                    name=label.name, labels=[label], group_type=LabelGroupType.EXCLUSIVE
                )
            )
            label_schema.add_group(single_groups[-1])
        label_schema.add_group(empty_group, exclusive_with=single_groups)
        return label_schema

    if task_type == TaskType.ANOMALY_CLASSIFICATION:
        return LabelSchemaEntity.from_labels(
            [
                LabelEntity(
                    name="Normal", domain=Domain.ANOMALY_CLASSIFICATION, id=ID(0)
                ),
                LabelEntity(
                    name="Anomalous", domain=Domain.ANOMALY_CLASSIFICATION, id=ID(1)
                ),
            ]
        )

    return LabelSchemaEntity.from_labels(dataset.get_labels())
    def test_model_entity_default_values(self):
        """
        <b>Description:</b>
        Check that ModelEntity correctly returns the default values

        <b>Expected results:</b>
        Test passes if ModelEntity correctly returns the default values

        <b>Steps</b>
        1. Check default values in the ModelEntity
        """

        model_entity = ModelEntity(train_dataset=self.dataset(),
                                   configuration=self.configuration())

        assert model_entity.id == ID()
        assert type(model_entity.configuration) == ModelConfiguration
        assert type(model_entity.creation_date) == datetime
        assert type(model_entity.train_dataset) == DatasetEntity
        assert model_entity.version == 1
        assert model_entity.model_status == ModelStatus.SUCCESS
        assert model_entity.model_format == ModelFormat.OPENVINO
        assert model_entity.precision == [ModelPrecision.FP32]
        assert model_entity.target_device == TargetDevice.CPU
        assert model_entity.optimization_type == ModelOptimizationType.NONE
        assert model_entity.performance == NullPerformance()

        for default_val_none in [
                "previous_trained_revision",
                "previous_revision",
                "target_device_type",
        ]:
            assert getattr(model_entity, default_val_none) is None

        for default_val_0_0 in ["training_duration", "model_size_reduction"]:
            assert getattr(model_entity, default_val_0_0) == 0.0

        for default_val_empty_list in ["tags", "optimization_methods"]:
            assert getattr(model_entity, default_val_empty_list) == []

        for default_val_empty_dict in [
                "model_adapters",
                "optimization_objectives",
                "performance_improvement",
        ]:
            assert getattr(model_entity, default_val_empty_dict) == {}

        for default_val_zero in ["latency", "fps_throughput"]:
            assert getattr(model_entity, default_val_zero) == 0

        assert model_entity.is_optimized() is False
 def __init__(
     self,
     annotations: List[Annotation],
     kind: AnnotationSceneKind,
     editor: str = "",
     creation_date: Optional[datetime.datetime] = None,
     id: Optional[ID] = None,
 ):
     self.__annotations = annotations
     self.__kind = kind
     self.__editor = editor
     self.__creation_date = now(
     ) if creation_date is None else creation_date
     self.__id = ID() if id is None else id
Exemplo n.º 30
0
 def roi(self):
     roi = Annotation(
         shape=Rectangle(
             x1=0.1,
             y1=0.1,
             x2=0.9,
             y2=0.9,
             modification_date=datetime.datetime(year=2021, month=12,
                                                 day=9),
         ),
         labels=self.roi_scored_labels(),
         id=ID("roi_annotation"),
     )
     return roi