async def test__create_subscription_async__subscription_created_with_paths(self):
        path1 = "tag1"
        path2 = "tag2"
        token = uuid.uuid4()
        self._client.all_requests.configure_mock(
            side_effect=self._get_mock_request(token, {"subscriptionUpdates": []})
        )
        uut = HttpTagSelection(
            self._client, [tbase.TagData(path1), tbase.TagData(path2)]
        )

        async with await uut.create_subscription_async():
            pass

        assert self._client.all_requests.call_args_list == [
            mock.call(
                "POST",
                "/nitag/v2/subscriptions",
                params=None,
                data={"tags": AnyOrderList([path1, path2]), "updatesOnly": True},
            ),
            mock.call(
                "GET",
                "/nitag/v2/subscriptions/{id}/values/current",
                params={"id": token},
            ),
            mock.call("DELETE", "/nitag/v2/subscriptions/{id}", params={"id": token}),
        ]
    def test__update_metadata__queried_metadata_is_correct(self, generate_tag_path):
        tag = tbase.TagData(
            generate_tag_path(),
            tbase.DataType.DOUBLE,
            ["keyword1", "keyword2"],
            {"prop1": "value1", "prop2": "value2"},
        )

        self.tag_manager.update([tag])
        opened_tag = self.tag_manager.open(tag.path)

        assert opened_tag.collect_aggregates is False
        assert tbase.DataType.DOUBLE == opened_tag.data_type
        assert sorted(tag.keywords) == sorted(opened_tag.keywords)
        assert tag.path == opened_tag.path
        assert sorted(tag.properties.items()) == sorted(opened_tag.properties.items())
        assert opened_tag.retention_count is None
        assert opened_tag.retention_days is None
        assert tbase.RetentionType.NONE == opened_tag.retention_type

        tag.collect_aggregates = True
        tag.set_retention_count(30)
        tag.keywords.append("keyword3")
        tag.properties["prop1"] = "edited"
        self.tag_manager.update([tag])
        self.tag_manager.refresh([opened_tag])

        assert opened_tag.collect_aggregates is True
        assert tbase.DataType.DOUBLE == opened_tag.data_type
        assert sorted(tag.keywords) == sorted(opened_tag.keywords)
        assert tag.path == opened_tag.path
        assert sorted(tag.properties.items()) == sorted(opened_tag.properties.items())
        assert 30 == opened_tag.retention_count
        assert opened_tag.retention_days is None
        assert tbase.RetentionType.COUNT == opened_tag.retention_type
Exemple #3
0
    def _update_values(
            self, values: List[Optional[SerializedTagWithAggregates]]) -> None:
        if self._values is None:
            self._values = {}

        missing_values = set(self._values.keys()).difference(v.path
                                                             for v in values
                                                             if v is not None)
        for missing_path in missing_values:
            del self._values[missing_path]

        for value in values:
            if value is None:
                continue

            self._values[value.path] = value

            tag = self._metadata.get(value.path)
            if tag is None:
                tag = tbase.TagData(value.path, value.data_type)
                self._metadata[value.path] = tag
            elif tag.data_type != value.data_type:
                tag.data_type = value.data_type
            else:
                continue

            reader = self._create_value_reader(tag)
            if reader is not None:
                self._readers[value.path] = reader
    def test__open__creates_selection_and_loads_all_data_from_server(self):
        path1 = "tag1"
        path2 = "tag2"
        paths = [path1, path2]
        public_properties = {
            "prop1": "value1",
            "prop2": "value2",
        }
        all_properties = dict(public_properties)
        dummy_tag = tbase.TagData(path1)
        dummy_tag.retention_type = tbase.RetentionType.COUNT
        dummy_tag.retention_count = 7
        dummy_tag.retention_days = 9
        dummy_tag._copy_retention_properties(all_properties)
        keywords = ["keyword1", "keyword2"]
        tags = [
            {
                "path": path1,
                "type": "BOOLEAN",
                "keywords": keywords,
                "properties": all_properties,
                "collectAggregates": True,
            },
            {"path": path2, "type": "DOUBLE"},
        ]
        token = uuid.uuid4()
        self._client.all_requests.configure_mock(
            side_effect=self._get_mock_request(token, tags)
        )

        uut = HttpTagSelection.open(self._client, paths)

        assert self._client.all_requests.call_args_list == [
            mock.call(
                "POST",
                "/nitag/v2/selections",
                params=None,
                data={"searchPaths": AnyOrderList(paths)},
            ),
            mock.call("GET", "/nitag/v2/selections/{id}/tags", params={"id": token}),
        ]
        assert 2 == len(uut.metadata)
        assert tbase.DataType.BOOLEAN == uut.metadata[path1].data_type
        assert uut.metadata[path1].collect_aggregates is True
        assert keywords == sorted(uut.metadata[path1].keywords)
        assert sorted(public_properties.items()) == sorted(
            uut.metadata[path1].properties.items()
        )
        assert dummy_tag.retention_count == uut.metadata[path1].retention_count
        assert dummy_tag.retention_days == uut.metadata[path1].retention_days
        assert dummy_tag.retention_type == uut.metadata[path1].retention_type

        assert tbase.DataType.DOUBLE == uut.metadata[path2].data_type
        assert uut.metadata[path2].collect_aggregates is False
        assert 0 == len(uut.metadata[path2].keywords)
        assert 0 == len(uut.metadata[path2].properties)
        assert uut.metadata[path2].retention_count is None
        assert uut.metadata[path2].retention_days is None
        assert tbase.RetentionType.NONE == uut.metadata[path2].retention_type
 async def test__no_selection_created_by_constructor__close_async__no_server_calls(
     self,
 ):
     uut = HttpTagSelection(self._client, [tbase.TagData("tag")])
     await uut.close_async()
     await uut.close_async()
     uut.close()
     assert self._client.all_requests.call_count == 0
    def test__run_queries__results_are_correct(self, generate_tag_paths):
        num_tags = 10
        even_paths = []
        odd_paths = []
        paths, path_prefix = generate_tag_paths(num_tags)
        tags = [tbase.TagData(p, tbase.DataType.INT32) for p in paths]

        for x, tag in enumerate(tags):
            odd_even = ""

            if x % 2 == 0:
                odd_even = "even"
                even_paths.append(tag.path)
            else:
                odd_even = "odd"
                odd_paths.append(tag.path)

            tag.keywords.append(odd_even)
            tag.properties["index"] = str(x)
            tag.properties["oddEven"] = odd_even

        self.tag_manager.update(tags)

        # Path query
        half_paths = paths[: num_tags // 2]
        self.internal_test_query_result(
            self.tag_manager.query(half_paths), half_paths, num_tags // 2
        )

        # Wildcard query
        self.internal_test_query_result(
            self.tag_manager.query([path_prefix + "*"]), paths, len(paths)
        )

        # Keyword query
        self.internal_test_query_result(
            self.tag_manager.query(paths, keywords=["odd"], properties=None),
            odd_paths,
            len(odd_paths),
        )

        # Property query
        self.internal_test_query_result(
            self.tag_manager.query(
                paths, keywords=None, properties={"oddEven": "even"}
            ),
            even_paths,
            len(even_paths),
        )

        # Pages
        self.internal_test_query_result(
            self.tag_manager.query(paths, skip=1, take=2),
            paths,
            page_size=2,
            skip=1,
            expected_pages=math.ceil((num_tags - 1) / 2.0),
        )
    def _get_tag_reader(self, path: str,
                        data_type: tbase.DataType) -> "tbase.TagValueReader":
        """Get a :class:`TagValueReader` for this path.

        Args:
            path: The path of the tag to read.
            data_type: The data type of the value to read.
        """
        return tbase.TagValueReader(self, tbase.TagData(path, data_type))
    async def test__move_next_page_async__current_page_has_data_for_second_page(self):
        path1 = "tag1"
        path2 = "tag2"
        total_count = 3
        public_properties = {
            "prop1": "value1",
            "prop2": "value2",
        }
        all_properties = dict(public_properties)
        dummy_tag = tbase.TagData(path1)
        dummy_tag.retention_type = tbase.RetentionType.COUNT
        dummy_tag.retention_count = 7
        dummy_tag.retention_days = 9
        dummy_tag._copy_retention_properties(all_properties)
        keywords = ["keyword1", "keyword2"]
        response = {
            "totalCount": total_count,
            "tags": [{"type": "DOUBLE", "path": path1}],
        }

        self._client.all_requests.configure_mock(
            side_effect=self._get_mock_request(
                [
                    {
                        "totalCount": total_count,
                        "tags": [
                            {
                                "type": "U_INT64",
                                "properties": all_properties,
                                "path": path2,
                                "keywords": keywords,
                                "collectAggregates": True,
                            }
                        ],
                    }
                ]
            )
        )

        uut = HttpAsyncTagQueryResultCollection(
            self._client, None, None, None, 0, 1, response, None
        )
        await uut.move_next_page_async()

        assert total_count == uut.total_count
        assert 1 == len(uut.current_page)
        assert path2 == uut.current_page[0].path
        assert tbase.DataType.UINT64 == uut.current_page[0].data_type
        assert uut.current_page[0].collect_aggregates is True
        assert keywords == sorted(uut.current_page[0].keywords)
        assert sorted(public_properties.items()) == sorted(
            uut.current_page[0].properties.items()
        )
        assert dummy_tag.retention_count == uut.current_page[0].retention_count
        assert dummy_tag.retention_days == uut.current_page[0].retention_days
        assert dummy_tag.retention_type == uut.current_page[0].retention_type
    def test__constructed__first_page_includes_data_from_query(self):
        path1 = "tag1"
        path2 = "tag2"
        total_count = 3
        public_properties = {
            "prop1": "value1",
            "prop2": "value2",
        }
        all_properties = dict(public_properties)
        dummy_tag = tbase.TagData(path1)
        dummy_tag.retention_type = tbase.RetentionType.COUNT
        dummy_tag.retention_count = 7
        dummy_tag.retention_days = 9
        dummy_tag._copy_retention_properties(all_properties)
        keywords = ["keyword1", "keyword2"]
        response = {
            "totalCount": total_count,
            "tags": [
                {
                    "type": "BOOLEAN",
                    "properties": all_properties,
                    "path": path1,
                    "keywords": keywords,
                    "collectAggregates": True,
                },
                {"type": "DOUBLE", "path": path2},
            ],
        }

        uut = HttpAsyncTagQueryResultCollection(
            self._client, None, None, None, 0, 2, response, None
        )
        assert total_count == uut.total_count
        assert 2 == len(uut.current_page)
        assert path1 == uut.current_page[0].path
        assert tbase.DataType.BOOLEAN == uut.current_page[0].data_type
        assert uut.current_page[0].collect_aggregates is True
        assert keywords == sorted(uut.current_page[0].keywords)
        assert sorted(public_properties.items()) == sorted(
            uut.current_page[0].properties.items()
        )
        assert dummy_tag.retention_count == uut.current_page[0].retention_count
        assert dummy_tag.retention_days == uut.current_page[0].retention_days
        assert dummy_tag.retention_type == uut.current_page[0].retention_type

        assert path2 == uut.current_page[1].path
        assert tbase.DataType.DOUBLE == uut.current_page[1].data_type
        assert uut.current_page[1].collect_aggregates is False
        assert 0 == len(uut.current_page[1].keywords)
        assert 0 == len(uut.current_page[1].properties)
        assert uut.current_page[1].retention_count is None
        assert uut.current_page[1].retention_days is None
        assert tbase.RetentionType.NONE == uut.current_page[1].retention_type
    def test__write_read_bool_tag__values_are_correct(self, generate_tag_path):
        with self.tag_manager.create_writer(buffer_size=1) as writer:
            tag = tbase.TagData(generate_tag_path(), tbase.DataType.BOOLEAN)
            tag.collect_aggregates = True

            self.tag_manager.update([tag])
            self.internal_test_write_and_read_tag(
                tag,
                writer,
                tbase.TagValueReader(self.tag_manager, tag),
                value_to_write=True,
            )
Exemple #11
0
        def fn():
            # Create a tag of each data type.
            data_types = [
                t for t in tbase.DataType if t != tbase.DataType.UNKNOWN
            ]
            paths, prefix = generate_tag_paths(len(data_types))
            tags = [tbase.TagData(p, t) for (p, t) in zip(paths, data_types)]
            for tag in tags:
                tag.collect_aggregates = True

            request.cls.tag_manager.update(tags)
            return tags, prefix
    def test__write_read_date_time_tag__values_are_correct(self, generate_tag_path):
        with self.tag_manager.create_writer(buffer_size=1) as writer:
            tag = tbase.TagData(generate_tag_path(), tbase.DataType.DATE_TIME)
            tag.collect_aggregates = True

            self.tag_manager.update([tag])
            self.internal_test_write_and_read_tag(
                tag,
                writer,
                tbase.TagValueReader(self.tag_manager, tag),
                value_to_write=datetime.now(timezone.utc),
            )
    def test__no_selection_created_by_constructor__refresh_metadata__tags_queried(self):
        path1 = "tag1"
        path2 = "tag2"
        token = uuid.uuid4()
        self._client.all_requests.configure_mock(
            side_effect=self._get_mock_request(token, [])
        )

        uut = HttpTagSelection(
            self._client, [tbase.TagData(path1), tbase.TagData(path2)]
        )
        uut.refresh_metadata()

        assert self._client.all_requests.call_args_list == [
            mock.call(
                "POST",
                "/nitag/v2/selections",
                params=None,
                data={"searchPaths": AnyOrderList([path1, path2])},
            ),
            mock.call("GET", "/nitag/v2/selections/{id}/tags", params={"id": token}),
        ]
    async def test__no_selection_created_by_constructor__async_api_function_called__selection_created(
        self,
    ):
        path1 = "tag1"
        path2 = "tag2"
        token = uuid.uuid4()
        self._client.all_requests.configure_mock(
            side_effect=self._get_mock_request(token, AssertionError())
        )

        uut = HttpTagSelection(
            self._client, [tbase.TagData(path1), tbase.TagData(path2)]
        )
        await uut.delete_tags_from_server_async()

        assert self._client.all_requests.call_args_list == [
            mock.call(
                "POST",
                "/nitag/v2/selections",
                params=None,
                data={"searchPaths": AnyOrderList([path1, path2])},
            ),
            mock.call("DELETE", "/nitag/v2/selections/{id}/tags", params={"id": token}),
        ]
def create_output_channel(mgr: sl_tag.TagManager, group: str, name: str,
                          data_type: sl_tag.DataType) -> sl_tag.TagData:
    """Create a FlexLogger output channel."""
    # "Import" the channel into FlexLogger.
    full_name = get_tag_prefix() + ".Import.Setpoint.{}.{}".format(group, name)
    mgr.open(full_name, data_type, create=True)

    # Once FlexLogger creates the channel, we'll interact with it as an "export" channel
    # (for both reads and writes).
    full_name = get_tag_prefix() + ".Export.Setpoint.{}".format(name)
    # Go ahead and pre-create the output channel, for ease-of-use. Otherwise, when
    # trying to read its value, we'd have to be prepared for an ApiException complaining
    # that the tag doesn't exist.
    mgr.open(full_name, data_type, create=True)
    return sl_tag.TagData(full_name, data_type)
    def test__write_read_double_tag__values_are_correct(self, generate_tag_path):
        with self.tag_manager.create_writer(buffer_size=1) as writer:
            tag = tbase.TagData(generate_tag_path(), tbase.DataType.DOUBLE)
            tag.collect_aggregates = True

            self.tag_manager.update([tag])
            aggregates = self.internal_test_write_and_read_tag(
                tag,
                writer,
                tbase.TagValueReader(self.tag_manager, tag),
                value_to_write=math.pi,
            )

            assert math.pi == aggregates.min
            assert math.pi == aggregates.max
            assert math.pi == aggregates.mean
    def test__write_read_uint64_tag__values_are_correct(self, generate_tag_path):
        with self.tag_manager.create_writer(buffer_size=1) as writer:
            tag = tbase.TagData(generate_tag_path(), tbase.DataType.UINT64)
            tag.collect_aggregates = True

            value = 2 ** 64 - 1
            self.tag_manager.update([tag])
            aggregates = self.internal_test_write_and_read_tag(
                tag,
                writer,
                tbase.TagValueReader(self.tag_manager, tag),
                value_to_write=value,
            )

            assert value == aggregates.min
            assert value == aggregates.max
            assert math.isclose(value, aggregates.mean)
Exemple #18
0
    def __handle_query_response(
            self, response: Dict[str, Any],
            http_response: HttpResponse) -> Tuple[List[tbase.TagData], int]:
        if response.get("totalCount") is None:
            raise tbase.TagManager.invalid_response(http_response)

        tags = []
        for t in response["tags"]:
            tags.append(
                tbase.TagData(
                    t["path"],
                    tbase.DataType.from_api_name(t["type"])
                    if t["type"] else None,
                    t.get("keywords"),
                    t.get("properties"),
                ))
            if t.get("collectAggregates"):
                tags[-1].collect_aggregates = True
        return tags, response["totalCount"]
    def test__uint64_aggregates(self, generate_tag_path):
        min = random.randrange(0, 2**31)
        max = random.randrange(2**31, 2**32)
        mean = (min + max + 2**31) / 3.0

        with self.tag_manager.create_writer(buffer_size=3) as writer:
            tag = tbase.TagData(generate_tag_path(), tbase.DataType.UINT64)
            tag.collect_aggregates = True

            self.tag_manager.update([tag])
            aggregates = self.internal_test_numeric_aggregates(
                tag,
                writer,
                tbase.TagValueReader(self.tag_manager, tag),
                mean,
                (max, min, 2**31),
            )

            assert min == aggregates.min
            assert max == aggregates.max
    def test__set_int_values__aggregate_values_are_correct(self, generate_tag_path):
        min = random.randrange(-(2 ** 31), 0)
        max = random.randrange(0, 2 ** 31 - 1)
        mean = (min + max) / 3.0

        with self.tag_manager.create_writer(buffer_size=3) as writer:
            tag = tbase.TagData(generate_tag_path(), tbase.DataType.INT32)
            tag.collect_aggregates = True

            self.tag_manager.update([tag])
            aggregates = self.internal_test_numeric_aggregates(
                tag,
                writer,
                tbase.TagValueReader(self.tag_manager, tag),
                mean,
                (min, 0, max),
            )

            assert min == aggregates.min
            assert max == aggregates.max
    def test__set_double_values__aggregate_values_are_correct(self, generate_tag_path):
        min = -random.random() * sys.float_info.max / 2
        max = random.random() * sys.float_info.max / 2
        mean = (min + max) / 3

        with self.tag_manager.create_writer(buffer_size=3) as writer:
            tag = tbase.TagData(generate_tag_path(), tbase.DataType.DOUBLE)
            tag.collect_aggregates = True

            self.tag_manager.update([tag])
            aggregates = self.internal_test_numeric_aggregates(
                tag,
                writer,
                tbase.TagValueReader(self.tag_manager, tag),
                mean,
                (0.0, max, min),
            )

            assert math.isclose(min, aggregates.min), (min, aggregates.min)
            assert math.isclose(max, aggregates.max), (max, aggregates.max)
 def test__metadata_supplied__constructed__no_server_queries(self):
     tags = [tbase.TagData("tag", tbase.DataType.BOOLEAN)]
     uut = HttpTagSelection(self._client, tags)
     assert tags == list(uut.metadata.values())
     assert self._client.all_requests.call_count == 0
Exemple #23
0
    async def open_async(
        self,
        path: str,
        data_type: Optional[tbase.DataType] = None,
        *,
        create: Optional[bool] = None
    ) -> tbase.TagData:
        """Asynchronously query the server for the metadata of a tag, optionally
        creating it if it doesn't already exist.

        The call fails if the tag already exists as a different data type than specified
        or if it doesn't exist and ``create`` is False.

        Args:
            path: The path of the tag to open.
            data_type: The expected data type of the tag.
            create: True to create the tag if it doesn't already exist, False to fail if
                it doesn't exist.

        Returns:
            A task representing the asynchronous operation. On success, contains
            information about the tag.

        Raises:
            ValueError: if ``path`` is None or empty.
            ValueError: if ``data_type`` is invalid.
            ValueError: if ``create`` is True, but ``data_type`` is None.
            ApiException: if the API call fails.
        """
        if create is None:
            create = data_type is not None
        elif create is True:
            if data_type is None:
                raise ValueError("Cannot create if data_type is not specified")

        if data_type == tbase.DataType.UNKNOWN:
            raise ValueError("Must specify a valid data type")

        tag = None  # type: Optional[Dict[str, Any]]
        try:
            tag, _ = await self._api.as_async.get(
                "/tags/{path}", params={"path": tbase.TagPathUtilities.validate(path)}
            )
        except core.ApiException as ex:
            error_name = None if ex.error is None else ex.error.name
            if create and (error_name or "").startswith("Tag.NoSuchTag"):
                pass  # continue on and create the tag
            else:
                raise

        if tag is not None:
            if data_type is not None and tag["type"] != data_type.api_name:
                raise core.ApiException("Tag exists with a conflicting data type")

            return tbase.TagData.from_json_dict(tag)
        else:
            if data_type is None:
                raise ValueError("data_type cannot be None when create is True")

            # Tag didn't already exist, so try to create it.
            await self._api.as_async.post(
                "/tags", data={"type": data_type.api_name, "path": path}
            )
            return tbase.TagData(path, data_type)
    def test__merge_metadata__queried_metadata_is_correct(
            self, generate_tag_path):
        keywords = ["keyword1", "keyword2"]
        properties = {"prop1": "value1", "prop2": "value2"}
        tag = tbase.TagData(generate_tag_path(), tbase.DataType.DOUBLE,
                            keywords, properties)

        self.tag_manager.update([tag])
        opened_tag = self.tag_manager.open(tag.path)

        assert opened_tag.collect_aggregates is False
        assert tbase.DataType.DOUBLE == opened_tag.data_type
        assert sorted(keywords) == sorted(opened_tag.keywords)
        assert tag.path == opened_tag.path
        assert sorted(properties.items()) == sorted(
            opened_tag.properties.items())
        assert opened_tag.retention_count is None
        assert opened_tag.retention_days is None
        assert tbase.RetentionType.NONE == opened_tag.retention_type

        tag.collect_aggregates = True
        tag.set_retention_count(50)
        tag.keywords[:] = ["keyword3"]
        tag.properties.clear()
        tag.properties.update({"prop1": "edited", "prop3": "value3"})
        self.tag_manager.update(updates=[
            tbase.TagDataUpdate.from_tagdata(tag, tbase.TagUpdateFields.ALL)
        ])
        self.tag_manager.refresh([opened_tag])

        keywords.append("keyword3")
        properties["prop1"] = "edited"
        properties["prop3"] = "value3"
        assert opened_tag.collect_aggregates is True
        assert tbase.DataType.DOUBLE == opened_tag.data_type
        assert sorted(keywords) == sorted(opened_tag.keywords)
        assert tag.path == opened_tag.path
        assert sorted(properties.items()) == sorted(
            opened_tag.properties.items())
        assert 50 == opened_tag.retention_count
        assert opened_tag.retention_days is None
        assert tbase.RetentionType.COUNT == opened_tag.retention_type

        # First update the collect aggregates and retention settings
        # then do a merge that excludes those properties and verify
        # they are unmodified.
        opened_tag.collect_aggregates = False
        opened_tag.set_retention_days(30)
        tag.properties["prop4"] = "value4"
        self.tag_manager.update([opened_tag])
        self.tag_manager.update(updates=[
            tbase.TagDataUpdate.from_tagdata(
                tag,
                tbase.TagUpdateFields.KEYWORDS
                | tbase.TagUpdateFields.PROPERTIES,
            )
        ])
        self.tag_manager.refresh([opened_tag])

        properties["prop4"] = "value4"
        assert opened_tag.collect_aggregates is False
        assert tbase.DataType.DOUBLE == opened_tag.data_type
        assert sorted(keywords) == sorted(opened_tag.keywords)
        assert tag.path == opened_tag.path
        assert sorted(properties.items()) == sorted(
            opened_tag.properties.items())
        assert 30 == opened_tag.retention_days
        assert tbase.RetentionType.DURATION == opened_tag.retention_type
        assert 50 == opened_tag.retention_count