예제 #1
0
    async def run(self) -> None:
        """
        The main task, that reads data from the sensors and pushes it onto the event_bus.
        """
        # Generate the UUIDs of new sensors
        sensor_stream = stream.chain(
            stream.iterate(
                iterate_safely(f"{self.__topic}/get",
                               f"{self.__topic}/status_update")),
            stream.iterate(event_bus.subscribe(f"{self.__topic}/add_host")),
        ) | pipe.flatmap(
            lambda item: stream.chain(
                (stream.call(event_bus.call, f"{self.__topic}/get_config", item
                             ) | catch.pipe(TopicNotRegisteredError)),
                stream.iterate(
                    event_bus.subscribe(f"nodes/by_uuid/{item}/update")),
            )
            | pipe.until(lambda config: config is None)
            | pipe.map(lambda config: config if self._is_config_valid(
                self.__node_id, config) else None)
            | pipe.map(self._create_transport)
            | pipe.switchmap(lambda transport: stream.empty() if transport is
                             None else stream.iterate(transport.stream_data()))
            | pipe.action(lambda data: event_bus.publish("wamp/publish", data)
                          ))

        await sensor_stream
예제 #2
0
 def _configure_and_stream(
         self,
         config: dict[str, Any] | None) -> AsyncGenerator[DataEvent, None]:
     if config is None:
         return stream.empty()
     try:
         # Run all config steps in order (concat) and one at a time (task_limit=1). Drop the output. There is
         # nothing to compare them to (filter => false), then read all sensors of the bricklet and process them in
         # parallel (flatten).
         config_stream = stream.chain(
             stream.iterate(config["on_connect"])
             | pipe.starmap(lambda func, timeout: stream.just(func()) | pipe
                            .timeout(timeout))
             | pipe.concat(task_limit=1)
             | pipe.filter(lambda result: False),
             stream.iterate(config["config"].items())
             | pipe.starmap(self._parse_callback_configuration)
             | pipe.starmap(self._set_callback_configuration)
             | pipe.flatten()
             |
             pipe.map(lambda args: self._read_sensor(config["uuid"], *args))
             | pipe.flatten(),
         )
         return config_stream
     except NotConnectedError:
         # Do not log it
         raise
     except Exception:
         self._logger.exception("This should not happen.")
         raise
예제 #3
0
    def stream_data(self) -> AsyncGenerator[DataEvent, None]:
        """
        Generate the initial configuration of the sensor, configure it, and finally stream the data from the sensor.
        If there is a configuration update, reconfigure the sensor and start streaming again.
        Returns
        -------
        AsyncGenerator of DataEvent
            The data from the device
        """
        # Generates the first configuration
        # Query the database and if it does not have a config for the sensor, wait until there is one

        data_stream = (
            stream.chain(
                stream.call(
                    call_safely, "db_labnode_sensors/get_config", "db_labnode_sensors/status_update", self.__uuid
                )
                | pipe.takewhile(lambda config: config is not None),
                stream.iterate(event_bus.subscribe(f"nodes/by_uuid/{self.__uuid}/update")),
            )
            | pipe.action(
                lambda config: logging.getLogger(__name__).info(
                    "Got new configuration for: %s",
                    self._device,
                )
            )
            | pipe.map(self._create_config)
            | pipe.switchmap(
                lambda config: stream.empty()
                if config is None or not config["enabled"]
                else (self._configure_and_stream(config))
            )
        )

        return data_stream
async def test_text_complex_elements(elements: List[JsonElement]) -> None:
    async with stream.iterate(elements).stream() as streamer:
        result = ""
        async for elem in respond_text(streamer):
            result += elem
        # every element is rendered as yaml with --- as object deliminator
        assert len(elements) == len(result.split("---"))
async def test_text_simple_elements(elements: List[JsonElement]) -> None:
    async with stream.iterate(elements).stream() as streamer:
        result = ""
        async for elem in respond_text(streamer):
            result += elem + "\n"
        # every element is rendered as single line
        assert len(elements) + 1 == len(result.split("\n"))
예제 #6
0
    def _stream_data(self, transport):
        config_stream = (stream.chain(
            stream.call(
                call_safely,
                f"{self.__database_topic}/get_config",
                f"{self.__database_topic}/status_update",
                transport.uuid,
            ),
            stream.iterate(
                event_bus.subscribe(f"nodes/by_uuid/{transport.uuid}/add")),
        )
                         | pipe.map(lambda config: self._create_device(
                             transport, config))
                         | pipe.starmap(lambda config, device: stream.empty(
                         ) if device is None else device.stream_data(config))
                         | pipe.switch()
                         | context.pipe(
                             transport,
                             on_enter=lambda: logging.getLogger(__name__).info(
                                 "Connected to %s at %s (%s).", transport.name,
                                 transport.uri, transport.label),
                             on_exit=lambda: logging.getLogger(__name__).info(
                                 "Disconnected from %s at %s (%s).", transport.
                                 name, transport.uri, transport.label),
                         ))

        return config_stream
예제 #7
0
    def _read_sensor(  # pylint: disable=too-many-arguments
        self, source_uuid: UUID, sid: int, unit: str, topic: str,
        callback_config: AdvancedCallbackConfiguration
    ) -> AsyncGenerator[DataEvent, None]:
        monitor_stream = (
            stream.repeat(self.device, interval=1)
            | pipe.map(
                async_(lambda sensor: sensor.get_callback_configuration(sid)))
            | pipe.map(lambda current_config: None
                       if current_config == callback_config else self.device)
            | pipe.filter(lambda sensor: sensor is not None)
            | pipe.action(lambda sensor: logging.getLogger(__name__).info(
                "Resetting callback config for %s", sensor))
            | pipe.action(
                async_(lambda sensor: sensor.set_callback_configuration(
                    sid, *callback_config)))
            | pipe.filter(lambda x: False))

        return stream.merge(
            stream.just(monitor_stream),
            stream.iterate(self.device.read_events(sids=(sid, )))
            | pipe.map(lambda item: DataEvent(sender=source_uuid,
                                              topic=topic,
                                              value=item.payload,
                                              sid=item.sid,
                                              unit=str(unit))),
        )
예제 #8
0
    async def _get_hexes_at_resolution(self, res: int) -> Set[H3HexCell]:
        idx_map_fn = h3.h3_to_parent if res < self.res else h3.h3_to_children
        results = await (stream.iterate(self.cells)
                         | aiopipe.map(partial(idx_map_fn, res=11))
                         | aiopipe.reduce(lambda a, b: a | b))

        return results
예제 #9
0
    def _stream_config_updates(
            sensor: TinkerforgeSensor) -> AsyncGenerator[dict[str, Any], None]:
        """
        Tries to fetch a config from the database. It also listens to 'nodes/tinkerforge/$UID/update' for new configs
        from the database.
        Parameters
        ----------
        sensor: TinkerforgeSensor
            The brick or bricklet for which to fetch a config from the database

        Returns
        -------
        AsyncGenerator of dict
            A dict containing the configuration of the device
        """
        return stream.chain(
            stream.call(
                call_safely,
                "db_tinkerforge_sensors/get_config",
                "db_tinkerforge_sensors/status_update",
                sensor.device.uid,
            )
            | pipe.takewhile(lambda config: config is not None),
            stream.iterate(
                event_bus.subscribe(
                    f"nodes/tinkerforge/{sensor.device.uid}/update")),
        )
예제 #10
0
    def stream_data(self,
                    config: dict[str, Any]) -> AsyncGenerator[DataEvent, None]:
        """
        Stream the data from the sensor.
        Parameters
        ----------
        config: dict
            A dictionary containing the sensor configuration.

        Returns
        -------
        AsyncGenerator
            The asynchronous stream.
        """
        data_stream = (stream.chain(
            stream.just(config),
            stream.iterate(
                event_bus.subscribe(f"nodes/by_uuid/{self.__uuid}/update")))
                       |
                       pipe.action(lambda _: logging.getLogger(__name__).info(
                           "Got new configuration for: %s", self) if config is
                                   not None else logging.getLogger(__name__).
                                   info("Removed configuration for: %s", self))
                       | pipe.map(self._parse_config)
                       | pipe.switchmap(lambda conf: stream.empty(
                       ) if conf is None or not conf["enabled"] else
                                        (self._configure_and_stream(conf))))

        return data_stream
예제 #11
0
async def euclidean_norm_handler(reader, writer):

    # Define lambdas
    strip =        lambda x: x.decode().strip()
    nonempty =     lambda x: x != ''
    square =       lambda x: x ** 2
    write_cursor = lambda x: writer.write(b'> ')
    square_root =  lambda x: x ** 0.5

    # Create awaitable
    handle_request = (
        stream.iterate(reader)
        | pipe.print('string: {}')
        | pipe.map(strip)
        | pipe.takewhile(nonempty)
        | pipe.map(float)
        | pipe.map(square)
        | pipe.print('square: {:.2f}')
        | pipe.action(write_cursor)
        | pipe.accumulate(initializer=0)
        | pipe.map(square_root)
        | pipe.print('norm -> {:.2f}')
    )

    # Loop over norm computations
    while not reader.at_eof():
        writer.write(INSTRUCTIONS.encode())
        try:
            result = await handle_request
        except ValueError:
            writer.write(ERROR.encode())
        else:
            writer.write(RESULT.format(result).encode())
예제 #12
0
async def test_iterable(assert_run):
    lst = [9, 4, 8, 3, 1]

    xs = stream.create.from_iterable(lst)
    await assert_run(xs, lst)

    xs = stream.iterate(lst)
    await assert_run(xs, lst)
예제 #13
0
def graph_stream(node_list: List[Json]) -> Stream:
    def from_node() -> Generator[Json, Any, None]:
        for node in node_list:
            yield node
        node_ids = [value_in_path(a, NodePath.node_id) for a in node_list]
        for from_n, to_n in interleave(node_ids):
            yield {"type": "edge", "from": from_n, "to": to_n}

    return stream.iterate(from_node())
예제 #14
0
 def _read_device(config: dict[str, Any]) -> AsyncGenerator[Any, None]:
     on_read: partial
     timeout: float
     on_read, timeout = config["on_read"]
     if inspect.isasyncgenfunction(on_read.func):
         return stream.iterate(on_read()) | pipe.timeout(timeout)
     return (stream.repeat(config["on_read"], interval=config["interval"])
             | pipe.starmap(lambda func, timeout: stream.just(func()) | pipe
                            .timeout(timeout))
             | pipe.concat(task_limit=1))
예제 #15
0
파일: common.py 프로젝트: Roam-gg/RestCore
async def async_all(items):
    if isinstance(items, Stream):
        xs = items
    else:
        xs = stream.iterate(items)
    async with xs.stream() as streamer:
        async for item in streamer:
            if not item:
                return False
        return True
예제 #16
0
파일: async_utils.py 프로젝트: lmmx/beeb
async def async_fetch_urlset(urls, download_dir, pbar=None, verbose=False):
    async with httpx.AsyncClient(http2=True) as session:
        ws = stream.repeat(session)
        xs = stream.zip(ws, stream.iterate(urls))
        ys = stream.starmap(xs, fetch, ordered=False, task_limit=10)
        process_download = partial(process,
                                   download_dir=download_dir,
                                   pbar=pbar,
                                   verbose=verbose)
        zs = stream.map(ys, process_download)
        return await zs
예제 #17
0
async def main():
    get_urls_titles()
    global start
    global end
    global counter
    global err

    start, end = intro()

    if end == length:
        min_urls_titles = urls_titles[start:]
    else:
        min_urls_titles = urls_titles[start:end + 1]

    counter = start - 1

    xs = stream.iterate(min_urls_titles)
    ys = stream.starmap(xs, fetch, task_limit=100)
    zs = stream.starmap(ys, process, task_limit=100)
    await zs

    if counter < 0: counter = 0

    # About Novel
    about.find('img').decompose()  # Remove IMG tags
    for a in about.select("a"):  # Remove anchor tags
        a['href'] = '#'
    html_gen("hr", soup, "", about)
    html_gen("h3", soup, "Description", about)
    syn = synopsis.text.replace("Description", "")
    html_gen("p", soup, syn, about)
    html_gen("hr", soup, "", about)
    html_gen("h3", soup, "About This Download : ", about)
    html_gen("p", soup, "Total Chapters = " + str(counter), about)
    html_gen("p", soup,
             "No. Of Chapters That Raised Exceptions = " + str(len(err)),
             about)
    if len(err) != 0:
        html_gen("p", soup, "And They Are : ", about)
        for i in err:
            html_gen("li", soup, str(i), about)
    html_gen("hr", soup, "", about)

    # Create About Novel Page
    c1 = epub.EpubHtml(title="About Novel",
                       file_name='About_novel' + '.xhtml',
                       lang='hr')
    c1.content = about.encode('utf-8')
    book.add_item(c1)
    book.toc.insert(0, c1)
    book.spine.insert(1, c1)
    print("Created \"About Novel\" Page")

    save()
예제 #18
0
async def test_async_gen() -> None:

    async with stream.empty().stream() as empty:
        async for _ in await force_gen(empty):
            pass

    with pytest.raises(Exception):
        async with stream.throw(Exception(";)")).stream() as err:
            async for _ in await force_gen(err):
                pass

    async with stream.iterate(range(0, 100)).stream() as elems:
        assert [x async for x in await force_gen(elems)] == list(range(0, 100))
 async def branch(self, limit=10):
     coros = await self.find_data()
     from aiostream import stream
     index = 0
     while True:
         xs = stream.iterate(coros)
         ys = xs[index:index + limit]
         t = await stream.list(ys)
         print("t is ", t)
         if not t:
             break
         await asyncio.ensure_future(asyncio.wait(t))
         index += limit
예제 #20
0
async def test_preserve(assert_run, event_loop):

    async def agen():
        yield 1
        yield 2

    xs = stream.iterate(agen())[0]
    await assert_run(xs, [1])
    await assert_run(xs, [], IndexError('Index out of range'))

    ys = stream.preserve(agen())[0]
    await assert_run(ys, [1])
    await assert_run(ys, [2])
예제 #21
0
async def test_async_iterable(assert_run, event_loop):

    async def agen():
        for x in range(2, 5):
            yield await asyncio.sleep(1.0, result=x**2)

    xs = stream.create.from_async_iterable(agen())
    await assert_run(xs, [4, 9, 16])
    assert event_loop.steps == [1.0, 1.0, 1.0]

    xs = stream.iterate(agen())
    await assert_run(xs, [4, 9, 16])
    assert event_loop.steps == [1.0, 1.0, 1.0]*2
예제 #22
0
    def stream_data(self) -> AsyncGenerator[DataEvent, None]:
        """
        Generate the initial configuration of the sensor, configure it, and finally stream the data from the sensor.
        If there is a configuration update, reconfigure the sensor and start streaming again.
        Returns
        -------
        AsyncGenerator of DataEvent
            The data from the device
        """
        # Generates the first configuration
        # Query the database and if it does not have a config for the sensor, wait until there is one

        data_stream = stream.chain(
            stream.just(self),
            stream.iterate(
                event_bus.subscribe(
                    f"nodes/tinkerforge/{self.device.uid}/remove"))[:1]
            | pipe.map(lambda x: None),
        ) | pipe.switchmap(
            lambda sensor: stream.empty() if sensor is None else
            (self._stream_config_updates(sensor)
             | pipe.switchmap(lambda config: stream.chain(
                 stream.just(config),
                 stream.iterate(
                     event_bus.subscribe(
                         f"nodes/by_uuid/{config['uuid']}/remove"))[:1]
                 | pipe.map(lambda x: None),
             ))
             | pipe.action(lambda config: logging.getLogger(__name__).info(
                 "Got new configuration for: %s",
                 sensor.device,
             ))
             | pipe.map(self._create_config)
             | pipe.switchmap(lambda config: stream.empty()
                              if config is None or not config["enabled"] else
                              (self._configure_and_stream(config)))))

        return data_stream
async def test_dot() -> None:
    def node(name: str, account_name: str) -> Json:
        ancestors = {"account": {"reported": {"name": account_name}}}
        return {
            "type": "node",
            "id": name,
            "reported": {
                "kind": name,
                "name": name
            },
            "ancestors": ancestors
        }

    def edge(from_node: str, to_node: str) -> Json:
        return {
            "type": "edge",
            "from": from_node,
            "to": to_node,
            "edge_type": "delete"
        }

    nodes = [node("a", "acc1"), node("b", "acc1"), node("c", "acc2")]
    edges = [edge("a", "b"), edge("a", "c"), edge("b", "c")]

    async with stream.iterate(nodes + edges).stream() as streamer:
        result = ""
        async for elem in respond_dot(streamer):
            result += elem + "\n"
        expected = ("digraph {\n"
                    "rankdir=LR\n"
                    "overlap=false\n"
                    "splines=true\n"
                    "node [shape=Mrecord colorscheme=paired12]\n"
                    "edge [arrowsize=0.5]\n"
                    ' "a" [label="a|a", style=filled fillcolor=1];\n'
                    ' "b" [label="b|b", style=filled fillcolor=2];\n'
                    ' "c" [label="c|c", style=filled fillcolor=3];\n'
                    ' "a" -> "b" [label="delete"]\n'
                    ' "a" -> "c" [label="delete"]\n'
                    ' "b" -> "c" [label="delete"]\n'
                    ' subgraph "acc1" {\n'
                    '    "a"\n'
                    '    "b"\n'
                    " }\n"
                    ' subgraph "acc2" {\n'
                    '    "c"\n'
                    " }\n"
                    "}\n")
        assert result == expected
예제 #24
0
파일: preserve.py 프로젝트: vxgmichel/aiorx
async def main():
    async def agen():
        yield 1
        yield 2
        yield 3

    # The xs stream does not preserve the generator
    xs = stream.iterate(agen())
    print(await xs[0])            # Print 1
    print(await stream.list(xs))  # Print [] (2 and 3 have never yielded)

    # The xs stream does preserve the generator
    xs = stream.preserve(agen())
    print(await xs[0])            # Print 1
    print(await stream.list(xs))  # Print [2, 3]

    # Transform agen into a stream operator
    agen_stream = operator(agen)
    xs = agen_stream()            # agen is now reusable
    print(await stream.list(xs))  # Print [1, 2, 3]
    print(await stream.list(xs))  # Print [1, 2, 3]
예제 #25
0
async def test_non_iterable(assert_run):
    with pytest.raises(TypeError):
        stream.iterate(None)

    with pytest.raises(TypeError):
        stream.create.from_async_iterable(None)
예제 #26
0
async def test_map(assert_run, event_loop):

    # Synchronous/simple
    with event_loop.assert_cleanup():
        xs = stream.range(5) | pipe.map(lambda x: x**2)
        expected = [x**2 for x in range(5)]
        await assert_run(xs, expected)

    # Synchronous/multiple
    with event_loop.assert_cleanup():
        xs = stream.range(5)
        ys = xs | pipe.map(lambda x, y: x+y, xs)
        expected = [x*2 for x in range(5)]
        await assert_run(ys, expected)

    # Asynchronous/simple/concurrent
    with event_loop.assert_cleanup():
        xs = stream.range(1, 4) | pipe.map(asyncio.sleep)
        expected = [None] * 3
        await assert_run(xs, expected)
        assert event_loop.steps == [1, 1, 1]

    # Asynchronous/simple/sequential
    with event_loop.assert_cleanup():
        xs = stream.range(1, 4) | pipe.map(asyncio.sleep, task_limit=1)
        expected = [None] * 3
        await assert_run(xs, expected)
        assert event_loop.steps == [1, 2, 3]

    # Asynchronous/multiple/concurrent
    with event_loop.assert_cleanup():
        xs = stream.range(1, 4)
        ys = xs | pipe.map(asyncio.sleep, xs)
        await assert_run(ys, [1, 2, 3])
        assert event_loop.steps == [1, 1, 1]

    # Asynchronous/multiple/sequential
    with event_loop.assert_cleanup():
        xs = stream.range(1, 4)
        ys = xs | pipe.map(asyncio.sleep, xs, task_limit=1)
        await assert_run(ys, [1, 2, 3])
        assert event_loop.steps == [1, 2, 3]

    # As completed
    with event_loop.assert_cleanup():
        xs = stream.iterate([2, 4, 1, 3, 5])
        ys = xs | pipe.map(asyncio.sleep, xs, ordered=False)
        await assert_run(ys, [1, 2, 3, 4, 5])
        assert event_loop.steps == [1, 1, 1, 1, 1]

    # Invalid argument
    with pytest.raises(ValueError):
        await (stream.range(1, 4) | pipe.map(asyncio.sleep, task_limit=0))

    # Break
    with event_loop.assert_cleanup():
        xs = stream.count(1)
        ys = xs | pipe.map(asyncio.sleep, xs, task_limit=10)
        await assert_run(ys[:3], [1, 2, 3])
        assert event_loop.steps == [1, 1, 1]

    # Stuck
    with event_loop.assert_cleanup():
        xs = stream.count(1)
        ys = xs | pipe.map(asyncio.sleep, xs, task_limit=1) | pipe.timeout(5)
        await assert_run(ys, [1, 2, 3, 4], asyncio.TimeoutError())

    # Force await
    with event_loop.assert_cleanup():
        xs = stream.iterate([1, 2, 3])
        ys = xs | pipe.map(async_(lambda x: asyncio.sleep(x, x)))
        await assert_run(ys, [1, 2, 3])
        assert event_loop.steps == [1, 1, 1]

    # Map await_
    with event_loop.assert_cleanup():
        xs = stream.iterate(map(lambda x: asyncio.sleep(x, x), [1, 2, 3]))
        ys = xs | pipe.map(await_)
        await assert_run(ys, [1, 2, 3])
        assert event_loop.steps == [1, 1, 1]