def _read_sensor(  # pylint: disable=too-many-arguments
        self, source_uuid: UUID, sid: int, unit: str, topic: str,
        callback_config: AdvancedCallbackConfiguration
    ) -> AsyncGenerator[DataEvent, None]:
        monitor_stream = (
            stream.repeat(self.device, interval=1)
            | pipe.map(
                async_(lambda sensor: sensor.get_callback_configuration(sid)))
            | pipe.map(lambda current_config: None
                       if current_config == callback_config else self.device)
            | pipe.filter(lambda sensor: sensor is not None)
            | pipe.action(lambda sensor: logging.getLogger(__name__).info(
                "Resetting callback config for %s", sensor))
            | pipe.action(
                async_(lambda sensor: sensor.set_callback_configuration(
                    sid, *callback_config)))
            | pipe.filter(lambda x: False))

        return stream.merge(
            stream.just(monitor_stream),
            stream.iterate(self.device.read_events(sids=(sid, )))
            | pipe.map(lambda item: DataEvent(sender=source_uuid,
                                              topic=topic,
                                              value=item.payload,
                                              sid=item.sid,
                                              unit=str(unit))),
        )
Exemple #2
0
async def download_files(urls):
    async with aiohttp.ClientSession() as session:
        await (stream.repeat(session)
               | pipe.zip(stream.iterate(fields.itertuples()))
               | pipe.flatmap(generate_urls)
               | pipe.filter(ignore_existing_file)
               | pipe.take(1000)
               | pipe.map(retrieve_file, ordered=False, task_limit=10))
Exemple #3
0
 def _read_device(config: dict[str, Any]) -> AsyncGenerator[Any, None]:
     on_read: partial
     timeout: float
     on_read, timeout = config["on_read"]
     if inspect.isasyncgenfunction(on_read.func):
         return stream.iterate(on_read()) | pipe.timeout(timeout)
     return (stream.repeat(config["on_read"], interval=config["interval"])
             | pipe.starmap(lambda func, timeout: stream.just(func()) | pipe
                            .timeout(timeout))
             | pipe.concat(task_limit=1))
Exemple #4
0
 def _read_sensor(  # pylint: disable=too-many-arguments
     self, sid: int, interval: float, unit: str, topic: str, timeout: float
 ) -> AsyncGenerator[DataEvent, None]:
     if self.__uuid is None:
         raise SensorNotReady("You must enumerate the sensor before reading.")
     return (
         stream.repeat(stream.call(self._device.get_by_function_id, sid), interval=interval)
         | pipe.concat(task_limit=1)
         | pipe.timeout(timeout)
         | pipe.map(lambda value: DataEvent(sender=self.__uuid, topic=topic, value=value, sid=sid, unit=str(unit)))
     )
Exemple #5
0
async def async_fetch_urlset(urls, download_dir, pbar=None, verbose=False):
    async with httpx.AsyncClient(http2=True) as session:
        ws = stream.repeat(session)
        xs = stream.zip(ws, stream.iterate(urls))
        ys = stream.starmap(xs, fetch, ordered=False, task_limit=10)
        process_download = partial(process,
                                   download_dir=download_dir,
                                   pbar=pbar,
                                   verbose=verbose)
        zs = stream.map(ys, process_download)
        return await zs
Exemple #6
0
async def async_fetch_urlset(urls,
                             schedules,
                             pbar=None,
                             verbose=False,
                             use_http2=True):
    async with httpx.AsyncClient(http2=use_http2) as session:
        ws = stream.repeat(session)
        xs = stream.zip(ws, stream.iterate(urls))
        ys = stream.starmap(xs, fetch, ordered=False,
                            task_limit=20)  # 30 is similar IDK
        process = partial(process_soup,
                          schedules=schedules,
                          pbar=pbar,
                          verbose=verbose)
        zs = stream.map(ys, process)
        return await zs
Exemple #7
0
async def async_fetch_episodes(listings,
                               pbar=None,
                               verbose=False,
                               use_http2=False):
    jsons = dict(zip(listings.broadcasts_urlset, listings.all_broadcasts))
    limits = httpx.Limits(max_keepalive_connections=20)
    async with httpx.AsyncClient(http2=use_http2, limits=limits) as session:
        ws = stream.repeat(session)
        xs = stream.zip(ws, stream.iterate(listings.broadcasts_urlset))
        ys = stream.starmap(xs, fetch, ordered=False,
                            task_limit=20)  # 20 is optimal
        process = partial(process_json,
                          jsons=jsons,
                          pbar=pbar,
                          verbose=verbose)
        zs = stream.map(ys, process)
        return await zs
Exemple #8
0
async def test_repeat(assert_run):
    xs = stream.repeat(1, 3)
    await assert_run(xs, [1, 1, 1])

    xs = stream.repeat(2)[:4]
    await assert_run(xs, [2, 2, 2, 2])
Exemple #9
0
async def test_repeat(assert_run):
    xs = stream.repeat(1, 3)
    await assert_run(xs, [1, 1, 1])

    xs = stream.repeat(2)[:4]
    await assert_run(xs, [2, 2, 2, 2])