def _window_to_group(self, value): return value.pipe( ops.to_iterable(), ops.map( lambda x: rx.from_iterable(x).pipe(ops.group_by( _group_by), ops.map(_group_to_batch), ops.merge_all())), ops.merge_all())
def action1(scheduler, state): xs[0] = reactivex.from_iterable( ["alpha", "apple", "beta", "bat", "gamma"] ).pipe( ops.group_by(lambda s: s[0]), ops.map(lambda xs: xs.pipe(ops.to_iterable(), ops.map(list))), ops.merge_all(), )
def create(): def mapper(w, i): return w.pipe(ops.map(lambda x: str(i) + " " + str(x))) return xs.pipe( ops.window(ys), ops.map_indexed(mapper), ops.merge_all(), )
def create(): def mapper(w, i): def mapping(x): return "%s %s" % (i, x) return w.pipe(ops.map(mapping)) return xs.pipe(ops.window_with_count(3, 2), ops.map_indexed(mapper), ops.merge_all())
def create(): def mapper(ys, i): def proj(y): return "%s %s" % (i, y) return ys.pipe( ops.map(proj), ops.concat(reactivex.return_value("%s end" % i))) return xs.pipe(ops.window_with_time(100, 50), ops.map_indexed(mapper), ops.merge_all())
def __init__(self) -> None: self._subject = Subject() self._scheduler = ThreadPoolScheduler(max_workers=1) obs = self._subject.pipe(ops.observe_on(self._scheduler)) self._disposable = obs \ .pipe(ops.window_with_time_or_count(count=5, timespan=datetime.timedelta(milliseconds=10_000)), ops.flat_map(lambda x: self._window_to_group(x)), ops.map(mapper=lambda x: self._retryable(data=x, delay=self._jitter_delay(jitter_interval=1000))), ops.merge_all()) \ .subscribe(self._result, self._error, self._on_complete) pass
def create(): def closing(x): return reactivex.timer(x) def mapper(w, i): return w.pipe(ops.map(lambda x: str(i) + " " + str(x))) return xs.pipe( ops.window_toggle(ys, closing), ops.map_indexed(mapper), ops.merge_all(), )
def create(): def closing(): return reactivex.throw(ex) def mapper(w, i): return w.pipe(ops.map(lambda x: str(i) + " " + str(x))) return xs.pipe( ops.window_when(closing), ops.map_indexed(mapper), ops.merge_all(), )
def create(): def projection(w, i): def inner_proj(x): return "%s %s" % (i, x) return w.pipe(ops.map(inner_proj)) return xs.pipe( ops.window_with_time_or_count(70, 3), ops.map_indexed(projection), ops.merge_all(), )
def create(): def closing(): curr = window[0] window[0] += 1 return reactivex.timer(curr * 100) def mapper(w, i): return w.pipe(ops.map(lambda x: str(i) + " " + str(x))) return xs.pipe( ops.window_when(closing), ops.map_indexed(mapper), ops.merge_all(), )
def _flat_map_internal( source: Observable[_T1], mapper: Optional[Mapper[_T1, Any]] = None, mapper_indexed: Optional[MapperIndexed[_T1, Any]] = None, ) -> Observable[Any]: def projection(x: _T1, i: int) -> Observable[Any]: mapper_result: Any = (mapper(x) if mapper else mapper_indexed(x, i) if mapper_indexed else identity) if isinstance(mapper_result, Future): result: Observable[Any] = from_future( cast("Future[Any]", mapper_result)) elif isinstance(mapper_result, Observable): result = mapper_result else: result = from_(mapper_result) return result return source.pipe( ops.map_indexed(projection), ops.merge_all(), )
def create(): def mapper(w, i): return w.pipe(ops.map(lambda x: "%s %s" % (i, x))) return xs.pipe(ops.window_with_time(100), ops.map_indexed(mapper), ops.merge_all())
def create(): return xs.pipe(ops.merge_all())
def create(): def proj(w, i): return w.pipe(ops.map(lambda x: str(i) + " " + str(x))) return xs.pipe(ops.window_with_count(3, 2), ops.map_indexed(proj), ops.merge_all())
async def main(): async with InfluxDBClientAsync(url='http://localhost:8086', token='my-token', org='my-org') as client: write_api = client.write_api() """ Async write """ async def async_write(batch): """ Prepare async task """ await write_api.write(bucket='my-bucket', record=batch) return batch """ Prepare batches from generator """ batches = rx \ .from_iterable(csv_to_generator('vix-daily.csv')) \ .pipe(ops.buffer_with_count(500)) \ .pipe(ops.map(lambda batch: rx.from_future(asyncio.ensure_future(async_write(batch)))), ops.merge_all()) done = asyncio.Future() """ Write batches by subscribing to Rx generator """ batches.subscribe( on_next=lambda batch: print(f'Written batch... {len(batch)}'), on_error=lambda ex: print(f'Unexpected error: {ex}'), on_completed=lambda: done.set_result(0), scheduler=AsyncIOScheduler(asyncio.get_event_loop())) """ Wait to finish all writes """ await done
def merge_(*sources: Observable[_T]) -> Observable[_T]: return reactivex.from_iterable(sources).pipe(ops.merge_all())