def __init__(self, concurrency_per_group, delay_seconds=0, description=None): self._queue = Subject() self._description = description self.request_scheduler = ThreadPoolScheduler(concurrency_per_group) producer_scheduler = ThreadPoolScheduler(concurrency_per_group) def on_next(result): output = result['output'] output.on_next({ 'value': result.get('value'), 'completed': result.get('completed') }) self._subscription = self._queue.pipe( observe_on(producer_scheduler), group_by(lambda r: r['group']), flat_map(lambda concurrency_group: concurrency_group.pipe( map(lambda r: r['work']), delay(delay_seconds), merge(max_concurrent=concurrency_per_group), merge_all(), )), take_until_disposed()).subscribe( on_next=on_next, on_error=lambda error: logging.exception( 'Error in {} request stream'.format(self)), scheduler=producer_scheduler)
def getCategory(selectedJobId): print('getting category...', selectedJobId) category = { 'id': selectedJobId, 'path': 'root/a/b/c', } return rx.of(category).pipe(ops.delay(1))
def _retryable(self, data: str, delay: datetime.timedelta): return rx.of(data).pipe( ops.delay(duetime=delay, scheduler=self._scheduler), ops.map(lambda x: self._http(x)), ops.catch(handler=lambda exception, source: self._retry_handler(exception, source, data)), )
def handle_label(label, i): label.config(dict(borderwidth=0, padx=0, pady=0)) mapper = ops.map(lambda ev: (label, ev, i)) delayer = ops.delay(i * 0.1) return mousemove.pipe(delayer, mapper)
def getTestrunProperties(testrun): print('getting testrun properties...', testrun) properties = { 'git_commit': '<git-hash>', } testrun['properties'] = properties; return rx.of(testrun).pipe(ops.delay(1));
def handle_label(label, i): delayer = ops.delay(i * 0.100) mapper = ops.map(lambda xy: (label, xy, i)) return frame.mousemove.pipe( delayer, mapper, )
def getTestcaseOwners(testrun): print('getting testcase owners...', testrun); owners = { 'testcase': 'a.b.c', 'owner': 'Sam Hawkins', } testrun['owners'] = owners return rx.of(testrun).pipe(ops.delay(1));
def handle_label(label, i): delayer = ops.delay(i*0.100) mapper = ops.map(lambda xy: (label, xy, i)) return window.mousemove.pipe( delayer, mapper, )
def mapper_to_observable(value): def initializing_mapper(): sepal.drive.InitializeThread(credentials) return mapper(value) return of(True).pipe( delay(0.1, TimeoutScheduler()), flat_map(lambda _: from_callable(initializing_mapper, _drive_executions.scheduler)))
def do_retry(source, tries, exception): if tries <= retries: logging.warning( 'retry_with_backoff(tries={}, retries={}, exception={}, description={})' .format(tries, retries, exception, description)) return of(None).pipe( delay(backoff(tries), TimeoutScheduler()), flat_map(source), catch(handler=lambda e, src: do_retry(src, tries + 1, e))) else: return throw(exception)
def handle_label(label, i): label.config(dict(borderwidth=0, padx=0, pady=0)) mapper = ops.map(lambda ev: (label, ev, i)) delayer = ops.delay(i*0.1) return mousemove.pipe( delayer, mapper )
def handle_label(label, i): label.config(dict(borderwidth=0, padx=0, pady=0)) mapper = ops.map( lambda ev: (label, ev, i) ) # transform the items emitted by an Observable by applying a function to each item delayer = ops.delay( i * 0.1 ) #shift the emissions from an Observable forward in time by a particular amount return mousemove.pipe(delayer, mapper)
def getTestruns(category): print('getting testruns...', category) baseTestrunId = category['id'] * 10 testruns = [] for i in range(NUM_TESTRUNS_PER_CATEGORY): testrun = { 'id': baseTestrunId + i, 'name': 'testrun-{}'.format(baseTestrunId + i), } testruns.append(testrun) return rx.of(testruns).pipe(ops.delay(1))
def _retryable(self, data: str, delay: timedelta): return rx.of(data).pipe( # use delay if its specified ops.delay(duetime=delay, scheduler=self._write_options.write_scheduler), # invoke http call ops.map(lambda x: self._http(x)), # if there is an error than retry ops.catch(handler=lambda exception, source: self._retry_handler( exception, source, data)), )
def getTestrunMatrix(testrun): print('getting testrun matrix...', testrun); matrix = { 'testsuite': 'a.b', 'testcases': [ 'c1', 'c2', 'c3', 'c4', ], } testrun['matrix'] = matrix return rx.of(testrun).pipe(ops.delay(1));
def main(): loop = asyncio.get_event_loop() io_scheduler = AsyncIOThreadSafeScheduler(loop=loop) scheduler = ThreadPoolScheduler(multiprocessing.cpu_count()) semaphore = Subject() semaphore_stream = semaphore.pipe( ops.flat_map(lambda _: rx.of(True).pipe( ops.delay(ARGS.block_time, scheduler=scheduler), ops.start_with(False))), ops.start_with(True)) video_stream_observable = rx.using( lambda: VideoStreamDisposable(), lambda d: rx.from_iterable(video_stream_iterable(d.cap))) gated_video_stream = video_stream_observable.pipe( ops.subscribe_on(scheduler), ops.sample(1 / ARGS.fps), # sample frames based on fps ops.combine_latest(semaphore_stream), ops.filter(lambda tup: tup[1]), # proceed only if semaphore allows ops.map(lambda tup: tup[0]) # take only frame ) disposable = gated_video_stream.pipe( ops.filter(has_face), # filter frames without faces ops.map(lambda frame: Image.fromarray( cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))), # map frame to PIL image ops.map(lambda img: img.resize( (640, 360))), # resize image (inference will be faster) ops.observe_on(io_scheduler), ops.map(lambda img: ImageFacesPair(img, analyse_frame(img)) ), # analyse frame for faces ops.filter(lambda img_faces_pair: any([ face.top_prediction.confidence > ARGS.threshold for face in img_faces_pair.faces ])), # proceed only if there is a known face in the frame ops.throttle_first(1), ops.flat_map(unlock_request), # unlock the door ops.do_action( on_next=lambda _: semaphore.on_next(True) ) # trigger semaphore which will block stream for "block-seconds" seconds (doors are unlocked for that long after unlock request) ).subscribe(on_error=lambda e: logger.exception(e)) try: loop.run_forever() except Exception as e: logger.exception(e) logger.info("Smart lock face recognition engine shutdown") disposable.dispose()
def configure_timed_read(self): interval = self.config.getfloat("fp50", "interval") if interval > 0: logger.info("Configuring timed read") # enabled rx.interval(interval, scheduler=NewThreadScheduler()).pipe( operators.flat_map(lambda x: self.control.get_power()), operators.map(lambda x: self.upload_power(x)), operators.delay(self.config.getfloat("fp50", "query_delay")), operators.flat_map( lambda x: self.control.get_internal_temperature()), operators.map(lambda x: self.upload_internal_temperature(x)), operators.catch(error_handler)).subscribe()
def main(sources): ticks_stream = rx.interval(timedelta(seconds=1)).pipe( map(lambda n: n + 1), take(10)) responses_stream = sources["http"].pipe(flat_map(lambda s: s), ) queries = rx.of( { "url": "https://jsonplaceholder.typicode.com/todos/1" }, { "url": "https://error123123123.co.uk" }, ).pipe(delay(timedelta(seconds=2))) return { "log": rx.merge(queries, responses_stream, ticks_stream), "http": queries }
def handle_image(i, image): imagerect = image.get_rect() def on_next(ev): imagerect.top = ev[1] imagerect.left = ev[0] + i * 32 if old[i]: erase.append(old[i]) old[i] = imagerect.copy() draw.append((image, imagerect.copy())) def on_error(err): print("Got error: %s" % err) sys.exit() mousemove.pipe(ops.delay(0.1 * i, scheduler=scheduler)).subscribe( on_next, on_error=on_error)
def handle_image(i, image): imagerect = image.get_rect() def on_next(ev): imagerect.top = ev[1] imagerect.left = ev[0] + i * 32 if old[i]: erase.append(old[i]) old[i] = imagerect.copy() draw.append((image, imagerect.copy())) def on_error(err): print("Got error: %s" % err) sys.exit() mousemove.pipe( ops.delay(0.1 * i, scheduler=scheduler) ).subscribe(on_next, on_error=on_error)
def configure(): def error_report(x): tb.print_stack() sc = QtScheduler.QtScheduler(QtCore) rx.just(1).pipe( operators.delay(1.0, sc), operators.map(lambda x: timeline_widget.update_plot(TimelineDataPoint("plot_1", "series_1").add_new_point(1.2))), operators.delay(1.0, sc), operators.map(lambda x: timeline_widget.update_plot(TimelineDataPoint("plot_1", "series_1").add_new_point(-1))), operators.delay(1.0, sc), operators.map(lambda x: timeline_widget.update_plot(TimelineDataPoint("plot_2", "series_1").add_new_point(1.2))), operators.delay(1.0, sc), operators.map(lambda x: timeline_widget.update_plot(TimelineDataPoint("plot_2", "series_1").add_new_point(-12))), operators.delay(1.0, sc), operators.map(lambda x: timeline_widget.update_plot(TimelineDataPoint("plot_3", "series_1").add_new_point(1.2))), operators.delay(1.0, sc), operators.map(lambda x: timeline_widget.update_plot(TimelineDataPoint("plot_3", "series_1").add_new_point(-12))), operators.delay(1.0, sc), operators.map(lambda x: timeline_widget.update_plot(TimelineDataPoint("plot_3", "series_1").add_new_point(1))), operators.map(lambda x: timeline_widget.update_plot(TimelineDataPoint("plot_3", "series_2").add_new_point(3))), operators.delay(1.0, sc), operators.map(lambda x: timeline_widget.update_plot(TimelineDataPoint("plot_3", "series_1").add_new_point(-6))), operators.map(lambda x: timeline_widget.update_plot(TimelineDataPoint("plot_3", "series_2").add_new_point(-5))), operators.map(lambda x: timeline_widget.update_plot(TimelineDataPoint("plot_4", "series_2").add_new_point(-5))), operators.map(lambda x: timeline_widget.update_plot(TimelineDataPoint("plot_5", "series_2").add_new_point(-5))), operators.map(lambda x: timeline_widget.update_plot(TimelineDataPoint("plot_6", "series_2").add_new_point(-5))), operators.map(lambda x: timeline_widget.update_plot(TimelineDataPoint("plot_7", "series_2").add_new_point(-5))), operators.map(lambda x: timeline_widget.update_plot(TimelineDataPoint("plot_8", "series_2").add_new_point(-5))), operators.map(lambda x: timeline_widget.update_plot(TimelineDataPoint("plot_9", "series_2").add_new_point(-5))), operators.map(lambda x: timeline_widget.update_plot(TimelineDataPoint("plot_10", "series_2").add_new_point(-5))), operators.map(lambda x: timeline_widget.update_plot(TimelineDataPoint("plot_11", "series_2").add_new_point(-5))), operators.map(lambda x: timeline_widget.update_plot(TimelineDataPoint("plot_12", "series_2").add_new_point(-5))), operators.map(lambda x: timeline_widget.update_plot(TimelineDataPoint("plot_13", "series_2").add_new_point(-5))), operators.map(lambda x: timeline_widget.update_plot(TimelineDataPoint("plot_14", "series_2").add_new_point(-5))), operators.map(lambda x: timeline_widget.update_plot(TimelineDataPoint("plot_15", "series_2").add_new_point(-5))), ).subscribe()
def __init__(self, concurrency_per_group, delay_seconds=0, description=None): self.scheduler = ThreadPoolScheduler(concurrency_per_group) self.request_scheduler = ThreadPoolScheduler(10) self._requests = Subject() self._output_subject = Subject() self._output = self._output_subject.pipe(share()) self._description = description self._subscription = self._requests.pipe( observe_on(self.scheduler), group_by(lambda r: r['concurrency_group']), flat_map(lambda concurrency_group: concurrency_group.pipe( map(lambda r: r['request']), merge(max_concurrent=concurrency_per_group), delay(delay_seconds))), take_until_disposed()).subscribe( on_next=lambda request: self._output_subject.on_next(request), on_error=lambda error: logging.exception( 'Error in {} request stream'.format(self)), on_completed=lambda: self.dispose(), scheduler=self.scheduler)
def create(): return xs.pipe(delay(150))
def create(): return xs.pipe( delay(datetime.utcfromtimestamp(350)) )
def create(): dt = datetime.utcfromtimestamp(300.0) return xs.pipe(delay(dt))
def main(sources): example1_completed = rx.combine_latest( sources["Cozmo"].pipe( ops.filter(lambda i: i["id"] is "example1_set_head_angle")), sources["Cozmo"].pipe( ops.filter(lambda i: i["id"] is "example1_set_lift_height")), ) example2_completed = rx.combine_latest( sources["Cozmo"].pipe( ops.filter(lambda i: i["id"] is "example2_drive_straight")), sources["Cozmo"].pipe( ops.filter(lambda i: i["id"] is "example2_turn_in_place")), ) example3_completed = rx.combine_latest( sources["Cozmo"].pipe( ops.filter(lambda i: i["id"] is "example3_set_lift_height")), sources["Cozmo"].pipe( ops.filter(lambda i: i["id"] is "example3_set_head_angle")), sources["Cozmo"].pipe( ops.filter(lambda i: i["id"] is "example3_drive_straight")), sources["Cozmo"].pipe( ops.filter( lambda i: i["id"] is "example3_display_oled_face_image")), ) example4_completed = rx.combine_latest( sources["Cozmo"].pipe( ops.filter(lambda i: i["id"] is "example4_set_lift_height")), sources["Cozmo"].pipe( ops.filter(lambda i: i["id"] is "example4_set_head_angle")), sources["Cozmo"].pipe( ops.filter(lambda i: i["id"] is "example4_drive_straight")), sources["Cozmo"].pipe( ops.filter( lambda i: i["id"] is "example4_display_oled_face_image")), ) example1_completed.subscribe(on_next=lambda i: print("example1_completed")) example2_completed.subscribe(on_next=lambda i: print("example2_completed")) example3_completed.subscribe(on_next=lambda i: print("example3_completed")) example4_completed.subscribe(on_next=lambda i: print("example4_completed")) rxcozmo = rx.merge( # example1_lift_head rx.of({ "id": "example1_set_head_angle", "name": "set_head_angle", "value": cozmo.robot.MAX_HEAD_ANGLE, }), rx.of({ "id": "example1_set_lift_height", "name": "set_lift_height", "value": 1.0, }), # example2_conflicting_actions example1_completed.pipe( ops.map( lambda i: { "id": "example2_drive_straight", "name": "drive_straight", "value": { "distance": distance_mm(50), "speed": speed_mmps(25), "should_play_anim": False, }, })), example1_completed.pipe( ops.map( lambda i: { "id": "example2_turn_in_place", "name": "turn_in_place", "value": { "angle": degrees(90), }, })), # example3_abort_one_action example2_completed.pipe( ops.map( lambda i: { "id": "example3_set_lift_height", "name": "set_lift_height", "value": 0, })), example2_completed.pipe( ops.map( lambda i: { "id": "example3_set_head_angle", "name": "set_head_angle", "value": { "angle": cozmo.robot.MIN_HEAD_ANGLE, "duration": 6.0, }, })), example2_completed.pipe( ops.map( lambda i: { "id": "example3_drive_straight", "name": "drive_straight", "value": { "distance": distance_mm(75), "speed": speed_mmps(25), "should_play_anim": False, }, })), example2_completed.pipe( ops.map( lambda i: { "id": "example3_display_oled_face_image", "name": "display_oled_face_image", "value": { "screen_data": face_image, "duration_ms": 30000.0, }, })), # abort actions example2_completed.pipe( ops.map(lambda i: { "type": "abort", "name": "set_lift_height" })), example2_completed.pipe( ops.delay(0.1), ops.map(lambda i: { "type": "abort", "name": "set_head_angle" }), ), example2_completed.pipe( ops.delay(2), ops.map(lambda i: { "type": "abort", "name": "display_oled_face_image" }), ), # example4_abort_all_actions example3_completed.pipe( ops.map( lambda i: { "id": "example4_set_lift_height", "name": "set_lift_height", "value": 0.0, })), example3_completed.pipe( ops.map( lambda i: { "id": "example4_set_head_angle", "name": "set_head_angle", "value": { "angle": cozmo.robot.MIN_HEAD_ANGLE, "duration": 6.0, }, })), example3_completed.pipe( ops.map( lambda i: { "id": "example4_drive_straight", "name": "drive_straight", "value": { "distance": distance_mm(75), "speed": speed_mmps(25), "should_play_anim": False, }, })), example3_completed.pipe( ops.map( lambda i: { "id": "example4_display_oled_face_image", "name": "display_oled_face_image", "value": { "screen_data": face_image, "duration_ms": 30000.0, }, })), # abort all actions example3_completed.pipe( ops.delay(2), ops.map(lambda i: { "type": "abort", "name": "set_lift_height" }), ), example3_completed.pipe( ops.delay(2), ops.map(lambda i: { "type": "abort", "name": "set_head_angle" }), ), example3_completed.pipe( ops.delay(2), ops.map(lambda i: { "type": "abort", "name": "drive_straight" }), ), example3_completed.pipe( ops.delay(2), ops.map(lambda i: { "type": "abort", "name": "display_oled_face_image" }), ), ) sinks = {"Cozmo": rxcozmo} return sinks
import rx import rx.operators as ops from rx.subject import Subject import time numbers1 = Subject() numbers2 = Subject() numbers1.pipe( ops.join( numbers2, lambda i: rx.just(True).pipe(ops.delay(200)), lambda i: rx.just(True).pipe(ops.delay(300)), ), ops.starmap(lambda i, j: i + j), ).subscribe(on_next=lambda i: print("on_next {}".format(i)), on_error=lambda e: print("on_error: {}".format(e)), on_completed=lambda: print("on_completed")) numbers1.on_next(0) numbers2.on_next(2) numbers1.on_next(1) time.sleep(0.4) numbers1.on_next(2) numbers2.on_next(5) time.sleep(0.25) numbers1.on_next(3) numbers2.on_next(3)
import rx import rx.operators as ops from rx.subject import Subject import datetime import time from rx.disposable import Disposable def resource(): print("create resource at {}".format(datetime.datetime.now())) def dispose(): print("dispose resource at {}".format(datetime.datetime.now())) return Disposable(dispose) rx.using(resource, lambda r: rx.just(1).pipe(ops.delay(0.2))).subscribe( on_next=lambda i: print("on_next {}".format(i)), on_error=lambda e: print("on_error: {}".format(e)), on_completed=lambda: print("on_completed") ) time.sleep(500)
async def go(): nonlocal result source = rx.return_value(42).pipe(ops.delay(0.1)) result = await source
import rx import rx.operators as ops import datetime import time numbers = rx.just(1) print("{}".format(datetime.datetime.now())) numbers.pipe(ops.delay(0.2)).subscribe( on_next=lambda i: print("on_next {}: {}".format(i, datetime.datetime.now()) ), on_error=lambda e: print("on_error: {}".format(e)), on_completed=lambda: print("on_completed")) time.sleep(0.5)
def handle_label(label, i): return window.mousemove.pipe( ops.delay(i * 0.050), ops.map(lambda xy: (label, xy, i)), )
op.map(lambda x: print('map call', x) or x), op.buffer_with_count(2), op.map(print), ).run() # 3こためて先頭の2こを処理 print('call take3 !!!!!!!!!!') rx.from_iterable((print('generator call', i) or i for i in range(10))).pipe( op.window_with_count(3), op.flat_map(lambda x: x.pipe(op.take(2))), op.map(print), ).run() # delayはobservableを引数に取りobservableを返す関数 # 引数にobservable以外を渡しても関数は通るが、呼び出すときにエラーになる print('call take4 !!!!!!!!!!') rx.from_iterable((print('generator call', i) or i for i in range(10))).pipe( op.map(lambda x: op.delay(3)(rx.just(x))), # op.window_with_count(1), op.flat_map(lambda x: x), op.map(print), ).run() print('=================') # generatorはrepeatできない # repeatはもとのoncompletedが来てから最初からやり直す # repeatをしてできたobservable自体のon completed は1回 rx.from_iterable([1, 2, 3, 4]).pipe( op.repeat(3), op.map(print), ).subscribe(on_completed=lambda: print('on completed'))
op.element_at() op.first() op.ignore_elements() op.last() op.skip() op.skip_last() op.take() op.take_last() # ... """Error Handling""" op.catch() op.retry() """Utility""" op.delay() op.materialize() op.time_interval() op.timeout() op.timestamp() """Conditional and Boolean""" op.all() op.contains() op.default_if_empty() op.sequence_equal() op.skip_until() op.skip_while() op.take_until() op.take_while()