class Client: def __init__(self, host='localhost', port='8888'): self._url = 'ws://{}:{}/exchange'.format(host, port) self.conn = None self.opened = Subject() self.messages = Subject() def connect(self): def on_connect(conn): self.conn = conn self.opened.on_next(conn) self.opened.on_completed() self.opened.dispose() def on_message_callback(message): self.messages.on_next(message) future = websocket_connect( self._url, on_message_callback=on_message_callback ) Observable.from_future(future).subscribe(on_connect) def write_message(self, message): self.conn.write_message(message)
def test_completed(self): source = Subject() request = Subject() sink, route_crossroad = make_crossroad_router(source) sink.subscribe(on_next=functools.partial(self.on_next, 'sink'), on_error=functools.partial(self.on_error, 'sink'), on_completed=functools.partial(self.on_completed, 'sink')) request \ .let(route_crossroad) \ .subscribe( on_next=functools.partial(self.on_next, 'response'), on_error=functools.partial(self.on_error, 'response'), on_completed=functools.partial(self.on_completed, 'response')) request.on_next(1) request.on_completed() self.assertEqual(1, len(self.actual['sink']['next'])) self.assertEqual(1, self.actual['sink']['next'][0]) self.assertTrue(self.actual['sink']['completed']) source.on_next(2) source.on_completed() self.assertEqual(1, len(self.actual['response']['next'])) self.assertEqual(2, self.actual['response']['next'][0]) self.assertTrue(self.actual['response']['completed'])
class Closeable(ABC): def __init__(self) -> None: self.__close_reason: Optional[str] = None self.__close_subject = Subject() def close(self, reason: str) -> None: if self.closed: return self.__close_reason = reason self.__close_subject.on_next((self, reason)) self.__close_subject.on_completed() def on_close(self) -> Observable: if not self.closed: return self.__close_subject return rx.just((self, self.__close_reason)) @property def closed(self) -> bool: return self.__close_subject.is_stopped @property def close_reason(self) -> Optional[str]: return self.__close_reason
class WSSubject(Observer): def __init__(self, web_socket: WebSocketResponse): super(WSSubject, self).__init__() self._web_socket = web_socket self._push_subject = Subject() def to_observable(self): return self._push_subject async def process(self): async for msg in self._web_socket: self._push_subject.on_next(msg) self._push_subject.on_completed() def on_next(self, data): self._web_socket.send_str(data) def on_completed(self): # close web socket # has to be coroutine to close ws pass def on_error(self, error): # send error and close web socket pass
def test_produces_a_payload_per_subscription_event(): # type: () -> None stream = Subject() send_important_email, subscription = create_subscription(stream) payload = [] subscription.subscribe(payload.append) send_important_email( Email( from_="*****@*****.**", subject="Alright", message="Tests are good", unread=True, ) ) expected_payload = { "importantEmail": { "email": {"from": "*****@*****.**", "subject": "Alright"}, "inbox": {"unread": 1, "total": 2}, } } assert len(payload) == 1 assert payload[0].data == expected_payload send_important_email( Email( from_="*****@*****.**", subject="Tools", message="I <3 making things", unread=True, ) ) expected_payload = { "importantEmail": { "email": {"from": "*****@*****.**", "subject": "Tools"}, "inbox": {"unread": 2, "total": 3}, } } assert len(payload) == 2 assert payload[-1].data == expected_payload # The client decides to disconnect stream.on_completed() send_important_email( Email( from_="*****@*****.**", subject="Important", message="Read me please", unread=True, ) ) assert len(payload) == 2
class HttpConnection(Protocol): # Protocol for Twisted listener = Subject() # shared by all connections def connectionMade(self): self.data_in = Subject() self.listener.on_next(self) def connectionLost(self, reason=connectionDone): self.data_in.on_completed() def dataReceived(self, data): self.data_in.on_next(data)
def action(scheduler, state=None): s = None if is_shift: s = Subject() q.append(s) observer.on_next(add_ref(s, ref_count_disposable)) if is_span: s = q.pop(0) s.on_completed() create_timer()
def action(scheduler, state=None): s = None if is_shift: s = Subject() q.append(s) observer.on_next(add_ref(s, ref_count_disposable)) if is_span: s = q.pop(0) s.on_completed() create_timer()
def test_bad_json(self): # test expectations expected_file_request = file.Read(id='config', path='/foo/config.json', size=-1, mode='r') # test setup file_response = Subject() argv = Subject() sink = config.read_configuration( config.Source(file_response=file_response, argv=argv)) configuration = None configuration_error = None file_request = None def config_on_next(i): nonlocal configuration configuration = i def config_on_error(e): nonlocal configuration_error configuration_error = e def file_request_on_next(i): nonlocal file_request file_request = i sink.configuration.subscribe(on_next=config_on_next, on_error=config_on_error) sink.file_request.subscribe(on_next=file_request_on_next) # feed source observables argv.on_next('theexe') argv.on_next('--config') argv.on_next('/foo/config.json') argv.on_completed() self.assertEqual(expected_file_request, file_request) file_response.on_next( file.ReadResponse(id='config', path='/foo/config.json', data=Observable.just('{ "foo": bar}'))) self.assertIsInstance(configuration_error, json.decoder.JSONDecodeError)
class AsyncConnection(asyncio.Protocol, Connection): def __init__(self, on_connect): super().__init__() self.on_connect = on_connect def connection_made(self, transport: asyncio.Transport): peername = transport.get_extra_info('peername') logger.info('Connection from %s', peername) self.transport = transport self.data_in = Subject() self.data_out = AnonymousObserver(self.on_data_out_next, self.on_data_out_error, self.on_data_out_completed) self.on_connect(self) def eof_received(self): logger.debug('data eof received') self.data_in.on_completed() def resume_writing(self): logger.debug('data resume') def pause_writing(self): logger.debug('data pause') def connection_lost(self, exc): logger.debug('data connection lost') self.data_in.on_error(exc) def data_received(self, data): logger.debug('data received: %s', data) self.data_in.on_next(data) def on_data_out_next(self, data): logger.debug('sending: %s', data) self.transport.write(data) def on_data_out_error(self, exception): logger.exception('data_out error: %r', exception) self.close() def on_data_out_completed(self): logger.info('data_out completed') self.close() def close(self): self.transport.close()
def _upload_many(self, items: Collection[UploadData], subject: Subject): items_count = len(items) finished_items_count = 0 app: PlayerApp = PlayerApp.get_instance() pc = ProgressComponent() pc.left_text = "uploading to cloud" pc.right_text = f"0 / {items_count}" app.ui.stack_layout.add(pc) with concurrent.futures.ThreadPoolExecutor(max_workers=3) as executor: futures = [executor.submit(self._upload, item) for item in items] for future in concurrent.futures.as_completed(futures): finished_items_count += 1 pc.progress = finished_items_count / items_count pc.right_text = f"{finished_items_count} / {items_count}" subject.on_next(future.result()) subject.on_completed() pc.detach()
def test_base(self): # test expectations expected_file_request = file.Read(id='config', path='/foo/config.json', size=-1, mode='r') # test setup file_response = Subject() argv = Subject() sink = config.read_configuration( config.Source(file_response=file_response, argv=argv)) configuration = None file_request = None def config_on_next(i): nonlocal configuration configuration = i def file_request_on_next(i): nonlocal file_request file_request = i sink.configuration.subscribe(on_next=config_on_next) sink.file_request.subscribe(on_next=file_request_on_next) # feed source observables argv.on_next('theexe') argv.on_next('--config') argv.on_next('/foo/config.json') argv.on_completed() self.assertEqual(expected_file_request, file_request) file_response.on_next( file.ReadResponse(id='config', path='/foo/config.json', data=Observable.just('{ "foo": "bar"}'))) self.assertEqual(dict_to_namedtuple({"foo": "bar"}), configuration)
class OptionsBag: def __init__(self, metadata: PartMetadata, model=None): self.options_bag = OrderedDict() self.is_stale = True self.OnStale = Subject() for opt in metadata.options_bag: if model is not None and opt.name in model['options']: value = model['options'][opt.name] else: value = opt.value self.options_bag[opt.name] = value self.is_stale = False def __getitem__(self, item): if isinstance(item, int): item = next(islice(self.options_bag.keys(), item, item + 1)) return self.options_bag[item] def __setitem__(self, key, value): # hack: We allow these sets for now just to keep things working if isinstance(key, int): key = next(islice(self.options_bag.keys(), key, key + 1)) self.options_bag[key] = value self.set_stale(key, value) def __iter__(self): return iter(self.options_bag) def __del__(self): self.OnStale.on_completed() del self.options_bag def set_stale(self, name, value): if self.is_stale: return self.is_stale = True self.OnStale.on_next((name, value)) def set_fresh(self): self.is_stale = False
def test_produces_a_payload_per_subscription_event(): stream = Subject() send_important_email, subscription = create_subscription(stream) payload = [] subscription.subscribe(payload.append) send_important_email(Email( from_='*****@*****.**', subject='Alright', message='Tests are good', unread=True, )) expected_payload = { 'importantEmail': { 'email': { 'from': '*****@*****.**', 'subject': 'Alright', }, 'inbox': { 'unread': 1, 'total': 2, }, } } assert len(payload) == 1 assert payload[0].data == expected_payload send_important_email(Email( from_='*****@*****.**', subject='Tools', message='I <3 making things', unread=True, )) expected_payload = { 'importantEmail': { 'email': { 'from': '*****@*****.**', 'subject': 'Tools', }, 'inbox': { 'unread': 2, 'total': 3, }, } } assert len(payload) == 2 assert payload[-1].data == expected_payload # The client decides to disconnect stream.on_completed() send_important_email(Email( from_='*****@*****.**', subject='Important', message='Read me please', unread=True, )) assert len(payload) == 2
class DockerExperiment(): def __init__(self, env): self.counter = 0 self.state = "work" self.env = env self.problem_id = env["problem"]["problem_id"] self.consumed_messages = Subject() self.messages = Subject() self.population_objects_topic = "population-objects" self.consumed_messages\ .filter(lambda x: x["problem"]["problem_id"] == self.problem_id) \ .take(env["problem"]["max_iterations"])\ .buffer_with_count(3)\ .subscribe( on_next=lambda x : self.population_mixer(x),on_completed = self.finish) self.consumed_messages.subscribe( lambda message: self.one_more(message), on_completed=lambda: print("MESSAGES COMPLETED")) self.messages.publish() self.messages.subscribe( lambda populations: self.produce(populations), on_completed=lambda: print("MESSAGES COMPLETED")) def one_more(self, message): #print(message) print('CONSUMED:{}, Max {}'.format( self.counter, self.env["problem"]["max_iterations"])) self.counter += 1 if 'best_score' in message: error = abs(message['best_score'] - message["fopt"]) print('Best:{}, Fopt {}, Error {}'.format(message['best_score'], message["fopt"], error)) if 1e-8 >= error: self.finish() def finish(self): print("Consume Finished") self.state = "stop" self.messages.on_completed() self.messages.dispose() #sys.exit(0) def population_mixer(self, populations): if len(populations) == 3: print("MIXER:", len(populations)) #populations = [json.loads(message.data) for message in populations]contr populations[0]['population'] = cxBestFromEach( populations[0]['population'], populations[1]['population']) populations[1]['population'] = cxBestFromEach( populations[1]['population'], populations[2]['population']) populations[2]['population'] = cxBestFromEach( populations[2]['population'], populations[0]['population']) # I can´t fo map(...on_next, populations ) self.messages.on_next(populations[0]) self.messages.on_next(populations[1]) self.messages.on_next(populations[2]) def read_from_queue(self): print("worker start") while self.state == 'work': print('working') data = None message = r.blpop(TOPIC_CONSUME, 2) if not message: print("NO DATA, WAITING...") time.sleep(2) else: data = message[1] pop_dict = json.loads(data) #print("message:data:", pop_dict) #print("message:type:", type(pop_dict)) #if 'best_score' in pop_dict: # error = abs(pop_dict['best_score']-pop_dict["fopt"]) # print ('Best:{}, Fopt {}, Error {}'.format( pop_dict['best_score'], pop_dict["fopt"], error )) #if 1e-8 >= error: # self.finish() print("message read from queue") self.log_to_redis_coco(pop_dict) self.consumed_messages.on_next(pop_dict) return self.problem_id def produce(self, population): print("pop sent:", "population") json_data = json.dumps(population) # Data must be a bytestring message = json_data.encode('utf-8') ack = r.lpush(TOPIC_PRODUCE, message) print("Produce:", ack) def log_to_redis_coco(self, population): log_name = "log:swarm" r.lpush(log_name, json.dumps(self.get_benchmark_data(population))) def get_benchmark_data(self, population): #print("\n\npopulation\n\n", population) return { "time_stamp": datetime.timestamp(datetime.now()), "evals": population["iterations"], "instance": population["problem"]["instance"], "worker_id": population["worker_id"], "params": { "sample_size": population["population_size"], "init": "random:[-5,5]", "NGEN": population["params"]["GA"]["iterations"] }, "experiment_id": population['experiment']["experiment_id"], "algorithm": population["algorithm"], "alg_params": population["params"][population["algorithm"]], "dim": population["problem"]["dim"], "benchmark": population["problem"]["function"], "fopt": population["fopt"], "message_counter": self.counter, "message_id": population["message_id"], "best_score": ("best_score" in population and population["best_score"]) or None }
from rx import Observable, Observer from rx.subjects import Subject class PrintObserver(Observer): def on_next(self, value): print("Value :", value) def on_error(self, error): print("Error :", error) def on_completed(self): print("Completed") subject = Subject() subject.subscribe(PrintObserver()) for i in range(10): subject.on_next(i) subject.on_completed()
class ControlledSubject(Observable): def __init__(self, enable_queue=True): super(ControlledSubject, self).__init__(self._subscribe) self.subject = Subject() self.enable_queue = enable_queue self.queue = [] if enable_queue else None self.requested_count = 0 self.requested_disposable = Disposable.empty() self.error = None self.has_failed = False self.has_completed = False self.controlled_disposable = Disposable.empty() def _subscribe(self, observer): return self.subject.subscribe(observer) def on_completed(self): check_disposed(self) self.has_completed = True if not self.enable_queue or not len(self.queue): self.subject.on_completed() def on_error(self, error): check_disposed(self) self.has_failed = True self.error = error if not self.enable_queue or not len(self.queue): self.subject.on_error(error) def on_next(self, value): check_disposed(self) has_requested = False if not self.requested_count: if self.enable_queue: self.queue.push(value) else: if self.requested_count != -1: requested_count = self.requested_count self.requested_count -= 1 if requested_count == 0: self.dispose_current_request() has_requested = True if has_requested: self.subject.on_next(value) def _process_request(self, number_of_items): if self.enable_queue: #console.log('queue length', self.queue.length) while len(self.queue) >= number_of_items and number_of_items > 0: # console.log('number of items', number_of_items) self.subject.on_next(self.queue.shift()) number_of_items -= 1 if len(self.queue): return { "number_of_items": number_of_items, "return_value": True } else: return { "number_of_items": number_of_items, "return_value": False } if self.has_failed: self.subject.on_error(self.error) self.controlled_disposable.dispose() self.controlled_disposable = Disposable.empty() elif self.has_completed: self.subject.on_completed() self.controlled_disposable.dispose() self.controlled_disposable = Disposable.empty() return {"number_of_items": number_of_items, "return_value": False} def request(self, number): check_disposed(self) self.dispose_current_request() r = self._process_request(number) number = r["number_of_items"] if not r["return_value"]: self.requested_count = number def action(): self.requested_count = 0 self.requested_disposable = Disposable(action) return self.requested_disposable else: return Disposable.empty() def dispose_current_request(self): self.requested_disposable.dispose() self.requested_disposable = Disposable.empty() def dispose(self): self.is_disposed = True # FIXME: something wrong in RxJS? self.error = None self.subject.dispose() self.requested_disposable.dispose()
class OptiRx(object): def __init__(self, chr_contr, executor, gen0=None, opts=default_options): self.gen0 = gen0 self.chr_contr = chr_contr self.pauser = Pauser() self.executor = executor self.executor.kpp_fun = self.pauser.kpp self.sub_chr_best = ReplaySubject() self.sub_gen_solved = Subject() self.sub_gen_unsolved = Subject() self.subj_done = Subject() self.su_genetic = StepUpGenetic(chr_contr) self.opts = dict(default_options) self.init_opts(opts) self.stop_flag = True self.gen_flag = True self.init_logic() def init_opts(self, opts): for k in opts: self.opts[k] = opts[k] self.su_genetic.pop_count = self.opts['pop_count'] self.su_genetic.prob_mut = self.opts['prob_mut'] self.su_genetic.prob_mut_gene = self.opts['prob_mut_gene'] self.su_genetic.prob_cross = self.opts['prob_cross'] self.su_genetic.elite_count = self.opts['elite_count'] def init_logic(self): # (fit, op)... def best_fun(gener): fit, op = gener.get_best(True) return gener.num_g, fit, op best_stream = self.sub_gen_solved.map(best_fun).publish() improve_best = best_stream.distinct(lambda tp: tp[1]) improve_best.subscribe(self.sub_chr_best) def wind_switch(beep_inds): beep_set = set(beep_inds) def inner(s): inds = Observable.interval(0) return s.zip(inds, lambda x, ind: ind) \ .filter(lambda ind: ind in beep_set) return inner switch = best_stream\ .window(improve_best.skip(1))\ .flat_map(wind_switch([self.opts['n_gener_switch'], self.opts['n_gener_done']-2])) done = best_stream\ .window(improve_best.skip(1))\ .flat_map(wind_switch([self.opts['n_gener_done']])) def stop(ind): self.stop_flag = True def switch_f(ind): self.gen_flag = False switch.subscribe(switch_f) done.subscribe(stop) best_stream.skip(self.opts['n_gener_max'] - 1).subscribe(stop) best_stream.connect() def paused(self): return self.pauser.paused def pause(self): self.pauser.pause() def unpause(self): self.pauser.play() def run_sync(self): if self.gen0: gen0 = self.gen0 else: gen0 = Generation(self.chr_contr, 0) gen0.get_init_pop(self.opts['pop_count'], self.opts['seed0']) self.stop_flag = False self.gen_flag = False while not self.stop_flag: self.sub_gen_unsolved.on_next(gen0) if self.gen_flag: gen0 = self.stepup_gen(gen0) else: gen0 = self.stepup_slsqp(gen0) self.gen_flag = True self.subj_done.on_completed() def run(self): cs1 = threading.Thread(name='calc_thread', target=self.run_sync) cs1.start() def stepup_gen(self, gen, seed=None): self.calc_gen(gen) self.sub_gen_solved.on_next(gen) gen_dict = self.su_genetic.step_up(gen.pop, seed) gen2 = Generation(self.chr_contr, gen.num_g + 1, gen_dict) return gen2 def calc_gen(self, gen): fitnessless = gen.get_fitlessness() if len(fitnessless) == 0: return tick_tack = ReplaySubject() fset = seq(fitnessless) \ .map(lambda wchr: (wchr['name'], wchr['id'])) \ .to_set() results = [] def remove_fset(result): wchr, fit = result tp = wchr['name'], wchr['id'] if tp in fset: fset.discard(tp) results.append(result) if len(fset) == 0: tick_tack.on_next(0) s1 = self.executor.sub_compl.subscribe(on_next=remove_fset) for fn in fitnessless: self.executor.sub_in.on_next(fn) if len(fset) == 0: tick_tack.on_next(0) tick_tack.to_blocking().first() tick_tack.dispose() s1.dispose() gen.init_fitnesses(results) if len(gen.get_fitlessness()) > 0: raise AssertionError("Посчитались не все гены в хромосоме") def stepup_slsqp(self, gen0): self.calc_gen(gen0) lst = reduce_pop(gen0.pop_list, self.opts['n_op_direct']) opti_lst = [ OptiSLSQP(op, self.executor.sub_in, self.executor.sub_compl) for op in lst ] tp = ThreadPoolScheduler() calc_stream = Observable \ .from_(opti_lst) \ .flat_map(lambda op: Observable.just(op).observe_on(tp).map(lambda op: op.run())) calc_stream.to_blocking().last_or_default(0) for op in lst: op.remove_exept_best() for o_sls in opti_lst: o_sls.dispose_conn() gen1 = Generation(self.chr_contr, gen0.num_g + 1, lst) self.sub_gen_solved.on_next(gen1) gen1.fill_pop(self.opts['pop_count']) return gen1
class DockerExperiment(): def __init__(self, env): self.counter = 0 self.state = "work" self.env = env self.problem_id = env["problem"]["problem_id"] self.consumed_messages = Subject() self.messages = Subject() self.population_objects_topic = "population-objects" self.valid_messages = self.consumed_messages\ .filter(lambda x: x["problem"]["problem_id"] == self.problem_id) \ .take(env["problem"]["max_iterations"]).publish() self.valid_messages.buffer_with_count(8)\ .subscribe( on_next=lambda x : self.population_mixer(x), on_completed = self.finish) self.valid_messages.subscribe( lambda message: self.one_more(message), on_completed=lambda: print("MESSAGES COMPLETED")) #self.valid_messages.subscribe(lambda message: self.log_to_redis_coco(message), on_completed = lambda : print("MESSAGES COMPLETED")) self.valid_messages.connect() self.messages.publish() self.messages.subscribe( lambda populations: self.produce(populations), on_completed=lambda: print("MESSAGES COMPLETED")) def one_more(self, message): #print(message) print('CONSUMED:{}, Max {}'.format( self.counter, self.env["problem"]["max_iterations"])) self.counter += 1 self.log_to_redis_coco(message) if 'best_score' in message and message["problem"][ "problem_id"] == self.problem_id: error = abs(message['best_score'] - message["fopt"]) print('Best:{}, Fopt {}, Error {}'.format(message['best_score'], message["fopt"], error)) if 1e-8 >= error: self.finish() def finish(self): print("Consume Finished") self.state = "stop" self.consumed_messages.on_completed() self.messages.on_completed() #self.messages.dispose() # not needed? Raised an exception #sys.exit(0) def population_mixer(self, populations): if len(populations) == 8: print("MIXER:", len(populations)) #populations = [json.loads(message.data) for message in populations]contr #populations[0]['population'] = cxBestFromEach(populations[0]['population'], populations[1]['population']) #populations[1]['population'] = cxBestFromEach(populations[1]['population'], populations[2]['population']) #populations[2]['population'] = cxBestFromEach(populations[2]['population'], populations[0]['population']) populations[0]['population'], populations[1][ 'population'], populations[2]['population'], populations[3][ 'population'], populations[4]['population'], populations[ 5]['population'], populations[6][ 'population'], populations[7][ 'population'] = merge_scoutbee_v1( populations[0]['population'], populations[1]['population'], populations[2]['population'], populations[3]['population'], populations[4]['population'], populations[5]['population'], populations[6]['population'], populations[7]['population']) #Iteracion hay que saber si sera el unico metodo o necesito otro #habra bandera para primera optimizacion? #deberian optimizarse 2 veces? #cxBestFromEach de donde viene? # I can´t fo map(...on_next, populations ) self.messages.on_next(populations[0]) self.messages.on_next(populations[1]) self.messages.on_next(populations[2]) self.messages.on_next(populations[3]) self.messages.on_next(populations[4]) self.messages.on_next(populations[5]) self.messages.on_next(populations[6]) self.messages.on_next(populations[7]) def read_from_queue(self): while self.state == 'work': print('working') data = None # if we need a QUEUE we use blpop # we can also use a STACK with brpop message = r.blpop(TOPIC_CONSUME, 2) if not message: print("NO DATA, WAITING...") time.sleep(2) else: data = message[1] pop_dict = json.loads(data) print("message read from queue") self.consumed_messages.on_next(pop_dict) return self.problem_id def produce(self, population): print("pop sent:", "population") json_data = json.dumps(population) # Data must be a bytestring message = json_data.encode('utf-8') ack = r.rpush(TOPIC_PRODUCE, message) print("Produce:", ack) def log_to_redis_coco(self, population): log_name = "log:swarm" r.rpush(log_name, json.dumps(self.get_benchmark_data(population))) def get_benchmark_data(self, population): #print("\n\npopulation\n\n", population) return { "time_stamp": datetime.timestamp(datetime.now()), "evals": population["iterations"], "instance": population["problem"]["instance"], "worker_id": population["worker_id"], "params": { "sample_size": population["population_size"], "init": "random:[-5,5]", "NGEN": population["params"]["GA"]["iterations"] }, "experiment_id": population['experiment']["experiment_id"], "algorithm": population["algorithm"], "alg_params": population["params"][population["algorithm"]], "dim": population["problem"]["dim"], "benchmark": population["problem"]["function"], "fopt": population["fopt"], "message_counter": self.counter, "message_id": population["message_id"], "best_score": ("best_score" in population and population["best_score"]) or None }
def test_produces_a_payload_per_subscription_event(): stream = Subject() send_important_email, subscription = create_subscription(stream) payload = [] subscription.subscribe(payload.append) send_important_email( Email( from_='*****@*****.**', subject='Alright', message='Tests are good', unread=True, )) expected_payload = { 'importantEmail': { 'email': { 'from': '*****@*****.**', 'subject': 'Alright', }, 'inbox': { 'unread': 1, 'total': 2, }, } } assert len(payload) == 1 assert payload[0].data == expected_payload send_important_email( Email( from_='*****@*****.**', subject='Tools', message='I <3 making things', unread=True, )) expected_payload = { 'importantEmail': { 'email': { 'from': '*****@*****.**', 'subject': 'Tools', }, 'inbox': { 'unread': 2, 'total': 3, }, } } assert len(payload) == 2 assert payload[-1].data == expected_payload # The client decides to disconnect stream.on_completed() send_important_email( Email( from_='*****@*****.**', subject='Important', message='Read me please', unread=True, )) assert len(payload) == 2
class Publisher(Subscriber[T], abc.Container): """ Implements the wrapper for an Observer and a Subject. Broadcasts the Events to the subscribed Observers on the Subject. Each Event is paired with an IntEnum event type (or kind), and invokes specific event handler methods for each Event type. Generics: T: Contextual object associated with each Event. Extends: Subscriber[T] abc.Container Attributes: subject: The Subject for Observers to subscribe to. presubj: The Subject for Observers to subscribe to, whose on_next are always called before, self.on_next and the subjects' on_next. """ subject: Subject presubj: Subject def __init__(self, events: Union[IntEnum, None] = None): """ Initialize this Publisher with the given Event types. Args: events: The registered Event types (kind). If None, no register Event types. """ Subscriber.__init__(self, events) self.presubj = Subject() self.subject = Subject() ### Methods: Queries def __contains__(self, observer: Observer) -> bool: """ Determine whether or not the given is already subscribed to this Publisher. Args: observer: The Observer to test whether or not already subscribed to this Publisher. Returns: True: If already subscribed. False: Otherwise. """ return observer in self.subject.observers or \ observer in self.presubj.observers ### Methods: Subscribe def subscribe(self, observer: Observer, before: bool = False): """ Subscribes the given Observer to the subject of this Publisher when an Event occurs. Args: observer: The Observer to subscribe to. before: Boolean flag for whether or not to receive Event before self.on_next or after self.on_next. True, for before; False, otherwise. """ assert observer not in self subject = self.presubj if before else self.subject subject.subscribe(observer) ### Methods: Broadcast def broadcast(self, event: Event, **kwargs): """ Broadcast the given event to subscribed Observers. Args: event: The Event to be broadcasted. kwargs: The parameters to be added or modified within the given Event. """ if hasattr(event, 'source') and event.source is self: return event.setparams(source=self, **kwargs) self.subject.on_next(event) ### Methods: Event Handlers def on_next(self, event: Event[T]): """ The Event handler when an Event occurs. Broadcast next Event. Overrides: Subscriber.on_next Args: event: The next Event to occur. """ if hasattr(event, 'source') and event.source is self: return self.presubj.on_next(event) try: if self.events: Subscriber.on_next(self, event) finally: self.broadcast(event) def on_completed(self): """ The Event handler when no more Events. Subscription completed. Broadcast completion Event. Overrides: Subscriber.on_completed """ self.presubj.on_completed() self.subject.on_completed() def on_error(self, exception: Exception): """ The Event handler when an error occurs. Broadcast error Event. Overrides: Subscriber.on_error Args: exception: The error that occurred. """ self.presubj.on_error(exception) self.subject.on_error(exception)
from rx import Observable from rx.subjects import Subject numbers = Subject() trigger = Subject() numbers.skip_until(trigger).subscribe( on_next=lambda i: print("on_next {}".format(i)), on_error=lambda e: print("on_error: {}".format(e)), on_completed=lambda: print("on_completed")) numbers.on_next(1) numbers.on_next(2) trigger.on_next(True) numbers.on_next(3) numbers.on_next(4) numbers.on_completed()
segment.writeDigit(3, int(second % 60 / 10)) # Tens segment.writeDigit(4, second % 60 % 10) # Ones else: segment.writeDigit(0, int((diff / 1000) / 10)) # Tens segment.writeDigit(1, int((diff / 1000) % 10)) # Ones segment.writeDigit(3, int(diff / 100) % 10) # Tens segment.writeDigit(4, int(diff % 10)) # Ones # Toggle colon segment.setColon(second % 2) # Toggle colon at 1Hz if (currentmillis > startmillis + LIMIT): # should fire timeout stream.on_completed() # startmillis = currentmillis; input_state = GPIO.input(18) if input_state == False: if (pressed == False): pressStartmillis = time.time() * 1000 pressed = True # time.sleep(0.2) else: if (pressed): pressCurrentmillis = time.time() * 1000 diff = (pressCurrentmillis - pressStartmillis) / 1000.0 print diff
def _scheduler_loop(events_source: EventsSource, classroom_source: ClassroomSource, spiders: List[BaseSpider], settings_source: SettingsSource, stop_event: Event, status_subject: Subject): while not stop_event.is_set(): settings = settings_source.get_settings() if settings.need_refresh(): logging.info("Starting scheduler") status_subject.on_next(SchedulerStatus("START")) events_source.delete_old_events() # Calls the spider and update the source for spider in spiders: buildings_provider = spider.get_buildings_provider() classrooms_dict = {} # Adds the classrooms inside the classroom source for classroom in buildings_provider.get_classrooms(): # Create the Building instance temp_building = Building( classroom.get_building().get_identifier(), classroom.get_building().get_name()) # Create the classroom building temp_classroom = Classroom(classroom.get_identifier(), classroom.get_name(), temp_building, classroom.get_floor()) # If the classroom is not present in the source will be added if not classroom_source.is_classroom_present( classroom.get_name()): classroom_source.add_classroom(classroom) # Adds the classroom to the dict classrooms_dict[ classroom.get_identifier()] = temp_classroom for event in spider.get_events(): # Checks if the classroom provided from the spider is present in the dict. if event.get_classroom_key() not in classrooms_dict: logging.warning("No classroom found for: " + event.get_classroom_key()) else: # Adds the event to the source. events_source.add_event(SpiderEventAdapter(event)) # Save the last refresh settings.update_refresh_date(date_time=datetime.now()) # Updates the settings settings_source.update_settings(settings) else: logging.info("Scheduler skipped") now = datetime.now() next_refresh = settings.get_next_refresh_date() next_refresh_time = (next_refresh - now).seconds logging.info("Next scheduler refresh in %d seconds" % next_refresh_time) logging.info("Total number of events: %d" % len(events_source.get_all_events())) status_subject.on_next(SchedulerStatus("COMPLETED")) # Wait until is time to run the spiders again. sleep(next_refresh_time) status_subject.on_completed()
class ControlledSubject(Observable): def __init__(self, enable_queue=True): super(ControlledSubject, self).__init__(self._subscribe) self.subject = Subject() self.enable_queue = enable_queue self.queue = [] if enable_queue else None self.requested_count = 0 self.requested_disposable = Disposable.empty() self.error = None self.has_failed = False self.has_completed = False self.controlled_disposable = Disposable.empty() def _subscribe(self, observer): return self.subject.subscribe(observer) def on_completed(self): check_disposed(self) self.has_completed = True if not self.enable_queue or not len(self.queue): self.subject.on_completed() def on_error(self, error): check_disposed(self) self.has_failed = True self.error = error if not self.enable_queue or not len(self.queue): self.subject.on_error(error) def on_next(self, value): check_disposed(self) has_requested = False if not self.requested_count: if self.enable_queue: self.queue.push(value) else: if self.requested_count != -1: requested_count = self.requested_count self.requested_count -= 1 if requested_count == 0: self.dispose_current_request() has_requested = True if has_requested: self.subject.on_next(value) def _process_request(self, number_of_items): if self.enable_queue: #console.log('queue length', self.queue.length) while len(self.queue) >= number_of_items and number_of_items > 0: # console.log('number of items', number_of_items) self.subject.on_next(self.queue.shift()) number_of_items -= 1 if len(self.queue): return { "number_of_items": number_of_items, "return_value": True } else: return { "number_of_items": number_of_items, "return_value": False } if self.has_failed: self.subject.on_error(self.error) self.controlled_disposable.dispose() self.controlled_disposable = Disposable.empty() elif self.has_completed: self.subject.on_completed() self.controlled_disposable.dispose() self.controlled_disposable = Disposable.empty() return { "number_of_items": number_of_items, "return_value": False } def request(self, number): check_disposed(self) self.dispose_current_request() r = self._process_request(number) number = r["number_of_items"] if not r["return_value"]: self.requested_count = number def action(): self.requested_count = 0 self.requested_disposable = Disposable(action) return self.requested_disposable else: return Disposable.empty() def dispose_current_request(self): self.requested_disposable.dispose() self.requested_disposable = Disposable.empty() def dispose(self): self.is_disposed = True # FIXME: something wrong in RxJS? self.error = None self.subject.dispose() self.requested_disposable.dispose()
def result_selector(left_v, r_obs): #right_values = [] #r_obs.subscribe(lambda v: right_values.append(v)) return left_v, r_obs def result_dumper(pair): pair[1].subscribe(Dumper('result left=%s' % pair[0])) left \ .group_join(right, left_duration_selector, right_duration_selector, result_selector) \ .subscribe(result_dumper) in_ = input('') while in_ != 'exit': parts = in_.split() if parts[0] == 'l': left.on_next(parts[1]) if parts[0] == 'r': right.on_next(parts[1]) if parts[0] == 'lds': left_duration_selectors[parts[1]].on_completed() if parts[0] == 'rds': right_duration_selectors[parts[1]].on_completed() in_ = input('') left.on_completed()
class ControlledSubject(ObservableBase, Observer): def __init__(self, enable_queue=True, scheduler=None): super(ControlledSubject, self).__init__() self.subject = Subject() self.enable_queue = enable_queue self.queue = [] if enable_queue else None self.requested_count = 0 self.requested_disposable = Disposable.empty() self.error = None self.has_failed = False self.has_completed = False self.scheduler = scheduler or current_thread_scheduler def _subscribe_core(self, observer): return self.subject.subscribe(observer) def on_completed(self): self.has_completed = True if not self.enable_queue or len(self.queue) == 0: self.subject.on_completed() self.dispose_current_request() else: self.queue.append(OnCompleted()) def on_error(self, error): self.has_failed = True self.error = error if not self.enable_queue or len(self.queue) == 0: self.subject.on_error(error) self.dispose_current_request() else: self.queue.append(OnError(error)) def on_next(self, value): if self.requested_count <= 0: self.enable_queue and self.queue.append(OnNext(value)) else: self.requested_count -= 1 if self.requested_count == 0: self.dispose_current_request() self.subject.on_next(value) def _process_request(self, number_of_items): if self.enable_queue: while len(self.queue) > 0 and (number_of_items > 0 or self.queue[0].kind != 'N'): first = self.queue.pop(0) first.accept(self.subject) if first.kind == 'N': number_of_items -= 1 else: self.dispose_current_request() self.queue = [] return number_of_items def request(self, number): self.dispose_current_request() def action(scheduler, i): remaining = self._process_request(i) stopped = self.has_completed and self.has_failed if not stopped and remaining > 0: self.requested_count = remaining def dispose(): self.requested_count = 0 return AnonymousDisposable(dispose) # Scheduled item is still in progress. Return a new # disposable to allow the request to be interrupted # via dispose. self.requested_disposable = self.scheduler.schedule(action, state=number) return self.requested_disposable def dispose_current_request(self): if self.requested_disposable: self.requested_disposable.dispose() self.requested_disposable = None
class ControlledSubject(ObservableBase, Observer): def __init__(self, enable_queue=True, scheduler=None): super(ControlledSubject, self).__init__(self._subscribe) self.subject = Subject() self.enable_queue = enable_queue self.queue = [] if enable_queue else None self.requested_count = 0 self.requested_disposable = Disposable.empty() self.error = None self.has_failed = False self.has_completed = False self.scheduler = scheduler or current_thread_scheduler def _subscribe(self, observer): return self.subject.subscribe(observer) def on_completed(self): self.has_completed = True if not self.enable_queue or len(self.queue) == 0: self.subject.on_completed() self.dispose_current_request() else: self.queue.push(OnCompleted()) def on_error(self, error): self.has_failed = True self.error = error if not self.enable_queue or len(self.queue) == 0: self.subject.on_error(error) self.dispose_current_request() else: self.queue.push(OnError(error)) def on_next(self, value): if self.requested_count <= 0: self.enable_queue and self.queue.append(OnNext(value)) else: self.requested_count -= 1 if self.requested_count == 0: self.dispose_current_request() self.subject.onNext(value) def _process_request(self, number_of_items): if self.enable_queue: while len(self.queue) > 0 and (number_of_items > 0 or self.queue[0].kind != 'N'): first = self.queue.pop(0) first.accept(self.subject) if first.kind == 'N': number_of_items -= 1 else: self.dispose_current_request() self.queue = [] return number_of_items def request(self, number): self.dispose_current_request() def action(scheduler, i): remaining = self._process_request(i) stopped = self.has_completed and self.has_failed if not stopped and remaining > 0: self.requestedCount = remaining def dispose(): self.requested_count = 0 return AnonymousDisposable(dispose) # Scheduled item is still in progress. Return a new # disposable to allow the request to be interrupted # via dispose. self.requested_disposable = self.scheduler.schedule(action, state=number) return self.requested_disposable def dispose_current_request(self): if self.requested_disposable: self.requested_disposable.dispose() self.requested_disposable = None
class HttpParser(Observer): def __init__(self, conn): super(HttpParser, self).__init__() self.conn = conn self.buf = StringIO() self.requests_in = Subject() self.responses_out = HttpWriter(conn) self.keep_alive_timeout_dispose = Disposable.empty() self.read_timeout_dispose = Disposable.empty() self.keep_alive_timer_on() def schedule_timeout(self, seconds): def action(scheduler, state=None): print 'timeout', seconds self.requests_in.on_error(HttpResponse(408, 'Request Timeout')) return scheduler.schedule_relative(timedelta(seconds=seconds), action) def clear_timeout(self, disposable): try: disposable.dispose() except: # Twisted sometimes complains when we try to cancel timeout after it has already fired pass def keep_alive_timer_on(self): self.keep_alive_timer_off() self.keep_alive_timeout_dispose = self.schedule_timeout( KEEP_ALIVE_TIMEOUT) def keep_alive_timer_off(self): self.clear_timeout(self.keep_alive_timeout_dispose) def read_timer_on(self): self.read_timer_off() self.read_timeout_dispose = self.schedule_timeout(READ_TIMEOUT) def read_timer_off(self): self.clear_timeout(self.read_timeout_dispose) def parse_request(self, buf): lines = buf.split('\r\n') first_line = lines[0].split() if len(first_line) == 3: self.requests_in.on_next( HttpRequest(self.conn, first_line[0], first_line[1])) else: self.requests_in.on_error(HttpResponse(400, 'Bad Request')) def on_next(self, data): self.keep_alive_timer_off() self.read_timer_on() self.buf.write(data) # append new data buf = self.buf.getvalue() eor = buf.find('\r\n\r\n') # check we've got full request if eor >= 0: self.buf = StringIO() self.buf.write(buf[eor + 4:]) # leave remainder in buf self.parse_request(buf[:eor]) self.read_timer_off() self.keep_alive_timer_on() def on_error(self, e): print 'parser got error', e self.keep_alive_timer_off() self.read_timer_off() self.requests_in.on_error(HttpResponse(500, 'Internal Server Error')) def on_completed(self): print 'parser completed' self.keep_alive_timer_off() self.read_timer_off() self.requests_in.on_completed()
from rx import Observable from rx.subjects import Subject first = Subject() second = Subject() first.amb(second).subscribe(on_next=lambda i: print("on_next {}".format(i)), on_error=lambda e: print("on_error: {}".format(e)), on_completed=lambda: print("on_completed")) first.on_next(1) second.on_next(2) first.on_completed()