def test_update_cache(stream_data): results = [] Observable.from_(stream_data) \ .let(update_chache) \ .do_action(results.append) \ .subscribe(lambda x: x) assert len(results) == 3
def update_capabilities_from_remote(self): Observable.from_(self.get_all_hubs_url()) \ .distinct() \ .flat_map(self._fetch_hub_detail_and_unpack_to_caps) \ .to_list() \ .subscribe_on(_scheduler) \ .subscribe(self.set_capabilities)
def words_from_file(file_name): file = open(file_name) return Observable.from_(file) \ .flat_map(lambda l: Observable.from_(l.split())) \ .map(lambda w: re.sub(r'[^\w]','', w)) \ .filter(lambda w: w != "") \ .map(lambda w: w.lower())
def words_and_counts(): return Observable.from_(interests) \ .flat_map(lambda interest: Observable.from_(interest[1].lower().split())) \ .group_by(lambda s: s) \ .flat_map(lambda grp: grp.count().map(lambda ct: (grp.key,ct))) \ .to_list().map(lambda list: sorted(list,key=lambda t: t[1],reverse=True)).flat_map(lambda list: Observable.from_(list)) \ .subscribe(SimplePrint())
def words_from_file(file_name): file = open(file_name) return Observable.from_(file)\ .map(lambda word: re.sub(r"\W", " ", word))\ .flat_map(lambda line: Observable.from_(line.split(" ")))\ .filter(lambda w: w != "")\ .map(lambda word: word.lower())
def test_group_by(): # TODO element_selector could provide my mapper function here to turn the data into immutable after this point # this is where all my agent logic should go items = ['a', 'b', 'vv', 'aaa', 'aaa', 'ggg'] Observable.from_(items).group_by(lambda s: len(s)) \ .flat_map(lambda grp: grp.count().map(lambda c: (grp.key, c))) \ .subscribe(print)
def main_reactive(): ll = _load() mm = np.array(ll) def diagonal(n, m, a): (n_axis, m_axis) = a res = [] try: for x in range(4): res.append(mm[n + x * n_axis][m + x * m_axis]) return res except IndexError: return 0, axes = ( (0, 1), (1, 0), (1, -1), (1, 1), ) Observable.from_(product(range(mm.shape[0]), range(mm.shape[1]), axes)). \ map(lambda t: diagonal(*t)) \ .map(mul_many) \ .max() \ .subscribe(print)
def asterixdb_exec3(self, alias=None, where=None, db=None, ret=None): headers = {'Content-type': 'application/x-www-form-urlencoded'} sqlT = asterixconfig['sql'] sqlT = sqlT.replace("<", "{").replace(">", "}") sql = sqlT.format(asterixconfig['keys']) # print(sql) ret = asterixconfig['ret'] alias = asterixconfig['alias'] response = requests.post('http://45.79.91.219:19002/query/service', data=sql, headers=headers) jsonobj = json.loads(response.text.replace("{ ,", "{ ")) def extract_fields(x): return tuple([x[field.replace(alias + ".", "")] for field in ret]) def save(x): if alias not in self.resultset.keys(): self.resultset[alias] = {'cols': ret, 'rows': []} self.resultset[alias]["rows"].append(x) Observable.from_(jsonobj["results"]) \ .map(lambda x: x[alias]) \ .map(extract_fields) \ .subscribe(save)
def walk(self): start = time.time() # Create source stream folders = Observable.from_(self._storage.list_folders()) if self._config.root_files: folders = folders.start_with(RootFolderInfo()) if self._config.list_folders: print("Folder") if self._config.list_sort: folders = folders.to_sorted_list(key_selector=lambda folder: folder.name) \ .flat_map(lambda x: x) folders.subscribe( on_next=lambda folder: print(folder.name) if folder else '', on_completed=lambda: self._print_summary(time.time() - start)) else: print("Folder, Filename, Checksum") # Expand folder stream into file stream files = folders.concat_map(lambda folder: Observable.from_( (fileinfo, folder) for fileinfo in self._storage.list_files(folder))) # Print each file if self._config.list_sort: files = files.to_sorted_list(key_selector=lambda (fileinfo, folder): "{} {}".format(folder.name, fileinfo.name)) \ .flat_map(lambda x: x) files.subscribe( on_next=lambda (fileinfo, folder): self._print_file(folder, fileinfo), on_completed=lambda: self._print_summary(time.time() - start))
def walk(self): start = time.time() # Create source stream folders = Observable.from_(self._storage.list_folders()) if self._config.root_files: folders = folders.start_with(RootFolder()) if self._config.list_folders: self._writer.writerow(["Folder"]) if self._config.list_sort: folders = folders.to_sorted_list(key_selector=lambda folder: folder.name) \ .flat_map(lambda x: x) folders.subscribe( on_next=lambda folder: self._writer.writerow( [folder.name if folder else '']), on_completed=lambda: self._print_summary(time.time() - start)) else: self._writer.writerow(["Folder", "Filename", "Checksum"]) # Expand folder stream into file stream files = folders.concat_map(lambda folder: Observable.from_( (file_, folder) for file_ in self._storage.list_files(folder))) # Print each file if self._config.list_sort: files = files.to_sorted_list(key_selector=lambda x: "{} {}".format(x[1].name, x[0].name)) \ .flat_map(lambda x: x) files.subscribe( on_next=unpack( lambda file_, folder: self._print_file(folder, file_)), on_completed=lambda: self._print_summary(time.time() - start))
def main(): # Connect to the redis database to store the state # redis_db = redis.StrictRedis(host='localhost', port=6379, db=0, decode_responses=True, encoding='utf-8') redis_db = 'Redis' # for simulation purposes # Defining the UDP source udp_source = Observable.from_(_test_socket_generator()) # Defining the hw controller source channel_source = [] for ch in range(NR_CHANNELS): channel_source.append(Observable.from_(_pull_from_channel(ch)) \ .distinct_until_changed()) # We only want the values that change hwc_source = Observable.merge(channel_source) merged_source = Observable.merge(hwc_source, udp_source) \ .map(lambda s: json.loads(s)) \ .publish() hwc_sink = merged_source.filter(lambda x: x[0].startswith('udp')) \ .subscribe(on_next=lambda x: _write_to_controller(x[2], x[3]), on_error=lambda e: print(e)) udp_sink = merged_source.filter(lambda x: x[0].startswith('hwc')) \ .subscribe(on_next=lambda x: _broadcast_state(x, broadcast_socket), on_error=lambda e: print(e)) db_sink = merged_source.subscribe(on_next=lambda x: _write_to_db(redis_db, x[2], x[3]), on_error=lambda e: print(e)) merged_source.connect()
def class_thirteen(): words = Observable.from_(["alpha", "Beta", "Gamma", "Delta", "Epsilon"]) Observable.from_([ words.count(), words.map(lambda x: len(x)).sum(), words.map(lambda x: len(x)).average(), words.reduce(lambda x, y: x + y) ]).merge_all().subscribe(print)
def test_mapper(self): result = [] Observable.from_([ {'cmd': 'call', 'resource_name': 'number', 'method': '__sub__', 'args': [1]}, {'cmd': 'call', 'resource_name': 'number', 'method': '__add__', 'args': [5]}, ]).map(default_command_mapper) \ .flat_map(self.reactive_server) \ .subscribe(result.append) self.assertEqual(result, [9, 15])
def __sense_environment(self): Observable.from_(self.sensors). \ subscribe( lambda sensor: self.working_memory.append(Fact(sensor=sensor.name, data=sensor.exec(), binding=sensor.binding)) ) Observable.from_(self.working_memory). \ subscribe( lambda fact: setattr(self.world_state, fact.binding, fact.data.response) )
def __sense_environment(self): Observable.from_(self.sensors). \ subscribe( lambda sensor: self.working_memory.append(Fact(sensor=sensor.name, data=sensor.exec(), binding=sensor.binding)) ) Observable.from_(self.working_memory). \ subscribe( lambda fact: setattr(self.world_state, fact.binding, fact.data.output) )
def on_next(self, img_lines): """Draw the detected lines on top of the image and display the result""" img, lines = img_lines green = (0, 255, 0) Observable.from_(lines).subscribe( lambda line: cv2.line(img, line.a, line.b, green), # on_next lambda error: print(error), # on_error lambda: self.display(img) # on_completed )
def common_interest_count(user): return Observable.from_(users) \ .filter(lambda other_user: other_user["id"] != user["id"]) \ .flat_map(lambda other_user: common_interests_between(user, other_user). count() .map(lambda ct: (other_user["name"], ct)) ).to_list() \ .map(lambda list: sorted(list, key=lambda t: t[1], reverse=True)) \ .flat_map(lambda list: Observable.from_(list))
def words_from_file(file_name): file = open(file_name) # parse, clean, and push words in text file return Observable.from_(file) \ .flat_map(lambda s: Observable.from_(s.split())) \ .map(lambda w: re.sub(r'[^\w]', '', w)) \ .filter(lambda w: w != "") \ .map(lambda w: w.lower())
def word_in_file(fn): file = open(fn) return Observable.from_(file) \ .flat_map(lambda line: Observable.from_(line.split())) \ .map(lambda w: w.lower()) \ .group_by(lambda word: word) \ .map(lambda group: group.count().map(lambda pair: (group.key, pair))) \ .merge_all() \ .to_dict(lambda pair: pair[0], lambda pair: pair[1])
def test_json_mapper(self): result = [] Observable.from_([ '{"cmd": "call", "resource_name": "number", "method": "__sub__", "args": [1]}', '{"cmd": "call", "resource_name": "number", "method": "__add__", "args": [5]}', ]).map(json.loads) \ .map(default_command_mapper) \ .flat_map(self.reactive_server) \ .subscribe(result.append) self.assertEqual(result, [9, 15])
def __init__(self, filename): super().__init__() self.disposable = None self.filename = filename file = open(filename, 'r') self.source = Observable.from_(file) \ .map(lambda s: Observable.from_(s.split())) \ .concat_all() \ .map(lambda w: re.sub(r'[^\w\s]','', w)) \ .map(lambda s: s.lower())
def GetInfo(pre_time): end_time = pre_time + 1800 str_pre_time = datetime.datetime.fromtimestamp(pre_time).strftime('%Y-%m-%d %H:%M:%S') str_end_time = datetime.datetime.fromtimestamp(end_time).strftime('%Y-%m-%d %H:%M:%S') select_cleaned_data_sql = 'select * from cleaned_data where time >= ? and time < ?;' info = c.execute(select_cleaned_data_sql, (str_pre_time, str_end_time)).fetchall() Observable.from_(info) \ .group_by(lambda item: item[1]) \ .subscribe(lambda obs: deal_with_by_sno(str_pre_time, obs))
def github_one(): from rx.internal import extensionmethod @extensionmethod(Observable, alias="merp") def herp(self): return self.map(lambda x: (x, "herp")) Observable.from_(["alpha", "Beta", "Gamma", "Delta", "Epsilon"])\ .herp()\ .merp()\ .subscribe(print)
def on_next(self, value): o3 = SomeObserver3(self.root, self.domain, self.resultset) Observable.from_(self.domain[value]) \ .map(lambda x: (self.domain[value][x]['processor'], value, x, self.domain[value][x]['where'], self.domain[value][x]['db'], self.domain[value][x]['return'])) \ .subscribe(o3)
def unpack_node(node): appium_netloc = self._get_base_url(node['id']) hub_netloc = self._get_base_url(hub_url) malform_dto = node['protocols']['web_driver']['browsers'][''] if 'name' in malform_dto: del malform_dto['name'] if 'version' in malform_dto: del malform_dto['version'] cap_lists = malform_dto.values() # list of cap_lists return Observable.from_(cap_lists) \ .flat_map(lambda cap_list: Observable.from_(cap_list)) \ .map(lambda cap: {**cap, 'appium_url': appium_netloc, 'hub_url': hub_netloc})
def github_two(): from rx.internal import extensionmethod @extensionmethod(Observable) def duplicate(self, times=1): obs = self for replicant in range(0, times): obs = obs.merge(self) return obs Observable.from_(["alpha", "Beta", "Gamma", "Delta", "Epsilon"])\ .duplicate(3)\ .subscribe(print)
def main(): """ Will connect to a local flighgear via telnet and power up the engines. Works best with aircraft c172p. """ fg = RxFlightGear(FlightGear()) starter = Subject() fg.starter(starter) fg.starter(Observable.from_marbles("0-1----------0|").map(lambda x: bool(int(x)))) fg.flaps(Observable.from_([0.5])) fg.rudder(Observable.from_([0.1])) fg.throttle(Observable.from_([1.0]))
def testspeed(num_processes = 6, eigruns = 6): adim = 300 num_processes = 6 arrays = [np.random.rand(adim * adim).reshape(adim, adim) for _ in range(eigruns)] print("longrun_eig not memoized:") now_time = datetime.utcnow() with ProcessPoolExecutor(num_processes) as executor: Observable.from_(arrays) \ .flat_map(lambda s: executor.submit(longrun_eig, s)) \ .subscribe(print) print("time taken {}".format(datetime.utcnow() - now_time)) print() print("longrun_eig memoized:") now_time = datetime.utcnow() Observable.from_(arrays) \ .map(longrun_eig) \ .subscribe(print) print("time taken {}".format(datetime.utcnow() - now_time)) print() test_ranges = [(random.randint(1, 5000), random.randint(5001, 15000)) for _ in range(eigruns)] print("primes_between not memoized:") now_time = datetime.utcnow() with ProcessPoolExecutor(num_processes) as executor: Observable.from_(test_ranges) \ .flat_map(lambda x: executor.submit(primes_between, *x)) \ .subscribe(print) print("time taken {}".format(datetime.utcnow() - now_time)) print() print("primes_between memoized:") now_time = datetime.utcnow() Observable.from_(test_ranges) \ .map(lambda x: primes_between(*x)) \ .subscribe(print) print("time taken {}".format(datetime.utcnow() - now_time))
def walk(top, recursive): dirnames = [] filenames = [] for path, dirs, files in os.walk(top): for filename in files: filenames.append(os.path.join(path, filename)) if recursive is True: for dirname in dirs: dirnames.append(walk(os.path.join(path, dirname), recursive)) return DirItem(top=top, directories=Observable.from_(dirnames), files=Observable.from_(filenames))
def test_distinct_until_change(): ''' Test that we can specify some part that should be distinct not the whole message :return: ''' keyCode = 'keyCode' codes = [ { 'a': 1, keyCode: 38 }, # // up { 'a': 2, keyCode: 38 }, # // up { 'a': 3, keyCode: 40 }, # // down { 'a': 4, keyCode: 40 }, # // down { 'a': 5, keyCode: 37 }, # // left { 'a': 6, keyCode: 39 }, # // right { 'a': 7, keyCode: 37 }, # // left { 'a': 8, keyCode: 39 }, # // right { 'a': 9, keyCode: 66 }, # // b { 'a': 10, keyCode: 65 } # // a ] Observable.from_(codes).distinct_until_changed( comparer=lambda x, y: x[keyCode] == y[keyCode]).subscribe(print)
def run(self): self.socket.listen(5) def clients_iter(): try: while True: yield self.socket.accept() except: pass def send_start(c): return c[0].send(b'Asterisk Call Manager/6.6.6\r\n\r\n') Observable.from_(clients_iter()) \ .subscribe(send_start)
def audio_encoder(sources): # Parse configuration read_config_file = (sources.argv.argv.skip(1).let( argparse.argparse, parser=Observable.just( argparse.Parser(description="audio encode server")), arguments=Observable.from_([ argparse.ArgumentDef(name='--config', help="Path of the server configuration file") ])).filter(lambda i: i.key == 'config').map( lambda i: file.Read(id='config', path=i.value))) config = sources.file.response.let(parse_config) # Transcode request handling encode_init = (config.map( lambda i: encoder.Initialize(storage_path=i.encode.storage_path))) encode_request = ( sources.httpd.route.filter(lambda i: i.id == 'flac_transcode'). flat_map(lambda i: i.request).map(lambda i: encoder.EncodeMp3( id=i.context, data=i.data, key=i.match_info['key']))) encoder_request = Observable.merge(encode_init, encode_request) # http server http_init = (config.flat_map(lambda i: Observable.from_([ httpd.Initialize(request_max_size=0), httpd.AddRoute( methods=['POST'], path='/api/transcode/v1/flac/{key:[a-zA-Z0-9-\._]*}', id='flac_transcode', ), httpd.StartServer(host=i.server.http.host, port=i.server.http.port), ]))) http_response = (sources.encoder.response.map(lambda i: httpd.Response( data='ok'.encode('utf-8'), context=i.id, ))) http = Observable.merge(http_init, http_response) # merge sink requests file_requests = read_config_file return Sink( encoder=encoder.Sink(request=encoder_request), file=file.Sink(request=file_requests), httpd=httpd.Sink(control=http), )
def get_result_sets(self, q): self.jl = json.loads(q) o1 = SomeObserver(self.root, self.domain) Observable.from_(self.jl["from"].keys()) \ .map(lambda x: { "domain": get_domain(self.jl["from"][x]), "key": x, "db": self.jl["from"][x], \ "where": self.jl["where"], "return": self.jl["return"] \ }) \ .subscribe(o1) # lambda x: print(x)) o2 = SomeObserver2(self.root, self.domain, self.resultset) Observable.from_(self.domain) \ .subscribe(o2) # print(self.resultset) # print(asterixconfig) # print(self.resultset.keys()) self.asterixdb_exec3(alias=self.resultset.keys()[0]) print(self.resultset) key1 = asterixconfig['where'].split('=')[0].strip() key2 = asterixconfig['where'].split('=')[1].strip() aliases = self.resultset.keys() colKeyIndex1 = self.resultset[aliases[0]]['cols'].index(key1) colKeyIndex2 = self.resultset[aliases[1]]['cols'].index(key2) colId1 = self.resultset[aliases[0]]['cols'].index(key1) j_result = {"rows": []} for i, row1 in enumerate(self.resultset[aliases[0]]['rows']): row1Key = (row1[colId1]) for j, row2 in enumerate(self.resultset[aliases[1]]['rows']): if row1[colKeyIndex1] == row2[colKeyIndex2]: j_result['rows'].append( tuple([x for x in row1] + [x for x in row2])) #print(json.dumps(j_result)) return j_result
def go(): def uniform(): while True: sleep(0.5) yield gauss(0., 1.) gen_white_noise = Observable.from_(uniform(), scheduler=AsyncIOScheduler()) def trf_shift(shift): return lambda value: value + shift def trf_scale(scale): return lambda value: scale * value white_noise_scaled = gen_white_noise.map(trf_scale(5.0)).map(trf_shift(10.0)) white_noise_scaled.subscribe(LoggingObserver('white_noise_scaled')) gen_white_noise.subscribe(LoggingObserver('white_noise')) gen = white_noise_scaled.to_async_generator() # Wish we could write something like: # ys = (x for x in yield from gen()) while True: x = yield from gen() if x is None: break
def projection(x, i): selector_result = selector(x, i) if isinstance(selector_result, collections.Iterable): result = Observable.from_(selector_result) else: result = Observable.from_future(selector_result) return result
def test_uses_the_subscription_schema_for_subscriptions(): from rx import Observable doc = 'query Q { a } subscription S { a }' class Data(object): a = 'b' c = 'd' ast = parse(doc) Q = GraphQLObjectType('Q', {'a': GraphQLField(GraphQLString)}) S = GraphQLObjectType( 'S', { 'a': GraphQLField(GraphQLString, resolver=lambda root, info: Observable.from_(['b'])) }) result = execute(GraphQLSchema(Q, subscription=S), ast, Data(), operation_name='S', allow_subscriptions=True) assert isinstance(result, Observable) l = [] result.subscribe(l.append) result = l[0] assert not result.errors assert result.data == {'a': 'b'}
def test_accepts_type_definition_with_sync_subscribe_function(): SubscriptionType = GraphQLObjectType( name='Subscription', fields=OrderedDict([ ('importantEmail', GraphQLField( EmailEventType, resolver=lambda *_: Observable.from_([None]))), ]) ) test_schema = GraphQLSchema( query=QueryType, subscription=SubscriptionType ) stream = Subject() send_important_email, subscription = create_subscription( stream, test_schema) email = Email( from_='*****@*****.**', subject='Alright', message='Tests are good', unread=True, ) l = [] subscription.subscribe(l.append) send_important_email(email) assert l # [0].data == {'importantEmail': None}
def test_double_subscribe_to_iterable(self): iterable_finite = [1, 2, 3] scheduler = TestScheduler() obs = Observable.from_(iterable_finite) results = scheduler.start(lambda: obs) results.messages.assert_equal(on_next(200, 1), on_next(200, 2), on_next(200, 3), on_completed(200)) results = scheduler.start(lambda: obs) results.messages.assert_equal(on_next(1001, 1), on_next(1001, 2), on_next(1001, 3), on_completed(1001))
def __init__(self, words, trie=None): super(AcrosticIter, self).__init__(words, trie) self._solution_len = len(words) self._source = Observable.from_(self) self.subscribe = self._source.subscribe # Since we can assume there will be 1 letter phrases for every letter # go ahead and create n Meta objects to hold phrases from n->m at [n][m]. self._phrase_graph = [ {} for _ in range(0, self._solution_len) ] self._walks = [] self._walk_cache = {} for i in range(0, self._solution_len): try: self._walks.append(self._trie.walk(self._words[i:])) except IndexError: pass
def reactive(): bringer_file_pair = ( (Bringer('us'), open('us.jl', 'w')), (Bringer('them'), open('them.jl', 'w')) ) emitter = Emitter() with open('queries') as queries: queries = Observable.from_(queries.readlines()) \ .map(str.strip) \ .subscribe_on(NewThreadScheduler()) for bringer, f in bringer_file_pair: queries \ .map(bringer.bring) \ .map(emitter.emit) \ .map(lambda s: s + '\n') \ .subscribe(f.write, on_completed=partial(logger.info, 'Done')) a = queries.subscribe(partial(logger.info, 'Pushed %r')) # suspend main thread logger.info("Connecting") logger.info("Done Connecting") sleep(3)
def test_uses_the_subscription_schema_for_subscriptions(): from rx import Observable doc = 'query Q { a } subscription S { a }' class Data(object): a = 'b' c = 'd' ast = parse(doc) Q = GraphQLObjectType('Q', { 'a': GraphQLField(GraphQLString) }) S = GraphQLObjectType('S', { 'a': GraphQLField(GraphQLString, resolver=lambda root, info: Observable.from_(['b'])) }) result = execute(GraphQLSchema(Q, subscription=S), ast, Data(), operation_name='S', allow_subscriptions=True) assert isinstance(result, Observable) l = [] result.subscribe(l.append) result = l[0] assert not result.errors assert result.data == {'a': 'b'}
def go_rxpy(): import rx from rx import Observable, Observer from rx.concurrency import Scheduler, AsyncIOScheduler from rx.subjects import Subject class LoggingObserver(Observer): def __init__(self, name): Observer.__init__(self) self._name = name def on_next(self, x): logging.info('<%s>received: %s', self._name, x) def on_error(self, e): logging.info('<%s>error: %s', self._name, e) def on_completed(self): logging.info('<%s>sequence completed', self._name) def uniform(): while True: sleep(0.5) yield gauss(0., 1.) gen_white_noise = Observable.from_(uniform()) def trf_shift(shift): return lambda value: value + shift def trf_scale(scale): return lambda value: scale * value white_noise_scaled = gen_white_noise.map(trf_scale(5.0)).map(trf_shift(10.0)) white_noise_scaled.subscribe(LoggingObserver('white_noise_scaled')) gen_white_noise.subscribe(LoggingObserver('white_noise'))
#!/usr/bin/env python3 import json from rx import Observable import APIReaderTelegram as Telegram def is_message_f(element): return element['message']['text'] == "f" def pretty_print(element): print(json.dumps(element, indent=4)) messages = Observable.from_(Telegram.get_iterable()) messages.filter(is_message_f).subscribe(pretty_print)
from rx import Observable obs1 = Observable.from_([1,2,3]) obs2 = Observable.from_([10,11,12]) Observable.merge(obs1, obs2)\ .subscribe(lambda s:print(s))
from rx import Observable observable = Observable.from_([1, 2]) other_observable = Observable.from_([3, 4]) Observable.concat(observable, other_observable) \ .subscribe(on_next=lambda n: print(n)) # O/P # (1, 'LITE Industrial', 'Southwest', '729 Ravine Way', 'Irving', 'TX', 75014) # (3, 'Re-Barre Construction', 'Southwest', '9043 Windy Dr', 'Irving', 'TX', 75032) # (5, 'Marsh Lane Metal Works', 'Southeast', '9143 Marsh Ln', 'Avondale', 'LA', 79782)
from rx import Observable, Observer from rx.internal import extensionmethod from rx.subjects import Subject in_ = '23432/XX428/X218-' def dump(s): print() s.subscribe(print, print, print) CHAR2NUM = {str(i): i for i in range(1, 9)} CHAR2NUM['-'] = 0 CHAR2NUM['X'] = 10 s1 = Observable.from_(in_) s1.window s2 = s1.scan(lambda prev, x: 10 - prev if x == '/' else CHAR2NUM[x], 0) s3 = s2.start_with(0).buffer_with_count(4, 1) s4 = s3.map(lambda q: dict(s=q[0]+q[1], spare=q[1]+q[2], strike=q[1]+q[2]+q[2]))
#!/usr/bin/env python3 ''' Attaching a Subject to an active stream is blocking ''' from rx import Observable from rx.subjects import Subject subject = Subject() source = Observable.from_(range(10000)) # Attaching Subject to the Original Oberservable # Blocking source.subscribe(subject) # Subscribing to an attached subject doesn't work until # the stream to which the subject is attached finishes, # if ever sub_subject1 = subject.subscribe( lambda v : print("Value published to observer 1: {0}".format(v)), lambda e : print("Error! {0}".format(e)), lambda : print("Completed!") ) # => Completed! sub_subject2 = subject.subscribe( lambda v : print("Value published to observer 2: {0}".format(v)), lambda e : print("Error! {0}".format(e)),
def retweet_test(element): if not original_tweet_has_less_than_50_retweets(element): print('=> Retweet:', json.dumps(element['retweeted_status']['retweet_count'], indent=4), "retweets") # All elements should have less than 2 emojis def emoji_test(element): if tweet_has_more_than_two_emojis(element): print("=> Emoji:", count_emoji(element['text'])) # All elements should have hashtags shorter that 12 characters def hashtag_test(element): if hashtag_longer_than_twelve_letters(element): print('=> Hashtag:', json.dumps(element['entities']['hashtags'], indent=4)) if __name__ == "__main__": stream = Observable.from_(Twitter.get_iterable()) tweets = stream.filter(is_tweet) # Keep only retweets retweeted_tweets = tweets.filter(is_retweet) # Keep only tweets in japanese in_japanese_tweets = tweets.filter(is_japanese_tweet) # Keep all tweets in spanish that contain a hashtag spanish_hashtags = tweets.filter(is_spanish_tweet).filter(has_hashtags) threads = [ Thread(target=process_stream, args=(in_japanese_tweets, emoji_test)), Thread(target=process_stream, args=(spanish_hashtags, hashtag_test)), Thread(target=process_stream, args=(retweeted_tweets, retweet_test))
def Stream(iterable): return Observable.from_(iterable)
from dumper import Dumper from rx import Observable, Observer from rx.internal import extensionmethod from rx.subjects import Subject in_ = '23432/XX428/X21X71' Observable.from_(in_) \ .flat_map(lambda q: Observable.range(1, 2) if q == 'X' else Observable.just(q)) \ .buffer_with_count(2) \ .map(lambda x, i: i) \ .take(10) \ .subscribe(Dumper('s'))
See rx-stream-pacing Two ^C become necessary to stop this script - why? ''' from rx import Observable import APIReaderTwitter as Twitter try: import json except ImportError: import simplejson as json def pretty_print(element): print(json.dumps(element, indent=4)) def is_delete(element): return not "delete" in element # Generate an interval sequece, firing once each second tick = Observable.interval(1000) # Publish an event from Twitter each tick as a minimum # If the twitter stream is empty it will just wait for an event to come source = Observable.from_(Twitter.get_iterable()).zip( tick, lambda t, _: t ) # Print each element in intervals, waits a minimum of 1s between events source.filter(is_delete).subscribe(pretty_print)
""" We can chain observables to form new observables to subscribe on. This is useful when transforming incoming data and we may want to perform some pre-processing before we output it to a subscriber/observer. This chaining of operations creates new observables that we can subscribe to and call the same on_next, on_completed and on_error callbacks """ from rx import Observable, Observer greek_words = ["Alpha", "Beta", "Gamma", "Delta", "Epsilon"] source = Observable.from_(greek_words) # we map the lengths of each word and get the lengts and create a new observable with a stream of lengths lengths = source.map(lambda s : len(s)) # now we filter and get the lengths that are greater than or equal to 5 filtered = lengths.filter(lambda i: i >= 5) # now we can subscribe to the observable and call on_next on our filtered data filtered.subscribe(lambda value : print("Received {}".format(value))) print("This will chain the operators on the observable and make this code more readable, eliminating intermidiary variables") source = Observable.from_(greek_words).map(lambda s : len(s)).filter(lambda i : i >= 5).subscribe(lambda value : print("Received {}".format(value)))
from threading import Thread from rx import Observable from APIReaderSmartvel import APIReaderSmartvel events = Observable.from_(APIReaderSmartvel().get_iterable()) # Verify that all the following regions have events REGIONS = ("Barcelona", "Málaga", "Palma de Mallorca") # Filters def has_place(element): return "place" in element["event"] is_in_region = {region: lambda element: element["event"]["place"]["region"]["name"] == region for region in REGIONS} # Test def is_not_empty(a_stream): a_stream.is_empty().subscribe(fail_if_empty) # Test helper (just a console reporter) def fail_if_empty(empty): if empty: print("stream should not be empty!") else: print("good, stream is not empty")
def print_len3(x): print('`{}` is at least 3 characters long.'.format(x)) if DEBUG: with Section('Reactive (sort of functional) programming via RxPY'): print_h2('Basic Observable') xs = Observable.from_iterable(range(10) + [None]) d = xs.subscribe(MyObserver()) observable_iterable = Observable.from_iterable(xrange(100)) logwriter = observable_iterable.subscribe(LogWriterObserver()) print(logwriter) print_h2('Observable from_') xs = Observable.from_(range(10)) gobbledygook = Observable.from_(list(string.punctuation)) letters = Observable.from_(list(string.ascii_uppercase)) merged = xs.merge(letters, gobbledygook).subscribe(_print) print_h2('Subjects') stream = TestSubject() stream.on_next(1) stream.on_next(2) d = stream.subscribe(_print) map(stream.on_next, range(5)) stream.on_next(3) map(stream.on_next, range(5)) stream.on_next(4) d.dispose() # Subclassed version prints, but the subscription object `d` does not
def create(): return Observable.from_(enumerable_finite, scheduler=scheduler)
def mapper(filename): with open(filename) as f: reader = csv.reader(f) return Observable.from_(reader)
import csv from rx import Observable def mapper(filename): with open(filename) as f: reader = csv.reader(f) return Observable.from_(reader) Observable.from_(["filename.csv"]).map(mapper).concat_all().subscribe(print) def to_file(filename): f = open(filename) return Observable.using( lambda: Disposable(lambda: f.close()), lambda d: Observable.just(f) ) def to_reader(f): return csv.reader(f) def print_rows(reader): for row in reader: print(row) Observable.from_(["filename.csv", "filename2.csv"]).flat_map(to_file).map(to_reader).subscribe(print_rows)
class MyObserver(Observer): def on_next(self, x): print("Got: %s" % x) def on_error(self, e): print("Got error: %s" % e) def on_completed(self): print("Sequence completed") xs = Observable.from_iterable(range(10)) d = xs.subscribe(MyObserver()) print d xs = Observable.from_([1,2,3,4,5,6]) ys = xs.to_blocking() zs = (x*x for x in ys if x > 3) for x in zs: print(x) xs = Observable.from_([1,2,3,4,5,6]) ys = xs[1:-1] xs = Observable.from_(range(10)) d = xs.subscribe(MyObserver()) #Filtering a sequence xs = Observable.from_(range(10)) d = xs.filter( lambda x: x % 2
""" Here we use an Observable.from_ and lambdas in the subscribe of the observer, this means we do not have to implement our own observer and we can simply use lambdas to call the on_next, on_error and on_completed callbacks on the subscriber """ from rx import Observable, Observer greek_words = ["Alpha", "Beta", "Gamma", "Delta", "Epsilon"] source = Observable.from_(greek_words) source.subscribe( on_next=lambda value : print("Received {}".format(value)), on_error=lambda error : print("Error encounted {}".format(error)), on_completed=lambda :print("Done") ) print("We can even eliminate the on_<> callbacks in the subscriber and simply use the on_next callback, not recommended in production") source.subscribe(lambda value : print("Recieved {}".format(value)))
# !/usr/bin/env python # coding=utf-8 from rx import Observable source = Observable.from_(["Alpha", "Beta", "Gamma", "Delta", "Epsilon"]) lengths = source.map(lambda s: len(s)) filtered = lengths.filter(lambda i: i >= 5) filtered.subscribe(lambda value: print("Received {0}".format(value))) def my_abs(x): if not isinstance(x, (int, float)): raise TypeError('bad operand type') if x >= 0: return x else: return -x print(my_abs(-1)) from rx import Observable from random import randint three_emissions = Observable.range(1, 3)