Ejemplo n.º 1
0
 def scan(self):
     self.is_armed = not self.bluetooth.is_there_friendly_devices_nearby()
     self.blueStream.on_next(self.is_armed)
     # TODO: decouple sms alert
     if self.is_armed:
         Observable.just(True).subscribe(SmsObserver('system armed'))
     else:
         Observable.just(True).subscribe(SmsObserver('system disarmed'))
Ejemplo n.º 2
0
    def test_paused_with_immediate_unpause(self):
        subscription = [None]

        scheduler = TestScheduler()

        results = scheduler.create_observer()

        xs = scheduler.create_hot_observable(
            on_next(150, 1),
            on_next(210, 2),
            on_completed(500)
        )

        controller = Observable.just(True)

        pausable_buffered = xs.pausable_buffered(controller)

        def action1(scheduler, state):
            subscription[0] = pausable_buffered.subscribe(results)
        scheduler.schedule_absolute(200, action1)

        scheduler.start()

        results.messages.assert_equal(
            on_next(210, 2),
            on_completed(500)
        )
Ejemplo n.º 3
0
def main():

    # Specify image file (image #2 cooperates better)
    FILE = 'res/box-2.jpg'

    # Stream the image as a whole
    originalObservable = Observable.just(cv2.imread(FILE))

    # Manipulate the image to bring out the box
    imageObservable = (
        originalObservable
        .map(lambda img: cv2.bilateralFilter(img, 9, 75, 75))       # bilateral blur (denoise)
        .map(lambda img: cv2.Canny(img, 100, 100, apertureSize=3))  # detect the edges
    )

    # Detect any lines and represent them with [startPoint, endPoint] in list [lines]
    lines = (
        imageObservable
        # transform the stream into multiple numpy vectors (each line represented by [[x1, y1, x2, y2]])
        .flat_map(lambda img: cv2.HoughLinesP(img, 1, pi/180, 50, minLineLength=70, maxLineGap=40))
        .map(lambda wrapped_line: wrapped_line[0])                  # flatten line array into [x1, y1, x2, y2]
        .map(lambda lv, _: [Line((lv[0], lv[1]), (lv[2], lv[3]))])  # pack line vector into [((x1, y2), (x2, y2))]
        .reduce(lambda accumulated, line: accumulated + line, [])   # reduce all lines into one [line, line, ..]
    )

    # For debugging/visualization purposes only
    # http://reactivex.io/documentation/operators/zip.html
    lines.zip(originalObservable, lambda lines, image: (image, lines)).subscribe(ImageObserver())
Ejemplo n.º 4
0
 def watch(self):
     # TODO: simplify this with reactive programming
     while True:
         if self.is_armed:
             time.sleep(parameters.main_interval_between_scan_while_armed)
         else:
             time.sleep(parameters.main_interval_between_scan_while_disarmed)
         will_arm = not self.bluetooth.is_there_friendly_devices_nearby()
         if not self.is_armed:
             if will_arm:
                 Observable.just(True).subscribe(SmsObserver('system armed'))
                 self.is_armed = True
                 self.blueStream.on_next(True)
         else:
             if not will_arm:
                 Observable.just(True).subscribe(SmsObserver('system disarmed'))
                 self.is_armed = False
                 self.blueStream.on_next(False)
Ejemplo n.º 5
0
def message():
    data = parse_qs(request.get_data(False, True, False))

    # For some reason these values are lists first
    data = {x: data[x][0] for x in data}
    token = data['token']

    print("Credentials are - \n" + str(credentials))
    print("Token received is " + token)
    print("Stored token   is " + credentials['slack_token'])

    # Verify it came from slack
    if token != credentials['slack_token']:
        return Response(json.dumps({'text': 'Invalid'}), status=403, mimetype='application/json')
    else:
        message_data = data['text']
        Observable.just(message_data, new_thread_scheduler)\
            .map(lambda s: process_command(s))\
            .subscribe(BotObserver())\

    resp = Response(None, status=200, mimetype='application/json')
    return resp
Ejemplo n.º 6
0
    f = urlopen(link)
    return Observable.from_(f) \
        .map(lambda s: s.decode("gbk").strip())


print(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f"))

codes = codes = [
    "usr_aapl", "usr_fb", "usr_goog", "usr_baba", "usr_ge", "usr_tsla",
    "usr_atvi", "usr_hpq"
]

source = Observable.from_(codes)\
    .map(lambda s: "http://hq.sinajs.cn/list={0}".format(s))\
    .flat_map(lambda s:
        Observable.just(s).subscribe_on(pool_scheduler).flat_map(lambda t: read_request(t))
    )\
    .map(lambda s: s.split('"')[1]) \
    .filter(lambda l: l != "") \
    .map(lambda s: s.split(","))\
    .map(lambda s: "股票:{0}           价格:{1}".format(s[0], s[1]))

source.subscribe(
    on_next=lambda i: print("{0} {1}".format(current_thread().name, i)),
    on_error=lambda e: print(e),
    on_completed=lambda: print(
        "PROCESS 1 Done!",
        datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")))

print(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f"))
Ejemplo n.º 7
0
def deepspeech_server(sources):
    argv = sources.argv.argv
    stt = sources.httpd.route
    stt_response = sources.deepspeech.text.share()
    ds_logs = sources.deepspeech.log
    config_data = sources.file.response

    http_ds_error, route_ds_error = make_error_router()

    args = argparse.argparse(
        argv=argv.skip(1).subscribe_on(aio_scheduler),
        parser=Observable.just(
            argparse.Parser(description="deepspeech server")),
        arguments=Observable.from_([
            argparse.ArgumentDef(name='--config',
                                 help="Path of the server configuration file")
        ]))

    config_file = (args.filter(lambda i: i.key == 'config').map(
        lambda i: file.Read(id='config', path=i.value)))
    config = parse_config(config_data).subscribe_on(aio_scheduler)

    logs_config = (config.flat_map(lambda i: Observable.from_(i.log.level).map(
        lambda i: logging.SetLevel(logger=i.logger, level=i.level)).concat(
            Observable.just(logging.SetLevelDone()))))

    logs = Observable.merge(logs_config, ds_logs)
    log_ready = sources.logging.response.take(1)

    ds_stt = (stt.flat_map(lambda i: i.request).map(
        lambda i: deepspeech.SpeechToText(data=i.data, context=i.context)))

    ds_arg = (
        # config is hot, the combine operator allows to keep its last value
        # until logging is initialized
        log_ready.combine_latest(
            config, lambda _, i: i).map(lambda i: deepspeech.Initialize(
                model=i.deepspeech.model,
                alphabet=i.deepspeech.alphabet,
                lm=i.deepspeech.lm,
                trie=i.deepspeech.trie,
                features=deepspeech.FeaturesParameters(
                    n_features=i.deepspeech.features.n_features,
                    n_context=i.deepspeech.features.n_context,
                    beam_width=i.deepspeech.features.beam_width,
                    lm_alpha=i.deepspeech.features.lm_alpha,
                    lm_beta=i.deepspeech.features.lm_beta,
                ) if i.deepspeech.features is not None else None)))
    ds = ds_stt.merge(ds_arg)

    http_init = (config.flat_map(lambda i: Observable.from_([
        httpd.Initialize(request_max_size=i.server.http.request_max_size),
        httpd.AddRoute(
            methods=['POST'],
            path='/api/stt',
            id='stt',
        ),
        httpd.StartServer(host=i.server.http.host, port=i.server.http.port),
    ])))

    http_response = (stt_response.let(
        route_ds_error,
        error_map=lambda e: httpd.Response(
            data="Speech to text error".encode('utf-8'),
            context=e.args[0].context,
            status=500)).map(lambda i: httpd.Response(
                data=i.text.encode('utf-8'),
                context=i.context,
            )))

    http = Observable.merge(http_init, http_response, http_ds_error)

    return DeepspeechSink(file=file.Sink(request=config_file),
                          logging=logging.Sink(request=logs),
                          deepspeech=deepspeech.Sink(speech=ds),
                          httpd=httpd.Sink(control=http))
Ejemplo n.º 8
0
from rx import Observable

number = Observable.just(1)
number.subscribe(on_next=lambda i: print("item: {}".format(i)),
                 on_error=lambda e: print("error: {}".format(e)),
                 on_completed=lambda: print("completed"))
Ejemplo n.º 9
0
import asyncio
from rx import Observable


stream = Observable.just("Hello, world!")

async def hello_world():
    n = await stream
    print(n)

loop = asyncio.get_event_loop()
# Blocking call which returns when the hello_world() coroutine is done
loop.run_until_complete(hello_world())
loop.close()
Ejemplo n.º 10
0
from rx import Observable, Observer

# Using Observable.range()
letters = Observable.range(1, 10)
letters.subscribe(lambda value: print(value))

# Using Observable.just()
greeting = Observable.just("Hello World!")
greeting.subscribe(lambda value: print(value))
Ejemplo n.º 11
0
def getIntervalOb(time):
    return Observable.interval(time).switch_map(
        lambda i: Observable.just(i).subscribe_on(pool_scheduler))
Ejemplo n.º 12
0
Archivo: b3.py Proyecto: miphip/bowling
from dumper import Dumper
from rx import Observable, Observer
from rx.internal import extensionmethod
from rx.subjects import Subject

in_ = '23432/XX428/X21X71'



Observable.from_(in_) \
    .flat_map(lambda q: Observable.range(1, 2) if q == 'X' else Observable.just(q)) \
    .buffer_with_count(2) \
    .map(lambda x, i: i) \
    .take(10) \
    .subscribe(Dumper('s'))
Ejemplo n.º 13
0
def read_last_line_from_file(filename):
    with open(filename) as file:
        lines = file.readlines()
        last_line = lines[-1]
        return Observable.just(last_line)
Ejemplo n.º 14
0
def extract_features(sources):
    aio_scheduler = AsyncIOScheduler()
    file_response = sources.file.response.share()
    config_sink = config.read_configuration(
        config.Source(file_response=file_response,
                      argv=sources.argv.argv.subscribe_on(aio_scheduler)))
    configuration = config_sink.configuration.share()

    walk_adapter = walk.adapter(sources.walk.response)
    #file_adapter = file.adapter(sources.media_file.response)
    #write_feature_request, write_feature_file = router.make_crossroad_router(file_response)
    media_file_request, feature_file_request, process_path = path_processor.make_path_processor(
        sources.media_file.response, sources.feature_file.response)
    random_cross_request, cross_random = router.make_crossroad_router(
        sources.random.response)

    features = (
        configuration.flat_map(
            lambda configuration: walk_adapter.api.walk(configuration.dataset.
                                                        voxceleb2_path)
            # extract features from files
            .let(
                process_path,
                configuration=configuration,
                #file_adapter=file_adapter,
            )
            # create sets
            .reduce(lambda acc, i: acc + [{
                'file': i,
                'label': label_from_path(i),
                'set': set_from_path(i),
            }],
                    seed=[])
            # todo: shuffle
            .map(train_test_split).flat_map(
                lambda dataset: Observable.just(dataset['test']).map(pair_set)
                # shuffle apn pairs
                .map(lambda i: random.Shuffle(id='dev_test_set', data=i)).let(
                    cross_random).filter(lambda i: i.id == 'dev_test_set').
                map(lambda i: i.data).map(lambda i: test_dev_split(
                    i, configuration.dataset.dev_set_utterance_count,
                    configuration.dataset.test_set_utterance_count)).map(
                        lambda i: {
                            'train': dataset['train'],
                            'dev': i['dev'],
                            'test': i['test'],
                        }))).share())

    # save dataset json file
    write_dataset_request = (features.map(json.dumps).with_latest_from(
        configuration, lambda dataset, configuration: file.Write(
            id='write_dataset',
            path=configuration.dataset.path,
            data=dataset,
            mode='w')).share())

    # random
    random_request = Observable.concat(
        configuration.map(lambda i: random.SetSeed(value=i.random_seed)),
        random_cross_request)

    logs = features
    exit = sources.dataset_file.response.ignore_elements()

    return Sink(file=file.Sink(request=config_sink.file_request),
                dataset_file=file.Sink(request=write_dataset_request),
                media_file=file.Sink(request=media_file_request),
                feature_file=file.Sink(request=feature_file_request),
                logging=logging.Sink(request=logs),
                walk=walk.Sink(request=walk_adapter.sink),
                stop=stop.Sink(control=exit),
                random=random.Sink(request=random_request))
Ejemplo n.º 15
0
def to_file(filename):
    f = open(filename)
    return Observable.using(lambda: Disposable(lambda: f.close()),
                            lambda d: Observable.just(f))
Ejemplo n.º 16
0
    def _do_work(self, task):
        self.logger.info('Starting {}...'.format(task))

        return Observable.just(task).map(
            lambda task: self.post_process_imp.putTaskToPostgreSQL(task)
        ).catch_exception(lambda error: self._logErrorAndResume(error, task))
Ejemplo n.º 17
0
 def publish_tasks(self, time_arr, task_arr):
     if self.task_producer is None:
         self.task_producer =  Observable.from_(time_arr).flat_map(lambda i: Observable.timer(i * 1000).switch_map(lambda i: Observable.just(i).subscribe_on(pool_scheduler))).zip(Observable.from_(task_arr), lambda x, y: y).publish().ref_count()
     return self.task_producer
Ejemplo n.º 18
0
    def process(path, configuration):
        ''' compute features for all files in path
        This lettable operator processes all files present in the path observable.
        It reads each file, process them, and writes the result. The processing
        is multithreaded via a thread pool. The resulting observable is still
        scheduled in the asyncio event loop.
        '''
        aio_ts_scheduler = AsyncIOScheduler(threadsafe=True)
        aio_scheduler = AsyncIOScheduler()
        thread_scheduler = ThreadPoolScheduler(
            max_workers=configuration.data_preparation.cpu_core_count)

        # prepare file routing : read media and write features
        media_file_response = file_response
        feature_write_request, write_feature_file = router.make_crossroad_router(
            feature_response)
        media_read_request, read_media_file = router.make_crossroad_router(
            media_file_response)

        media_read_request \
            .flat_map(lambda i: Observable.just(i, scheduler=thread_scheduler)) \
            .subscribe(sink_request)

        feature_write_request.subscribe(feature_request)

        # feature engineering
        return (
            path
            #.do_action(TraceObserver(prefix='write1', trace_next_payload=False))
            .flat_map(
                lambda i: i.files
                .map(lambda path: file.Read(
                    id='read_media',
                    path=path,
                    mode='rb',
                ))
                .let(read_media_file)
                .filter(lambda i: i.id == 'read_media')
                .flat_map(lambda media: media.data
                    #.do_action(lambda i: print('write20-{}'.format(threading.get_ident()))) \
                    #.do_action(TraceObserver(prefix='write20', trace_next_payload=False))
                    .let(process_audio, configuration=configuration.features)

                    .map(lambda i: data_to_feature(i, media.path, configuration))
                    #.do_action(TraceObserver(prefix='write21-{}'.format(threading.get_ident()), trace_next_payload=False))
                    # .do_action(lambda i: print(i.path))
                )
                #)
                #.do_action(lambda i: print('write2-{}'.format(threading.get_ident()))) \
                #.do_action(TraceObserver(prefix='write2', trace_next_payload=False))
            )
            # write feature file to disk
            .map(lambda i: file.Write(
               id='write_feature',
               path=i.path, data=i.data,
               mode='wb', mkdirs=True))
            .let(write_feature_file)
            .filter(lambda i: i.id == 'write_feature')
            .map(lambda i: i.path)

            #.do_action(lambda i: print('write2-2-{}'.format(threading.get_ident()))) \
            #.do_action(TraceObserver(prefix='write2-2', trace_next_payload=False))
            .observe_on(aio_ts_scheduler)
            #.map(lambda i: "/foo/bar/i/4.mp3")
            #.do_action(lambda i: print('write3-{}'.format(threading.get_ident())))
            #.do_action(TraceObserver(prefix='write3', trace_next_payload=False))
        )
Ejemplo n.º 19
0
from modules.door_listener import DoorListener
from modules.light import LightOnObserver
from modules.light import LightOffObserver
from modules.siren_client import SirenClientObserver
from modules.sms import SmsObserver
from modules.pirs import Pirs
from modules.blue import Scanner

import parameters

door_listener = DoorListener()
pirs = Pirs()
scanner = Scanner()

# check that everything is ok
Observable.just(True).subscribe(SmsObserver('alarm started'))
Observable.just(True).subscribe(LightOnObserver())
Observable.timer(2000).subscribe(LightOffObserver())
# rise error: Camera component couldn't be enabled: Out of resources (other than memory)
# Observable.just(True).subscribe(CameraObserver())  # TODO take an initial photo on startup
scanner.scan()


def main():
    # start bluetooth scanning in the background
    new_bluetooth_thread = threading.Thread(target=scanner.watch)
    new_bluetooth_thread.daemon = True  # stop if the program exits
    new_bluetooth_thread.start()

    # start listening door tag
    new_doortag_thread = threading.Thread(target=door_listener.listen)
Ejemplo n.º 20
0
import asyncio
import aiohttp
from rx import Observable
from rx.concurrency import AsyncIOScheduler
from rx import Observable

loop = asyncio.get_event_loop()
loop.set_debug(True)
scheduler = AsyncIOScheduler(loop)

def request(method, url, **kw):
    future = asyncio.ensure_future(aiohttp.request(method, url, **kw))
    return Observable.from_future(future)

ob = Observable.just(1, scheduler=scheduler)

ob10 = ob.\
    map(lambda x: request('GET', 'https://httpbin.org/')).\
    flat_map(lambda x: x)

ob10.subscribe(print)
ob.subscribe(print)

loop.run_forever()
Ejemplo n.º 21
0
 def catch_error(error):
     exe_context.errors.append(error)
     return Observable.just(None)
Ejemplo n.º 22
0
 def test_for_each_argument_checking(self):
     some = Observable.just(42).to_blocking()
     self.assertRaises(TypeError, lambda: Observable(None).to_blocking().for_each(lambda x: x))
     self.assertRaises(TypeError, lambda: some.for_each(lambda: None))
def to_file(filename):
    f = open(filename)
    return Observable.using(
        lambda: Disposable(lambda: f.close()),
        lambda d: Observable.just(f)
    )
import multiprocessing
import random
import time
from threading import current_thread

from rx import Observable
from rx.concurrency import ThreadPoolScheduler


def intense_calculation(value):
    # sleep for a random short duration between 0.5 to 2.0 seconds to simulate a long-running calculation
    time.sleep(random.randint(5, 20) * .1)
    return value

# calculate number of CPU's, then create a ThreadPoolScheduler with that number of threads
optimal_thread_count = multiprocessing.cpu_count() + 1
pool_scheduler = ThreadPoolScheduler(optimal_thread_count)


Observable.of("Alpha", "Beta", "Gamma", "Delta", "Epsilon") \
    .flat_map(lambda s: Observable.just(s).subscribe_on(pool_scheduler).map(lambda s: intense_calculation(s))) \
    .subscribe(on_next=lambda s: print("PROCESS 1: {0} {1}".format(current_thread().name, s)),
               on_error=lambda e: print(e),
               on_completed=lambda: print("PROCESS 1 done!"))
Ejemplo n.º 25
0
def class_five():
    Observable.just("Hello world").subscribe(on_next=lambda s: print(s))
Ejemplo n.º 26
0
def commentObservable(obs):
    return Observable.just("/*").concat(obs.map(lambda line: " * " + line)).concat(Observable.just(" */"))
Ejemplo n.º 27
0
def get_weather(location):
    with requests.session() as session:
        endpoint = '{}/{},{}'.format(URI, location[0], location[1])
        return Observable.just(session, scheduler=Scheduler.current_thread) \
            .map(lambda x: x.get(endpoint)) \
            .interval(1000)
def audio_encoder(sources):
    # Parse configuration
    read_config_file = (
        sources.argv.argv.skip(1)
        .let(argparse.argparse,
            parser=Observable.just(
                argparse.Parser(description="audio encode server")),
            arguments=Observable.from_([
                argparse.ArgumentDef(
                    name='--config', help="Path of the server configuration file")
            ]))
        .filter(lambda i: i.key == 'config')
        .map(lambda i: file.Read(id='config', path=i.value))
    )
    config = sources.file.response.let(parse_config)

    # Transcode request handling
    encode_init = (
        config
        .map(lambda i: encoder.Initialize(storage_path=i.encode.storage_path))
    )

    encode_request = (
        sources.httpd.route
        .filter(lambda i: i.id == 'flac_transcode')
        .flat_map(lambda i: i.request)
        .do_action(lambda i: print("[{}]http req: {}".format(datetime.datetime.now(), threading.get_ident())))
        #.observe_on(encode_scheduler)
        .flat_map(lambda i: Observable.just(i, encode_scheduler))
        .do_action(lambda i: print("[{}]encode req: {}".format(datetime.datetime.now(), threading.get_ident())))
        .map(lambda i: encoder.EncodeMp3(
            id=i.context,
            data=i.data,
            key=i.match_info['key']))
    )
    encoder_request = Observable.merge(encode_init, encode_request)

    # store encoded file
    store_requests = (
        sources.encoder.response
        .do_action(lambda i: print("[{}]encode res: {}".format(datetime.datetime.now(), threading.get_ident())))
        .observe_on(s3_scheduler)
        .do_action(lambda i: print("[{}]s3 req: {}".format(datetime.datetime.now(), threading.get_ident())))
        .map(lambda i: s3.UploadObject(
            key=i.key + '.flac',
            data=i.data,
            id=i.id,
        ))
    )

    # acknowledge http request
    http_response = (
        sources.s3.response
        .do_action(lambda i: print("[{}]s3 res: {}".format(datetime.datetime.now(), threading.get_ident())))
        .do_action(lambda i: print("httpd res: {}".format(threading.get_ident())))
        .map(lambda i: httpd.Response(
            data='ok'.encode('utf-8'),
            context=i.id,
        ))
    )

    # http server
    http_init = (
        config
        .flat_map(lambda i: Observable.from_([
            httpd.Initialize(request_max_size=0),
            httpd.AddRoute(
                methods=['POST'],
                path='/api/transcode/v1/flac/{key:[a-zA-Z0-9-\._]*}',
                id='flac_transcode',
            ),
            httpd.StartServer(
                host=i.server.http.host,
                port=i.server.http.port),
        ]))
    )
    http = Observable.merge(http_init, http_response)

    # s3 database
    s3_init = (
        config
        .map(lambda i: s3.Configure(
            access_key=i.s3.access_key,
            secret_key=i.s3.secret_key,
            bucket=i.s3.bucket,
            endpoint_url=i.s3.endpoint_url,
            region_name=i.s3.region_name,
        ))
    )

    # merge sink requests
    file_requests = read_config_file
    s3_requests = Observable.merge(s3_init, store_requests)

    return Sink(
        encoder=encoder.Sink(request=encoder_request),
        s3=s3.Sink(request=s3_requests),
        file=file.Sink(request=file_requests),
        httpd=httpd.Sink(control=http),
    )
from __future__ import print_function

from rx import Observable
from rx.concurrency import ThreadPoolScheduler
from threading import current_thread
import multiprocessing, time, random

def intense_calculation(value):
    # sleep for a random short duration between 0.5 to 2.0 seconds to simulate a long-running calculation
    time.sleep(random.randint(5,20) * .1)
    return value

# calculate number of CPU's and add 1, then create a ThreadPoolScheduler with that number of threads
optimal_thread_count = multiprocessing.cpu_count() + 1
pool_scheduler = ThreadPoolScheduler(optimal_thread_count)

# Create Process 1
Observable.from_(["Alpha","Beta","Gamma","Delta","Epsilon", "Alpha","Beta","Gamma","Delta","Epsilon"]) \
    .flat_map(lambda s: Observable.just(s).subscribe_on(pool_scheduler).map(lambda s: intense_calculation(s))) \
    .subscribe(on_next=lambda s: print("PROCESS 1: {0} {1}".format(current_thread().name, s)),
               on_error=lambda e: print(e),
               on_completed=lambda: print("PROCESS 1 done!"))
input("Press any key to exit\n")
from rx import Observable

scheduler = ThreadPoolScheduler()

xs = Observable.range(1, 5).flat_map(lambda x: Observable.just(x, scheduler=scheduler), mapper)
from __future__ import print_function

from rx import Observable, Observer
from rx.concurrency import ThreadPoolScheduler
from threading import current_thread
import multiprocessing, time, random

optimal_thread_count = multiprocessing.cpu_count() + 1
pool_scheduler = ThreadPoolScheduler(optimal_thread_count)

#         .switch_map(lambda i: Observable.from_(running_tasks).subscribe_on(pool_scheduler).map(lambda s: print_task_st(s))) \

a = Observable.interval(1000).switch_map(lambda i: Observable.just(
    i).subscribe_on(pool_scheduler)).publish().ref_count()

a.subscribe(
    lambda i: print("first observer {0}, {1}".format(i,
                                                     current_thread().name)))
time.sleep(5)
a.subscribe(
    lambda i: print("second observer {0}, {1}".format(i,
                                                      current_thread().name)))

while True:
    pass
Ejemplo n.º 32
0
 def catch_error(error):
     subscriber_exe_context.errors.append(error)
     return Observable.just(None)
Ejemplo n.º 33
0
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)

logger = logging.getLogger(__name__)


def file_source(file):
    return open(file, 'r')


def watch(handle):
    return select.select([handle], [], [], 5)


def print_f(value):
    print value


def handle_select(value):
    if ([], [], []) == value:
        logger.info('Nothing happening...')
    else:
        return value[0][0].readline()


Observable.just(sys.stdin) \
    .map(watch) \
    .map(handle_select) \
    .filter(bool) \
    .repeat() \
    .subscribe(on_next=logger.info)