def test_time_interval_default_scheduler(self):
        import datetime
        import time
        xs = rx.of(1, 2).pipe(
            ops.time_interval(),
            ops.pluck_attr('interval'),
        )

        l = []
        d = xs.subscribe(l.append)
        time.sleep(0.1)
        self.assertEqual(len(l), 2)
        [self.assertIsInstance(el, datetime.timedelta) for el in l]
Esempio n. 2
0
    def _export_year(geometry, year_start, year_end, export_description, year_dir):
        stack = _create_stack(geometry, year_start, year_end)
        if not stack.bandNames().size().getInfo():
            logging.info('No data between {} and {}'.format(year_start, year_end))
            return of({
                'exported': 1,
                'downloaded': 1,
                'downloaded_bytes': 0,
                'processed': 1
            })
        initial_progress = of({
            'exported': 0,
            'stack_bytes': 0,
            'dates_bytes': 0,
            'downloaded': 0,
            'processed': 0
        })

        def aggregate_downloaded_bytes(p):
            return {
                'exported': p['exported'],
                'downloaded': p['downloaded'],
                'downloaded_bytes': p['stack_bytes'] + p['dates_bytes'],
                'processed': p['processed']
            }

        return concat(
            initial_progress,
            merge(
                _export_and_download_stack(stack, export_description, year_dir),
                _export_and_download_dates(stack, export_description, year_dir)
            ),
            _process_year(year_dir),
            of({'processed': 1})
        ).pipe(
            scan(lambda acc, p: {**acc, **p}, {}),
            map(aggregate_downloaded_bytes)
        )
Esempio n. 3
0
 def do_retry(source, tries, exception):
     print(
         'retry_with_backoff(tries={}, retries={}, description={}, exception={})'
         .format(tries, retries, description, exception))
     if tries <= retries:
         logging.warning(
             'retry_with_backoff(tries={}, retries={}, exception={}, description={})'
             .format(tries, retries, exception, description))
         return of(None).pipe(
             delay(backoff(tries), TimeoutScheduler.singleton()),
             flat_map(source),
             catch(handler=lambda e, src: do_retry(src, tries + 1, e)))
     else:
         return throw(exception)
Esempio n. 4
0
def demo_group_by():
    '''tuple unpacking'''
    a = rx.of([
        {'id':1 , 'name': 'aaa'},
        {'id':2 , 'name': 'bbb'},
        {'id':1 , 'name': 'aaa'},
        {'id':1 , 'name': 'aaa'},
        {'id':2 , 'name': 'aaa'},
        ])

    a.pipe(
        ops.group_by(lambda x: x['id'], lambda x: x['name'], subject_mapper = rx.subject.ReplaySubject()),
        ops.to_iterable(),
    ).subscribe(print)
Esempio n. 5
0
 def _set_band_names():
     band_names_stream = of(band_names) if band_names else get_band_names(
         credentials, image)
     return concat(
         progress(
             default_message='Setting band names...',
             message_key='tasks.retrieve.image_to_sepal.setting_band_names'
         ),
         band_names_stream.pipe(
             flat_map(lambda names: set_band_names(
                 band_names=names,
                 files=
                 [destination_path + '/*.tif', destination_path + '/*.vrt'])
                      ), flat_map(lambda _: empty())))
Esempio n. 6
0
 def _export_and_download_stack(stack, export_description, year_dir):
     stack_drive_description = 'stack_' + export_description
     stack_drive_folder = '{}-{}'.format(stack_drive_description, str(uuid.uuid4()))
     stack_drive_path = '{}/{}'.format(drive_folder_path, stack_drive_folder)
     create_stack_drive_folder = create_folder_with_path(credentials, stack_drive_path).pipe(
         flat_map(lambda _: empty())
     )
     export_stack = _export_stack(stack, stack_drive_description, stack_drive_folder).pipe(
         flat_map(lambda _: empty())
     )
     download_stack_from_drive = _download_from_drive(
         path=stack_drive_path,
         destination=year_dir
     ).pipe(
         map(lambda p: {'stack_bytes': p.downloaded_bytes})
     )
     return concat(
         create_stack_drive_folder,
         export_stack,
         of({'exported': 1}),
         download_stack_from_drive,
         of({'downloaded': 1}),
     )
Esempio n. 7
0
    def on_mouse_over(self) -> Observable:
        position = MouseInput.input(self).observe("position")
        local_pos = position.pipe(ops.map(lambda p: p - self.offset))

        return self.on_mouse_move.pipe(
            ops.map(lambda e: e.position),
            ops.map(lambda p: rx.concat(
                rx.of(p),
                rx.never().pipe(
                    ops.take_until(
                        local_pos.pipe(
                            ops.filter(lambda l: not self.bounds.contains(l)))
                    )))), ops.exclusive(),
            ops.map(lambda p: MouseOverEvent(self, p)))
Esempio n. 8
0
def main():
    loop = asyncio.get_event_loop()
    io_scheduler = AsyncIOThreadSafeScheduler(loop=loop)
    scheduler = ThreadPoolScheduler(multiprocessing.cpu_count())

    semaphore = Subject()

    semaphore_stream = semaphore.pipe(
        ops.flat_map(lambda _: rx.of(True).pipe(
            ops.delay(ARGS.block_time, scheduler=scheduler),
            ops.start_with(False))), ops.start_with(True))

    video_stream_observable = rx.using(
        lambda: VideoStreamDisposable(),
        lambda d: rx.from_iterable(video_stream_iterable(d.cap)))

    gated_video_stream = video_stream_observable.pipe(
        ops.subscribe_on(scheduler),
        ops.sample(1 / ARGS.fps),  # sample frames based on fps
        ops.combine_latest(semaphore_stream),
        ops.filter(lambda tup: tup[1]),  # proceed only if semaphore allows
        ops.map(lambda tup: tup[0])  # take only frame
    )

    disposable = gated_video_stream.pipe(
        ops.filter(has_face),  # filter frames without faces
        ops.map(lambda frame: Image.fromarray(
            cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))),  # map frame to PIL image
        ops.map(lambda img: img.resize(
            (640, 360))),  # resize image (inference will be faster)
        ops.observe_on(io_scheduler),
        ops.map(lambda img: ImageFacesPair(img, analyse_frame(img))
                ),  # analyse frame for faces
        ops.filter(lambda img_faces_pair: any([
            face.top_prediction.confidence > ARGS.threshold
            for face in img_faces_pair.faces
        ])),  # proceed only if there is a known face in the frame
        ops.throttle_first(1),
        ops.flat_map(unlock_request),  # unlock the door
        ops.do_action(
            on_next=lambda _: semaphore.on_next(True)
        )  # trigger semaphore which will block stream for "block-seconds" seconds (doors are unlocked for that long after unlock request)
    ).subscribe(on_error=lambda e: logger.exception(e))

    try:
        loop.run_forever()
    except Exception as e:
        logger.exception(e)
        logger.info("Smart lock face recognition engine shutdown")
        disposable.dispose()
Esempio n. 9
0
def collect_pos(sender, **kwargs):
    results = []
    source = rx.of(*kwargs['rs'])
    pos_list: List[Text] = kwargs['data']

    source.pipe(
        filter_pos(pos_list),
        to_token(),
    ).subscribe(
        on_next=lambda value: results.append(value),
        on_error=lambda e: logger.error(e),
        on_completed=lambda: logger.debug("done."),
    )
    return results
Esempio n. 10
0
def enqueue(
        queue: WorkQueue,
        action: Callable = None,
        description: str = None,
        retries: int = None
):
    return of(True).pipe(
        operators.enqueue(
            queue=queue,
            mapper=lambda _: action(),
            description=description,
            retries=retries
        )
    )
Esempio n. 11
0
def execute(
        credentials,
        action: Callable,
        retries: int = 5,
        description: str = None
) -> Observable:
    return of(True).pipe(
        operators.execute(
            credentials,
            mapper=lambda _: action(),
            retries=retries,
            description=description
        )
    )
Esempio n. 12
0
 def next_token(self, token):
     if token == b"(":
         self.stack.append([])
         return rx.empty()
     elif token == b")":
         part = self.stack.pop()
         if self.stack:
             self.stack[-1].append(part)
             return rx.empty()
         else:
             return rx.of(part)
     else:
         self.stack[-1].append(token)
         return rx.empty()
Esempio n. 13
0
def get_source(sents, lang, domain_type=None)-> Observable:
    from sagas.nlu.ruleset_procs import cached_chunks, get_main_domains
    from sagas.conf.conf import cf
    import rx

    engine=cf.engine(lang)
    if domain_type is None:
        domain_type, domains=get_main_domains(sents, lang, engine)
    else:
        chunks = cached_chunks(sents, lang, engine)
        domains = chunks[domain_type]
    table_rs = []
    for ds in domains:
        flat_table(ds, '', table_rs)
    return rx.of(*table_rs)
    def retrieve_knowledge_edge(
        self, config: Callable[[KnowledgeEdgeReq], None]
    ) -> Observable[KnowledgeEdgeRes]:
        knowledge_req: Callable[[KnowledgeReq],
                                None] = lambda x: x.edges(config)
        retrieval_req: Callable[[RetrievalReq],
                                None] = lambda x: x.knowledge(knowledge_req)
        query_req: Callable[[QueryReq],
                            None] = lambda x: x.retrieve(retrieval_req)
        query_res: Callable[
            [QueryRes],
            KnowledgeEdgeRes] = lambda x: x.retrieve.knowledge.edges

        observable = rx.of(self.client.query(GaiaRequest.query(query_req)))
        return flat_mapQ(observable, query_res)
    def perceive_data(
            self,
            impulse: PerceiveDataImpulse) -> Observable[PerceivedImpulse]:
        perceive_data_req: Callable[
            [PerceptionReq],
            None] = lambda x: x.perceive_data(impulse, lambda e: e.id())
        mutation_req: Callable[[MutationReq],
                               None] = lambda x: x.perceive(perceive_data_req)
        mutation_res: Callable[
            [MutationRes],
            PerceivedImpulse] = lambda x: x.perceive.perceive_data

        observable = rx.of(
            self.client.mutation(GaiaRequest.mutation(mutation_req)))
        return mapM(observable, mutation_res)
    def preserve_create_fulfilments(
        self, impulses: List[CreateFulfilmentImpulse]
    ) -> Observable[CreatedFulfilmentImpulse]:
        fulfilment_req = lambda x: x.id()
        create_req: Callable[[PreservationReq], None] = lambda x: x.create(
            lambda e: e.fulfilments(impulses, fulfilment_req))
        mutation_req: Callable[[MutationReq],
                               None] = lambda x: x.preserve(create_req)
        mutation_res: Callable[
            [MutationRes],
            CreatedFulfilmentImpulse] = lambda x: x.preserve.create.fulfilments

        observable = rx.of(
            self.client.mutation(GaiaRequest.mutation(mutation_req)))
        return flat_mapM(observable, mutation_res)
    def preserve_delete_prompts(
        self, impulses: List[DeletePromptImpulse]
    ) -> Observable[DeletedPromptImpulse]:
        prompt_req = lambda x: x.id()
        delete_prompts: Callable[[PreservationReq], None] = lambda x: x.delete(
            lambda e: e.prompts(impulses, prompt_req))
        mutation_req: Callable[[MutationReq],
                               None] = lambda x: x.preserve(delete_prompts)
        mutation_res: Callable[
            [MutationRes],
            DeletedPromptImpulse] = lambda x: x.preserve.delete.prompts

        observable = rx.of(
            self.client.mutation(GaiaRequest.mutation(mutation_req)))
        return flat_mapM(observable, mutation_res)
    def retrieve_behaviour(
            self, config: Callable[[BehaviourReq],
                                   None]) -> Observable[BehaviourRes]:
        behaviour_req: Callable[[BehaviourReq],
                                None] = lambda x: x.behaviours(config)
        retrieval_req: Callable[[RetrievalReq],
                                None] = lambda x: x.knowledge(behaviour_req)
        query_req: Callable[[QueryReq],
                            None] = lambda x: x.retrieve(retrieval_req)
        query_res: Callable[
            [QueryRes],
            BehaviourRes] = lambda x: x.retrieve.knowledge.behaviours

        observable = rx.of(self.client.query(GaiaRequest.query(query_req)))
        return flat_mapQ(observable, query_res)
    def retrieve_fulfilments(
            self, config: Callable[[FulfilmentReq],
                                   None]) -> Observable[FulfilmentRes]:
        fulfilment_req: Callable[[FulfilmentReq],
                                 None] = lambda x: x.fulfilments(config)
        retrieval_req: Callable[[RetrievalReq],
                                None] = lambda x: x.knowledge(fulfilment_req)
        query_req: Callable[[QueryReq],
                            None] = lambda x: x.retrieve(retrieval_req)
        query_res: Callable[
            [QueryRes],
            FulfilmentRes] = lambda x: x.retrieve.knowledge.fulfilments

        observable = rx.of(self.client.query(GaiaRequest.query(query_req)))
        return flat_mapQ(observable, query_res)
Esempio n. 20
0
    def __init__(self,
                 size: Dimension,
                 toolkit: FixtureToolkit,
                 look_and_feel: Optional[LookAndFeel] = None,
                 font_options: Optional[FontOptions] = None,
                 window_manager: Optional[WindowManager] = None,
                 error_handler: Optional[ErrorHandler] = None) -> None:
        if size is None:
            raise ValueError("Argument 'size' is required.")

        # noinspection PyTypeChecker
        self.window_size = rx.of(size)

        super().__init__(toolkit, look_and_feel, font_options, window_manager,
                         error_handler)
    def preserve_update_intents(
        self, impulses: List[UpdateIntentImpulse]
    ) -> Observable[UpdatedIntentImpulse]:
        intent_req = lambda x: x.id()
        update_intents: Callable[[PreservationReq], None] = lambda x: x.update(
            lambda e: e.intents(impulses, intent_req))
        mutation_req: Callable[[MutationReq],
                               None] = lambda x: x.preserve(update_intents)
        mutation_res: Callable[
            [MutationRes],
            UpdatedIntentImpulse] = lambda x: x.preserve.update.intents

        observable = rx.of(
            self.client.mutation(GaiaRequest.mutation(mutation_req)))
        return flat_mapM(observable, mutation_res)
Esempio n. 22
0
def file_by_path(path: str) -> Observable:
    root_stream = of({'id': 'root', 'path': '/'})

    def find_with_parent(parent_stream, name):
        return parent_stream.pipe(
            flat_map(lambda parent: list_folder(parent, name_filter=name)),
            map(lambda files: files[0] if len(files) else None),
            flat_map(lambda file: of(file) if file else throw(
                Exception('File {} does not exist.'.format(path)))))

    return from_list(path.split('/')).pipe(
        filter(lambda name: name and name.strip()
               ),  # Allows double // and training /
        reduce(find_with_parent, root_stream),
        flat_map(lambda file_stream: file_stream.pipe(map(lambda file: file))),
        first())
Esempio n. 23
0
def getCategoryAndDependencies(selectedJobId):
    return rx.of(selectedJobId).pipe(
        ops.flat_map(
            lambda selectedJobId: getCategory(selectedJobId)
        ),
        ops.flat_map(
            lambda category: getTestruns(category)
        ),
        ops.flat_map(
            lambda testruns: rx.from_(testruns)
        ),
        ops.map(
            lambda testrun: getTestrunDependencies(testrun)
        ),
        ops.merge(max_concurrent=1),
    )
Esempio n. 24
0
def main(sources):
    ticks_stream = rx.interval(timedelta(seconds=1)).pipe(
        map(lambda n: n + 1), take(10))
    responses_stream = sources["http"].pipe(flat_map(lambda s: s), )
    queries = rx.of(
        {
            "url": "https://jsonplaceholder.typicode.com/todos/1"
        },
        {
            "url": "https://error123123123.co.uk"
        },
    ).pipe(delay(timedelta(seconds=2)))
    return {
        "log": rx.merge(queries, responses_stream, ticks_stream),
        "http": queries
    }
Esempio n. 25
0
    def timestampOperator(self):
        source = interval(1).pipe(
            op.map(lambda second: self.intense_calculation(second)),
            op.timestamp()
        )
        
        source2 = of('Alpha', 'Beta', 'Gamma', 'Delta', 'Epsilon').pipe(
            op.map(lambda second: self.intense_calculation(second)),
            op.timestamp()
        )

        result = source.pipe(
            op.merge(source2)
        )

        result.subscribe(lambda item: print('"Subscribe Timestamp" output: {}'.format(item)))
    def preserve_delete_behaviours(
        self, impulses: List[DeleteBehaviourImpulse]
    ) -> Observable[DeletedBehaviourImpulse]:
        behaviour_req = lambda x: x.id()
        delete_behaviours: Callable[
            [PreservationReq], None] = lambda x: x.delete(
                lambda e: e.behaviours(impulses, behaviour_req))
        mutation_req: Callable[[MutationReq],
                               None] = lambda x: x.preserve(delete_behaviours)
        mutation_res: Callable[
            [MutationRes],
            DeletedBehaviourImpulse] = lambda x: x.preserve.delete.behaviours

        observable = rx.of(
            self.client.mutation(GaiaRequest.mutation(mutation_req)))
        return flat_mapM(observable, mutation_res)
Esempio n. 27
0
    def complexMerge(self):
        # This is infinite
        source = interval(1).pipe(
            op.map(lambda item: item * 10),
            op.map(lambda second: self.intense_calculation(second)))

        source2 = of('Alpha', 'Beta', 'Gamma', 'Delta', 'Epsilon').pipe(
            op.map(lambda second: self.intense_calculation(second)))

        result = source.pipe(op.merge(source2))

        result.subscribe(on_next=lambda item: print(
            '"Subscribe Complex Merge" output: {}'.format(item)),
                         on_error=lambda err: print('Error: {}'.format(e)))

        input('Press any key to exit\n')
Esempio n. 28
0
def pos_proc(sender, **kwargs):
    results = []

    source = rx.of(*kwargs['rs'])
    cond: pred_cond = kwargs['data']
    logger.debug(f"pred pos: {cond}")

    source.pipe(
        filter_path(cond.part),
        ops.filter(lambda t: t.upos.lower() in cond.cond),
        to_token(cond.cond),
    ).subscribe(
        on_next=lambda value: results.append({**value}),
        on_error=lambda e: logger.error(e),
    )

    return results
Esempio n. 29
0
    def __init__(self, context: Context, visible: bool = True) -> None:
        super().__init__(context, visible)

        # noinspection PyTypeChecker
        self.hover = rx.merge(
            self.on_mouse_over.pipe(ops.map(lambda _: True)),
            self.on_mouse_out.pipe(ops.map(lambda _: False))).pipe(ops.start_with(False))

        mouse = MouseInput.input(self)

        # noinspection PyTypeChecker
        self.active = self.on_mouse_down.pipe(
            ops.filter(lambda e: e.button == MouseButton.LEFT),
            ops.map(lambda _: rx.concat(rx.of(True), mouse.on_button_release(MouseButton.LEFT).pipe(
                ops.take(1),
                ops.map(lambda _: False)))),
            ops.exclusive(),
            ops.start_with(False))
Esempio n. 30
0
def cat_proc(sender, **kwargs):
    from sagas.nlu.utils import predicate
    from sagas.nlu.translator import trans_axis

    results = []

    source = rx.of(*kwargs['rs'])
    lang = kwargs['lang']
    cond: pred_cond = kwargs['data']
    logger.debug(f"pred pos: {cond}")

    kind = cond.cond
    logger.debug(f"lang: {lang}, cond: {cond}")
    source.pipe(
        filter_path(cond.part),
        ops.map(lambda t: to_obj({
            'word':
            t.text if t.upos.lower() in ['adj'] else t.lemma,
            **t
        })),
        ops.map(lambda t: to_obj({
            'trans': trans_axis(t.word, lang, t.upos),
            **t
        })),
        ops.filter(lambda t: predicate(kind, t.trans, 'en', '*')),
        ops.map(
            lambda t: {
                'path': t.path,
                'word': t.word,
                'trans': t.trans,
                'cat': kind,
                'value': kind,
                'pos': t.upos.lower()
            }),
    ).subscribe(
        on_next=lambda value: results.append({**value}),
        on_error=lambda e: logger.error(e),
    )
    logger.debug(f"result: {results}")
    return results
Esempio n. 31
0
    def _watch_door_health(self, building_map):
        doors: List[Door] = []
        if building_map:
            for level in building_map.levels:
                level: Level
                for door in level.doors:
                    doors.append(door)

        def to_door_health(data: Tuple[str, bool]):
            id_ = data[0]
            has_heartbeat = data[1]
            if has_heartbeat:
                return ttm.DoorHealth(id_=id_,
                                      health_status=ttm.HealthStatus.HEALTHY)
            return ttm.DoorHealth(
                id_=id_,
                health_status=ttm.HealthStatus.DEAD,
                health_message="heartbeat failed",
            )

        keys = [x.name for x in doors]
        initial_values: Sequence[Tuple[str, Any]] = [(k, None) for k in keys]
        obs = rx.merge(
            rx.of(*initial_values),
            self.rmf.door_states.pipe(ops.map(lambda x: (x.door_name, x))),
        )

        door_mode_health = obs.pipe(ops.map(self._door_mode_to_health))

        heartbeat_health = obs.pipe(
            self._watch_heartbeat(lambda x: x[0]),
            ops.map(to_door_health),
        )

        sub = heartbeat_health.pipe(
            self._combine_most_critical(door_mode_health), ).subscribe(
                self.rmf.door_health.on_next, scheduler=self.scheduler)
        self._building_watchers.append(sub)
Esempio n. 32
0
 def create():
     return rx.of(1, 2, 3, 4, 5)
Esempio n. 33
0
    def test_of_empty(self):
        results = []

        rx.of().subscribe(results.append)

        assert(len(results) == 0)
Esempio n. 34
0
 def create():
     return rx.of(scheduler=scheduler)
Esempio n. 35
0
    def test_of(self):
        results = []

        rx.of(1, 2, 3, 4, 5).subscribe(results.append)

        assert(str([1, 2, 3, 4, 5]) == str(results))
Esempio n. 36
0
 def test_run_of(self):
     result = rx.of(1, 2, 3).run()
     assert result == 3