Beispiel #1
0
    def test_error(self):
        numbers = Subject()
        windows = Subject()

        expected_numbers = []
        expected_error = None
        actual_numbers = []
        actual_error = None

        def on_next(i):
            actual_numbers.append(i)

        def on_error(e):
            nonlocal actual_error
            actual_error = e

        numbers.pipe(ops.buffer(windows)).subscribe(on_next=on_next,
                                                    on_error=on_error)

        numbers.on_next(1)
        numbers.on_next(2)
        numbers.on_error(ValueError())

        self.assertIsInstance(actual_error, ValueError)
        self.assertEqual(expected_numbers, actual_numbers)
Beispiel #2
0
    def test_nominal(self):
        numbers = Subject()
        windows = Subject()

        expected_numbers = [[1, 2], [3, 4, 5]]
        expected_error = None
        actual_numbers = []
        actual_error = None

        def on_next(i):
            actual_numbers.append(i)

        def on_error(e):
            nonlocal actual_error
            actual_error = e

        numbers.pipe(ops.buffer(windows)).subscribe(on_next=on_next,
                                                    on_error=on_error)

        numbers.on_next(1)
        numbers.on_next(2)
        windows.on_next(True)
        numbers.on_next(3)
        numbers.on_next(4)
        numbers.on_next(5)
        windows.on_next(True)

        self.assertEqual(None, actual_error)
        self.assertEqual(expected_numbers, actual_numbers)
Beispiel #3
0
class CustomOrderManager(OrderManager):
    """A sample order manager for implementing your own custom strategy"""
    def __init__(self):
        super().__init__()
        self.context = {'exchange': self.exchange}
        self.orderbook_stream = Subject()
        self.orderbook_stream.pipe(
            check_position_limits(), process_buy_orders(),
            process_sell_orders()).subscribe(self.flush_orders)

    def flush_orders(self, context):
        buy_orders = context['buy_orders'] if 'buy_orders' in context else []
        sell_orders = context[
            'sell_orders'] if 'sell_orders' in context else []
        try:
            self.converge_orders(buy_orders, sell_orders)
        except Exception as e:
            logger.exception(e)

    def place_orders(self):
        orderbook = pd.DataFrame(self.exchange.bitmex.market_depth())
        orderbook = orderbook.sort_values(
            'price', ascending=False).reset_index(drop=True)
        self.context['orderbook'] = orderbook
        self.orderbook_stream.on_next(self.context)
Beispiel #4
0
 def test_no_subscribe(self):
     source = Subject()
     source.pipe(
         trace_observable(prefix='foo',
                          trace_subscribe=False,
                          date=datetime.datetime(year=2018, month=8,
                                                 day=3))).subscribe()
     source.on_next('bar')
     self.assertEqual('2018-08-03 00:00:00:foo - on_next: bar',
                      self.out.getvalue().strip())
Beispiel #5
0
class MyData:
    p1 = Value('Prop 1')
    p2 = Value('Prop 2')

    def __init__(self):
        self._subject = Subject()
        self.subscribe = self._subject.subscribe

    def observe(self, prop, listener):
        self._subject.pipe(rxops.filter(
            lambda item: item[1] == prop)).subscribe(listener)
Beispiel #6
0
 def test_base_on_completed(self):
     source = Subject()
     source.pipe(
         trace_observable(prefix='foo',
                          date=datetime.datetime(year=2018, month=8,
                                                 day=3))).subscribe()
     source.on_completed()
     self.assertEqual(
         '2018-08-03 00:00:00:foo - on_subscribe\n'
         '2018-08-03 00:00:00:foo - on_completed\n'
         '2018-08-03 00:00:00:foo - dispose',
         self.out.getvalue().strip())
Beispiel #7
0
        def schedule_emit_next_until(until: subject.Subject):
            stop_emitting = False

            def _action(sch: rx.typing.Scheduler, state=None):
                emit_next()

            def until_on_next(v):
                nonlocal stop_emitting
                stop_emitting = True

            until.pipe(operators.take_until(_stop)).subscribe(until_on_next,
                                                              scheduler=sch)

            if not stop_emitting:
                sch.schedule(_action)
Beispiel #8
0
def reactive_frame(initial: Optional[pd.DataFrame] = None,
                   mutate=False) -> Tuple[Subject, rx.Observable]:
    """
    Creates a pair of observables, a subject that allows generating a stream of data, and
    a DataFrame accumulator computed from the data. This can operate in either mutable (where the
    data frame is modified rather than concatenated with each new point) or immutable (default) operation.

    In concurrent applications, you should be wary of setting `mutable=True`, unless you are okay
    with the data being updated behind your back. That being said, this option is more performant as pandas
    does not need to make a new copy of the frame with every push of data onto the stream.

    Args:
        initial (pd.DataFrame): Initial data frame which can be used to populate the types and column names.
        mutate (bool): Whether to modify or concat (make new copy) new data onto the accumulated DataFrame.

    Returns:
        A tuple of an rx.Subject and an rx.Observable providing the raw value
        and accumulated value streams respectively.
    """
    subject = Subject()

    def append_to_frame(old_frame: pd.DataFrame, new_item: Dict[str, any]):
        if mutate:
            old_frame.loc[len(old_frame)] = new_item
            return old_frame

        return pd.concat([old_frame, pd.DataFrame([new_item])],
                         ignore_index=True)

    accumulated = subject.pipe(ops.scan(append_to_frame, initial))

    return subject, accumulated
Beispiel #9
0
def one_channel(para, need_end):
    '''必须这样送进1个Value, 才能退出'''
    print(f'开始处理通道{para}')
    #用rx +纯函数定义处理流程
    in_stream = Subject()

    out_stream = in_stream.pipe(
        #模拟简单处理
        ops.map(lambda data: len(data)),
        #ops.filter(lambda record_zoc1: record_zoc1 is not None)
    )

    #订阅处理 最终输出
    out_stream.subscribe(print)

    #实际开始
    while True:
        # 用Event判断结束
        #print('子进程中 ', need_end.value)
        if need_end.value:
            break
        #模拟产生数据
        time.sleep(1)
        data = '哈哈哈'
        #推送到in_stream 进行梳理
        in_stream.on_next(data)

    print('子进程结束')
Beispiel #10
0
class WSHandler(WebSocketHandler):
    def open(self):
        print("WebSocket aberto")

        # Um Sujeito é observável e observ5ador, portanto, podemos nos inscrever
        # para ele e também alimentá-lo (on_next) com novos valores
        self.subject = Subject()

        # Agora pegamos nossos óculos mágicos e projetamos o fluxo de bytes em
        query = self.subject.pipe(
            # 1. fluxo de códigos-chave
            ops.map(lambda obj: obj["keycode"]),
            # 2. fluxo de janelas (10 ints de comprimento)
            ops.window_with_count(10, 1),
            # 3. fluxo de booleanos, verdadeiro ou falso
            ops.flat_map(lambda win: win.pipe(ops.sequence_equal(codes))),
            # 4. fluxo de verdadeiras
            ops.filter(lambda equal: equal))
        # 4. então, assinamos o Trues e sinalizamos para a Konami! se virmos algum
        query.subscribe(lambda x: self.write_message("Konami!"))

    def on_message(self, message):
        obj = json_decode(message)
        self.subject.on_next(obj)

    def on_close(self):
        print("WebSocket fechado")
def test_parent_before_source():
    source = Subject()
    parent = Subject()

    actual_value = None

    def on_next(i):
        nonlocal actual_value
        actual_value = i

    disposable = parent.pipe(rxsci.ops.with_latest_from(source), ).subscribe(
        on_next=on_next,
        on_error=lambda e: print(e),
    )

    parent.on_next(1)
    assert actual_value == None
    source.on_next('a')
    assert actual_value == None
    parent.on_next(2)
    assert actual_value == (2, 'a')
    parent.on_next(3)
    assert actual_value == (3, 'a')
    actual_value = None
    source.on_next('b')
    assert actual_value == None
    parent.on_next(4)
    assert actual_value == (4, 'b')
Beispiel #12
0
class Socket:
    def execute_this_method(self):
        self.stream = Subject()
        url = URL

        websocket.enableTrace(True)
        ws = websocket.WebSocketApp(
            url,
            on_message=self.on_message,
            on_error=self.on_error,
        )
        ws.run_forever()

        react = self.stream.pipe(NewThreadScheduler())
        return react.subscribe()

    def on_message(self, message):
        data = []
        json_data = json.loads(message)
        if json_data["phase"] != "flavor text":
            if json_data["phase"] == "morning":
                data = json_data["text"]["@value"]
            elif json_data["phase"] == "result":
                data = json_data["avatar"]["name"]
        self.stream.on_next(data)

    def on_error(self, error):
        print(error)
class WSHandler(WebSocketHandler):
    def open(self):
        print("WebSocket opened")

        self.stream = Subject()

        searcher = self.stream.pipe(ops.map(lambda x: x["term"]),
                                    ops.filter(lambda text: len(text) > 2),
                                    ops.debounce(0.750),
                                    ops.distinct_until_changed(),
                                    ops.flat_map_latest(search_wikipedia))

        def send_response(x):
            self.write_message(x.body)

        def on_error(ex):
            print(ex)

        searcher.subscribe(send_response, on_error, scheduler=scheduler)

    def on_message(self, message):
        obj = json_decode(message)
        self.stream.on_next(obj)

    def on_close(self):
        print("WebSocket closed")
Beispiel #14
0
class WSHandler(WebSocketHandler):
    def open(self):
        scheduler = AsyncIOScheduler(asyncio.get_event_loop())

        print("WebSocket opened")

        # A Subject is both an observable and observer, so we can both subscribe
        # to it and also feed (send) it with new values
        self.subject = Subject()

        # Get all distinct key up events from the input and only fire if long enough and distinct
        searcher = self.subject.pipe(
            ops.map(lambda x: x["term"]),
            ops.filter(lambda text: len(text) > 2),  # Only if the text is longer than 2 characters
            ops.debounce(0.750),                     # Pause for 750ms
            ops.distinct_until_changed(),            # Only if the value has changed
            ops.flat_map_latest(search_wikipedia)
        )

        def send_response(x):
            self.write_message(x.body)

        def on_error(ex):
            print(ex)

        searcher.subscribe(send_response, on_error, scheduler=scheduler)

    def on_message(self, message):
        obj = json_decode(message)
        self.subject.on_next(obj)

    def on_close(self):
        print("WebSocket closed")
Beispiel #15
0
def _new_files(file_handler):
    subject = Subject()

    def on_created(event):
        subject.on_next(event.src_path)

    file_handler.on_created = on_created
    return subject.pipe(operators.flat_map(_convert_file_if_necessary))
Beispiel #16
0
class __InferenceEngine(EdgeThing):
    def __init__(self, properties_str: str,
                 inference: Callable[[str, object], Tuple[str, object]],
                 tag_groups: List[str], thing_cls: List[str]):
        super().__init__(properties_str=properties_str,
                         tag_groups=tag_groups,
                         thing_cls=thing_cls)
        self.__frame_data_class = class_from_thing_input(
            self.dr, self.thing, 'VideoFrameData')
        self.__frame_subject = Subject()
        self.__listener = FrameListener(self.__frame_subject,
                                        self.__frame_data_class)
        self.__inference_fn = inference
        self.__frame_subject.pipe(
            ops.map(lambda s: self.__inference_fn(s[0], s[1]))).subscribe(
                self._write_inference)

    @property
    def sink(self) -> Callable[[str, object], None]:
        def no_op(flow_id, data):
            log.warning('No sink configured.')
            pass

        return no_op

    def _write_inference(self, obj: Tuple[str, object]) -> None:
        self.sink(obj[0], obj[1])

    def run(self) -> None:
        """
        The main loop of the Thing, when it exits the lifecycle of the Thing is done.
        A listener is attached to the VideoFrame input and as the frames are received they a passed through
        the TensorFlow Object Detection API to transform the frame into a set of Regions of Interest (or
        Detection Boxes) that have a classification of what is in them.
        The result is packaged into a DetectionBox and written to the Data River.
        It exists when the `terminate` flage is set on the Thing by calling __exit__
        """
        dispatcher = Dispatcher()
        self.thing.add_listener(self.__listener, 'VideoFrameData', dispatcher)

        while not self.terminate:
            try:
                dispatcher.process_events(1000)
            except:
                continue
Beispiel #17
0
    def test_grouped_sample(self):
        subject = Subject()

        result = []
        scheduler = HistoricalScheduler()
        subject.pipe(grouped_sample(lambda x: x[0],
                                    10)).subscribe(result.append,
                                                   scheduler=scheduler)

        subject.on_next(("key", "first"))
        subject.on_next(("key_2", "first_2"))
        scheduler.advance_by(5)
        subject.on_next(("key", "second"))
        scheduler.advance_by(5)

        # check that only the latest of each key is returned
        self.assertEqual(len(result), 2)
        self.assertEqual(result[0][1], "second")
        self.assertEqual(result[1][1], "first_2")
class SubjectMessageBus(typing.MessageBus, Disposable):
    def dispose(self) -> None:
        self.subject.dispose()

    def __init__(self, scheduler: Scheduler = None):
        self.subject = Subject()
        if scheduler is None:
            self.msgs = self.subject.pipe(as_observable())
        else:
            self.msgs = self.subject.pipe(subscribe_on(scheduler),
                                          as_observable())

    async def publish(self, msg):
        self.subject.on_next(msg)
        return

    @property
    def messages(self) -> rx.Observable:
        return self.msgs
Beispiel #19
0
class DataSource(Process):
    '''暂时无法如此运行?
        TypeError: cannot pickle '_thread.RLock' object
        PermissionError: [WinError 5] 拒绝访问。
    '''
    def __init__(self, para):
        super(DataSource, self).__init__()
        self.para = para
        self.need_end = True
        self.is_done = False

        #用rx +纯函数定义处理流程
        self.in_stream = Subject()

        self.out_stream = self.in_stream.pipe(
            #模拟简单处理
            ops.map(lambda data: len(data)),
            #ops.filter(lambda record_zoc1: record_zoc1 is not None)
        )

        #订阅处理 最终输出
        self.out_stream.subscribe(print)

    def run(self):
        '''根据para开始产生数据'''
        print(f'开始处理通道{self.para}')
        while True:
            print('子进程中need_end', self.need_end)
            if not self.need_end:
                break
            #模拟产生数据
            time.sleep(1)
            data = '哈哈哈'
            #推送到in_stream
            self.in_stream.on_next(data)
        print('退出循环')
        #后处理完毕,可以退出
        self.is_done = True

    def close(self):
        print('子进程准备结束')
        #通知run结束
        self.need_end = False
        #等待run结束
        while not self.is_done:
            time.sleep(0.01)
            pass
        #结束
        print('子进程结束')
        super(DataSource, self).close()

    def end(self):
        print('子进程准备结束')
        #通知run结束
        self.need_end = False
Beispiel #20
0
def run() -> None:
    """Update store, create streams and run the main loop."""

    midi_stream = Subject()
    store.update('mappings', import_mappings())
    set_io_ports(midi_stream)

    midi_stream.pipe(
        ops.map(lambda x: process_midi(x)),
        ops.map(lambda x: get_translations(x)),
        ops.flat_map(lambda x: x),
        ops.map(lambda x: translate_and_send(x)),
        ops.do_action(lambda x: log(x)),
    ).subscribe(on_error=lambda x: print(f'ERROR: {x}'))

    # send initial bank to reset controller
    set_bank(1, initial=True)

    while True:
        time.sleep(1)
Beispiel #21
0
def handle_source():
    subject = Subject()
    asyncio.create_task(sse_observable(subject))

    base_transform_pipe = subject.pipe(
        op.map(lambda data: {
            field: data.get(field)
            for field in USER_CONTRIBUTES_REQUIRED_FIELDS
        }), op.map(transform_timestamp))
    base_transform_pipe.subscribe(store_user_contributes)
    base_transform_pipe.subscribe(lambda data: logger.info(
        f"User: {data['user']} \nEdited article: {data['title']}"))
class StateManagement(metaclass=Singleton):
    def __init__(self):
        self.addSegmentConfigSrc = Subject()
        self.addSegmentConfigSink = self.addSegmentConfigSrc.pipe(
            ops.map(lambda _: Repo().addSegment()))
        self.removeSegmentConfigSrc = Subject()
        self.removeSegmentConfigSink = self.removeSegmentConfigSrc.pipe()
        self.removeSegmentConfigSrc.subscribe(
            lambda key: Repo().removeSegment(key))

        self.updateSegmentSrc = Subject()
        self.updateSegmentSink = self.updateSegmentSrc.pipe(
            ops.map(Repo().updateSegment))
        # self.updateSegmentSink.subscribe(lambda _: _a)

        self.generateSegmentsSrc = Subject()
        self.retriveKnobTendonModels = self.generateSegmentsSrc.pipe(
            ops.map(Repo().generateSegments))
        self.retriveKnobTendonModels.subscribe(lambda _: None)

        self.updateTensionsSrc = Subject()
        self.updateTensionsSrc.subscribe(Repo().updateTensions)

        self.computeTensionsSrc = Subject()
        self.computeTensionsSink = self.computeTensionsSrc.pipe(
            ops.map(lambda _: Repo().computeTensions()))

        self.graphResizeUpdateSrc = Subject()
        self.graphResizeUpdateSink = self.graphResizeUpdateSrc.pipe(
            ops.debounce(2.0))

    def __del__(self):
        for v in self.__dict__.values():
            if isinstance(v, Subject):
                v.dispose()
Beispiel #23
0
    def enqueue(self,
                observable: Observable,
                group: str = 'default-group',
                retries: int = 0,
                description: str = None) -> Observable:
        def log_status(status):
            logging.debug(
                str({
                    'WorkQueue': str(self),
                    'group': group,
                    status: description
                }))

        log_status('ENQUEUED')
        output = Subject()
        errors = Subject()
        output_finalized = Subject()

        def handle_error(e, _):
            log_status('FAILED')
            errors.on_next(e)
            return empty()

        def set_output_finalized():
            output_finalized.on_next(True)

        work = of(True).pipe(
            do_action(lambda _: log_status('STARTED')),
            take_until(output_finalized),
            flat_map(lambda _: observable.pipe(
                map(lambda value: of({
                    'value': value,
                    'output': output
                })),
                retry_with_backoff(
                    retries=retries,
                    description='{}.enqueue(group={}, description={})'.format(
                        self, group, description)),
                catch(handler=handle_error), take_until(output_finalized),
                take_until_disposed())),
            concat(of(of({
                'completed': True,
                'output': output
            }))), finally_action(lambda: log_status('COMPLETED')))

        self._queue.on_next({'work': work, 'group': group})

        return output.pipe(observe_on(self.request_scheduler),
                           throw_when(errors),
                           take_while(lambda r: not r.get('completed')),
                           map(lambda r: r.get('value')),
                           finally_action(set_output_finalized))
Beispiel #24
0
def main():
    loop = asyncio.get_event_loop()
    io_scheduler = AsyncIOThreadSafeScheduler(loop=loop)
    scheduler = ThreadPoolScheduler(multiprocessing.cpu_count())

    semaphore = Subject()

    semaphore_stream = semaphore.pipe(
        ops.flat_map(lambda _: rx.of(True).pipe(
            ops.delay(ARGS.block_time, scheduler=scheduler),
            ops.start_with(False))), ops.start_with(True))

    video_stream_observable = rx.using(
        lambda: VideoStreamDisposable(),
        lambda d: rx.from_iterable(video_stream_iterable(d.cap)))

    gated_video_stream = video_stream_observable.pipe(
        ops.subscribe_on(scheduler),
        ops.sample(1 / ARGS.fps),  # sample frames based on fps
        ops.combine_latest(semaphore_stream),
        ops.filter(lambda tup: tup[1]),  # proceed only if semaphore allows
        ops.map(lambda tup: tup[0])  # take only frame
    )

    disposable = gated_video_stream.pipe(
        ops.filter(has_face),  # filter frames without faces
        ops.map(lambda frame: Image.fromarray(
            cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))),  # map frame to PIL image
        ops.map(lambda img: img.resize(
            (640, 360))),  # resize image (inference will be faster)
        ops.observe_on(io_scheduler),
        ops.map(lambda img: ImageFacesPair(img, analyse_frame(img))
                ),  # analyse frame for faces
        ops.filter(lambda img_faces_pair: any([
            face.top_prediction.confidence > ARGS.threshold
            for face in img_faces_pair.faces
        ])),  # proceed only if there is a known face in the frame
        ops.throttle_first(1),
        ops.flat_map(unlock_request),  # unlock the door
        ops.do_action(
            on_next=lambda _: semaphore.on_next(True)
        )  # trigger semaphore which will block stream for "block-seconds" seconds (doors are unlocked for that long after unlock request)
    ).subscribe(on_error=lambda e: logger.exception(e))

    try:
        loop.run_forever()
    except Exception as e:
        logger.exception(e)
        logger.info("Smart lock face recognition engine shutdown")
        disposable.dispose()
Beispiel #25
0
async def main(loop):
    scheduler = AsyncIOScheduler(loop)
    finder = WikipediaFinder(loop)
    stream = Subject()

    def task(term):
        t = loop.create_task(finder.search(term))
        return rx.from_future(t)

    def pretty(result):
        parsed = json.loads(result)
        print(json.dumps(parsed, sort_keys=True, indent=2))

    stream.pipe(
        ops.debounce(0.750),
        ops.distinct(),
        ops.flat_map_latest(task)
    ).subscribe(pretty, scheduler=scheduler)

    def reader():
        line = sys.stdin.readline().strip()
        stream.on_next(line)

    loop.add_reader(sys.stdin.fileno(), reader)
Beispiel #26
0
class MyData2:

    def __init__(self):
        self._subject = Subject()
        self.p1 = 'Prop 1'
        self.p2 = 'Prop 2'

    def __setattr__(self, name, value):
        if hasattr(self, '_subject') and getattr(self, name, None) != value:
            self._subject.on_next((self, name, value))
        super().__setattr__(name, value)

    def observe(self, name, *args, **kwargs):
        return self._subject.pipe(rxops.filter(lambda item: item[1] == name)).subscribe(*args, **kwargs)

    def subscribe(self, *args, **kwargs):
        return self._subject.subscribe(*args, **kwargs)
Beispiel #27
0
class BufferedObserver():
    def __init__(self, func):
        self.values = Subject()
        self.boundaries = Subject()
        self.grouped_values = self.values.pipe(
            op.buffer(self.boundaries)
        )

        self.sub = self.grouped_values.subscribe(func)

    def deliver_values(self):
        self.boundaries.on_next(True)

    def __call__(self, value):
        self.values.on_next(value)

    def dispose(self):
        self.sub.dispose()
Beispiel #28
0
class BlenderKeyInput(KeyInput, ReactiveObject, EventLoopAware):
    pressed: RV[Set[int]] = rv.new_view()

    def __init__(self, context: BlenderContext) -> None:
        super().__init__(context)

        self._activeInputs = Subject()

        # noinspection PyTypeChecker
        self.pressed = self._activeInputs.pipe(
            ops.start_with({}), ops.map(lambda s: set(s.keys())))

    def process(self) -> None:
        self._activeInputs.on_next(keyboard.activeInputs)

    def dispose(self) -> None:
        super().dispose()

        self.execute_safely(self._activeInputs.dispose)
Beispiel #29
0
def test_dispose_dead_letter():
    source = Subject()
    actual_result = []
    actual_error = []
    errors_result = []
    errors_error = []

    errors, route_errors = rs.error.create_error_router()

    data = source.pipe(
        rs.ops.multiplex(rx.pipe(
            rs.ops.map(lambda i: 1 / i),
            route_errors(),
        )))

    errors_disposable = errors.subscribe(on_next=errors_result.append,
                                         on_error=errors_error.append)
    disposable = data.subscribe(on_next=actual_result.append,
                                on_error=actual_error.append)

    source.on_next(1)
    source.on_next(0)
    source.on_next(2)
    source.on_next(0)

    assert errors_error == []
    assert len(errors_result) == 2
    assert type(errors_result[0]) is ZeroDivisionError
    assert type(errors_result[1]) is ZeroDivisionError
    assert actual_error == []
    assert actual_result == [1.0, 0.5]

    errors_disposable.dispose()
    source.on_next(1)
    source.on_next(0)

    assert len(actual_error) == 1
    assert type(actual_error[0]) is ZeroDivisionError
    assert actual_result == [1.0, 0.5, 1.0]
Beispiel #30
0
class Processor(object):
    def __init__(self):  # type: (Processor) -> None
        # Editable params
        self.data_resolution = 250
        self.blink_threshold = 500
        self.recorded_data = []
        self.is_recording = False
        self._is_open = True
        self._sample_frequency = 512

        # Disposal handler
        self.subscriptions = []

        # Observers and Subjects
        self.data = Subject()
        self.subscriptions.append(self.data)

        # Hidden params
        self._raw_data_batch = []
        self._save_path = ''

    @staticmethod
    def _init_thread(target, args=()):  # type: (Any, Union[Tuple, Any]) -> None
        threading.Thread(target=target, args=args).start()

    def add_data(self, raw_data):  # type: (Processor, int) -> None
        self._raw_data_batch.append(raw_data)
        if len(self._raw_data_batch) >= self.data_resolution and self._is_open:
            self._init_thread(target=self._fft)

    def set_sampling_rate(self, fs):
        self._sample_frequency = fs

    def _fft(self):  # type: (Processor) -> None
        temp_data_batch = self._raw_data_batch.copy()
        self._raw_data_batch = []
        batch_size = len(temp_data_batch)
        if batch_size is not 0 and (
                self.blink_threshold > np.amax(temp_data_batch) or -self.blink_threshold < np.amin(temp_data_batch)):
            x_fft = np.fft.rfftfreq(batch_size, 2 * (1 / self._sample_frequency))[2:50]
            y_fft = np.absolute(np.real(np.fft.rfft(temp_data_batch)))[2:50]
            self.data.on_next(np.array([x_fft, y_fft]))

    def record(self, path='./processor_data', recording_length=10):
        if not self.is_recording:
            self._save_path = os.path.realpath(path)
            self.recorded_data = []
            self.is_recording = True
            self.data.pipe(take_until_with_time(recording_length)).subscribe(
                observer=lambda values: self.recorded_data.append(values),
                on_error=lambda e: print(e),
                on_completed=self._save
            )
        else:
            print('Already recording...')

    def _save(self):
        np.save(self._save_path, self.recorded_data)
        self.is_recording = False
        print('Recording Complete')

    def close(self):
        self._is_open = True
        sleep(1.5)
        for subscription in self.subscriptions:
            try:
                subscription.dispose()
            except DisposedException:
                pass
        print('Processor Closed!')