Esempio n. 1
0
    def test_handles_interval_breaks(self):
        LAYOUT = "int32_3"
        LENGTH = 1000
        my_pipe = LocalPipe(LAYOUT, name="pipe")
        test_data1 = helpers.create_data(LAYOUT, length=LENGTH)
        test_data2 = helpers.create_data(LAYOUT, length=LENGTH)
        my_pipe.write_nowait(test_data1)
        my_pipe.close_interval_nowait()
        my_pipe.write_nowait(test_data2)

        async def reader():
            # read the first interval
            read_data = await my_pipe.read()
            self.assertTrue(my_pipe.end_of_interval)
            my_pipe.consume(len(read_data) - 20)
            np.testing.assert_array_equal(test_data1, read_data)

            # read the second interval
            read_data = await my_pipe.read()
            self.assertFalse(my_pipe.end_of_interval)
            self.assertEqual(len(read_data), len(test_data2) + 20)
            my_pipe.consume(len(read_data))
            np.testing.assert_array_equal(test_data2, read_data[20:])

        loop = asyncio.get_event_loop()
        loop.run_until_complete(reader())
Esempio n. 2
0
    async def test_subscribes_to_data(self):
        db: Session = self.app["db"]
        my_stream = db.query(DataStream).filter_by(name="stream1").one()
        blk1 = helpers.create_data(my_stream.layout)
        blk2 = helpers.create_data(my_stream.layout, length=50)
        my_pipe = pipes.LocalPipe(my_stream.layout)
        my_pipe.write_nowait(blk1)
        my_pipe.close_interval_nowait()
        my_pipe.write_nowait(blk2)
        my_pipe.close_nowait()
        self.supervisor.subscription_pipe = my_pipe
        async with self.client.get("/data", params={"id": my_stream.id,
                                                    "subscribe": '1'}) as resp:
            pipe = pipes.InputPipe(stream=my_stream, reader=resp.content)
            rx_blk1 = await pipe.read()
            pipe.consume(len(rx_blk1))
            np.testing.assert_array_equal(blk1, rx_blk1)
            self.assertTrue(pipe.end_of_interval)

            rx_blk2 = await pipe.read()
            pipe.consume(len(rx_blk2))
            np.testing.assert_array_equal(blk2, rx_blk2)
            with self.assertRaises(pipes.EmptyPipe):
                await pipe.read()
        self.assertEqual(self.supervisor.unsubscribe_calls, 1)
Esempio n. 3
0
 async def test_insert(self):
     await self.store.initialize([self.stream1])
     nrows = 300
     data = helpers.create_data(layout="int8_3", length=nrows)
     # first insert
     await self.store.insert(self.stream1, data['timestamp'][0],
                             data['timestamp'][-1] + 1, data)
     self.assertEqual(self.fake_nilmdb.streams["/joule/1"].rows, nrows)
     # another insert
     data = helpers.create_data(layout="int8_3", start=data['timestamp'][-1] + 1,
                                length=nrows)
     await self.store.insert(self.stream1, data['timestamp'][0],
                             data['timestamp'][-1] + 1, data)
     self.assertEqual(self.fake_nilmdb.streams["/joule/1"].rows, nrows * 2)
Esempio n. 4
0
 async def test_propogates_intervals_to_decimations(self):
     self.stream1.decimate = True
     source = QueueReader()
     pipe = pipes.InputPipe(stream=self.stream1, reader=source)
     # insert the following broken chunks of data
     # |----15*8----|-15-|-15-|-15-|-15-|  ==> raw (120 samples)
     # |-----30-----|-3--|--3-|--3-|--3-|  ==> x4  (27 samples)
     # |-----7------|                      ==> x16 (7 samples)
     # |-----1------|                      ==> x64 (1 sample
     n_chunks = 12
     for i in range(n_chunks):
         data = helpers.create_data(layout="int8_3", length=15, start=i * 1e6, step=1)
         await source.put(data.tobytes())
         if i > 6:  # breaks in the 2nd half
             await source.put(pipes.interval_token("int8_3").tobytes())
     task = await  self.store.spawn_inserter(self.stream1, pipe, insert_period=0)
     await task
     # should have raw, x4, x16, x64, x256
     self.assertEqual(len(self.fake_nilmdb.streams), 5)
     self.assertEqual(self.fake_nilmdb.streams["/joule/1"].rows, n_chunks * 15)
     # x4 level should be missing data due to interval breaks
     self.assertEqual(self.fake_nilmdb.streams["/joule/1~decim-4"].rows, 42)
     # x16 level should have 7 sample (only from first part)
     self.assertEqual(self.fake_nilmdb.streams["/joule/1~decim-16"].rows, 7)
     # x64 level should have 1 sample
     self.assertEqual(self.fake_nilmdb.streams["/joule/1~decim-64"].rows, 1)
     # x256 level should be empty
     self.assertEqual(self.fake_nilmdb.streams["/joule/1~decim-256"].rows, 0)
Esempio n. 5
0
    async def test_inserter_clean(self):
        self.stream1.datatype = DataStream.DATATYPE.UINT16
        self.stream1.keep_us = 24 * 60 * 60 * 1e6  # 1 day
        self.stream1.decimate = True

        source = QueueReader(delay=0.1)
        await source.put(helpers.create_data(layout="uint16_3").tobytes())
        pipe = pipes.InputPipe(stream=self.stream1, reader=source)
        self.store.cleanup_period = 0
        task = await self.store.spawn_inserter(self.stream1, pipe, insert_period=0)
        await task

        self.assertTrue(len(self.fake_nilmdb.remove_calls) > 0)
        # make sure decimations have been removed too
        removed_paths = [x['path'] for x in self.fake_nilmdb.remove_calls]
        self.assertTrue('/joule/1' in removed_paths)
        self.assertTrue('/joule/1~decim-4' in removed_paths)
        self.assertTrue('/joule/1~decim-16' in removed_paths)
        # make sure nilmdb cleanup executed with correct parameters
        params = self.fake_nilmdb.remove_calls[-1]
        self.assertEqual(int(params['start']), 0)
        expected = int(time.time() * 1e6) - self.stream1.keep_us
        actual = int(params['end'])
        # should be within 0.1 second
        np.testing.assert_almost_equal(expected / 1e6, actual / 1e6, decimal=1)
Esempio n. 6
0
    def test_when_destination_is_invalid(self):
        server = FakeJoule()
        # create the source stream
        src = DataStream(id=0,
                         name="source",
                         keep_us=100,
                         datatype=DataStream.DATATYPE.FLOAT32)
        src.elements = [
            Element(name="e%d" % x,
                    index=x,
                    display_type=Element.DISPLAYTYPE.CONTINUOUS)
            for x in range(3)
        ]

        # source has 100 rows of data between [0, 100]
        src_data = helpers.create_data(src.layout, length=4)
        src_info = StreamInfo(int(src_data['timestamp'][0]),
                              int(src_data['timestamp'][-1]), len(src_data))

        server.add_stream('/test/source', src, src_info, np.ndarray([]))
        self.start_server(server)
        runner = CliRunner()
        result = runner.invoke(main,
                               ['data', 'copy', '/test/source', 'badpath'])
        self.assertTrue('Error' in result.output
                        and 'destination' in result.output)
        self.assertEqual(result.exit_code, 1)
        self.stop_server()
Esempio n. 7
0
def create_source_data(server, no_intervals=False):
    # create the source stream
    src = DataStream(id=0,
                     name="source",
                     keep_us=100,
                     datatype=DataStream.DATATYPE.FLOAT32)
    src.elements = [
        Element(name="e%d" % x,
                index=x,
                display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3)
    ]

    # source has 100 rows of data in four intervals between [0, 100]
    src_data = helpers.create_data(src.layout,
                                   length=100,
                                   start=1548353881 * 1e6,
                                   step=1e6)

    ts = src_data['timestamp']

    if no_intervals:
        intervals = []
        src_info = StreamInfo(None, None, 0)

    else:
        intervals = [[ts[0], ts[24]], [ts[25], ts[49]], [ts[50], ts[74]],
                     [ts[75], ts[99]]]
        src_info = StreamInfo(intervals[0][0], intervals[-1][1], len(src_data))

    server.add_stream('/test/source', src, src_info, src_data, intervals)
    return intervals
Esempio n. 8
0
 async def extract(self,
                   stream: DataStream,
                   start: Optional[int],
                   end: Optional[int],
                   callback: Callable[[np.ndarray, str, bool], Coroutine],
                   max_rows: int = None,
                   decimation_level=1):
     if self.no_data:
         return  # do not call the callback func
     if self.raise_data_error:
         raise DataError("nilmdb error")
     if self.raise_decimation_error:
         raise InsufficientDecimationError("insufficient decimation")
     if decimation_level is not None and decimation_level > 1:
         layout = stream.decimated_layout
     else:
         decimation_level = 1
         layout = stream.layout
     for i in range(self.nintervals):
         for x in range(self.nchunks):
             await callback(helpers.create_data(layout, length=25), layout,
                            decimation_level)
         if i < (self.nintervals - 1):
             await callback(pipes.interval_token(layout), layout,
                            decimation_level)
Esempio n. 9
0
 def test_reader_requires_single_output(self):
     module = SimpleReader()
     data = helpers.create_data(self.stream.layout, length=10)
     pipe_arg = json.dumps(
         json.dumps({
             "outputs": {
                 'first': {
                     'fd': 0,
                     'id': None,
                     'layout': self.stream.layout
                 },
                 'second': {
                     'fd': 1,
                     'id': None,
                     'layout': self.stream.layout
                 }
             },
             "inputs": {}
         }))
     args = argparse.Namespace(pipes=pipe_arg,
                               module_config="unset",
                               socket="unset",
                               url='http://localhost:8080',
                               node="",
                               api_socket="",
                               mock_data=data)
     # run the reader module
     with self.assertLogs(level="ERROR") as logs:
         module.start(args)
     all_logs = ' '.join(logs.output).lower()
     self.assertTrue('output' in all_logs)
Esempio n. 10
0
def create_source_data(server):
    # create the source stream
    src = DataStream(id=0,
                     name="source",
                     keep_us=100,
                     datatype=DataStream.DATATYPE.FLOAT32)
    src.elements = [
        Element(name="e%d" % x,
                index=x,
                display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3)
    ]

    # source has 100 rows of data in four intervals between [0, 100]
    src_data = helpers.create_data(src.layout, length=100, start=0, step=1)
    # insert the intervals
    pipe_data = np.hstack(
        (src_data[:25], pipes.interval_token(src.layout), src_data[25:50],
         pipes.interval_token(src.layout), src_data[50:75],
         pipes.interval_token(src.layout), src_data[75:]))
    ts = src_data['timestamp']
    intervals = [[ts[0], ts[24]], [ts[25], ts[49]], [ts[50], ts[74]],
                 [ts[75], ts[99]]]
    src_info = StreamInfo(int(src_data['timestamp'][0]),
                          int(src_data['timestamp'][-1]), len(src_data))
    server.add_stream('/test/source', src, src_info, pipe_data, intervals)
    return src_data
Esempio n. 11
0
    def test_confirms_data_removal(self):
        # if there is existing data in the target, confirm removal
        server = FakeJoule()
        # create the source stream
        src = DataStream(id=0,
                         name="dest",
                         keep_us=100,
                         datatype=DataStream.DATATYPE.FLOAT32)
        src.elements = [
            Element(name="e%d" % x,
                    index=x,
                    display_type=Element.DISPLAYTYPE.CONTINUOUS)
            for x in range(3)
        ]
        # source has 100 rows of data between [0, 100]
        src_data = helpers.create_data(src.layout, length=1000)
        # File:    |----|
        # DataStream:     |-----|
        src_info = StreamInfo(int(src_data['timestamp'][500]),
                              int(src_data['timestamp'][-1]), 500)
        server.add_stream('/test/dest', src, src_info, src_data[500:])
        self.start_server(server)
        runner = CliRunner()
        with tempfile.NamedTemporaryFile() as data_file:
            write_hd5_data(data_file, src_data[:750])
            result = runner.invoke(
                main, ['data', 'ingest', '--file', data_file.name], input='N')
            _print_result_on_error(result)
            self.assertEqual(result.exit_code, 0)
            self.assertIn('Cancelled', result.output)

        self.stop_server()
Esempio n. 12
0
 def test_ingests_data_to_empty_existing_stream(self):
     server = FakeJoule()
     # create the source stream
     src = DataStream(id=0,
                      name="existing",
                      keep_us=100,
                      datatype=DataStream.DATATYPE.FLOAT32)
     src.elements = [
         Element(name="e%d" % x,
                 index=x,
                 display_type=Element.DISPLAYTYPE.CONTINUOUS)
         for x in range(3)
     ]
     src_data = helpers.create_data(src.layout, length=22000)
     src_info = StreamInfo(0, 0, 0, 0)
     server.add_stream('/test/existing', src, src_info, None)
     self.start_server(server)
     runner = CliRunner()
     with tempfile.NamedTemporaryFile() as data_file:
         write_hd5_data(data_file, src_data)
         result = runner.invoke(main, [
             'data', 'ingest', '--file', data_file.name, '--stream',
             '/test/existing'
         ])
         _print_result_on_error(result)
         self.assertEqual(result.exit_code, 0)
     db_obj = self.msgs.get()
     np.testing.assert_array_equal(src_data, db_obj.data)
     # uses the stream parameter instead of the hd5 attrs
     self.assertEqual(db_obj.stream.name, 'existing')
     self.stop_server()
Esempio n. 13
0
    def test_creates_stream_if_necessary(self):
        server = FakeJoule()
        # create the source stream
        src = DataStream(id=0,
                         name="source",
                         keep_us=100,
                         datatype=DataStream.DATATYPE.FLOAT32)
        src.elements = [
            Element(name="e%d" % x,
                    index=x,
                    display_type=Element.DISPLAYTYPE.CONTINUOUS)
            for x in range(3)
        ]

        # source has 100 rows of data between [0, 100]
        src_data = helpers.create_data(src.layout, length=4)
        src_info = StreamInfo(int(src_data['timestamp'][0]),
                              int(src_data['timestamp'][-1]), len(src_data))
        server.add_stream('/test/source', src, src_info, src_data,
                          [[src_info.start, src_info.end]])
        server.add_stream('/test/source', src, src_info, src_data,
                          [[src_info.start, src_info.end]])

        self.start_server(server)
        runner = CliRunner()
        result = runner.invoke(
            main, ['data', 'copy', '/test/source', '/test/destination'])
        _print_result_on_error(result)
        self.assertEqual(result.exit_code, 0)
        while self.msgs.empty():
            time.sleep(0.1)
            print("waiting...")
        mock_entry = self.msgs.get()
        np.testing.assert_array_equal(src_data, mock_entry.data)
        self.stop_server()
Esempio n. 14
0
 def test_sends_data_to_subscribers(self):
     LAYOUT = "float32_2"
     (fd_r, fd_w) = os.pipe()
     loop = asyncio.get_event_loop()
     output_cb = CoroutineMock()
     input_cb = CoroutineMock()
     subscriber_cb = CoroutineMock()
     npipe_out = OutputPipe(layout=LAYOUT, writer_factory=writer_factory(fd_w),
                            close_cb=output_cb)
     subscriber = LocalPipe(layout=LAYOUT, close_cb=subscriber_cb)
     npipe_out.subscribe(subscriber)
     test_data = helpers.create_data(LAYOUT)
     loop.run_until_complete(npipe_out.write(test_data))
     # data should be available on the InputPipe side
     npipe_in = InputPipe(layout=LAYOUT, reader_factory=reader_factory(fd_r),
                          close_cb=input_cb)
     rx_data = loop.run_until_complete(npipe_in.read())
     np.testing.assert_array_equal(test_data, rx_data)
     # data should also be available on the Subscriber output
     rx_data = subscriber.read_nowait()
     np.testing.assert_array_equal(test_data, rx_data)
     loop.run_until_complete(asyncio.gather(npipe_in.close(),
                                            npipe_out.close()))
     # subscriber should be closed
     self.assertTrue(subscriber.closed)
     # make sure all of the callbacks have been executed
     self.assertEqual(output_cb.call_count, 1)
     self.assertEqual(input_cb.call_count, 1)
     self.assertEqual(subscriber_cb.call_count, 1)
Esempio n. 15
0
    def test_raises_consume_errors(self):
        LAYOUT = "float32_2"
        LENGTH = 100
        (fd_r, fd_w) = os.pipe()
        loop = asyncio.get_event_loop()
        npipe_in = InputPipe(layout=LAYOUT, reader_factory=reader_factory(fd_r))
        npipe_out = OutputPipe(layout=LAYOUT, writer_factory=writer_factory(fd_w))
        test_data = helpers.create_data(LAYOUT, length=LENGTH)

        # cannot consume more data than is in the pipe

        loop = asyncio.get_event_loop()
        loop.run_until_complete(npipe_out.write(test_data))
        read_data = loop.run_until_complete(npipe_in.read())
        # can't consume more than was read
        with self.assertRaises(PipeError) as e:
            npipe_in.consume(len(read_data)+1)
        self.assertTrue('consume' in str(e.exception))
        # can't consume less than zero
        with self.assertRaises(PipeError) as e:
            npipe_in.consume(-1)
        # fine to consume zero rows
        npipe_in.consume(0)
        self.assertTrue('negative' in str(e.exception))

        # close the pipes
        loop.run_until_complete(asyncio.gather(npipe_in.close(),
                                               npipe_out.close()))
Esempio n. 16
0
    def test_different_format_writes(self):
        LAYOUT = "int8_2"
        loop = asyncio.get_event_loop()
        my_pipe = LocalPipe(LAYOUT, name="testpipe")

        test_data = helpers.create_data(LAYOUT, length=4, step=1, start=106)

        async def write():
            # write unstructured numpy arrays
            await my_pipe.write(np.array([[1, 1, 1]]))
            await my_pipe.write(np.array([[2, 2, 2], [3, 3, 3]]))

            # logs empty writes
            with self.assertLogs(level="INFO") as logs:
                await my_pipe.write(np.array([[]]))

            # errors on invalid write types
            bad_data = [[100, 1, 2], 'invalid', 4, None, np.array([4, 8])]
            for data in bad_data:
                with self.assertRaises(PipeError):
                    await my_pipe.write(data)

            # write structured numpy arrays
            await my_pipe.write(test_data)

        loop = asyncio.get_event_loop()
        loop.run_until_complete(write())
        result = my_pipe.read_nowait(flatten=True)
        np.testing.assert_array_equal(result[:3],
                                      [[1, 1, 1], [2, 2, 2], [3, 3, 3]])
        my_pipe.consume(3)
        result = my_pipe.read_nowait()
        np.testing.assert_array_equal(result, test_data)
Esempio n. 17
0
    def test_runs_webserver(self):
        module = InterfaceReader()
        data = helpers.create_data(self.stream.layout, length=10)
        port = unused_port()
        args = argparse.Namespace(pipes="unset",
                                  module_config="unset",
                                  socket="unset",
                                  port=port,
                                  host="127.0.0.1",
                                  url='http://localhost:8080',
                                  node="",
                                  api_socket="",
                                  mock_data=data)

        def get_page():
            time.sleep(0.5)
            resp = requests.get('http://localhost:%d' % port)
            self.assertEqual(resp.content.decode('utf8'), 'Hello World')

        getter = threading.Thread(target=get_page)
        getter.start()
        f = io.StringIO()
        with redirect_stdout(f):
            module.start(args)
        getter.join()
Esempio n. 18
0
    def test_datatype_mismatch(self):
        #  the datatype of the file and target stream must match
        server = FakeJoule()
        # create the source stream
        src = DataStream(id=0,
                         name="dest",
                         keep_us=100,
                         datatype=DataStream.DATATYPE.UINT16)
        src.elements = [
            Element(name="e%d" % x,
                    index=x,
                    display_type=Element.DISPLAYTYPE.CONTINUOUS)
            for x in range(3)
        ]
        # source has 100 rows of data between [0, 100]
        file_data = helpers.create_data('int16_3')
        src_info = StreamInfo(0, 0, 0, 0)
        server.add_stream('/test/dest', src, src_info, None)
        self.start_server(server)
        runner = CliRunner()
        with tempfile.NamedTemporaryFile() as data_file:
            write_hd5_data(data_file, file_data)
            result = runner.invoke(
                main, ['data', 'ingest', '--file', data_file.name])
            self.assertIn("datatype", result.output)
            self.assertNotEqual(result.exit_code, 0)

        self.stop_server()
Esempio n. 19
0
    async def _test_row_count(self):
        test_data = helpers.create_data(layout=self.test_stream.layout,
                                        length=10000)
        test_stream = DataStream(
            id=95,
            name="stream1",
            datatype=DataStream.DATATYPE.FLOAT32,
            keep_us=DataStream.KEEP_ALL,
            decimate=True,
            elements=[Element(name="e%d" % x) for x in range(3)])
        pipe = pipes.LocalPipe(test_stream.layout)
        task = await self.store.spawn_inserter(test_stream, pipe)
        await pipe.write(test_data)
        await pipe.close()
        await task
        conn: asyncpg.Connection = await asyncpg.connect(self.db_url)
        # test to make sure nrows is within 10% of actual value
        # Test: [start, end]
        nrows = await psql_helpers.get_row_count(conn, test_stream, None, None)
        self.assertGreater(nrows, len(test_data) * 0.9)
        # Test: [ts,end]
        nrows = await psql_helpers.get_row_count(
            conn, test_stream, test_data[len(test_data) // 2][0], None)
        self.assertLess(abs(nrows - len(test_data) // 2), 0.1 * len(test_data))
        # Test: [start, ts]
        nrows = await psql_helpers.get_row_count(
            conn, test_stream, None, test_data[len(test_data) // 3][0])
        self.assertLess(abs(nrows - len(test_data) // 3), 0.1 * len(test_data))

        # Test: [ts, ts]
        nrows = await psql_helpers.get_row_count(
            conn, test_stream, test_data[2 * len(test_data) // 6][0],
            test_data[3 * len(test_data) // 6][0])
        self.assertLess(abs(nrows - len(test_data) // 6), 0.1 * len(test_data))

        # Test: [ts, ts] (no data)
        nrows = await psql_helpers.get_row_count(
            conn, test_stream, test_data['timestamp'][0] - 100,
            test_data['timestamp'][0] - 50)
        self.assertEqual(0, nrows)

        # Test row count for stream with no data tables
        empty_stream = DataStream(
            id=96,
            name="empty",
            datatype=DataStream.DATATYPE.FLOAT64,
            keep_us=100,
            decimate=True,
            elements=[Element(name="e%d" % x) for x in range(8)])
        nrows = await psql_helpers.get_row_count(conn, empty_stream, None,
                                                 None)
        self.assertEqual(0, nrows)
        nrows = await psql_helpers.get_row_count(
            conn, empty_stream, test_data[len(test_data) // 2][0], None)
        self.assertEqual(0, nrows)
        nrows = await psql_helpers.get_row_count(
            conn, test_stream, test_data['timestamp'][0] - 100,
            test_data['timestamp'][0] - 50)
        self.assertEqual(0, nrows)
Esempio n. 20
0
    def test_read_data_must_be_consumed(self):
        #writes to pipe sends data to reader and any subscribers
        LAYOUT = "float32_2"
        LENGTH = 500
        UNCONSUMED_ROWS = 4
        my_pipe = LocalPipe(LAYOUT)
        chunk1 = helpers.create_data(LAYOUT, length=LENGTH)
        chunk2 = helpers.create_data(LAYOUT, length=LENGTH)
        chunk3 = helpers.create_data(LAYOUT, length=LENGTH)

        my_pipe.write_nowait(chunk1)

        async def reader():
            await my_pipe.read()
            my_pipe.consume(0)
            # should get the same data back on the next read
            # add a second copy of the test data
            await my_pipe.write(chunk2)
            rx_data = await my_pipe.read()
            # two copies of the data now
            np.testing.assert_array_equal(chunk1, rx_data[:len(chunk1)])
            np.testing.assert_array_equal(chunk2, rx_data[len(chunk1):])
            # write another copy but consume the first
            my_pipe.consume(len(chunk1))
            await my_pipe.write(chunk3)
            rx_data = await my_pipe.read()
            # two copies of the data now
            np.testing.assert_array_equal(chunk2, rx_data[:len(chunk2)])
            np.testing.assert_array_equal(chunk3, rx_data[len(chunk2):])
            my_pipe.consume(len(chunk2))
            await my_pipe.close()
            # now a read should return immediately with the unconsumed data
            rx_data = await my_pipe.read()
            np.testing.assert_array_equal(chunk3, rx_data)
            # the pipe should be empty but still return the old data
            rx_data = await my_pipe.read()
            np.testing.assert_array_equal(chunk3, rx_data)
            # only after consuming the remaining data does it raise an exception
            my_pipe.consume(len(rx_data))
            # another read should cause an exception (even if the data hasn't been consumed)
            with self.assertRaises(EmptyPipeError):
                await my_pipe.read()
            # the pipe should be empty
            self.assertTrue(my_pipe.is_empty())

        asyncio.run(reader())
Esempio n. 21
0
 async def test_insert_error_on_overlapping_data(self):
     # when nilmdb server rejects the data (eg overlapping timestamps)
     await self.store.initialize([self.stream1])
     self.fake_nilmdb.streams['/joule/1'] = FakeStream(
         layout=self.stream1.layout, start=0, end=500, rows=500)
     data = helpers.create_data(layout="int8_3", start=25, step=1, length=20)
     with self.assertRaises(DataError)as e:
         await self.store.insert(self.stream1, 25, 1000, data)
     self.assertTrue('overlaps' in str(e.exception))
Esempio n. 22
0
    async def _test_consolidate(self):
        # intervals less than max_gap us apart are consolidated
        # data: 100 samples spaced at 1000us
        test_stream = DataStream(
            id=1,
            name="stream1",
            datatype=DataStream.DATATYPE.FLOAT32,
            keep_us=DataStream.KEEP_ALL,
            decimate=True,
            elements=[Element(name="e%d" % x) for x in range(3)])
        pipe = pipes.LocalPipe(test_stream.layout)
        nrows = 955
        orig_data = helpers.create_data(layout=test_stream.layout,
                                        length=nrows)
        chunks = [
            orig_data[:300], orig_data[305:400], orig_data[402:700],
            orig_data[800:]
        ]
        # data: |++++++|  |+++++++++|    |++++++|    |++++|
        #               ^--5000 us    ^--2000 us   ^---0.1 sec (retained)
        chunks = [
            orig_data[:300], orig_data[305:400], orig_data[402:700],
            orig_data[800:850], orig_data[852:]
        ]
        # data: |++++++|  |+++++++++|    |++++++|    |++++|  |++++|
        #               ^--5000 us    ^--2000 us   |        ^--- 2000 us
        #                                          `---0.1 sec (retained)
        task = await self.store.spawn_inserter(test_stream, pipe)
        for chunk in chunks:
            await pipe.write(chunk)
            await pipe.close_interval()
        await pipe.close()
        await task

        # extract data
        extracted_data = []

        rx_chunks = []

        async def callback(rx_data, layout, factor):
            if rx_data[0] != pipes.interval_token(layout):
                rx_chunks.append(rx_data)

        await self.store.consolidate(test_stream,
                                     start=None,
                                     end=None,
                                     max_gap=6e3)
        await self.store.extract(test_stream,
                                 start=None,
                                 end=None,
                                 callback=callback)

        # should only be two intervals left (the first two are consolidated)
        np.testing.assert_array_equal(rx_chunks[0], np.hstack(chunks[:3]))
        np.testing.assert_array_equal(rx_chunks[1], np.hstack(chunks[3:]))
        self.assertEqual(len(rx_chunks), 2)
Esempio n. 23
0
    async def test_write_data(self):
        db: Session = self.app["db"]
        store: MockStore = self.app['data-store']
        stream: DataStream = db.query(DataStream).filter_by(name="stream1").one()
        data = helpers.create_data(stream.layout)
        resp: aiohttp.ClientResponse = await \
            self.client.post("/data", params={"path": "/folder1/stream1"},
                             data=data.tobytes())
        self.assertEqual(resp.status, 200)
        self.assertTrue(store.inserted_data)

        # can write stream by id as well
        store.inserted_data = False
        data = helpers.create_data(stream.layout)
        resp: aiohttp.ClientResponse = await \
            self.client.post("/data", params={"id": stream.id},
                             data=data.tobytes())
        self.assertEqual(resp.status, 200)
        self.assertTrue(store.inserted_data)
Esempio n. 24
0
 def test_raises_dtype_errors(self):
     pipe = Pipe(layout="uint8_4")
     # data for a different stream type
     data1 = helpers.create_data("float32_3")
     # invalid array structure (must be 2D)
     data2 = np.ones((3, 3, 3))
     # invalid structured array
     data3 = np.array([('timestamp', 1, 2, 3), ('bad', 1, 2, 3)])
     for data in [data1, data2, data3]:
         with self.assertRaises(PipeError):
             _ = pipe._apply_dtype(data)
Esempio n. 25
0
 async def test_inserter_data_error(self):
     # when stream has invalid data (eg bad timestamps)
     self.stream1.datatype = DataStream.DATATYPE.UINT16
     source = QueueReader()
     pipe = pipes.InputPipe(stream=self.stream1, reader=source)
     task = await self.store.spawn_inserter(self.stream1, pipe, insert_period=0)
     await source.put(helpers.create_data(layout="uint16_3").tobytes())
     # when nilmdb server generates an error
     self.fake_nilmdb.generate_error_on_path("/joule/1", 400, "bad data")
     with self.assertRaises(DataError):
         await task
Esempio n. 26
0
 async def test_invalid_writes_propoage_data_error(self):
     db: Session = self.app["db"]
     store: MockStore = self.app['data-store']
     store.raise_data_error = True
     stream: DataStream = db.query(DataStream).filter_by(
         name="stream1").one()
     data = helpers.create_data(stream.layout)
     resp: aiohttp.ClientResponse = await \
         self.client.post("/data", params={"path": "/folder1/stream1"},
                          data=data.tobytes())
     self.assertEqual(resp.status, 400)
     self.assertIn(await resp.text(), 'test error')
Esempio n. 27
0
def create_source_data(server):
    # create the source stream
    src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32)
    src.elements = [Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3)]

    # source has 100 rows of data
    src_data = helpers.create_data(src.layout, length=100, start=0, step=1)
    ts = src_data['timestamp']
    intervals = [[ts[0], ts[99]]]
    src_info = StreamInfo(int(src_data['timestamp'][0]), int(src_data['timestamp'][-1]),
                          len(src_data))
    server.add_stream('/test/source', src, src_info, src_data, intervals)
    return src_data
Esempio n. 28
0
def create_source_data(server, is_destination=False):
    # create the source stream
    src = DataStream(id=0,
                     name="source",
                     keep_us=100,
                     datatype=DataStream.DATATYPE.UINT8,
                     is_destination=is_destination)
    src.elements = [
        Element(name="e%d" % x,
                index=x,
                display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3)
    ]

    # source has 100 rows of data
    src_data = np.hstack(
        (helpers.create_data(src.layout), interval_token(src.layout),
         helpers.create_data(src.layout)))
    src_info = StreamInfo(int(src_data['timestamp'][0]),
                          int(src_data['timestamp'][-1]), len(src_data))
    server.add_stream('/test/source', src, src_info, src_data,
                      [src_info.start, src_info.end])
    return src_data
Esempio n. 29
0
    def test_when_server_returns_error_code(self):
        server = FakeJoule()
        # create the source stream
        src = DataStream(id=0,
                         name="source",
                         keep_us=100,
                         datatype=DataStream.DATATYPE.FLOAT32)
        src.elements = [
            Element(name="e%d" % x,
                    index=x,
                    display_type=Element.DISPLAYTYPE.CONTINUOUS)
            for x in range(3)
        ]
        # source has 200 rows of data between [0, 200] in two intervals
        src_data = np.hstack((helpers.create_data(src.decimated_layout,
                                                  start=0,
                                                  length=100,
                                                  step=1),
                              pipes.interval_token(src.decimated_layout),
                              helpers.create_data(src.decimated_layout,
                                                  start=100,
                                                  length=100,
                                                  step=1)))

        src_info = StreamInfo(int(src_data['timestamp'][0]),
                              int(src_data['timestamp'][-1]), len(src_data))
        server.add_stream('/test/source', src, src_info, src_data)

        server.response = "test error"
        server.http_code = 500
        server.stub_data_read = True
        self.start_server(server)
        runner = CliRunner()

        with self.assertLogs(level=logging.ERROR):
            runner.invoke(main,
                          ['data', 'read', '/test/source', '--start', 'now'])

        self.stop_server()
Esempio n. 30
0
 async def test_retries_when_nilmdb_is_not_available(self):
     # when nilmdb server is not available the inserter should retry
     self.stream1.datatype = DataStream.DATATYPE.UINT16
     source = QueueReader()
     await self.fake_nilmdb.stop()
     await source.put(helpers.create_data(layout="uint16_3").tobytes())
     pipe = pipes.InputPipe(stream=self.stream1, reader=source)
     with self.assertLogs(level="WARNING") as logs:
         task = await self.store.spawn_inserter(self.stream1, pipe, retry_interval=0.05)
         await asyncio.sleep(0.1)
         task.cancel()
         await task
     self.assertTrue("retrying request" in ''.join(logs.output))