Ejemplo n.º 1
0
 async def writer():
     for block in helpers.to_chunks(test_data, 4):
         await asyncio.sleep(.1)
         await my_pipe.write(block)
     # closing the interval should flush the data
     await my_pipe.close_interval()
     # add a dummy section after the interval break
     await my_pipe.write(np.ones((35, 3)))
     await my_pipe.flush_cache()
Ejemplo n.º 2
0
    async def test_nondecimating_inserter(self):
        self.stream1.decimate = False
        self.stream1.datatype = DataStream.DATATYPE.UINT16
        source = QueueReader()
        pipe = pipes.InputPipe(stream=self.stream1, reader=source)
        nrows = 896
        data = helpers.create_data(layout="uint16_3", length=nrows)
        task = await self.store.spawn_inserter(self.stream1, pipe, insert_period=0)
        for chunk in helpers.to_chunks(data, 300):
            await source.put(chunk.tobytes())
        await task

        self.assertEqual(self.fake_nilmdb.streams["/joule/1"].rows, nrows)
        self.assertEqual(len(self.fake_nilmdb.streams), 1)
Ejemplo n.º 3
0
 async def test_decimating_inserter(self):
     self.stream1.decimate = True
     source = QueueReader()
     pipe = pipes.InputPipe(stream=self.stream1, reader=source)
     nrows = 955
     data = helpers.create_data(layout="int8_3", length=nrows)
     task = await self.store.spawn_inserter(self.stream1, pipe)
     for chunk in helpers.to_chunks(data, 300):
         await source.put(chunk.tobytes())
     await task
     self.assertEqual(len(self.fake_nilmdb.streams), 6)
     self.assertEqual(self.fake_nilmdb.streams["/joule/1"].rows, nrows)
     self.assertEqual(self.fake_nilmdb.streams["/joule/1~decim-4"].rows,
                      np.floor(nrows / 4))
     self.assertEqual(self.fake_nilmdb.streams["/joule/1~decim-16"].rows,
                      np.floor(nrows / 16))
     self.assertEqual(self.fake_nilmdb.streams["/joule/1~decim-64"].rows,
                      np.floor(nrows / 64))
     self.assertEqual(self.fake_nilmdb.streams["/joule/1~decim-256"].rows,
                      np.floor(nrows / 256))
Ejemplo n.º 4
0
    async def _test_extract_data_with_intervals(self):
        test_stream = DataStream(
            id=1,
            name="stream1",
            datatype=DataStream.DATATYPE.FLOAT32,
            keep_us=DataStream.KEEP_ALL,
            decimate=True,
            elements=[Element(name="e%d" % x) for x in range(3)])
        pipe = pipes.LocalPipe(test_stream.layout)
        nrows = 955
        data = helpers.create_data(layout=test_stream.layout, length=nrows)
        task = await self.store.spawn_inserter(test_stream, pipe)
        for chunk in helpers.to_chunks(data, 300):
            await pipe.write(chunk)
            await pipe.close_interval()
        await pipe.close()
        await task

        # extract data
        extracted_data = []

        async def callback(rx_data, layout, factor):
            self.assertEqual(layout, test_stream.layout)
            self.assertEqual(factor, 1)
            extracted_data.append(rx_data)

        await self.store.extract(test_stream,
                                 start=None,
                                 end=None,
                                 callback=callback)
        extracted_data = np.hstack(extracted_data)
        # check for interval boundaries
        np.testing.assert_array_equal(extracted_data[300],
                                      pipes.interval_token(test_stream.layout))
        np.testing.assert_array_equal(extracted_data[601],
                                      pipes.interval_token(test_stream.layout))
        np.testing.assert_array_equal(extracted_data[902],
                                      pipes.interval_token(test_stream.layout))
Ejemplo n.º 5
0
    async def stream_extract(self, request: web.Request):
        self.extract_calls.append(request.query)
        try:
            stream = self.streams[request.query["path"]]
        except KeyError:
            return web.json_response(
                {
                    "status": "404 Not Found",
                    "message": "No such stream: %s" % request.query["path"],
                    "traceback": ""
                },
                status=404)

        if "count" in request.query:
            return web.Response(text=str(stream.rows))
        # return chunked data
        data = helpers.create_data(stream.layout, length=stream.rows)
        resp = web.StreamResponse()
        resp.enable_chunked_encoding()
        await resp.prepare(request)
        for chunk in helpers.to_chunks(data, 300):
            await resp.write(chunk.tobytes())
        return resp
Ejemplo n.º 6
0
 async def writer():
     for block in helpers.to_chunks(test_data, 270):
         await npipe_out.write(block)
         await asyncio.sleep(0.01)
Ejemplo n.º 7
0
 async def writer():
     for block in helpers.to_chunks(test_data, 270):
         await asyncio.sleep(0.1)
         await my_pipe.write(block)
     await my_pipe.close()
Ejemplo n.º 8
0
    async def _test_basic_insert_extract(self):
        stream_id = 990
        self.store.extract_block_size = 500
        psql_types = [
            'double precision', 'real', 'bigint', 'integer', 'smallint'
        ]
        datatypes = [
            DataStream.DATATYPE.FLOAT64, DataStream.DATATYPE.FLOAT32,
            DataStream.DATATYPE.INT64, DataStream.DATATYPE.INT32,
            DataStream.DATATYPE.INT16
        ]
        conn: asyncpg.Connection = await asyncpg.connect(self.db_url)
        for i in range(len(datatypes)):
            datatype = datatypes[i]
            psql_type = psql_types[i]
            for n_elements in range(1, 5):
                test_stream = DataStream(id=stream_id,
                                         name="stream1",
                                         datatype=datatype,
                                         keep_us=DataStream.KEEP_ALL,
                                         elements=[
                                             Element(name="e%d" % x)
                                             for x in range(n_elements)
                                         ])
                test_stream.decimate = True
                source = QueueReader()
                pipe = pipes.InputPipe(stream=test_stream, reader=source)
                nrows = 955
                data = helpers.create_data(layout=test_stream.layout,
                                           length=nrows)
                task = await self.store.spawn_inserter(test_stream, pipe)
                for chunk in helpers.to_chunks(data, 300):
                    await source.put(chunk.tobytes())
                await task

                # make sure the correct tables have been created
                records = await conn.fetch(
                    '''SELECT table_name FROM information_schema.tables 
                                                    WHERE table_schema='data';'''
                )
                tables = list(itertools.chain(*records))
                for table in [
                        'stream%d' % stream_id,
                        'stream%d_intervals' % stream_id
                ]:
                    self.assertIn(table, tables)

                # check the column data types
                records = await conn.fetch(
                    '''SELECT column_name, data_type FROM information_schema.columns 
                                                            WHERE table_name='stream%d' AND table_schema='data';'''
                    % stream_id)
                (names, types) = zip(*records)
                expected_elements = ['time'] + [
                    'elem%d' % x for x in range(n_elements)
                ]
                self.assertCountEqual(names, expected_elements)
                expected_psql_types = tuple(
                    ['timestamp without time zone'] +
                    [psql_type for x in range(n_elements)])
                self.assertEqual(types, expected_psql_types)
                self.assertEqual(len(records), n_elements + 1)

                # extract raw data
                extracted_data = []

                async def callback(rx_data, layout, factor):
                    self.assertEqual(layout, test_stream.layout)
                    self.assertEqual(factor, 1)
                    extracted_data.append(rx_data)

                await self.store.extract(test_stream,
                                         start=None,
                                         end=None,
                                         callback=callback)
                extracted_data = np.hstack(extracted_data)
                np.testing.assert_array_equal(extracted_data, data)

                level = 64
                data_mean = np.mean(extracted_data['data'][:level], axis=0)
                data_max = np.max(extracted_data['data'][:level], axis=0)
                data_min = np.min(extracted_data['data'][:level], axis=0)

                # extract decimated data
                async def d_callback(rx_data, layout, factor):
                    self.assertEqual(layout, test_stream.decimated_layout)
                    self.assertEqual(factor, level)
                    extracted_data.append(rx_data)

                extracted_data = []
                await self.store.extract(test_stream,
                                         decimation_level=level,
                                         start=None,
                                         end=None,
                                         callback=d_callback)
                extracted_data = np.hstack(extracted_data)
                expected = np.hstack((data_mean, data_min, data_max))
                np.testing.assert_array_almost_equal(expected,
                                                     extracted_data['data'][0])
                stream_id += 1
        await conn.close()