Esempio n. 1
0
 def test_has_json_representation(self):
     # create an input stream
     src = DataStream(id=0,
                      name="source",
                      keep_us=100,
                      datatype=DataStream.DATATYPE.FLOAT32)
     src.elements = [
         Element(name="e%d" % x,
                 index=x,
                 display_type=Element.DISPLAYTYPE.CONTINUOUS)
         for x in range(3)
     ]
     # create an output stream
     dest = DataStream(id=1,
                       name="dest",
                       keep_us=100,
                       datatype=DataStream.DATATYPE.UINT16)
     dest.elements = [
         Element(name="e%d" % x,
                 index=x,
                 display_type=Element.DISPLAYTYPE.EVENT) for x in range(5)
     ]
     m = module.from_config(self.config)
     m.inputs = {'input': src}
     m.outputs = dict(output=dest)
     result = m.to_json()
     # make sure basic attributes are in the output
     self.assertEqual(result['name'], 'test')
     self.assertEqual(result['exec_cmd'], '/bin/runit.sh')
     # make sure inputs are included (name: id)
     self.assertEqual(result['inputs']['input'], 0)
     self.assertEqual(result['outputs']['output'], 1)
Esempio n. 2
0
    async def _test_row_count(self):
        test_data = helpers.create_data(layout=self.test_stream.layout,
                                        length=10000)
        test_stream = DataStream(
            id=95,
            name="stream1",
            datatype=DataStream.DATATYPE.FLOAT32,
            keep_us=DataStream.KEEP_ALL,
            decimate=True,
            elements=[Element(name="e%d" % x) for x in range(3)])
        pipe = pipes.LocalPipe(test_stream.layout)
        task = await self.store.spawn_inserter(test_stream, pipe)
        await pipe.write(test_data)
        await pipe.close()
        await task
        conn: asyncpg.Connection = await asyncpg.connect(self.db_url)
        # test to make sure nrows is within 10% of actual value
        # Test: [start, end]
        nrows = await psql_helpers.get_row_count(conn, test_stream, None, None)
        self.assertGreater(nrows, len(test_data) * 0.9)
        # Test: [ts,end]
        nrows = await psql_helpers.get_row_count(
            conn, test_stream, test_data[len(test_data) // 2][0], None)
        self.assertLess(abs(nrows - len(test_data) // 2), 0.1 * len(test_data))
        # Test: [start, ts]
        nrows = await psql_helpers.get_row_count(
            conn, test_stream, None, test_data[len(test_data) // 3][0])
        self.assertLess(abs(nrows - len(test_data) // 3), 0.1 * len(test_data))

        # Test: [ts, ts]
        nrows = await psql_helpers.get_row_count(
            conn, test_stream, test_data[2 * len(test_data) // 6][0],
            test_data[3 * len(test_data) // 6][0])
        self.assertLess(abs(nrows - len(test_data) // 6), 0.1 * len(test_data))

        # Test: [ts, ts] (no data)
        nrows = await psql_helpers.get_row_count(
            conn, test_stream, test_data['timestamp'][0] - 100,
            test_data['timestamp'][0] - 50)
        self.assertEqual(0, nrows)

        # Test row count for stream with no data tables
        empty_stream = DataStream(
            id=96,
            name="empty",
            datatype=DataStream.DATATYPE.FLOAT64,
            keep_us=100,
            decimate=True,
            elements=[Element(name="e%d" % x) for x in range(8)])
        nrows = await psql_helpers.get_row_count(conn, empty_stream, None,
                                                 None)
        self.assertEqual(0, nrows)
        nrows = await psql_helpers.get_row_count(
            conn, empty_stream, test_data[len(test_data) // 2][0], None)
        self.assertEqual(0, nrows)
        nrows = await psql_helpers.get_row_count(
            conn, test_stream, test_data['timestamp'][0] - 100,
            test_data['timestamp'][0] - 50)
        self.assertEqual(0, nrows)
Esempio n. 3
0
    async def _test_info(self):
        # create another stream
        empty_stream = DataStream(
            id=103,
            name="empty stream",
            datatype=DataStream.DATATYPE.INT32,
            keep_us=DataStream.KEEP_ALL,
            decimate=True,
            elements=[Element(name="e%d" % x) for x in range(8)])
        stream2 = DataStream(
            id=104,
            name="stream2",
            datatype=DataStream.DATATYPE.INT32,
            keep_us=DataStream.KEEP_ALL,
            decimate=True,
            elements=[Element(name="e%d" % x) for x in range(8)])
        pipe = pipes.LocalPipe(stream2.layout)
        test_data = helpers.create_data(layout=stream2.layout, length=800)
        task = await self.store.spawn_inserter(stream2, pipe)
        await pipe.write(test_data)
        await pipe.close()
        await task
        records = await self.store.info(
            [self.test_stream, stream2, empty_stream])
        # check stream1
        info = records[self.test_stream.id]
        self.assertEqual(info.start, self.test_data['timestamp'][0])
        self.assertEqual(info.end, self.test_data['timestamp'][-1])
        self.assertEqual(info.total_time, info.end - info.start)
        # rows are approximate
        self.assertLess(abs(len(self.test_data) - info.rows),
                        len(self.test_data) * 0.1)
        self.assertGreater(info.bytes, 0)

        # check stream2
        info = records[stream2.id]
        self.assertEqual(info.start, test_data['timestamp'][0])
        self.assertEqual(info.end, test_data['timestamp'][-1])
        self.assertEqual(info.total_time, info.end - info.start)
        self.assertLess(abs(len(test_data) - info.rows), len(test_data) * 0.1)
        self.assertGreater(info.bytes, 0)

        # check the empty stream
        info = records[empty_stream.id]
        self.assertEqual(info.start, None)
        self.assertEqual(info.end, None)
        self.assertEqual(info.total_time, 0)
        self.assertEqual(info.rows, 0)
        self.assertEqual(info.bytes, 0)
Esempio n. 4
0
    async def setUp(self):
        self.fake_nilmdb = FakeNilmdb()
        url = await self.fake_nilmdb.start()

        # use a 0 insert period for test execution
        self.store = NilmdbStore(url, 0, 60)

        # make a couple example streams
        # stream1 int8_3
        self.stream1 = DataStream(id=1, name="stream1", datatype=DataStream.DATATYPE.INT8,
                                  elements=[Element(name="e%d" % x) for x in range(3)])

        # stream2 uint16_4
        self.stream2 = DataStream(id=2, name="stream2", datatype=DataStream.DATATYPE.UINT16,
                                  elements=[Element(name="e%d" % x) for x in range(4)])
Esempio n. 5
0
    def test_copies_all_data(self):
        server = FakeJoule()
        # create the source and destination streams
        src_data = create_source_data(
            server)  # helpers.create_data(src.layout)
        # dest is empty
        dest = DataStream(id=1,
                          name="dest",
                          keep_us=100,
                          datatype=DataStream.DATATYPE.FLOAT32)
        dest.elements = [
            Element(name="e%d" % x,
                    index=x,
                    display_type=Element.DISPLAYTYPE.CONTINUOUS)
            for x in range(3)
        ]
        server.add_stream('/test/destination', dest, StreamInfo(None, None, 0),
                          None)

        self.start_server(server)
        runner = CliRunner()
        result = runner.invoke(
            main, ['data', 'copy', '/test/source', '/test/destination'])
        _print_result_on_error(result)
        self.assertEqual(result.exit_code, 0)
        while self.msgs.empty():
            time.sleep(0.1)
            print("waiting...")
        mock_entry = self.msgs.get()
        np.testing.assert_array_equal(src_data, mock_entry.data)
        # self.assertEqual(len(mock_entry.intervals), 3)
        self.stop_server()
Esempio n. 6
0
    def test_creates_stream_if_necessary(self):
        server = FakeJoule()
        # create the source stream
        src = DataStream(id=0,
                         name="source",
                         keep_us=100,
                         datatype=DataStream.DATATYPE.FLOAT32)
        src.elements = [
            Element(name="e%d" % x,
                    index=x,
                    display_type=Element.DISPLAYTYPE.CONTINUOUS)
            for x in range(3)
        ]

        # source has 100 rows of data between [0, 100]
        src_data = helpers.create_data(src.layout, length=4)
        src_info = StreamInfo(int(src_data['timestamp'][0]),
                              int(src_data['timestamp'][-1]), len(src_data))
        server.add_stream('/test/source', src, src_info, src_data,
                          [[src_info.start, src_info.end]])
        server.add_stream('/test/source', src, src_info, src_data,
                          [[src_info.start, src_info.end]])

        self.start_server(server)
        runner = CliRunner()
        result = runner.invoke(
            main, ['data', 'copy', '/test/source', '/test/destination'])
        _print_result_on_error(result)
        self.assertEqual(result.exit_code, 0)
        while self.msgs.empty():
            time.sleep(0.1)
            print("waiting...")
        mock_entry = self.msgs.get()
        np.testing.assert_array_equal(src_data, mock_entry.data)
        self.stop_server()
Esempio n. 7
0
    def setUp(self):
        self.loop = asyncio.new_event_loop()
        self.loop.set_debug(True)
        logging.getLogger('asyncio').setLevel(logging.DEBUG)
        asyncio.set_event_loop(self.loop)
        # generic float32_4 streams
        streams = [DataStream(name="str%d" % n, datatype=DataStream.DATATYPE.FLOAT32,
                              id=n, elements=[Element(name="e%d" % j, index=j,
                                                      display_type=Element.DISPLAYTYPE.CONTINUOUS) for j in range(3)])
                   for n in
                   range(5)]  # 5th stream is not produced
        self.streams = streams
        # [producer0] --<str0>--,-------------,-<str0,str2>--[consumer0]
        #                       +---[module]--+
        # [producer1] --<str1>--`             `--<str2,str3>--[consumer1]

        self.module = Module(name="module", exec_cmd="/bin/true",
                             description="test module",
                             is_app=False, uuid=123)
        self.module.inputs = {"input1": streams[0], "input2": streams[1]}
        self.module.outputs = {"output1": streams[2], "output2": streams[3]}
        self.module.log_size = LOG_SIZE
        self.worker = Worker(self.module)
        m_producers = [Module(name="producer1", exec_cmd="/bin/runit.sh"),
                       Module(name="producer2", exec_cmd="/bin/runit.sh")]
        m_producers[0].outputs = {"output": streams[0]}
        m_producers[1].outputs = {"output": streams[1]}
        self.producers: List[Worker] = [Worker(m) for m in m_producers]
        m_consumers = [Module(name="consumer1", exec_cmd="/bin/runit.sh"),
                       Module(name="consumer2", exec_cmd="/bin/runit.sh")]
        m_consumers[0].inputs = {"input1": streams[0], "input2": streams[2]}
        m_consumers[1].inputs = {"input1": streams[2], "input2": streams[3]}
        self.consumers: List[Worker] = [Worker(m) for m in m_consumers]
        self.supervisor = Supervisor(self.producers + self.consumers, [], None)
Esempio n. 8
0
def create_source_data(server, no_intervals=False):
    # create the source stream
    src = DataStream(id=0,
                     name="source",
                     keep_us=100,
                     datatype=DataStream.DATATYPE.FLOAT32)
    src.elements = [
        Element(name="e%d" % x,
                index=x,
                display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3)
    ]

    # source has 100 rows of data in four intervals between [0, 100]
    src_data = helpers.create_data(src.layout,
                                   length=100,
                                   start=1548353881 * 1e6,
                                   step=1e6)

    ts = src_data['timestamp']

    if no_intervals:
        intervals = []
        src_info = StreamInfo(None, None, 0)

    else:
        intervals = [[ts[0], ts[24]], [ts[25], ts[49]], [ts[50], ts[74]],
                     [ts[75], ts[99]]]
        src_info = StreamInfo(intervals[0][0], intervals[-1][1], len(src_data))

    server.add_stream('/test/source', src, src_info, src_data, intervals)
    return intervals
Esempio n. 9
0
    def test_creates_nilmdb_stream_if_necessary(self):
        source_server = FakeJoule()
        nilmdb_msgs = multiprocessing.Queue()
        dest_url = self._start_nilmdb(nilmdb_msgs)
        # create just the source stream
        src_data = create_source_data(source_server)  # helpers.create_data(src.layout)

        src_stream = DataStream(id=1, name="dest", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32)
        src_stream.elements = [Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in
                               range(3)]
        source_server.add_stream('/test/destination', src_stream, StreamInfo(None, None, 0), None)

        self.start_server(source_server)
        runner = CliRunner()
        result = runner.invoke(main, ['data', 'copy',
                                      '-d', dest_url, '/test/source', '/test/destination'], catch_exceptions=False)
        _print_result_on_error(result)
        # expect a stream create call
        nilmdb_call = nilmdb_msgs.get()
        self.assertEqual('stream_create', nilmdb_call['action'])
        self.assertEqual({'path': '/test/destination', 'layout': 'float32_3'}, nilmdb_call['params'])
        # expect a metadata call
        nilmdb_call = nilmdb_msgs.get()
        self.assertEqual('set_metadata', nilmdb_call['action'])
        self.assertEqual('/test/destination', nilmdb_call['params']['path'])
        self.assertEqual('config_key__', list(json.loads(nilmdb_call['params']['data']).keys())[0])
        # expect data transfer call
        nilmdb_call = nilmdb_msgs.get()
        self.assertEqual('stream_insert', nilmdb_call['action'])
        data = nilmdb_call['data']
        np.testing.assert_array_equal(src_data, data)
        self.assertEqual(0, result.exit_code)
        self._stop_nilmdb()
        self.stop_server()
        del nilmdb_msgs
Esempio n. 10
0
    def test_copies_data_to_nilmdb(self):

        source_server = FakeJoule()
        nilmdb_msgs = multiprocessing.Queue()
        dest_url = self._start_nilmdb(nilmdb_msgs)
        # create the source and destination streams
        src_data = create_source_data(source_server)  # helpers.create_data(src.layout)

        src_stream = DataStream(id=1, name="dest", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32)
        src_stream.elements = [Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in
                               range(3)]
        source_server.add_stream('/test/destination', src_stream, StreamInfo(None, None, 0), None)

        self.start_server(source_server)
        runner = CliRunner()
        result = runner.invoke(main, ['data', 'copy',
                                      '-d', dest_url, '/test/source', '/existing/float32_3'],
                               input='y\n', catch_exceptions=True)
        _print_result_on_error(result)
        # expect data transfer call
        nilmdb_call = nilmdb_msgs.get()
        self.assertEqual('stream_insert', nilmdb_call['action'])
        data = nilmdb_call['data']
        np.testing.assert_array_equal(src_data, data)
        self.assertEqual(0, result.exit_code)
        self._stop_nilmdb()
        self.stop_server()
        del nilmdb_msgs
Esempio n. 11
0
    def test_when_destination_is_invalid(self):
        server = FakeJoule()
        # create the source stream
        src = DataStream(id=0,
                         name="source",
                         keep_us=100,
                         datatype=DataStream.DATATYPE.FLOAT32)
        src.elements = [
            Element(name="e%d" % x,
                    index=x,
                    display_type=Element.DISPLAYTYPE.CONTINUOUS)
            for x in range(3)
        ]

        # source has 100 rows of data between [0, 100]
        src_data = helpers.create_data(src.layout, length=4)
        src_info = StreamInfo(int(src_data['timestamp'][0]),
                              int(src_data['timestamp'][-1]), len(src_data))

        server.add_stream('/test/source', src, src_info, np.ndarray([]))
        self.start_server(server)
        runner = CliRunner()
        result = runner.invoke(main,
                               ['data', 'copy', '/test/source', 'badpath'])
        self.assertTrue('Error' in result.output
                        and 'destination' in result.output)
        self.assertEqual(result.exit_code, 1)
        self.stop_server()
Esempio n. 12
0
    def test_when_server_returns_error_code(self):
        server = FakeJoule()
        error_msg = "test error"
        error_code = 500
        server.response = error_msg
        server.stub_stream_destroy = True
        server.http_code = error_code
        # actually create a stream so the stubbed API call is the delete one
        src = DataStream(id=0,
                         name="source",
                         keep_us=100,
                         datatype=DataStream.DATATYPE.FLOAT32)
        src.elements = [
            Element(name="e%d" % x,
                    index=x,
                    display_type=Element.DISPLAYTYPE.CONTINUOUS)
            for x in range(3)
        ]
        src_info = StreamInfo(0, 0, 0)
        server.add_stream('/folder/stream', src, src_info, None, [])

        self.start_server(server)
        runner = CliRunner()
        result = runner.invoke(main, ['stream', 'delete', '/folder/stream'],
                               input='y\n')
        self.assertTrue('%d' % error_code in result.output)
        self.assertTrue(error_msg in result.output)
        self.assertEqual(result.exit_code, 1)
        self.stop_server()
Esempio n. 13
0
    def test_datatype_mismatch(self):
        #  the datatype of the file and target stream must match
        server = FakeJoule()
        # create the source stream
        src = DataStream(id=0,
                         name="dest",
                         keep_us=100,
                         datatype=DataStream.DATATYPE.UINT16)
        src.elements = [
            Element(name="e%d" % x,
                    index=x,
                    display_type=Element.DISPLAYTYPE.CONTINUOUS)
            for x in range(3)
        ]
        # source has 100 rows of data between [0, 100]
        file_data = helpers.create_data('int16_3')
        src_info = StreamInfo(0, 0, 0, 0)
        server.add_stream('/test/dest', src, src_info, None)
        self.start_server(server)
        runner = CliRunner()
        with tempfile.NamedTemporaryFile() as data_file:
            write_hd5_data(data_file, file_data)
            result = runner.invoke(
                main, ['data', 'ingest', '--file', data_file.name])
            self.assertIn("datatype", result.output)
            self.assertNotEqual(result.exit_code, 0)

        self.stop_server()
Esempio n. 14
0
    async def test_stream_create(self):
        db: Session = self.app["db"]
        new_stream = DataStream(name="test", datatype=DataStream.DATATYPE.FLOAT32)
        new_stream.elements = [Element(name="e%d" % j, index=j,
                                       display_type=Element.DISPLAYTYPE.CONTINUOUS) for j in range(3)]
        payload = {
            "dest_path": "/deep/new folder",
            "stream": new_stream.to_json()
        }
        resp = await self.client.post("/stream.json", json=payload)

        self.assertEqual(resp.status, 200)
        # check the stream was created correctly
        created_stream: DataStream = db.query(DataStream).filter_by(name="test").one()
        self.assertEqual(len(created_stream.elements), len(new_stream.elements))
        self.assertEqual(created_stream.folder.name, "new folder")

        # can create by dest_id as well
        folder1: Folder = db.query(Folder).filter_by(name="folder1").one()
        new_stream.name = "test2"
        payload = {
            "dest_id": folder1.id,
            "stream": new_stream.to_json()
        }
        resp = await self.client.post("/stream.json", json=payload)

        self.assertEqual(resp.status, 200)
        # check the stream was created correctly
        created_stream: DataStream = db.query(DataStream).filter_by(name="test2").one()
        self.assertEqual(len(created_stream.elements), len(new_stream.elements))
        self.assertEqual(created_stream.folder.name, "folder1")
Esempio n. 15
0
    def test_confirms_data_removal(self):
        # if there is existing data in the target, confirm removal
        server = FakeJoule()
        # create the source stream
        src = DataStream(id=0,
                         name="dest",
                         keep_us=100,
                         datatype=DataStream.DATATYPE.FLOAT32)
        src.elements = [
            Element(name="e%d" % x,
                    index=x,
                    display_type=Element.DISPLAYTYPE.CONTINUOUS)
            for x in range(3)
        ]
        # source has 100 rows of data between [0, 100]
        src_data = helpers.create_data(src.layout, length=1000)
        # File:    |----|
        # DataStream:     |-----|
        src_info = StreamInfo(int(src_data['timestamp'][500]),
                              int(src_data['timestamp'][-1]), 500)
        server.add_stream('/test/dest', src, src_info, src_data[500:])
        self.start_server(server)
        runner = CliRunner()
        with tempfile.NamedTemporaryFile() as data_file:
            write_hd5_data(data_file, src_data[:750])
            result = runner.invoke(
                main, ['data', 'ingest', '--file', data_file.name], input='N')
            _print_result_on_error(result)
            self.assertEqual(result.exit_code, 0)
            self.assertIn('Cancelled', result.output)

        self.stop_server()
Esempio n. 16
0
    def test_does_not_copy_existing_data(self):
        server = FakeJoule()
        # create the source and destination streams
        src_data = create_source_data(
            server)  # helpers.create_data(src.layout)
        # dest has the same intervals as source so nothing is copied
        ts = src_data['timestamp']
        intervals = server.streams['/test/source'].intervals

        dest = DataStream(id=1,
                          name="dest",
                          keep_us=100,
                          datatype=DataStream.DATATYPE.FLOAT32)
        dest.elements = [
            Element(name="e%d" % x,
                    index=x,
                    display_type=Element.DISPLAYTYPE.CONTINUOUS)
            for x in range(3)
        ]
        server.add_stream('/test/destination', dest,
                          StreamInfo(int(ts[0]), int(ts[-1]), len(ts)),
                          src_data, intervals)
        self.start_server(server)
        runner = CliRunner()
        result = runner.invoke(main, [
            'data', 'copy', '--start',
            str(ts[0]), '--end',
            str(ts[-1]), '/test/source', '/test/destination'
        ])
        _print_result_on_error(result)
        self.assertEqual(result.exit_code, 0)
        # only the annotations get was called (twice for each interval: src and dest)
        self.assertTrue(self.msgs.qsize(), len(intervals) * 2)
        self.stop_server()
Esempio n. 17
0
 def test_ingests_data_to_empty_existing_stream(self):
     server = FakeJoule()
     # create the source stream
     src = DataStream(id=0,
                      name="existing",
                      keep_us=100,
                      datatype=DataStream.DATATYPE.FLOAT32)
     src.elements = [
         Element(name="e%d" % x,
                 index=x,
                 display_type=Element.DISPLAYTYPE.CONTINUOUS)
         for x in range(3)
     ]
     src_data = helpers.create_data(src.layout, length=22000)
     src_info = StreamInfo(0, 0, 0, 0)
     server.add_stream('/test/existing', src, src_info, None)
     self.start_server(server)
     runner = CliRunner()
     with tempfile.NamedTemporaryFile() as data_file:
         write_hd5_data(data_file, src_data)
         result = runner.invoke(main, [
             'data', 'ingest', '--file', data_file.name, '--stream',
             '/test/existing'
         ])
         _print_result_on_error(result)
         self.assertEqual(result.exit_code, 0)
     db_obj = self.msgs.get()
     np.testing.assert_array_equal(src_data, db_obj.data)
     # uses the stream parameter instead of the hd5 attrs
     self.assertEqual(db_obj.stream.name, 'existing')
     self.stop_server()
Esempio n. 18
0
def create_source_data(server):
    # create the source stream
    src = DataStream(id=0,
                     name="source",
                     keep_us=100,
                     datatype=DataStream.DATATYPE.FLOAT32)
    src.elements = [
        Element(name="e%d" % x,
                index=x,
                display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3)
    ]

    # source has 100 rows of data in four intervals between [0, 100]
    src_data = helpers.create_data(src.layout, length=100, start=0, step=1)
    # insert the intervals
    pipe_data = np.hstack(
        (src_data[:25], pipes.interval_token(src.layout), src_data[25:50],
         pipes.interval_token(src.layout), src_data[50:75],
         pipes.interval_token(src.layout), src_data[75:]))
    ts = src_data['timestamp']
    intervals = [[ts[0], ts[24]], [ts[25], ts[49]], [ts[50], ts[74]],
                 [ts[75], ts[99]]]
    src_info = StreamInfo(int(src_data['timestamp'][0]),
                          int(src_data['timestamp'][-1]), len(src_data))
    server.add_stream('/test/source', src, src_info, pipe_data, intervals)
    return src_data
Esempio n. 19
0
 def test_warn_on_different_elements(self):
     server = FakeJoule()
     create_source_data(server)
     dest = DataStream(id=1,
                       name="dest",
                       keep_us=100,
                       datatype=DataStream.DATATYPE.FLOAT32)
     dest.elements = [
         Element(name="different%d" % x,
                 index=x,
                 units='other',
                 display_type=Element.DISPLAYTYPE.CONTINUOUS)
         for x in range(3)
     ]
     server.add_stream('/test/destination', dest, StreamInfo(None, None, 0),
                       None)
     self.start_server(server)
     runner = CliRunner()
     # does not copy without confirmation
     runner.invoke(main,
                   ['data', 'copy', '/test/source', '/test/destination'])
     self.assertTrue(self.msgs.empty())
     # copies with confirmation
     loop = asyncio.new_event_loop()
     loop.set_debug(True)
     asyncio.set_event_loop(loop)
     result = runner.invoke(
         main, ['data', 'copy', '/test/source', '/test/destination'],
         input='y\n')
     mock_entry = self.msgs.get()
     self.assertTrue(len(mock_entry.data) > 0)
     self.assertEqual(result.exit_code, 0)
     self.stop_server()
Esempio n. 20
0
    async def _test_consolidate(self):
        # intervals less than max_gap us apart are consolidated
        # data: 100 samples spaced at 1000us
        test_stream = DataStream(
            id=1,
            name="stream1",
            datatype=DataStream.DATATYPE.FLOAT32,
            keep_us=DataStream.KEEP_ALL,
            decimate=True,
            elements=[Element(name="e%d" % x) for x in range(3)])
        pipe = pipes.LocalPipe(test_stream.layout)
        nrows = 955
        orig_data = helpers.create_data(layout=test_stream.layout,
                                        length=nrows)
        chunks = [
            orig_data[:300], orig_data[305:400], orig_data[402:700],
            orig_data[800:]
        ]
        # data: |++++++|  |+++++++++|    |++++++|    |++++|
        #               ^--5000 us    ^--2000 us   ^---0.1 sec (retained)
        chunks = [
            orig_data[:300], orig_data[305:400], orig_data[402:700],
            orig_data[800:850], orig_data[852:]
        ]
        # data: |++++++|  |+++++++++|    |++++++|    |++++|  |++++|
        #               ^--5000 us    ^--2000 us   |        ^--- 2000 us
        #                                          `---0.1 sec (retained)
        task = await self.store.spawn_inserter(test_stream, pipe)
        for chunk in chunks:
            await pipe.write(chunk)
            await pipe.close_interval()
        await pipe.close()
        await task

        # extract data
        extracted_data = []

        rx_chunks = []

        async def callback(rx_data, layout, factor):
            if rx_data[0] != pipes.interval_token(layout):
                rx_chunks.append(rx_data)

        await self.store.consolidate(test_stream,
                                     start=None,
                                     end=None,
                                     max_gap=6e3)
        await self.store.extract(test_stream,
                                 start=None,
                                 end=None,
                                 callback=callback)

        # should only be two intervals left (the first two are consolidated)
        np.testing.assert_array_equal(rx_chunks[0], np.hstack(chunks[:3]))
        np.testing.assert_array_equal(rx_chunks[1], np.hstack(chunks[3:]))
        self.assertEqual(len(rx_chunks), 2)
Esempio n. 21
0
    def test_relationships(self):
        # root
        #  -folder1
        #     -stream11 (4 elements)
        #  -folder2
        #     -stream21 (2 elements)
        #  -stream1 (1 element)
        #  -stream2 (1 element)

        stream11 = DataStream(name="stream11",
                              datatype=DataStream.DATATYPE.FLOAT32)
        stream11.elements = [
            Element(name="e%d" % x, display_type=Element.DISPLAYTYPE.DISCRETE)
            for x in range(4)
        ]
        folder1 = Folder(name="folder1")
        folder1.data_streams.append(stream11)

        stream21 = DataStream(name="stream21",
                              datatype=DataStream.DATATYPE.UINT8)
        stream21.elements = [
            Element(name="e%d" % x,
                    display_type=Element.DISPLAYTYPE.CONTINUOUS)
            for x in range(4)
        ]
        folder2 = Folder(name="folder2")
        folder2.data_streams.append(stream21)

        stream1 = DataStream(name="stream1", datatype=DataStream.DATATYPE.INT8)
        stream1.elements.append(Element(name="e0"))

        stream2 = DataStream(name="stream2",
                             datatype=DataStream.DATATYPE.UINT64)
        stream2.elements.append(Element(name="e0"))

        root = Folder(name="root")
        root.children = [folder1, folder2]
        root.data_streams = [stream1, stream2]
        # check downward navigation
        self.assertEqual(len(root.children[0].data_streams[0].elements), 4)
        # check upward navigation
        e = stream11.elements[-1]
        self.assertEqual(e.stream.folder.parent.name, 'root')
Esempio n. 22
0
 def setUp(self):
     super().setUp()
     # module output is a float32_3 stream
     self.stream = DataStream(
         name="output",
         datatype=DataStream.DATATYPE.FLOAT32,
         elements=[
             Element(name="e%d" % j,
                     index=j,
                     display_type=Element.DISPLAYTYPE.CONTINUOUS)
             for j in range(3)
         ])
Esempio n. 23
0
def create_source_data(server):
    # create the source stream
    src = DataStream(id=0, name="source", keep_us=100, datatype=DataStream.DATATYPE.FLOAT32)
    src.elements = [Element(name="e%d" % x, index=x, display_type=Element.DISPLAYTYPE.CONTINUOUS) for x in range(3)]

    # source has 100 rows of data
    src_data = helpers.create_data(src.layout, length=100, start=0, step=1)
    ts = src_data['timestamp']
    intervals = [[ts[0], ts[99]]]
    src_info = StreamInfo(int(src_data['timestamp'][0]), int(src_data['timestamp'][-1]),
                          len(src_data))
    server.add_stream('/test/source', src, src_info, src_data, intervals)
    return src_data
Esempio n. 24
0
def create_stream(name, layout, id=0) -> DataStream:
    (ltype, lcount, dtype) = parse_layout(layout)
    datatype = DataStream.DATATYPE[ltype.upper()]

    return DataStream(name=name,
                      datatype=datatype,
                      id=id,
                      elements=[
                          Element(name="e%d" % j,
                                  index=j,
                                  display_type=Element.DISPLAYTYPE.CONTINUOUS)
                          for j in range(lcount)
                      ])
Esempio n. 25
0
def run(pipe_config: str, db: Session) -> DataStream:
    # check for a remote stream config
    (pipe_config, node_name) = strip_remote_config(pipe_config)
    local = node_name is None
    # separate the configuration pieces
    (path, name, inline_config) = parse_pipe_config(pipe_config)
    name = data_stream.validate_name(name)
    # parse the inline configuration
    (datatype, element_names) = parse_inline_config(inline_config)
    # if the stream is local, check for it in the database
    if local:
        my_folder = folder.find(path, db, create=True)
        # check if the stream exists in the database
        existing_stream: DataStream = db.query(DataStream). \
            filter_by(folder=my_folder, name=name). \
            one_or_none()
        if existing_stream is not None:
            if len(inline_config) > 0:
                _validate_config_match(existing_stream, datatype,
                                       element_names)
            return existing_stream
    else:  # make sure the remote node is a follower
        if db.query(Follower).filter_by(name=node_name).one_or_none() is None:
            raise ConfigurationError("Remote node [%s] is not a follower" %
                                     node_name)

    # if the stream doesn't exist it or its remote, it *must* have inline configuration
    if len(inline_config) == 0:
        if local:
            msg = "add inline config or *.conf file for stream [%s]" % pipe_config
        else:
            msg = "remote streams must have inline config"
        raise ConfigurationError(msg)

    # build the stream from inline config
    my_stream = data_stream.DataStream(name=name, datatype=datatype)
    i = 0
    for e in element_names:
        my_stream.elements.append(Element(name=e, index=i))
        i += 1
    if local:
        my_folder.data_streams.append(my_stream)
        db.add(my_stream)
    else:
        my_stream.set_remote(node_name, path + '/' + my_stream.name)
    return my_stream
Esempio n. 26
0
    async def test_runner(self):
        tests = [
            self._test_basic_insert_extract,
            self._test_extract_data_with_intervals,
            self._test_extract_decimated_data, self._test_db_info,
            self._test_info, self._test_intervals, self._test_remove,
            self._test_destroy, self._test_row_count,
            self._test_actions_on_empty_streams, self._test_consolidate,
            self._test_consolidate_with_time_bounds
        ]
        for test in tests:
            conn: asyncpg.Connection = await asyncpg.connect(self.db_url)
            await conn.execute("DROP SCHEMA IF EXISTS data CASCADE")
            await conn.execute("CREATE SCHEMA data")
            await conn.execute("GRANT ALL ON SCHEMA data TO public")

            self.store = TimescaleStore(
                self.db_url,
                0,
                60,
            )
            await self.store.initialize([])
            # make a sample stream with data
            self.test_stream = DataStream(
                id=100,
                name="stream1",
                datatype=DataStream.DATATYPE.FLOAT32,
                keep_us=DataStream.KEEP_ALL,
                decimate=True,
                elements=[Element(name="e%d" % x) for x in range(3)])
            pipe = pipes.LocalPipe(self.test_stream.layout)
            self.test_data = helpers.create_data(
                layout=self.test_stream.layout, length=1005)
            task = self.store.spawn_inserter(self.test_stream, pipe)
            await pipe.write(self.test_data)
            await pipe.close()
            runner = await task
            await runner
            await conn.close()
            # await self.store.initialize([])
            await test()
            # simulate the nose2 test output
            sys.stdout.write('o')
            await self.store.close()
            sys.stdout.flush()
Esempio n. 27
0
 def test_errors_on_invalid_update(self):
     # value types must be correct
     e = Element(name="test")
     with self.assertRaises(ConfigurationError):
         e.update_attributes({
             "name": "new name",
             "default_min": 'invalid',
         })
     with self.assertRaises(ConfigurationError):
         e.update_attributes({
             "name": "new name",
             "offset": '',
         })
     # default_min < default_max
     with self.assertRaises(ConfigurationError) as error:
         e.update_attributes({
             "name": "new name",
             "default_min": 100,
             "default_max": 10
         })
     self.assertTrue('default_min' in str(error.exception))
Esempio n. 28
0
    def test_reads_selected_elements_to_file(self):
        server = FakeJoule()
        # create the source stream
        src = DataStream(id=0,
                         name="source",
                         keep_us=100,
                         datatype=DataStream.DATATYPE.UINT16)
        src.elements = [
            Element(name="e%d" % x,
                    index=x,
                    display_type=Element.DISPLAYTYPE.CONTINUOUS)
            for x in range(3)
        ]
        # source has 100 rows of data between [0, 100]
        src_data = helpers.create_data(src.layout)
        src_info = StreamInfo(int(src_data['timestamp'][0]),
                              int(src_data['timestamp'][-1]), len(src_data))
        server.add_stream('/test/source', src, src_info, src_data)

        self.start_server(server)
        runner = CliRunner()
        # add in some extra parameters to make sure they are parsed
        with tempfile.NamedTemporaryFile() as data_file:
            result = runner.invoke(main, [
                'data', 'read', '/test/source', '--start', '0', '--end',
                '1 hour ago', '-i', '0,2', '--file', data_file.name
            ])

            _print_result_on_error(result)
            self.assertEqual(result.exit_code, 0)
            h5_file = h5py.File(data_file.name, 'r')
            self.assertEqual(src_data['data'].dtype, h5_file['data'].dtype)
            self.assertEqual(h5_file['timestamp'].dtype, np.dtype('i8'))

            np.testing.assert_array_almost_equal(h5_file['data'],
                                                 src_data['data'][:, [0, 2]])
            np.testing.assert_array_almost_equal(
                h5_file['timestamp'], src_data['timestamp'][:, None])

            h5_file.close()

        self.stop_server()
Esempio n. 29
0
 def test_copies_new_data(self):
     server = FakeJoule()
     # create the source and destination streams
     src_data = create_source_data(
         server)  # helpers.create_data(src.layout)
     # dest has half the data
     dest = DataStream(id=1,
                       name="dest",
                       keep_us=100,
                       datatype=DataStream.DATATYPE.FLOAT32)
     dest.elements = [
         Element(name="e%d" % x,
                 index=x,
                 display_type=Element.DISPLAYTYPE.CONTINUOUS)
         for x in range(3)
     ]
     # destination is missing first interval but this won't be copied with the --new flag
     dest_interval = server.streams['/test/source'].intervals[1]
     dest_data = np.copy(src_data[dest_interval[0]:dest_interval[1]])
     server.add_stream(
         '/test/destination', dest,
         StreamInfo(int(dest_interval[0]), int(dest_interval[1]),
                    len(dest_data)), dest_data, [dest_interval])
     self.start_server(server)
     runner = CliRunner()
     result = runner.invoke(
         main,
         ['data', 'copy', '--new', '/test/source', '/test/destination'])
     print(result.output)
     _print_result_on_error(result)
     self.assertEqual(result.exit_code, 0)
     while self.msgs.empty():
         time.sleep(0.1)
         print("waiting...")
     while not self.msgs.empty():
         msg = self.msgs.get()
         if type(msg) is MockDbEntry:
             print(msg)
     self.stop_server()
Esempio n. 30
0
 def test_incompatible_layouts(self):
     server = FakeJoule()
     create_source_data(server)
     dest = DataStream(id=1,
                       name="dest",
                       keep_us=100,
                       datatype=DataStream.DATATYPE.FLOAT32)
     dest.elements = [
         Element(name="e%d" % x,
                 index=x,
                 display_type=Element.DISPLAYTYPE.CONTINUOUS)
         for x in range(5)
     ]
     server.add_stream('/test/destination', dest, StreamInfo(None, None, 0),
                       None)
     self.start_server(server)
     runner = CliRunner()
     result = runner.invoke(
         main, ['data', 'copy', '/test/source', '/test/destination'])
     self.assertTrue('not compatible' in result.output)
     self.assertEqual(result.exit_code, 1)
     self.stop_server()