def test_write_read_direct_with_offset(self): fh = self._create_file(0, 2) r = DataReader().open(fh) r.raw_processor.suppress_mode = 'off' # d = np.right_shift(r.raw(5, 10), 2) data = r.data_get(5, 10, 1) np.testing.assert_allclose(np.arange(10, 20, 2), data[:, 0]['mean'])
def test_write_read_direct_with_sample_overscan_before(self): fh = self._create_file( 1, 3) # will be samples 120 to 250 (not 126 to 252) r = DataReader().open(fh) r.raw_processor.suppress_mode = 'off' data = r.data_get(0, 140, 1) np.testing.assert_allclose(np.arange(252, 532, 2), data[:, 0]['mean'])
def test_large_file_from_usb(self): sample_count = 2000000 * 2 fh, sample_count = self._create_large_file(sample_count) r = DataReader().open(fh) self.assertEqual([0, sample_count], r.sample_id_range) reduction = r.get_reduction() self.assertEqual(sample_count // 20000, len(reduction))
def test_write_read_stats_over_samples_offset(self): fh = self._create_file(0, 2) r = DataReader() r.raw_processor.suppress_mode = 'off' r.open(fh) data = r.data_get(7, 50, 10) np.testing.assert_allclose(np.arange(23, 90, 20), data[:, 0]['mean'])
def test_write_read_direct(self): fh = self._create_file(0, 2) r = DataReader().open(fh) self.assertEqual([0, 252], r.sample_id_range) r.raw_processor.suppress_mode = 'off' data = r.data_get(0, 252, 1) np.testing.assert_allclose(np.arange(0, 252 * 2, 2), data[:, 0]['mean'])
def test_single_sample(self): fh = self.create_sinusoid_file(2000000, 400000) r = DataReader().open(fh) s1 = r.statistics_get(20, 20, units='samples') i_mean = r.raw(20, 21)[2][0, 0] np.testing.assert_allclose(s1['signals']['current']['statistics']['μ'], i_mean, rtol=0.0005)
def test_write_read_get_reduction(self): fh = self._create_file(0, 32) dfr = datafile.DataFileReader(fh) dfr.pretty_print() fh.seek(0) r = DataReader().open(fh) data = r.get_reduction(0, 4000) np.testing.assert_allclose(np.arange(999, 7000, 2000), data[:, 0]['mean'])
def test_single_sample(self): fh = self.create_sinusoid_file(2000000, 400000) r = DataReader().open(fh) s1 = r.statistics_get(20, 20, units='samples') k = r.samples_get(20, 21, units='samples', fields=['current']) i_mean = k['signals']['current']['value'][0] np.testing.assert_allclose(s1['signals']['current']['µ']['value'], i_mean, rtol=0.0005)
def test_samples_get(self): #fh = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data_recording_01.jls') fh = self.create_sinusoid_file(2.0, 2000000, 100000) r = DataReader().open(fh) k = r.samples_get(0, 1000, units='samples', fields=['current']) self.assertIn('time', k) self.assertIn('signals', k) self.assertIn('current', k['signals']) self.assertIn('value', k['signals']['current']) self.assertEqual(k['signals']['current']['units'], 'A') i = k['signals']['current']['value'] self.assertEqual(1000, len(i))
def open(self): self.view = self self.reader = DataReader().open(self._filename) f = self.reader.sampling_frequency r = self.reader.sample_id_range x_lim = [x / f for x in r] self.span = span.Span(x_lim, 1 / f, 100) self.x_range, self.samples_per, self.x = self.span.conform_discrete( x_lim) self._cache = None # invalidate log.info('RecordingViewerDevice.open: %s => %s, %s', r, self.x_range, self.samples_per)
def _open(self): self._reader = DataReader() if self._current_ranging_format is not None: self._reader.raw_processor.suppress_mode = self._current_ranging_format self._reader.open(self._filename) # todo progress bar updates if hasattr(self._filename, 'read'): return self._loader = AnnotationLoader(self._parent, self._filename, self._cmdp) self._loader.signals.finished.connect(self._on_annotations_loaded) self._threadpool = QtCore.QThreadPool() self._threadpool.start(self._loader) self._log.info('RecordingViewerDevice.open')
def test_user_data(self): expect = ['hello', {'there': 'world'}] fh = io.BytesIO() d = DataRecorder(fh, user_data=expect) d.close() fh.seek(0) r = DataReader().open(fh) self.assertEqual(r.user_data, expect)
def on_cmd(args): r = DataReader().open(args.filename) print(r.summary_string()) start = args.start stop = args.stop if stop < 0: stop = r.sample_id_range[1] + 1 + stop if args.export is not None: i, v = r.get_calibrated(start, stop) data = np.hstack((i.reshape((-1, 1)), (v.reshape((-1, 1))))) if args.export.endswith('npy'): np.save(args.export, data) else: np.savetxt(args.export, data, fmt='%.5g', delimiter=',') if args.plot: import matplotlib.pyplot as plt y = r.get_reduction(start, stop) x = np.arange(len(y)) * (r.config['samples_per_reduction'] / r.config['sampling_frequency']) f = plt.figure() for axis in range(3): ax = f.add_subplot(3, 1, axis + 1) ax.plot(x, y[:, axis, 0], color='blue') ax.plot(x, y[:, axis, 2], color='red') ax.plot(x, y[:, axis, 3], color='red') plt.show() plt.close(f) r.close() return 0
def test_statistics_get(self): #fh = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data_recording_01.jls') fh = self.create_sinusoid_file(2000000, 400000) r = DataReader().open(fh) t_start, t_stop = 0.066780, 0.069004 k_start, k_stop = r.normalize_time_arguments(t_start, t_stop, units='seconds') ranges = [ (0, 1000), # trivial, direct (0, 20000), # trivial, single reduction (100000, 101000), # offset, direct (100000, 120000), # offset, ex (99000, 120000), (100000, 121000), (99000, 121000), (k_start, k_stop), ] for k_start, k_stop in ranges: # print(f'range {k_start}:{k_stop}') s1 = r.statistics_get(k_start, k_stop, units='samples') k = r.samples_get(k_start, k_stop, units='samples', fields=['current']) i_mean = np.mean(k['signals']['current']['value']) np.testing.assert_allclose(s1['signals']['current']['µ']['value'], i_mean, rtol=0.0005) r.close()
def test_create_single(self): fh = self._create_file_insert(0, 2, 2) r = DataReader() r.raw_processor.suppress_mode = 'off' r.open(fh) self.assertEqual([0, 252], r.sample_id_range) self.assertEqual(1000, r.output_sampling_frequency) self.assertEqual(0.05, r.reduction_frequency) self.assertEqual(0.252, r.duration) self.assertEqual(0.0, r.sample_id_to_time(0)) self.assertEqual(0, r.time_to_sample_id(0)) self.assertEqual(0.2, r.sample_id_to_time(200)) self.assertEqual(200, r.time_to_sample_id(0.2)) data = r.data_get(6, 10, 1) np.testing.assert_allclose(np.arange(12, 20, 2), data[:, 0]['mean'])
def test_user_footer_data(self): header_data = ['hello', {'there': 'world'}] footer_data = ['goodbye', {'for': 'now'}] fh = io.BytesIO() d = DataRecorder(fh, user_data=header_data) d.close(footer_user_data=footer_data) fh.seek(0) r = DataReader().open(fh) self.assertEqual(r.user_data, header_data) self.assertEqual(r.footer_user_data, footer_data)
def test_empty_file(self): fh = io.BytesIO() d = DataRecorder(fh) d.close() fh.seek(0) r = DataReader().open(fh) self.assertEqual([0, 0], r.sample_id_range) self.assertEqual(1.0, r.sampling_frequency) self.assertEqual(1.0, r.input_sampling_frequency) self.assertEqual(1.0, r.output_sampling_frequency) self.assertEqual(1.0, r.reduction_frequency) self.assertEqual(0.0, r.duration) self.assertEqual(0, r.voltage_range) self.assertEqual(0, len(r.get_reduction(0, 0))) self.assertEqual(0, len(r.data_get(0, 0))) self.assertEqual(0, r.time_to_sample_id(0.0)) self.assertEqual(0.0, r.sample_id_to_time(0)) self.assertIsNone(r.samples_get(0, 0)) r.close()
def test_normalize_time_arguments(self): fh = self._create_file(0, 2) r = DataReader().open(fh) self.assertEqual(r.normalize_time_arguments(0, 1), (0, 1)) self.assertEqual(r.normalize_time_arguments(0.0, 0.01, 'seconds'), (0, 10)) self.assertEqual(r.normalize_time_arguments(-10, -1), (242, 251)) self.assertEqual(r.normalize_time_arguments(-0.01, -0.005, 'seconds'), (0, 252))
def test_time_conversion(self): fh = self._create_file(0, 2) r = DataReader().open(fh) self.assertEqual([0, 252], r.sample_id_range) self.assertEqual(1000, r.output_sampling_frequency) self.assertEqual(1.0, r.reduction_frequency) self.assertEqual(0.252, r.duration) self.assertEqual(0.0, r.sample_id_to_time(0)) self.assertEqual(0, r.time_to_sample_id(0)) self.assertEqual(0.2, r.sample_id_to_time(200)) self.assertEqual(200, r.time_to_sample_id(0.2))
def test_truncated(self): stream_buffer = StreamBuffer(400.0, [10], 1000.0) stream_buffer.suppress_mode = 'off' fh = io.BytesIO() d = DataRecorder(fh) d.stream_notify(stream_buffer) count = 16 for idx in range(0, 160, count): data = usb_packet_factory(idx, count) stream_buffer.insert(data) stream_buffer.process() d.stream_notify(stream_buffer) fh.seek(0) #r = datafile.DataFileReader(fh) #r.pretty_print() r = DataReader().open(fh)
def load_current_range(filename): r = DataReader().open(filename) try: print(r.summary_string()) r_start, r_stop = r.sample_id_range if r_stop - r_start > MAX_SAMPLES: print('file too big') return 1 d = r.samples_get(fields=['current_range']) return d['signals']['current_range']['value'] finally: r.close()
def test_cache_test(self): sample_rate = 2000000 sample_count = sample_rate * 2 fh = self.create_sinusoid_file(sample_rate, sample_count) r = DataReader().open(fh) for step_size in [1111, 2000, 11111, 20000]: # print(f'step_size = {step_size}') for i in range(0, sample_count - step_size, step_size): r.raw_processor.reset() s1 = r.statistics_get(i, i + step_size, units='samples') _, _, data = r.raw(i, i + step_size) i_mean = np.mean(data[:, 0]) np.testing.assert_allclose( s1['signals']['current']['statistics']['μ'], i_mean, rtol=0.0005) r.close()
def run(): args = get_parser().parse_args() reader = DataReader() reader.open(args.infile) s_min, s_max = reader.sample_id_range sample_count = s_max - s_min writer = DataRecorder(args.outfile, reader.calibration, reader.user_data) block_size = int(reader.sampling_frequency) print(f'samples={sample_count}, fs={reader.sampling_frequency}') block_count = (sample_count + block_size - 1) // block_size for block in range(block_count): offset = block * block_size offset_next = offset + block_size if offset_next > sample_count: offset_next = sample_count data = reader.samples_get(offset, offset_next, 'samples') writer.insert(data) progress(block, block_count - 1) reader.close() writer.close() return 0
def _open(self): self._reader = DataReader() if self._current_ranging_format is not None: self._reader.raw_processor.suppress_mode = self._current_ranging_format self._reader.open(self._filename) # todo progress bar updates self._log.info('RecordingViewerDevice.open')
class RecordingViewerDevice: """A user-interface-compatible device that displays previous recorded data :param filename: The filename path to the pre-recorded data. """ def __init__(self, filename, current_ranging_format=None): if isinstance(filename, str) and not os.path.isfile(filename): raise IOError('file not found') self._filename = filename self._current_ranging_format = current_ranging_format self._reader = None self._views = [] self._coalesce = {} self._thread = None self._cmd_queue = queue.Queue() # tuples of (command, args, callback) self._response_queue = queue.Queue() self._quit = False self._log = logging.getLogger(__name__) def __str__(self): return os.path.basename(self._filename) @property def sampling_frequency(self): if self._reader is None: return None return self._reader.sampling_frequency @property def calibration(self): if self._reader is None: return None return self._reader.calibration @property def voltage_range(self): return self._reader.voltage_range def _cmd_process(self, cmd, view, args, cbk): rv = None try: # self._log.debug('_cmd_process %s - start', cmd) if cmd == 'refresh': view._refresh_requested = True elif cmd == 'on_x_change': rv = view._on_x_change(*args) elif cmd == 'samples_get': rv = view._samples_get(**args) elif cmd == 'statistics_get': rv = view._statistics_get(**args) elif cmd == 'statistics_get_multiple': rv = view._statistics_get_multiple(**args) elif cmd == 'view_factory': self._views.append(args) rv = args elif cmd == 'view_close': if args in self._views: self._views.remove(args) elif cmd == 'open': rv = self._open() elif cmd == 'close': rv = self._close() elif cmd == 'ping': rv = args else: self._log.warning('unsupported command %s', cmd) except: self._log.exception('While running command') if callable(cbk): try: cbk(rv) except: self._log.exception('in callback') def run(self): cmd_count = 0 timeout = 1.0 self._log.info('RecordingViewerDevice.start') while not self._quit: try: cmd, view, args, cbk = self._cmd_queue.get(timeout=timeout) except queue.Empty: timeout = 1.0 for value in self._coalesce.values(): self._cmd_process(*value) self._coalesce.clear() for view in self._views: if view._refresh_requested: view._update() cmd_count = 0 continue cmd_count += 1 timeout = 0.0 try: source_id = args.pop('source_id') except: source_id = None if source_id is not None: key = f'{view}_{cmd}_{source_id}' # keep most recent only self._coalesce[key] = (cmd, view, args, cbk) else: self._cmd_process(cmd, view, args, cbk) self._log.info('RecordingViewerDevice.run done') def _post(self, command, view=None, args=None, cbk=None): if self._thread is None: self._log.info('RecordingViewerDevice._post(%s) when thread not running', command) else: self._cmd_queue.put((command, view, args, cbk)) def _post_block(self, command, view=None, args=None, timeout=None): timeout = TIMEOUT if timeout is None else float(timeout) # self._log.debug('_post_block %s start', command) while not self._response_queue.empty(): self._log.warning('response queue not empty') try: self._response_queue.get(timeout=0.0) except queue.Empty: pass if self._thread is None: raise IOError('View thread not running') self._post(command, view, args, lambda rv_=None: self._response_queue.put(rv_)) try: rv = self._response_queue.get(timeout=timeout) except queue.Empty as ex: self._log.error('RecordingViewerDevice thread hung: %s - FORCE CLOSE', command) self._post('close', None, None) self._thread.join(timeout=TIMEOUT) self._thread = None rv = ex except Exception as ex: rv = ex if isinstance(rv, Exception): raise IOError(rv) # self._log.debug('_post_block %s done', command) # rv return rv def _open(self): self._reader = DataReader() if self._current_ranging_format is not None: self._reader.raw_processor.suppress_mode = self._current_ranging_format self._reader.open(self._filename) # todo progress bar updates self._log.info('RecordingViewerDevice.open') def _close(self): if self._reader is not None: self._reader.close() self._reader = None self._quit = True def view_factory(self): view = RecordingView(self) return self._post_block('view_factory', None, view) def open(self, event_callback_fn=None): self.close() self._log.info('open') self._thread = threading.Thread(name='view', target=self.run) self._thread.start() self._post_block('open') def close(self): if self._thread is not None: self._log.info('close') try: self._post_block('close') except Exception: self._log.exception('while attempting to close') self._thread.join(timeout=TIMEOUT) self._thread = None
def _open(self): self._reader = DataReader().open( self._filename) # todo progress bar updates self._log.info('RecordingViewerDevice.open')
def test_write_read_get_reduction_offset(self): fh = self._create_file(0, 32) r = DataReader().open(fh) data = r.get_reduction(1000, 4000) np.testing.assert_allclose([2999, 4999, 6999], data[:, 0]['mean'])
def test_write_read_reduction_indirect(self): fh = self._create_file(0, 32) r = DataReader().open(fh) data = r.data_get(0, 4000, 2000) np.testing.assert_allclose([1999, 5999], data[:, 0]['mean'])
def test_write_read_stats_over_samples(self): fh = self._create_file(0, 2) r = DataReader().open(fh) data = r.get(0, 50, 5) np.testing.assert_allclose(np.arange(4, 100, 10), data[:, 0, 0])
def test_user_data_none_when_not_provided(self): fh = self._create_file(0, 2) r = DataReader().open(fh) self.assertEqual(r.user_data, None)