def create_sinusoid_file(self, sample_rate, samples): cal = Calibration() cal.current_offset[:7] = -3000 cal.current_gain[:7] = [1e-3, 1e-4, 1e-5, 1e-6, 1e-7, 1e-8, 1e-9] cal.voltage_offset[:2] = -3000 cal.voltage_gain[:2] = [1e-3, 1e-4] cal.data = cal.save(bytes([0] * 32)) fh = io.BytesIO() d = DataRecorder(fh, calibration=cal) stream_buffer = StreamBuffer(1.0, [100], sample_rate) stream_buffer.calibration_set(cal.current_offset, cal.current_gain, cal.voltage_offset, cal.voltage_gain) d.stream_notify(stream_buffer) data = self.create_sinusoid_data(sample_rate, samples) chunk_size = (sample_rate // 2) * 2 for i in range(0, 2 * samples, chunk_size): stream_buffer.insert_raw(data[i:(i + chunk_size)]) stream_buffer.process() d.stream_notify(stream_buffer) d.close() fh.seek(0) return fh
def create_sinusoid_file(self, file_duration, input_sample_rate, output_sample_rate, stream_buffer_duration=None, chunk_size=None): stream_buffer_duration = 1.0 if stream_buffer_duration is None else float(stream_buffer_duration) min_duration = 400000 / output_sample_rate stream_buffer_duration = max(stream_buffer_duration, min_duration) chunk_size = 1024 if chunk_size is None else int(chunk_size) cal = Calibration() cal.current_offset[:7] = -3000 cal.current_gain[:7] = [1e-3, 1e-4, 1e-5, 1e-6, 1e-7, 1e-8, 1e-9] cal.voltage_offset[:2] = -3000 cal.voltage_gain[:2] = [1e-3, 1e-4] cal.data = cal.save(bytes([0] * 32)) fh = io.BytesIO() d = DataRecorder(fh, calibration=cal) buffer = DownsamplingStreamBuffer(stream_buffer_duration, [100], input_sample_rate, output_sample_rate) buffer.calibration_set(cal.current_offset, cal.current_gain, cal.voltage_offset, cal.voltage_gain) d.stream_notify(buffer) input_samples = int(file_duration * input_sample_rate) data = self.create_sinusoid_data(input_sample_rate, input_samples) i = 0 while i < input_samples: i_next = min(i + chunk_size, input_samples) buffer.insert_raw(data[i:i_next]) buffer.process() d.stream_notify(buffer) i = i_next d.close() fh.seek(0) return fh
def _create_large_file(self, samples=None): """Create a large file. :param samples: The total number of samples which will be rounded to a full USB packet. :return: (BytesIO file, samples) """ sample_rate = 2000000 packets_per_burst = 128 bursts = int( np.ceil(samples / (SAMPLES_PER_PACKET * packets_per_burst))) stream_buffer = StreamBuffer(1.0, [100], sample_rate) samples_total = SAMPLES_PER_PACKET * packets_per_burst * bursts fh = io.BytesIO() d = DataRecorder(fh) d.stream_notify(stream_buffer) for burst_index in range(bursts): packet_index = burst_index * packets_per_burst frames = usb_packet_factory_signal(packet_index, count=packets_per_burst, samples_total=samples_total) stream_buffer.insert(frames) stream_buffer.process() d.stream_notify(stream_buffer) d.close() fh.seek(0) # dfr = datafile.DataFileReader(fh) # dfr.pretty_print() # fh.seek(0) return fh, samples_total
def _export_jls(self): view = self._view cfg = self._cfg sampling_frequency = view.sampling_frequency sample_step_size = sampling_frequency stream_buffer = StreamBuffer(sampling_frequency * 2, []) data_recorder = DataRecorder(cfg['filename'], calibration=view.calibration.data, sampling_frequency=sampling_frequency) data_recorder.process(stream_buffer) try: idx_start = cfg['sample_id_start'] idx_stop = cfg['sample_id_stop'] idx_range = idx_stop - idx_start idx = idx_start self.sigProgress.emit(0) while not self._stop and idx < idx_stop: log.info('export_jls iteration') idx_next = idx + sample_step_size if idx_next > idx_stop: idx_next = idx_stop data = view.raw_get(idx, idx_next) log.info('export_jls (%d, %d) -> %d', idx, idx_next, len(data)) stream_buffer.insert_raw(data) stream_buffer.process() data_recorder.process(stream_buffer) idx = idx_next self.sigProgress.emit(int(1000 * (idx - idx_start) / idx_range)) finally: data_recorder.close()
def _create_file_insert(self, packet_index, count_incr, count_total): stream_buffer = StreamBuffer(10.0, [10], 1000.0) stream_buffer.suppress_mode = 'off' if packet_index > 0: data = usb_packet_factory(0, packet_index - 1) stream_buffer.insert(data) stream_buffer.process() fh = io.BytesIO() d = DataRecorder(fh) sample_id = stream_buffer.sample_id_range[1] for _ in range(0, count_total, count_incr): data = usb_packet_factory(packet_index, count_incr) stream_buffer.insert(data) stream_buffer.process() sample_id_next = stream_buffer.sample_id_range[1] d.insert(stream_buffer.samples_get(sample_id, sample_id_next)) sample_id = sample_id_next packet_index += count_incr d.close() fh.seek(0) # dfr = datafile.DataFileReader(fh) # dfr.pretty_print() # fh.seek(0) return fh
def run(): quit_ = False def do_quit(*args, **kwargs): nonlocal quit_ quit_ = 'quit from SIGINT' def on_stop(event, message): nonlocal quit_ quit_ = 'quit from stop duration' if len(sys.argv) != 2: print("usage: python3 capture_jls.py [filename]") return 1 filename = sys.argv[1] signal.signal(signal.SIGINT, do_quit) device = scan_require_one(config='auto') with device: recorder = DataRecorder(filename, calibration=device.calibration) try: device.stream_process_register(recorder) data = device.start(stop_fn=on_stop) print('Capturing data: type CTRL-C to stop') while not quit_: time.sleep(0.01) finally: recorder.close() return 0
def test_user_data(self): expect = ['hello', {'there': 'world'}] fh = io.BytesIO() d = DataRecorder(fh, user_data=expect) d.close() fh.seek(0) r = DataReader().open(fh) self.assertEqual(r.user_data, expect)
def _export_jls(self, data): data_recorder = DataRecorder(self._filename, calibration=data.calibration.data) try: for block in data: log.info('export_jls iteration') data_recorder.insert(block) finally: data_recorder.close()
def test_user_footer_data(self): header_data = ['hello', {'there': 'world'}] footer_data = ['goodbye', {'for': 'now'}] fh = io.BytesIO() d = DataRecorder(fh, user_data=header_data) d.close(footer_user_data=footer_data) fh.seek(0) r = DataReader().open(fh) self.assertEqual(r.user_data, header_data) self.assertEqual(r.footer_user_data, footer_data)
def run(): quit_ = False def do_quit(*args, **kwargs): nonlocal quit_ quit_ = 'quit from SIGINT' def on_stop(event, message): nonlocal quit_ quit_ = 'quit from stop' args = get_parser().parse_args() filename_base, filename_ext = os.path.splitext(args.out) signal.signal(signal.SIGINT, do_quit) devices = scan(config='auto') items = [] try: for device in devices: if args.frequency: try: device.parameter_set('sampling_frequency', int(args.frequency)) except Exception: # bad frequency selected, display warning & exit gracefully freqs = [ f[2][0] for f in device.parameters( 'sampling_frequency').options ] print(f'Unsupported frequency selected: {args.frequency}') print(f'Supported frequencies = {freqs}') quit_ = True break fname = f'{filename_base}_{device.device_serial_number}{filename_ext}' device.open() recorder = DataRecorder(fname, calibration=device.calibration) items.append([device, recorder]) device.stream_process_register(recorder) if not quit_: for device, _ in items: device.start(stop_fn=on_stop, duration=args.duration) print('Capturing data: type CTRL-C to stop') while not quit_: time.sleep(0.01) finally: for device, recorder in items: try: device.stop() recorder.close() device.close() except Exception: print('exception during close') return 0
def run(device, filename, duration=None, contiguous_duration=None): quit_ = False def do_quit(*args, **kwargs): nonlocal quit_ quit_ = 'quit from SIGINT' def on_stop(event, message): nonlocal quit_ quit_ = 'quit from stop duration' recorder = None signal.signal(signal.SIGINT, do_quit) try: device.open() recorder = DataRecorder(filename, sampling_frequency=device.sampling_frequency, calibration=device.calibration) device.stream_process_register(recorder) device.start(stop_fn=on_stop, duration=duration, contiguous_duration=contiguous_duration) time_last = time.time() sample_id_last = 0 sample_id_incr = 1000000 sample_id_next = sample_id_last + sample_id_incr status_failures = 0 while not quit_: time.sleep(0.01) time_now = time.time() if time_now - time_last > 1.0: s = device.status() if s.get('driver', {}).get('return_code', {}).get('value', 1): status_failures += 1 if status_failures >= 3: raise RuntimeError( f'status_failures = {status_failures}') logging.getLogger().info(s) time_last = time_now while device.stream_buffer.sample_id_range[-1] >= sample_id_next: # todo save sample_id_next += sample_id_incr device.stop() except Exception as ex: logging.getLogger().exception('while capturing data') print('Data capture failed') return 1 finally: if recorder is not None: recorder.close() device.close() print('done capturing data: %s' % quit_) return 0
def run(cmd_queue, filehandle, sampling_frequency, calibration): r = DataRecorder(filehandle, sampling_frequency, calibration) b = StreamBuffer(int(sampling_frequency), [], sampling_frequency) b.calibration_set(calibration.current_offset, calibration.current_gain, calibration.voltage_offset, calibration.voltage_gain) while True: cmd, args = cmd_queue.get() if cmd == 'stream_notify': raw_data, voltage_range = args b.voltage_range = voltage_range b.insert_raw(raw_data) b.process() r.stream_notify(b) elif cmd == 'close': r.close() break
def run(cmd_queue, filehandle, calibration, logging_queue): worker_configurer(logging_queue) log = logging.getLogger(__name__) log.info('DataRecorder process start') r = DataRecorder(filehandle, calibration) while True: cmd, args = cmd_queue.get() if cmd == 'stream_notify': data, = args r.insert(data) elif cmd == 'close': log.info('DataRecorder closing') r.close() break cmd_queue.put('close') log.info('DataRecorder process end')
def _create_file(self, packet_index, count=None): stream_buffer = StreamBuffer(401.0, [200, 100], 1000.0) stream_buffer.suppress_mode = 'off' if packet_index > 0: data = usb_packet_factory(0, packet_index - 1) stream_buffer.insert(data) stream_buffer.process() fh = io.BytesIO() d = DataRecorder(fh) d.stream_notify(stream_buffer) data = usb_packet_factory(packet_index, count) stream_buffer.insert(data) stream_buffer.process() d.stream_notify(stream_buffer) d.close() fh.seek(0) return fh
def test_empty_file(self): fh = io.BytesIO() d = DataRecorder(fh) d.close() fh.seek(0) r = DataReader().open(fh) self.assertEqual([0, 0], r.sample_id_range) self.assertEqual(1.0, r.sampling_frequency) self.assertEqual(1.0, r.input_sampling_frequency) self.assertEqual(1.0, r.output_sampling_frequency) self.assertEqual(1.0, r.reduction_frequency) self.assertEqual(0.0, r.duration) self.assertEqual(0, r.voltage_range) self.assertEqual(0, len(r.get_reduction(0, 0))) self.assertEqual(0, len(r.data_get(0, 0))) self.assertEqual(0, r.time_to_sample_id(0.0)) self.assertEqual(0.0, r.sample_id_to_time(0)) self.assertIsNone(r.samples_get(0, 0)) r.close()
def run(cmd_queue, filehandle, calibration, logging_queue): worker_configurer(logging_queue) log = logging.getLogger(__name__) log.info('run start') r = DataRecorder(filehandle, calibration) while True: try: cmd, args = cmd_queue.get(timeout=1.0) if cmd == 'stream_notify': data, = args r.insert(data) elif cmd == 'close': log.info('run closing') r.close() break except Empty: pass except: log.exception("run exception during loop") log.info('run end')
def run(): args = get_parser().parse_args() reader = DataReader() reader.open(args.infile) s_min, s_max = reader.sample_id_range sample_count = s_max - s_min writer = DataRecorder(args.outfile, reader.calibration, reader.user_data) block_size = int(reader.sampling_frequency) print(f'samples={sample_count}, fs={reader.sampling_frequency}') block_count = (sample_count + block_size - 1) // block_size for block in range(block_count): offset = block * block_size offset_next = offset + block_size if offset_next > sample_count: offset_next = sample_count data = reader.samples_get(offset, offset_next, 'samples') writer.insert(data) progress(block, block_count - 1) reader.close() writer.close() return 0
def _export_jls(self, data): cfg = self._cfg sampling_frequency = data.sample_frequency stream_buffer = StreamBuffer(sampling_frequency * 2, [], sampling_frequency=sampling_frequency) stream_buffer.calibration_set(data.calibration.current_offset, data.calibration.current_gain, data.calibration.voltage_offset, data.calibration.voltage_gain) stream_buffer.voltage_range = data.cmdp['Plugins/#state/voltage_range'] data_recorder = DataRecorder(cfg['filename'], calibration=data.calibration.data, sampling_frequency=sampling_frequency) data_recorder.stream_notify(stream_buffer) try: for block in data: log.info('export_jls iteration') stream_buffer.insert_raw(block['signals']['raw']['value']) stream_buffer.process() data_recorder.stream_notify(stream_buffer) finally: data_recorder.close()
def _create_file(self, packet_index, count=None): stream_buffer = StreamBuffer(2000, [10]) stream_buffer.suppress_mode = 'off' if packet_index > 0: data = usb_packet_factory(0, packet_index - 1) stream_buffer.insert(data) stream_buffer.process() fh = io.BytesIO() d = DataRecorder(fh, sampling_frequency=1000) d.process(stream_buffer) data = usb_packet_factory(packet_index, count) stream_buffer.insert(data) stream_buffer.process() d.process(stream_buffer) d.close() fh.seek(0) # from joulescope import datafile # dfr = datafile.DataFileReader(fh) # dfr.pretty_print() # fh.seek(0) return fh
class Capture: def __init__(self, device, args): self._device = device self._args = args self._timestamp = None self._record = None self._csv = None self._count = 0 self._triggered = False self._sample_id_last = None self._start_fn = getattr(self, f'_start_{args.start}') self._end_fn = getattr(self, f'_end_{args.end}') self._current = Signal() self._voltage = Signal() self._power = Signal() self._charge = 0 # in 1e-15 C, use Python int for infinite precision self._energy = 0 # in 1e-15 J, use Python int for infinite precision self._time_start = None # [sample, timestr] self._time_end = None if self._args.csv is not None: self._csv = open(self._args.csv, 'wt') self._csv.write(f'#{COLUMNS}\n') def _construct_record_filename(self): time_start = datetime.datetime.utcnow() timestamp_str = time_start.strftime('%Y%m%d_%H%M%S') return f'{timestamp_str}_{self._count + 1:04d}.jls' def start(self, start_id): if self._triggered: self.stop() self._time_start = [start_id, _current_time_str()] log.info(f'start {start_id}') if self._args.display_trigger: print(f'start {start_id}') self._current.clear() self._voltage.clear() self._power.clear() self._charge = 0 self._energy = 0 if self._args.record: filename = self._construct_record_filename() self._record = DataRecorder(filename, calibration=self._device.calibration) self._triggered = True return start_id def stop(self, end_id=None): if not self._triggered: return if end_id is None: end_id = self._sample_id_last self._time_end = [end_id, _current_time_str()] if self._record: self._record.close() self._record = None log.info(f'stop {end_id}') if self._args.display_trigger: print(f'stop {end_id}') self._count += 1 current = self._current.result() voltage = self._voltage.result() power = self._power.result() charge = self._charge / GAIN energy = self._energy / GAIN r = self._time_start + self._time_end + \ current + voltage + power + \ [charge, charge / 3600.0] + [energy, energy / 3600.0] results = [] for x in r: if x is None: results.append('NAN') elif isinstance(x, int): results.append(str(x)) elif isinstance(x, str): results.append(x) else: results.append('%g' % x) line = ','.join(results) if self._args.display_stats: if self._count == 1: print(COLUMNS) print(line) if self._csv is not None: self._csv.write(line + '\n') self._csv.flush() self._triggered = False return end_id def close(self): self.stop() if self._csv is not None: self._csv.close() def _in_level_low(self, stream_buffer, field, start_id, end_id): field = FIELD_MAP.get(field, field) gpi = stream_buffer.samples_get(start_id, end_id, fields=field) if not np.all(gpi): # found trigger return start_id + int(np.argmin(gpi)) return None def _in_level_high(self, stream_buffer, field, start_id, end_id): field = FIELD_MAP.get(field, field) gpi = stream_buffer.samples_get(start_id, end_id, fields=field) if np.any(gpi): # found trigger return start_id + int(np.argmax(gpi)) return None def _in_edge_rising(self, stream_buffer, field, start_id, end_id): field = FIELD_MAP.get(field, field) if start_id <= 0: gpi = stream_buffer.samples_get(start_id, end_id, fields=field) if bool(gpi[0]): return start_id else: gpi = stream_buffer.samples_get(start_id, end_id, fields=field) gpi = gpi.astype(np.int8) d = np.diff(gpi) if np.any(d >= 1): # found trigger return start_id + int(np.argmax(d)) return None def _in_edge_falling(self, stream_buffer, field, start_id, end_id): field = FIELD_MAP.get(field, field) if start_id <= 0: gpi = stream_buffer.samples_get(start_id, end_id, fields=field) if not bool(gpi[0]): return start_id else: gpi = stream_buffer.samples_get(start_id, end_id, fields=field) gpi = gpi.astype(np.int8) d = np.diff(gpi) if np.any(d <= -1): # found trigger return start_id + int(np.argmin(d)) return None def _start_none(self, stream_buffer, start_id, end_id): return start_id def _start_low(self, stream_buffer, start_id, end_id): field = self._args.start_signal return self._in_level_low(stream_buffer, field, start_id, end_id) def _start_high(self, stream_buffer, start_id, end_id): field = self._args.start_signal return self._in_level_high(stream_buffer, field, start_id, end_id) def _start_rising(self, stream_buffer, start_id, end_id): field = self._args.start_signal return self._in_edge_rising(stream_buffer, field, start_id, end_id) def _start_falling(self, stream_buffer, start_id, end_id): field = self._args.start_signal return self._in_edge_falling(stream_buffer, field, start_id, end_id) def _start_duration(self, stream_buffer, start_id, end_id): if self._time_end is None: self._time_end = [start_id, _current_time_str()] d = int(self._args.start_duration * stream_buffer.output_sampling_frequency) d += self._time_end[0] if end_id > d: return d else: return None def _add(self, stream_buffer, start_id, end_id): data = stream_buffer.samples_get(start_id, end_id) i_all = data['signals']['current']['value'] v_all = data['signals']['voltage']['value'] p_all = data['signals']['power']['value'] finite_idx = np.isfinite(i_all) i, v, p = i_all[finite_idx], v_all[finite_idx], p_all[finite_idx] if len(i) != len(i_all): print(f'Ignored {len(i_all) - len(i)} missing samples') if len(i): self._current.add(i) self._voltage.add(v) self._power.add(p) period = 1.0 / stream_buffer.output_sampling_frequency self._charge += int(np.sum(i) * period * GAIN) self._energy += int(np.sum(p) * period * GAIN) if self._record is not None: self._record.insert(data) return end_id def _end_none(self, stream_buffer, start_id, end_id): return None def _end_low(self, stream_buffer, start_id, end_id): field = self._args.end_signal return self._in_level_low(stream_buffer, field, start_id, end_id) def _end_high(self, stream_buffer, start_id, end_id): field = self._args.end_signal return self._in_level_high(stream_buffer, field, start_id, end_id) def _end_rising(self, stream_buffer, start_id, end_id): field = self._args.end_signal return self._in_edge_rising(stream_buffer, field, start_id, end_id) def _end_falling(self, stream_buffer, start_id, end_id): field = self._args.end_signal return self._in_edge_falling(stream_buffer, field, start_id, end_id) def _end_duration(self, stream_buffer, start_id, end_id): d = int(self._args.capture_duration * stream_buffer.output_sampling_frequency) d += self._time_start[0] if end_id > d: return d else: return None def __call__(self, stream_buffer): start_id, end_id = stream_buffer.sample_id_range if self._sample_id_last is not None and start_id < self._sample_id_last: start_id = self._sample_id_last if start_id >= end_id: return False # nothing to process gpi = stream_buffer.samples_get(start_id, end_id, fields='current_lsb') while start_id < end_id: if not self._triggered: log.info(f'process {start_id} {end_id} await') trigger_id = self._start_fn(stream_buffer, start_id, end_id) if trigger_id is None: start_id = end_id else: self.start(trigger_id) start_id = trigger_id + 1 else: log.info(f'process {start_id} {end_id} triggered') trigger_id = self._end_fn(stream_buffer, start_id, end_id) if trigger_id is None: self._add(stream_buffer, start_id, end_id) start_id = end_id else: if start_id + 2 < trigger_id: self._add(stream_buffer, start_id, trigger_id - 1) self.stop(trigger_id) start_id = trigger_id + 1 if self._args.count and self._count >= self._args.count: return True self._sample_id_last = start_id self._sample_id_last = end_id return False
class LoggerDevice: def __init__(self, parent, device_str): self.is_open = False self._parent = weakref.ref(parent) self._device_str = device_str self._device = None self._f_csv = None self._jls_recorder = None self._last = None # (all values in csv) self._offset = [0.0, 0.0, 0.0] # [time, charge, energy] self._downsample_counter = 0 self._downsample_state = { 'µ': np.zeros(3, dtype=np.float), 'min': np.zeros(1, dtype=np.float), 'max': np.zeros(1, dtype=np.float), } self._downsample_state_reset() def _downsample_state_reset(self): self._downsample_state['µ'][:] = 0.0 self._downsample_state['min'][:] = FLOAT_MAX self._downsample_state['max'][:] = -FLOAT_MAX def __str__(self): return self._device_str @property def csv_filename(self): sn = self._device_str.split(':')[-1] return self._parent()._base_filename + '_' + sn + '.csv' def _resume(self): fname = self.csv_filename if not os.path.isfile(fname): return sz = os.path.getsize(fname) self._last = LAST_INITIALIZE with open(fname, 'rt') as f: f.seek(max(0, sz - CSV_SEEK)) for line in f.readlines()[-1::-1]: if line.startswith('#'): continue self._last = tuple([float(x) for x in line.strip().split(',')]) self._offset = [0.0, self._last[-2], self._last[-1]] return def open(self, device): if self.is_open: return if str(device) != str(self): raise ValueError('Mismatch device') parent = self._parent() parent.on_event('DEVICE', 'OPEN ' + self._device_str) self._resume() self._f_csv = open(self.csv_filename, 'at+') f = parent._frequency source = parent._source device.parameter_set('reduction_frequency', f'{f} Hz') jls_sampling_frequency = parent._jls_sampling_frequency if jls_sampling_frequency is not None: device.parameter_set('sampling_frequency', jls_sampling_frequency) device.parameter_set('buffer_duration', 2) device.open(event_callback_fn=self.on_event_cbk) if jls_sampling_frequency is not None: time_str = now_str() base_filename = 'jslog_%s_%s.jls' % (time_str, device.device_serial_number) filename = os.path.join(BASE_PATH, base_filename) self._jls_recorder = DataRecorder(filename, calibration=device.calibration) device.stream_process_register(self._jls_recorder) info = device.info() self._parent().on_event('DEVICE_INFO', json.dumps(info)) device.statistics_callback_register(self.on_statistics, source) device.parameter_set('i_range', 'auto') device.parameter_set('v_range', '15V') if source == 'stream_buffer' or jls_sampling_frequency is not None: device.parameter_set('source', 'raw') device.start(stop_fn=self.on_stop) self._device = device self.is_open = True return self def stop(self): try: if self._device is not None: self._device.stop() if self._jls_recorder is not None: self._device.stream_process_unregister(self._jls_recorder) self._jls_recorder.close() self._jls_recorder = None except Exception: pass charge, energy = self._last[4], self._last[5] msg = f'{self._device} : duration={self.duration:.0f}, charge={charge:g}, energy={energy:g}' print(msg) return msg def on_event_cbk(self, event=0, message=''): self._parent().on_event_cbk(self, event, message) def on_stop(self, event=0, message=''): self._parent().on_stop(self, event, message) def close(self): self._last = None self.is_open = False if self._device is not None: self._device.close() self._device = None if self._f_csv is not None: self._f_csv.close() self._f_csv = None def write(self, text): if self._f_csv is not None: self._f_csv.write(text) def status(self): return self._device.status() @property def duration(self): now = time.time() t = now - self._parent()._start_time_s + self._offset[0] return t def on_statistics(self, data): """Process the next Joulescope downsampled 2 Hz data. :param data: The Joulescope statistics data. See :meth:`joulescope.View.statistics_get` for details. """ # called from the Joulescope device thread parent = self._parent() if self._last is None: self._last = LAST_INITIALIZE columns = [ 'time', 'current', 'voltage', 'power', 'charge', 'energy', 'current_min', 'current_max' ] units = [ 's', data['signals']['current']['µ']['units'], data['signals']['voltage']['µ']['units'], data['signals']['power']['µ']['units'], data['accumulators']['charge']['units'], data['accumulators']['energy']['units'], data['signals']['current']['µ']['units'], data['signals']['current']['µ']['units'], ] columns_csv = ','.join(columns) units_csv = ','.join(units) parent.on_event('PARAM', f'columns={columns_csv}') parent.on_event('PARAM', f'units={units_csv}') if parent._header in ['simple']: self._f_csv.write(f'{columns_csv}\n') elif parent._header in ['comment', 'full']: self._f_csv.write(f'#= header={columns_csv}\n') self._f_csv.write(f'#= units={units_csv}\n') self._f_csv.write(f'#= start_time={parent._start_time_s}\n') self._f_csv.write(f'#= start_str={parent._time_str}\n') self._f_csv.flush() t = self.duration i = data['signals']['current']['µ']['value'] v = data['signals']['voltage']['µ']['value'] p = data['signals']['power']['µ']['value'] c = data['accumulators']['charge']['value'] + self._offset[1] e = data['accumulators']['energy']['value'] + self._offset[2] i_min = data['signals']['current']['min']['value'] i_max = data['signals']['current']['max']['value'] self._downsample_state['µ'] += [i, v, p] self._downsample_state['min'] = np.minimum( [i_min], self._downsample_state['min']) self._downsample_state['max'] = np.maximum( [i_max], self._downsample_state['max']) self._downsample_counter += 1 if self._downsample_counter >= parent._downsample: s = self._downsample_state['µ'] / self._downsample_counter self._downsample_counter = 0 self._last = (t, *s, c, e, *self._downsample_state['min'], *self._downsample_state['max']) self._downsample_state_reset() self._f_csv.write('%.7f,%g,%g,%g,%.4f,%g,%g,%g\n' % self._last) self._f_csv.flush()
def test_init_with_filename(self): self.assertFalse(os.path.isfile(self._filename1)) d = DataRecorder(self._filename1) self.assertTrue(os.path.isfile(self._filename1)) d.close()
def test_init_with_file_handle(self): fh = io.BytesIO() d = DataRecorder(fh) d.close() self.assertGreater(len(fh.getbuffer()), 0)