def _create_file_insert(self, packet_index, count_incr, count_total):
        stream_buffer = StreamBuffer(10.0, [10], 1000.0)
        stream_buffer.suppress_mode = 'off'
        if packet_index > 0:
            data = usb_packet_factory(0, packet_index - 1)
            stream_buffer.insert(data)
            stream_buffer.process()

        fh = io.BytesIO()
        d = DataRecorder(fh)
        sample_id = stream_buffer.sample_id_range[1]
        for _ in range(0, count_total, count_incr):
            data = usb_packet_factory(packet_index, count_incr)
            stream_buffer.insert(data)
            stream_buffer.process()
            sample_id_next = stream_buffer.sample_id_range[1]
            d.insert(stream_buffer.samples_get(sample_id, sample_id_next))
            sample_id = sample_id_next
            packet_index += count_incr
        d.close()
        fh.seek(0)

        # dfr = datafile.DataFileReader(fh)
        # dfr.pretty_print()
        # fh.seek(0)
        return fh
Example #2
0
    def _export_jls(self, data):
        data_recorder = DataRecorder(self._filename,
                                     calibration=data.calibration.data)

        try:
            for block in data:
                log.info('export_jls iteration')
                data_recorder.insert(block)
        finally:
            data_recorder.close()
Example #3
0
def run(cmd_queue, filehandle, calibration, logging_queue):
    worker_configurer(logging_queue)
    log = logging.getLogger(__name__)
    log.info('DataRecorder process start')
    r = DataRecorder(filehandle, calibration)
    while True:
        cmd, args = cmd_queue.get()
        if cmd == 'stream_notify':
            data, = args
            r.insert(data)
        elif cmd == 'close':
            log.info('DataRecorder closing')
            r.close()
            break
    cmd_queue.put('close')
    log.info('DataRecorder process end')
Example #4
0
def run(cmd_queue, filehandle, calibration, logging_queue):
    worker_configurer(logging_queue)
    log = logging.getLogger(__name__)
    log.info('run start')
    r = DataRecorder(filehandle, calibration)
    while True:
        try:
            cmd, args = cmd_queue.get(timeout=1.0)
            if cmd == 'stream_notify':
                data, = args
                r.insert(data)
            elif cmd == 'close':
                log.info('run closing')
                r.close()
                break
        except Empty:
            pass
        except:
            log.exception("run exception during loop")
    log.info('run end')
Example #5
0
def run():
    args = get_parser().parse_args()
    reader = DataReader()
    reader.open(args.infile)
    s_min, s_max = reader.sample_id_range
    sample_count = s_max - s_min
    writer = DataRecorder(args.outfile, reader.calibration, reader.user_data)
    block_size = int(reader.sampling_frequency)
    print(f'samples={sample_count}, fs={reader.sampling_frequency}')
    block_count = (sample_count + block_size - 1) // block_size
    for block in range(block_count):
        offset = block * block_size
        offset_next = offset + block_size
        if offset_next > sample_count:
            offset_next = sample_count
        data = reader.samples_get(offset, offset_next, 'samples')
        writer.insert(data)
        progress(block, block_count - 1)
    reader.close()
    writer.close()
    return 0
class Capture:
    def __init__(self, device, args):
        self._device = device
        self._args = args
        self._timestamp = None
        self._record = None
        self._csv = None
        self._count = 0
        self._triggered = False
        self._sample_id_last = None
        self._start_fn = getattr(self, f'_start_{args.start}')
        self._end_fn = getattr(self, f'_end_{args.end}')
        self._current = Signal()
        self._voltage = Signal()
        self._power = Signal()
        self._charge = 0  # in 1e-15 C, use Python int for infinite precision
        self._energy = 0  # in 1e-15 J, use Python int for infinite precision
        self._time_start = None  # [sample, timestr]
        self._time_end = None

        if self._args.csv is not None:
            self._csv = open(self._args.csv, 'wt')
            self._csv.write(f'#{COLUMNS}\n')

    def _construct_record_filename(self):
        time_start = datetime.datetime.utcnow()
        timestamp_str = time_start.strftime('%Y%m%d_%H%M%S')
        return f'{timestamp_str}_{self._count + 1:04d}.jls'

    def start(self, start_id):
        if self._triggered:
            self.stop()
        self._time_start = [start_id, _current_time_str()]
        log.info(f'start {start_id}')
        if self._args.display_trigger:
            print(f'start {start_id}')
        self._current.clear()
        self._voltage.clear()
        self._power.clear()
        self._charge = 0
        self._energy = 0
        if self._args.record:
            filename = self._construct_record_filename()
            self._record = DataRecorder(filename,
                                        calibration=self._device.calibration)
        self._triggered = True
        return start_id

    def stop(self, end_id=None):
        if not self._triggered:
            return
        if end_id is None:
            end_id = self._sample_id_last
        self._time_end = [end_id, _current_time_str()]
        if self._record:
            self._record.close()
            self._record = None
        log.info(f'stop {end_id}')
        if self._args.display_trigger:
            print(f'stop {end_id}')

        self._count += 1
        current = self._current.result()
        voltage = self._voltage.result()
        power = self._power.result()
        charge = self._charge / GAIN
        energy = self._energy / GAIN
        r = self._time_start + self._time_end + \
            current + voltage + power + \
            [charge, charge / 3600.0] + [energy, energy / 3600.0]
        results = []
        for x in r:
            if x is None:
                results.append('NAN')
            elif isinstance(x, int):
                results.append(str(x))
            elif isinstance(x, str):
                results.append(x)
            else:
                results.append('%g' % x)
        line = ','.join(results)

        if self._args.display_stats:
            if self._count == 1:
                print(COLUMNS)
            print(line)
        if self._csv is not None:
            self._csv.write(line + '\n')
            self._csv.flush()
        self._triggered = False
        return end_id

    def close(self):
        self.stop()
        if self._csv is not None:
            self._csv.close()

    def _in_level_low(self, stream_buffer, field, start_id, end_id):
        field = FIELD_MAP.get(field, field)
        gpi = stream_buffer.samples_get(start_id, end_id, fields=field)
        if not np.all(gpi):  # found trigger
            return start_id + int(np.argmin(gpi))
        return None

    def _in_level_high(self, stream_buffer, field, start_id, end_id):
        field = FIELD_MAP.get(field, field)
        gpi = stream_buffer.samples_get(start_id, end_id, fields=field)
        if np.any(gpi):  # found trigger
            return start_id + int(np.argmax(gpi))
        return None

    def _in_edge_rising(self, stream_buffer, field, start_id, end_id):
        field = FIELD_MAP.get(field, field)
        if start_id <= 0:
            gpi = stream_buffer.samples_get(start_id, end_id, fields=field)
            if bool(gpi[0]):
                return start_id
        else:
            gpi = stream_buffer.samples_get(start_id, end_id, fields=field)
        gpi = gpi.astype(np.int8)
        d = np.diff(gpi)
        if np.any(d >= 1):  # found trigger
            return start_id + int(np.argmax(d))
        return None

    def _in_edge_falling(self, stream_buffer, field, start_id, end_id):
        field = FIELD_MAP.get(field, field)
        if start_id <= 0:
            gpi = stream_buffer.samples_get(start_id, end_id, fields=field)
            if not bool(gpi[0]):
                return start_id
        else:
            gpi = stream_buffer.samples_get(start_id, end_id, fields=field)
        gpi = gpi.astype(np.int8)
        d = np.diff(gpi)
        if np.any(d <= -1):  # found trigger
            return start_id + int(np.argmin(d))
        return None

    def _start_none(self, stream_buffer, start_id, end_id):
        return start_id

    def _start_low(self, stream_buffer, start_id, end_id):
        field = self._args.start_signal
        return self._in_level_low(stream_buffer, field, start_id, end_id)

    def _start_high(self, stream_buffer, start_id, end_id):
        field = self._args.start_signal
        return self._in_level_high(stream_buffer, field, start_id, end_id)

    def _start_rising(self, stream_buffer, start_id, end_id):
        field = self._args.start_signal
        return self._in_edge_rising(stream_buffer, field, start_id, end_id)

    def _start_falling(self, stream_buffer, start_id, end_id):
        field = self._args.start_signal
        return self._in_edge_falling(stream_buffer, field, start_id, end_id)

    def _start_duration(self, stream_buffer, start_id, end_id):
        if self._time_end is None:
            self._time_end = [start_id, _current_time_str()]
        d = int(self._args.start_duration *
                stream_buffer.output_sampling_frequency)
        d += self._time_end[0]
        if end_id > d:
            return d
        else:
            return None

    def _add(self, stream_buffer, start_id, end_id):
        data = stream_buffer.samples_get(start_id, end_id)
        i_all = data['signals']['current']['value']
        v_all = data['signals']['voltage']['value']
        p_all = data['signals']['power']['value']
        finite_idx = np.isfinite(i_all)
        i, v, p = i_all[finite_idx], v_all[finite_idx], p_all[finite_idx]
        if len(i) != len(i_all):
            print(f'Ignored {len(i_all) - len(i)} missing samples')
        if len(i):
            self._current.add(i)
            self._voltage.add(v)
            self._power.add(p)
            period = 1.0 / stream_buffer.output_sampling_frequency
            self._charge += int(np.sum(i) * period * GAIN)
            self._energy += int(np.sum(p) * period * GAIN)
            if self._record is not None:
                self._record.insert(data)
        return end_id

    def _end_none(self, stream_buffer, start_id, end_id):
        return None

    def _end_low(self, stream_buffer, start_id, end_id):
        field = self._args.end_signal
        return self._in_level_low(stream_buffer, field, start_id, end_id)

    def _end_high(self, stream_buffer, start_id, end_id):
        field = self._args.end_signal
        return self._in_level_high(stream_buffer, field, start_id, end_id)

    def _end_rising(self, stream_buffer, start_id, end_id):
        field = self._args.end_signal
        return self._in_edge_rising(stream_buffer, field, start_id, end_id)

    def _end_falling(self, stream_buffer, start_id, end_id):
        field = self._args.end_signal
        return self._in_edge_falling(stream_buffer, field, start_id, end_id)

    def _end_duration(self, stream_buffer, start_id, end_id):
        d = int(self._args.capture_duration *
                stream_buffer.output_sampling_frequency)
        d += self._time_start[0]
        if end_id > d:
            return d
        else:
            return None

    def __call__(self, stream_buffer):
        start_id, end_id = stream_buffer.sample_id_range
        if self._sample_id_last is not None and start_id < self._sample_id_last:
            start_id = self._sample_id_last
        if start_id >= end_id:
            return False  # nothing to process

        gpi = stream_buffer.samples_get(start_id, end_id, fields='current_lsb')
        while start_id < end_id:
            if not self._triggered:
                log.info(f'process {start_id} {end_id} await')
                trigger_id = self._start_fn(stream_buffer, start_id, end_id)
                if trigger_id is None:
                    start_id = end_id
                else:
                    self.start(trigger_id)
                    start_id = trigger_id + 1
            else:
                log.info(f'process {start_id} {end_id} triggered')
                trigger_id = self._end_fn(stream_buffer, start_id, end_id)
                if trigger_id is None:
                    self._add(stream_buffer, start_id, end_id)
                    start_id = end_id
                else:
                    if start_id + 2 < trigger_id:
                        self._add(stream_buffer, start_id, trigger_id - 1)
                    self.stop(trigger_id)
                    start_id = trigger_id + 1
            if self._args.count and self._count >= self._args.count:
                return True
            self._sample_id_last = start_id
        self._sample_id_last = end_id
        return False