def _read_channel_data(self, channel, offset=0, length=None): if offset < 0: raise ValueError("offset must be non-negative") if length is not None and length < 0: raise ValueError("length must be non-negative") reader = self._get_reader() with Timer(log, "Allocate space for channel"): # Allocate space for data if length is None: num_values = len(channel) - offset else: num_values = min(length, len(channel) - offset) num_values = max(0, num_values) channel_data = get_data_receiver(channel, num_values, self._memmap_dir) with Timer(log, "Read data for channel"): # Now actually read all the data for chunk in reader.read_raw_data_for_channel( channel.path, offset, length): if chunk.data is not None: channel_data.append_data(chunk.data) if chunk.scaler_data is not None: for scaler_id, scaler_data in chunk.scaler_data.items(): channel_data.append_scaler_data(scaler_id, scaler_data) return channel_data
def _read_data(self, tdms_reader): with Timer(log, "Allocate space"): # Allocate space for data for group in self.groups(): for channel in group.channels(): self._channel_data[channel.path] = get_data_receiver( channel, len(channel), self._memmap_dir) with Timer(log, "Read data"): # Now actually read all the data for chunk in tdms_reader.read_raw_data(): for (path, data) in chunk.channel_data.items(): channel_data = self._channel_data[path] if data.data is not None: channel_data.append_data(data.data) elif data.scaler_data is not None: for scaler_id, scaler_data in data.scaler_data.items(): channel_data.append_scaler_data( scaler_id, scaler_data) for group in self.groups(): for channel in group.channels(): channel_data = self._channel_data[channel.path] if channel_data is not None: channel._set_raw_data(channel_data) self.data_read = True