Beispiel #1
0
 def __init__(self, stream_data_path, timing_data_path):
     self.input_file = open(stream_data_path, "rb")
     
     timing_csv_iterator = csv.reader(open(timing_data_path))
     self.timings = collections.deque((float(timestamp_str), int(byte_count_str))
                                      for timestamp_str, byte_count_str
                                      in timing_csv_iterator)
     
     start_time = zephyr.time()
     first_timestamp = float(self.timings[0][0])
     self.timestamp_correction = start_time - first_timestamp
    def __init__(self, stream_data_path, timing_data_path):
        self.input_file = open(stream_data_path, "rb")

        timing_csv_iterator = csv.reader(open(timing_data_path))
        self.timings = collections.deque(
            (float(timestamp_str), int(byte_count_str))
            for timestamp_str, byte_count_str in timing_csv_iterator)

        start_time = zephyr.time()
        first_timestamp = float(self.timings[0][0])
        self.timestamp_correction = start_time - first_timestamp
Beispiel #3
0
 def estimate_and_correct_timestamp(self, timestamp, key):
     if DISABLE_CLOCK_DIFFERENCE_ESTIMATION:
         return timestamp
     
     instantaneous_zephyr_clock_ahead = timestamp - zephyr.time()
     self._clock_difference_deques[key].append(instantaneous_zephyr_clock_ahead)
     
     clock_ahead_values = self._clock_difference_deques[key]
     zephyr_clock_ahead_estimate = sum(clock_ahead_values) / float(len(clock_ahead_values))
     
     corrected_timestamp = timestamp - zephyr_clock_ahead_estimate
     return corrected_timestamp
Beispiel #4
0
 def cleanup_if_needed(self):
     now = zephyr.time()
     
     if self.last_cleanup_time < now - 5.0:
         history_limit = now - self.history_length_seconds
         for signal_stream_history in self._signal_stream_histories.values():
             signal_stream_history.clean_up_samples_before(history_limit)
         
         for event_stream in self._event_streams.values():
             event_stream.clean_up_events_before(history_limit)
         
         self.last_cleanup_time = now
Beispiel #5
0
 def cleanup_if_needed(self):
     now = zephyr.time()
     
     if self.last_cleanup_time < now - 5.0:
         history_limit = now - self.history_length_seconds
         for signal_stream_history in self._signal_stream_histories.values():
             signal_stream_history.clean_up_samples_before(history_limit)
         
         for event_stream in self._event_streams.values():
             event_stream.clean_up_events_before(history_limit)
         
         self.last_cleanup_time = now
Beispiel #6
0
 def run(self):
     while not self.terminate_requested:
         now = zephyr.time()
         all_streams = itertools.chain(self.signal_collector.iterate_signal_stream_histories(),
                                       self.signal_collector.iterate_event_streams())
         
         for signal_stream_name, signal_stream_history in all_streams:
             delay = self.specific_delays.get(signal_stream_name, self.default_delay)
             
             delayed_current_time = now - delay
             
             from_sample = self.stream_output_positions[signal_stream_name]
             for sample in signal_stream_history.iterate_samples(from_sample, delayed_current_time):
                 self.stream_output_positions[signal_stream_name] += 1
                 for callback in self.callbacks:
                     callback(signal_stream_name, sample)
         
         time.sleep(0.01)
Beispiel #7
0
 def read_byte(self):
     if len(self.timings) == 0:
         raise EOFError("End of file reached")
     
     chunk_timestamp_string, chunk_cumulative_byte_count_string = self.timings[0]
     chunk_timestamp = float(chunk_timestamp_string) + self.timestamp_correction
     chunk_cumulative_byte_count = int(chunk_cumulative_byte_count_string)
     
     time_to_chunk_timestamp = chunk_timestamp - zephyr.time()
     
     if time_to_chunk_timestamp > 0:
         zephyr.sleep(time_to_chunk_timestamp)
     
     output_byte = self.input_file.read(1)
     position = self.input_file.tell()
     
     if position >= chunk_cumulative_byte_count:
         self.timings.popleft()
     
     return output_byte
Beispiel #8
0
    def run(self):

        while not self.terminate_requested:

            now = zephyr.time()
            all_streams = itertools.chain(self.signal_collector.iterate_signal_stream_histories(),
                                          self.signal_collector.iterate_event_streams())
            
            for signal_stream_name, signal_stream_history in all_streams:
                delay = self.specific_delays.get(signal_stream_name, self.default_delay)
                
                delayed_current_time = now - delay
                
                from_sample = self.stream_output_positions[signal_stream_name]
                for sample in signal_stream_history.iterate_samples(from_sample, delayed_current_time):
                    self.stream_output_positions[signal_stream_name] += 1
                    for callback in self.callbacks:
                        callback(signal_stream_name, sample)
            
            time.sleep(0.01)
    def read_byte(self):
        if len(self.timings) == 0:
            raise EOFError("End of file reached")

        chunk_timestamp_string, chunk_cumulative_byte_count_string = self.timings[
            0]
        chunk_timestamp = float(
            chunk_timestamp_string) + self.timestamp_correction
        chunk_cumulative_byte_count = int(chunk_cumulative_byte_count_string)

        time_to_chunk_timestamp = chunk_timestamp - zephyr.time()

        if time_to_chunk_timestamp > 0:
            zephyr.sleep(time_to_chunk_timestamp)

        output_byte = self.input_file.read(1)
        position = self.input_file.tell()

        if position >= chunk_cumulative_byte_count:
            self.timings.popleft()

        return output_byte