コード例 #1
0
 def testGetFile(self):
     _, prefix = tempfile.mkstemp()
     logger = data_logger.Logger(prefix)
     expected = os.path.join('%s_1.data' % prefix)
     self.assertEqual(expected, logger.file_path)
     logger.GetFile(256)
     expected = os.path.join('%s_2.data' % prefix)
     self.assertEqual(expected, logger.file_path)
コード例 #2
0
 def testWriteAndReadBack(self):
     logger = data_logger.Logger(self.file_path)
     logger.WriteProto(self.point)
     logger.WriteProto(self.point)
     logger.current_file.flush()
     file_points = []
     for file_point in logger.ReadProtos():
         file_points.append(file_point)
     expected = [self.point, self.point]
     self.assertEqual(expected, file_points)
コード例 #3
0
ファイル: replay_data.py プロジェクト: djhedges/exit_speed
def ReplayLog(filepath, include_sleep=False):
  """Replays data, extermely useful to LED testing.

  Args:
    filepath: A string of the path of lap data.
    include_sleep: If True replays adds sleeps to simulate how data was
                   processed in real time.

  Returns:
    A exit_speed_main.ExitSpeed instance that has replayed the given data.
  """
  logging.info('Replaying %s', filepath)
  logger = data_logger.Logger(filepath)
  points = list(logger.ReadProtos())
  logging.info('Number of points %d', len(points))
  if include_sleep:
    replay_start = time.time()
    time_shift = int(replay_start * 1e9 - points[0].time.ToNanoseconds())
    session_start = None
  else:
    FLAGS.set_default('commit_cycle', 10000)
  es = exit_speed_main.ExitSpeed(live_data=not include_sleep)
  es.point = points[0]
  es.point_queue.put(points[0].SerializeToString())
  es.config['car'] = os.path.split(os.path.dirname(filepath))[1]
  es.InitializeSubProcesses()
  es.AddNewLap()
  for point in points:
    if include_sleep:
      point.time.FromNanoseconds(point.time.ToNanoseconds() + time_shift)
      if not session_start:
        session_start = point.time.ToMilliseconds() / 1000

    es.point = point
    es.ProcessLap()
    if include_sleep:
      run_delta = time.time() - replay_start
      point_delta = point.time.ToMilliseconds() / 1000 - session_start
      if run_delta < point_delta:
        time.sleep(point_delta - run_delta)

  if not include_sleep:
    time.sleep(1)
    qsize = len(es.timescale.point_queue)
    while qsize > 0:
      qsize = len(es.timescale.point_queue)
      logging.log_every_n_seconds(logging.INFO, 'Queue size %s', 2, qsize)
    es.timescale.stop_process_signal.value = True
    print(time.time())
    es.timescale.process.join(10)
    print(time.time())
  return es
コード例 #4
0
    def testMidHeader(self):
        logger = data_logger.Logger(self.file_path)
        logger.WriteProto(self.point)
        logger.current_file.flush()
        with open(logger.file_path, 'ab') as temp_file:
            temp_file.write((1).to_bytes(logger.current_proto_len,
                                         data_logger.BYTE_ORDER))

        file_points = []
        for file_point in logger.ReadProtos():
            file_points.append(file_point)
        expected = [self.point]
        self.assertEqual(expected, file_points)
コード例 #5
0
 def testReadSplitDataFiles(self):
     logger = data_logger.Logger(self.file_path)
     logger.GetFile(256)
     self.assertEqual(2, logger.current_proto_len)
     expected = os.path.join('%s_2.data' % logger.file_prefix)
     self.assertEqual(expected, logger.file_path)
     logger.WriteProto(self.point)
     logger.current_file.flush()
     file_points = []
     for file_point in logger.ReadProtos():
         file_points.append(file_point)
     expected = [self.point]
     self.assertEqual(expected, file_points)
コード例 #6
0
    def testShortWrite(self):
        logger = data_logger.Logger(self.file_path)
        logger.WriteProto(self.point)
        logger.WriteProto(self.point)
        logger.current_file.flush()
        with open(logger.file_path, 'rb') as temp_file:
            contents = temp_file.read()
        with open(logger.file_path, 'wb') as temp_file:
            temp_file.write(contents[:-1])

        file_points = []
        for file_point in logger.ReadProtos():
            file_points.append(file_point)
        expected = [self.point]
        self.assertEqual(expected, file_points)
コード例 #7
0
def IsFileAlreadySynced(timescale_conn: psycopg2.extensions.connection,
                        filepath: Text) -> bool:
    logger = data_logger.Logger(filepath)
    first_point = None
    for point in logger.ReadProtos():
        first_point = point
        print(first_point.time.ToJsonString())
        break
    else:
        return True  # Data file has no points.
    session_time = first_point.time.ToJsonString()
    cursor = timescale_conn.cursor()
    if cursor.execute(SELECT_SESSION, (session_time, )):
        return True
    if first_point:
        _, track, _ = tracks.FindClosestTrack(first_point)
        if track.name == 'Test Parking Lot':
            return True  # Skip the test parking lot.  Mostly development files.
    return False
コード例 #8
0
 def _InitializeDataLogger(self, point: gps_pb2.Point):
     file_prefix = self.GetLogFilePrefix(point)
     logging.info('Logging data to %s', file_prefix)
     self.data_logger = data_logger.Logger(file_prefix)