コード例 #1
0
  def test_unbiased(self):
    ekf = TestEKF(1e3, [0., 0.])
    dt = np.float64(1. / 100)

    sensor = SimpleSensor(0, 1, 2)
    readings = map(sensor.read, np.arange(1000))

    for reading in readings:
      ekf.update_scalar(reading)
      ekf.predict(dt)

    np.testing.assert_allclose(ekf.state, [[1000.], [100.]], 1e-4)
コード例 #2
0
    def __init__(self, VM, mocked):
        self.VM = VM
        self.mocked = mocked

        self.MP = ModelParser()
        self.tracks = defaultdict(dict)

        self.last_md_ts = 0
        self.last_controls_state_ts = 0

        self.active = 0
        self.steer_angle = 0.
        self.steer_override = False

        # Kalman filter stuff:
        self.ekfv = EKFV1D()
        self.speedSensorV = SimpleSensor(XV, 1, 2)

        # v_ego
        self.v_ego = 0.
        self.v_ego_hist_t = deque([0], maxlen=v_len)
        self.v_ego_hist_v = deque([0], maxlen=v_len)
        self.v_ego_t_aligned = 0.
コード例 #3
0
  def test_update_scalar(self):
    ekf = TestEKF(1e3, [0.1, 1])
    dt = 1. / 100

    sensor = SimpleSensor(0, 1, 2)
    readings = map(sensor.read, np.arange(100, 300))

    for reading in readings:
      ekf.update_scalar(reading)
      ekf.predict(dt)

    np.testing.assert_allclose(ekf.state, [[300], [100]], 1e-4)
    np.testing.assert_allclose(
      ekf.covar,
      np.asarray([[0.0563, 0.10278], [0.10278, 0.55779]]),
      atol=1e-4)
コード例 #4
0
  def test_correctness(self):
    dt = 1. / 100
    reading = SimpleSensor(0, 1, 2).read(100)

    ekf = TestEKF(1e3, [0.1, 1])
    fast_ekf = FastEKF1D(dt, 1e3, [0.1, 1])

    ekf.update_scalar(reading)
    fast_ekf.update_scalar(reading)
    self.assertAlmostEqual(ekf.state[0]   , fast_ekf.state[0])
    self.assertAlmostEqual(ekf.state[1]   , fast_ekf.state[1])
    self.assertAlmostEqual(ekf.covar[0, 0], fast_ekf.covar[0])
    self.assertAlmostEqual(ekf.covar[0, 1], fast_ekf.covar[2])
    self.assertAlmostEqual(ekf.covar[1, 1], fast_ekf.covar[1])

    ekf.predict(dt)
    fast_ekf.predict(dt)
    self.assertAlmostEqual(ekf.state[0]   , fast_ekf.state[0])
    self.assertAlmostEqual(ekf.state[1]   , fast_ekf.state[1])
    self.assertAlmostEqual(ekf.covar[0, 0], fast_ekf.covar[0])
    self.assertAlmostEqual(ekf.covar[0, 1], fast_ekf.covar[2])
    self.assertAlmostEqual(ekf.covar[1, 1], fast_ekf.covar[1])
コード例 #5
0
ファイル: radard.py プロジェクト: wlrnet/chffrplus
def radard_thread(gctx=None):
  set_realtime_priority(2)

  # wait for stats about the car to come in from controls
  cloudlog.info("radard is waiting for CarParams")
  CP = car.CarParams.from_bytes(Params().get("CarParams", block=True))
  mocked= CP.radarName == "mock"
  VM = VehicleModel(CP)
  cloudlog.info("radard got CarParams")

  # import the radar from the fingerprint
  cloudlog.info("radard is importing %s", CP.radarName)
  exec('from selfdrive.radar.'+CP.radarName+'.interface import RadarInterface')

  context = zmq.Context()

  # *** subscribe to features and model from visiond
  poller = zmq.Poller()
  model = messaging.sub_sock(context, service_list['model'].port, conflate=True, poller=poller)
  live100 = messaging.sub_sock(context, service_list['live100'].port, conflate=True, poller=poller)

  PP = PathPlanner()
  RI = RadarInterface()

  last_md_ts = 0
  last_l100_ts = 0

  # *** publish live20 and liveTracks
  live20 = messaging.pub_sock(context, service_list['live20'].port)
  liveTracks = messaging.pub_sock(context, service_list['liveTracks'].port)

  path_x = np.arange(0.0, 140.0, 0.1)    # 140 meters is max

  # Time-alignment
  rate = 20.   # model and radar are both at 20Hz
  tsv = 1./rate
  v_len = 20         # how many speed data points to remember for t alignment with rdr data

  active = 0
  steer_angle = 0.
  steer_override = False

  tracks = defaultdict(dict)

  # Kalman filter stuff:
  ekfv = EKFV1D()
  speedSensorV = SimpleSensor(XV, 1, 2)

  # v_ego
  v_ego = None
  v_ego_array = np.zeros([2, v_len])
  v_ego_t_aligned = 0.

  rk = Ratekeeper(rate, print_delay_threshold=np.inf)
  while 1:
    rr = RI.update()

    ar_pts = {}
    for pt in rr.points:
      ar_pts[pt.trackId] = [pt.dRel + RDR_TO_LDR, pt.yRel, pt.vRel, pt.measured]

    # receive the live100s
    l100 = None
    md = None

    for socket, event in poller.poll(0):
      if socket is live100:
        l100 = messaging.recv_one(socket)
      elif socket is model:
        md = messaging.recv_one(socket)

    if l100 is not None:
      active = l100.live100.active
      v_ego = l100.live100.vEgo
      steer_angle = l100.live100.angleSteers
      steer_override = l100.live100.steerOverride

      v_ego_array = np.append(v_ego_array, [[v_ego], [float(rk.frame)/rate]], 1)
      v_ego_array = v_ego_array[:, 1:]

      last_l100_ts = l100.logMonoTime

    if v_ego is None:
      continue

    if md is not None:
      last_md_ts = md.logMonoTime

    # *** get path prediction from the model ***
    PP.update(v_ego, md)

    # run kalman filter only if prob is high enough
    if PP.lead_prob > 0.7:
      ekfv.update(speedSensorV.read(PP.lead_dist, covar=PP.lead_var))
      ekfv.predict(tsv)
      ar_pts[VISION_POINT] = (float(ekfv.state[XV]), np.polyval(PP.d_poly, float(ekfv.state[XV])),
                              float(ekfv.state[SPEEDV]), False)
    else:
      ekfv.state[XV] = PP.lead_dist
      ekfv.covar = (np.diag([PP.lead_var, ekfv.var_init]))
      ekfv.state[SPEEDV] = 0.
      if VISION_POINT in ar_pts:
        del ar_pts[VISION_POINT]

    # *** compute the likely path_y ***
    if (active and not steer_override) or mocked:
      # use path from model (always when mocking as steering is too noisy)
      path_y = np.polyval(PP.d_poly, path_x)
    else:
      # use path from steer, set angle_offset to 0 it does not only report the physical offset
      path_y = calc_lookahead_offset(v_ego, steer_angle, path_x, VM, angle_offset=0)[0]

    # *** remove missing points from meta data ***
    for ids in tracks.keys():
      if ids not in ar_pts:
        tracks.pop(ids, None)

    # *** compute the tracks ***
    for ids in ar_pts:
      # ignore standalone vision point, unless we are mocking the radar
      if ids == VISION_POINT and not mocked:
        continue
      rpt = ar_pts[ids]

      # align v_ego by a fixed time to align it with the radar measurement
      cur_time = float(rk.frame)/rate
      v_ego_t_aligned = np.interp(cur_time - RI.delay, v_ego_array[1], v_ego_array[0])
      d_path = np.sqrt(np.amin((path_x - rpt[0]) ** 2 + (path_y - rpt[1]) ** 2))
      # add sign
      d_path *= np.sign(rpt[1] - np.interp(rpt[0], path_x, path_y))

      # create the track if it doesn't exist or it's a new track
      if ids not in tracks:
        tracks[ids] = Track()
      tracks[ids].update(rpt[0], rpt[1], rpt[2], d_path, v_ego_t_aligned, rpt[3], steer_override)

    # allow the vision model to remove the stationary flag if distance and rel speed roughly match
    if VISION_POINT in ar_pts:
      fused_id = None
      best_score = NO_FUSION_SCORE
      for ids in tracks:
        dist_to_vision = np.sqrt((0.5*(ar_pts[VISION_POINT][0] - tracks[ids].dRel)) ** 2 + (2*(ar_pts[VISION_POINT][1] - tracks[ids].yRel)) ** 2)
        rel_speed_diff = abs(ar_pts[VISION_POINT][2] - tracks[ids].vRel)
        tracks[ids].update_vision_score(dist_to_vision, rel_speed_diff)
        if best_score > tracks[ids].vision_score:
          fused_id = ids
          best_score = tracks[ids].vision_score

      if fused_id is not None:
        tracks[fused_id].vision_cnt += 1
        tracks[fused_id].update_vision_fusion()

    if DEBUG:
      print "NEW CYCLE"
      if VISION_POINT in ar_pts:
        print "vision", ar_pts[VISION_POINT]

    idens = tracks.keys()
    track_pts = np.array([tracks[iden].get_key_for_cluster() for iden in idens])

    # If we have multiple points, cluster them
    if len(track_pts) > 1:
      link = linkage_vector(track_pts, method='centroid')
      cluster_idxs = fcluster(link, 2.5, criterion='distance')
      clusters = [None]*max(cluster_idxs)

      for idx in xrange(len(track_pts)):
        cluster_i = cluster_idxs[idx]-1

        if clusters[cluster_i] == None:
          clusters[cluster_i] = Cluster()
        clusters[cluster_i].add(tracks[idens[idx]])
    elif len(track_pts) == 1:
      # TODO: why do we need this?
      clusters = [Cluster()]
      clusters[0].add(tracks[idens[0]])
    else:
      clusters = []

    if DEBUG:
      for i in clusters:
        print i
    # *** extract the lead car ***
    lead_clusters = [c for c in clusters
                     if c.is_potential_lead(v_ego)]
    lead_clusters.sort(key=lambda x: x.dRel)
    lead_len = len(lead_clusters)

    # *** extract the second lead from the whole set of leads ***
    lead2_clusters = [c for c in lead_clusters
                      if c.is_potential_lead2(lead_clusters)]
    lead2_clusters.sort(key=lambda x: x.dRel)
    lead2_len = len(lead2_clusters)

    # *** publish live20 ***
    dat = messaging.new_message()
    dat.init('live20')
    dat.live20.mdMonoTime = last_md_ts
    dat.live20.canMonoTimes = list(rr.canMonoTimes)
    dat.live20.radarErrors = list(rr.errors)
    dat.live20.l100MonoTime = last_l100_ts
    if lead_len > 0:
      lead_clusters[0].toLive20(dat.live20.leadOne)
      if lead2_len > 0:
        lead2_clusters[0].toLive20(dat.live20.leadTwo)
      else:
        dat.live20.leadTwo.status = False
    else:
      dat.live20.leadOne.status = False

    dat.live20.cumLagMs = -rk.remaining*1000.
    live20.send(dat.to_bytes())

    # *** publish tracks for UI debugging (keep last) ***
    dat = messaging.new_message()
    dat.init('liveTracks', len(tracks))

    for cnt, ids in enumerate(tracks.keys()):
      if DEBUG:
        print "id: %4.0f x:  %4.1f  y: %4.1f  vr: %4.1f d: %4.1f  va: %4.1f  vl: %4.1f  vlk: %4.1f alk: %4.1f  s: %1.0f" % \
          (ids, tracks[ids].dRel, tracks[ids].yRel, tracks[ids].vRel,
           tracks[ids].dPath, tracks[ids].vLat,
           tracks[ids].vLead, tracks[ids].vLeadK,
           tracks[ids].aLeadK,
           tracks[ids].stationary)
      dat.liveTracks[cnt].trackId = ids
      dat.liveTracks[cnt].dRel = float(tracks[ids].dRel)
      dat.liveTracks[cnt].yRel = float(tracks[ids].yRel)
      dat.liveTracks[cnt].vRel = float(tracks[ids].vRel)
      dat.liveTracks[cnt].aRel = float(tracks[ids].aRel)
      dat.liveTracks[cnt].stationary = tracks[ids].stationary
      dat.liveTracks[cnt].oncoming = tracks[ids].oncoming
    liveTracks.send(dat.to_bytes())

    rk.monitor_time()
コード例 #6
0
def radard_thread(gctx=None):
  #print "===>>> File: controls/radard.py; FUnction: radard_thread"
  set_realtime_priority(1)

  # wait for stats about the car to come in from controls
  cloudlog.info("radard is waiting for CarParams")
  CP = car.CarParams.from_bytes(Params().get("CarParams", block=True))
  cloudlog.info("radard got CarParams")

  # import the radar from the fingerprint
  cloudlog.info("radard is importing %s", CP.radarName)
  exec('from selfdrive.radar.'+CP.radarName+'.interface import RadarInterface')

  context = zmq.Context()

  # *** subscribe to features and model from visiond
  model = messaging.sub_sock(context, service_list['model'].port)
  live100 = messaging.sub_sock(context, service_list['live100'].port)

  PP = PathPlanner()
  RI = RadarInterface()

  last_md_ts = 0
  last_l100_ts = 0

  # *** publish live20 and liveTracks
  live20 = messaging.pub_sock(context, service_list['live20'].port)
  liveTracks = messaging.pub_sock(context, service_list['liveTracks'].port)

  path_x = np.arange(0.0, 140.0, 0.1)    # 140 meters is max

  # Time-alignment
  rate = 20.   # model and radar are both at 20Hz
  tsv = 1./rate
  rdr_delay = 0.10   # radar data delay in s
  v_len = 20         # how many speed data points to remember for t alignment with rdr data

  enabled = 0
  steer_angle = 0.

  tracks = defaultdict(dict)
  
  # Kalman filter stuff: 
  ekfv = EKFV1D()
  speedSensorV = SimpleSensor(XV, 1, 2)

  # v_ego
  v_ego = None
  v_ego_array = np.zeros([2, v_len])
  v_ego_t_aligned = 0.

  rk = Ratekeeper(rate, print_delay_threshold=np.inf)
  while 1:
    rr = RI.update()

    ar_pts = {}
    for pt in rr.points:
      ar_pts[pt.trackId] = [pt.dRel + RDR_TO_LDR, pt.yRel, pt.vRel, pt.aRel, None, False, None]

    # receive the live100s
    l100 = messaging.recv_sock(live100)
    if l100 is not None:
      enabled = l100.live100.enabled
      v_ego = l100.live100.vEgo
      steer_angle = l100.live100.angleSteers

      v_ego_array = np.append(v_ego_array, [[v_ego], [float(rk.frame)/rate]], 1)
      v_ego_array = v_ego_array[:, 1:]

      last_l100_ts = l100.logMonoTime

    if v_ego is None:
      continue

    md = messaging.recv_sock(model)
    #print "============ RADAR Thread"
    #print md
    if md is not None:
      last_md_ts = md.logMonoTime

    # *** get path prediction from the model ***
    PP.update(sec_since_boot(), v_ego, md)

    # run kalman filter only if prob is high enough
    if PP.lead_prob > 0.7:
      ekfv.update(speedSensorV.read(PP.lead_dist, covar=PP.lead_var))
      ekfv.predict(tsv)
      ar_pts[VISION_POINT] = (float(ekfv.state[XV]), np.polyval(PP.d_poly, float(ekfv.state[XV])),
                              float(ekfv.state[SPEEDV]), np.nan, last_md_ts, np.nan, sec_since_boot())
    else:
      ekfv.state[XV] = PP.lead_dist
      ekfv.covar = (np.diag([PP.lead_var, ekfv.var_init]))
      ekfv.state[SPEEDV] = 0.
      if VISION_POINT in ar_pts:
        del ar_pts[VISION_POINT]

    # *** compute the likely path_y ***
    if enabled:    # use path from model path_poly
      path_y = np.polyval(PP.d_poly, path_x)
    else:          # use path from steer, set angle_offset to 0 since calibration does not exactly report the physical offset
      path_y = calc_lookahead_offset(v_ego, steer_angle, path_x, CP, angle_offset=0)[0]

    # *** remove missing points from meta data ***
    for ids in tracks.keys():
      if ids not in ar_pts:
        tracks.pop(ids, None)

    # *** compute the tracks ***
    for ids in ar_pts:
      # ignore the vision point for now
      if ids == VISION_POINT and not VISION_ONLY:
        continue
      elif ids != VISION_POINT and VISION_ONLY:
        continue
      rpt = ar_pts[ids]

      # align v_ego by a fixed time to align it with the radar measurement     
      cur_time = float(rk.frame)/rate
      v_ego_t_aligned = np.interp(cur_time - rdr_delay, v_ego_array[1], v_ego_array[0])
      d_path = np.sqrt(np.amin((path_x - rpt[0]) ** 2 + (path_y - rpt[1]) ** 2))

      # create the track if it doesn't exist or it's a new track
      if ids not in tracks or rpt[5] == 1:
        tracks[ids] = Track()
      tracks[ids].update(rpt[0], rpt[1], rpt[2], d_path, v_ego_t_aligned)

      # allow the vision model to remove the stationary flag if distance and rel speed roughly match
      if VISION_POINT in ar_pts:
        dist_to_vision = np.sqrt((0.5*(ar_pts[VISION_POINT][0] - rpt[0])) ** 2 + (2*(ar_pts[VISION_POINT][1] - rpt[1])) ** 2)
        rel_speed_diff = abs(ar_pts[VISION_POINT][2] - rpt[2])
        tracks[ids].mix_vision(dist_to_vision, rel_speed_diff)

    # publish tracks (debugging)
    dat = messaging.new_message()
    dat.init('liveTracks', len(tracks))
    for cnt, ids in enumerate(tracks.keys()):
      dat.liveTracks[cnt].trackId = ids
      dat.liveTracks[cnt].dRel = float(tracks[ids].dRel)
      dat.liveTracks[cnt].yRel = float(tracks[ids].yRel)
      dat.liveTracks[cnt].vRel = float(tracks[ids].vRel)
      dat.liveTracks[cnt].aRel = float(tracks[ids].aRel)
      dat.liveTracks[cnt].stationary = tracks[ids].stationary
      dat.liveTracks[cnt].oncoming = tracks[ids].oncoming
    liveTracks.send(dat.to_bytes())

    idens = tracks.keys()
    track_pts = np.array([tracks[iden].get_key_for_cluster() for iden in idens])

    # If we have multiple points, cluster them
    if len(track_pts) > 1:
      link = linkage_vector(track_pts, method='centroid')
      cluster_idxs = fcluster(link, 2.5, criterion='distance')
      clusters = [None]*max(cluster_idxs)

      for idx in xrange(len(track_pts)):
        cluster_i = cluster_idxs[idx]-1

        if clusters[cluster_i] == None:
          clusters[cluster_i] = Cluster()
        clusters[cluster_i].add(tracks[idens[idx]])
    elif len(track_pts) == 1:
      # TODO: why do we need this?
      clusters = [Cluster()]
      clusters[0].add(tracks[idens[0]])
    else:
      clusters = []

    # *** extract the lead car ***
    lead_clusters = [c for c in clusters
                     if c.is_potential_lead(v_ego)]
    lead_clusters.sort(key=lambda x: x.dRel)
    lead_len = len(lead_clusters)

    # *** extract the second lead from the whole set of leads ***
    lead2_clusters = [c for c in lead_clusters
                      if c.is_potential_lead2(lead_clusters)]
    lead2_clusters.sort(key=lambda x: x.dRel)
    lead2_len = len(lead2_clusters)

    # *** publish live20 ***
    dat = messaging.new_message()
    dat.init('live20')
    dat.live20.mdMonoTime = last_md_ts
    dat.live20.canMonoTimes = list(rr.canMonoTimes)
    dat.live20.l100MonoTime = last_l100_ts
    if lead_len > 0:
      lead_clusters[0].toLive20(dat.live20.leadOne)
      if lead2_len > 0:
        lead2_clusters[0].toLive20(dat.live20.leadTwo)
      else:
        dat.live20.leadTwo.status = False
    else:
      dat.live20.leadOne.status = False

    dat.live20.cumLagMs = -rk.remaining*1000.
    live20.send(dat.to_bytes())

    rk.monitor_time()
コード例 #7
0
    def update(self, d_rel, y_rel, v_rel, d_path, v_ego_t_aligned):
        if self.initted:
            self.dPathPrev = self.dPath
            self.vLeadPrev = self.vLead
            self.vRelPrev = self.vRel

        # relative values, copy
        self.dRel = d_rel  # LONG_DIST
        self.yRel = y_rel  # -LAT_DIST
        self.vRel = v_rel  # REL_SPEED

        # compute distance to path
        self.dPath = d_path

        # computed velocity and accelerations
        self.vLead = self.vRel + v_ego_t_aligned

        if not self.initted:
            self.aRel = 0.  # nidec gives no information about this
            self.vLat = 0.
            self.aLead = 0.
        else:
            # estimate acceleration
            a_rel_unfilt = (self.vRel - self.vRelPrev) / ts
            a_rel_unfilt = clip(a_rel_unfilt, -10., 10.)
            self.aRel = k_a_lead * a_rel_unfilt + (1 - k_a_lead) * self.aRel

            v_lat_unfilt = (self.dPath - self.dPathPrev) / ts
            self.vLat = k_v_lat * v_lat_unfilt + (1 - k_v_lat) * self.vLat

            a_lead_unfilt = (self.vLead - self.vLeadPrev) / ts
            a_lead_unfilt = clip(a_lead_unfilt, -10., 10.)
            self.aLead = k_a_lead * a_lead_unfilt + (1 - k_a_lead) * self.aLead

        if self.stationary:
            # stationary objects can become non stationary, but not the other way around
            self.stationary = v_ego_t_aligned > v_ego_stationary and abs(
                self.vLead) < v_stationary_thr
        self.oncoming = self.vLead < v_oncoming_thr

        if self.ekf is None:
            self.ekf = FastEKF1D(ts, 1e3, [0.1, 1])
            self.ekf.state[SPEED] = self.vLead
            self.ekf.state[ACCEL] = 0
            self.lead_sensor = SimpleSensor(SPEED, 1, 2)

            self.vLeadK = self.vLead
            self.aLeadK = self.aLead
        else:
            self.ekf.update_scalar(self.lead_sensor.read(self.vLead))
            self.ekf.predict(ts)
            self.vLeadK = float(self.ekf.state[SPEED])
            self.aLeadK = float(self.ekf.state[ACCEL])

        if not self.initted:
            self.cnt = 1
            self.vision_cnt = 0
        else:
            self.cnt += 1

        self.initted = True
        self.vision = False
コード例 #8
0
class Track(object):
    def __init__(self):
        self.ekf = None
        self.stationary = True
        self.initted = False

    def update(self, d_rel, y_rel, v_rel, d_path, v_ego_t_aligned):
        if self.initted:
            self.dPathPrev = self.dPath
            self.vLeadPrev = self.vLead
            self.vRelPrev = self.vRel

        # relative values, copy
        self.dRel = d_rel  # LONG_DIST
        self.yRel = y_rel  # -LAT_DIST
        self.vRel = v_rel  # REL_SPEED

        # compute distance to path
        self.dPath = d_path

        # computed velocity and accelerations
        self.vLead = self.vRel + v_ego_t_aligned

        if not self.initted:
            self.aRel = 0.  # nidec gives no information about this
            self.vLat = 0.
            self.aLead = 0.
        else:
            # estimate acceleration
            a_rel_unfilt = (self.vRel - self.vRelPrev) / ts
            a_rel_unfilt = clip(a_rel_unfilt, -10., 10.)
            self.aRel = k_a_lead * a_rel_unfilt + (1 - k_a_lead) * self.aRel

            v_lat_unfilt = (self.dPath - self.dPathPrev) / ts
            self.vLat = k_v_lat * v_lat_unfilt + (1 - k_v_lat) * self.vLat

            a_lead_unfilt = (self.vLead - self.vLeadPrev) / ts
            a_lead_unfilt = clip(a_lead_unfilt, -10., 10.)
            self.aLead = k_a_lead * a_lead_unfilt + (1 - k_a_lead) * self.aLead

        if self.stationary:
            # stationary objects can become non stationary, but not the other way around
            self.stationary = v_ego_t_aligned > v_ego_stationary and abs(
                self.vLead) < v_stationary_thr
        self.oncoming = self.vLead < v_oncoming_thr

        if self.ekf is None:
            self.ekf = FastEKF1D(ts, 1e3, [0.1, 1])
            self.ekf.state[SPEED] = self.vLead
            self.ekf.state[ACCEL] = 0
            self.lead_sensor = SimpleSensor(SPEED, 1, 2)

            self.vLeadK = self.vLead
            self.aLeadK = self.aLead
        else:
            self.ekf.update_scalar(self.lead_sensor.read(self.vLead))
            self.ekf.predict(ts)
            self.vLeadK = float(self.ekf.state[SPEED])
            self.aLeadK = float(self.ekf.state[ACCEL])

        if not self.initted:
            self.cnt = 1
            self.vision_cnt = 0
        else:
            self.cnt += 1

        self.initted = True
        self.vision = False

    def mix_vision(self, dist_to_vision, rel_speed_diff):
        # rel speed is very hard to estimate from vision
        if dist_to_vision < 4.0 and rel_speed_diff < 10.:
            # vision point is never stationary
            self.vision_cnt += 1
            # don't trust 1 or 2 fusions until model quality is much better
            if self.vision_cnt >= 3:
                self.vision = True
                self.stationary = False

    def get_key_for_cluster(self):
        # Weigh y higher since radar is inaccurate in this dimension
        return [self.dRel, self.yRel * 2, self.vRel]
コード例 #9
0
def radard_thread(gctx=None):
    set_realtime_priority(1)

    context = zmq.Context()

    # *** subscribe to features and model from visiond
    model = messaging.sub_sock(context, service_list['model'].port)
    logcan = messaging.sub_sock(context, service_list['can'].port)
    live100 = messaging.sub_sock(context, service_list['live100'].port)

    PP = PathPlanner(model)

    # *** publish live20 and liveTracks
    live20 = messaging.pub_sock(context, service_list['live20'].port)
    liveTracks = messaging.pub_sock(context, service_list['liveTracks'].port)

    # subscribe to stats about the car
    # TODO: move this to new style packet
    VP = VehicleParams(False)  # same for ILX and civic

    ar_pts = {}
    path_x = np.arange(0.0, 140.0, 0.1)  # 140 meters is max

    # Time-alignment
    rate = 20.  # model and radar are both at 20Hz
    tsv = 1. / rate
    rdr_delay = 0.10  # radar data delay in s
    v_len = 20  # how many speed data points to remember for t alignment with rdr data

    enabled = 0
    steer_angle = 0.

    tracks = defaultdict(dict)

    # Nidec
    cp = _create_radard_can_parser()

    # Kalman filter stuff:
    ekfv = EKFV1D()
    speedSensorV = SimpleSensor(XV, 1, 2)

    # v_ego
    v_ego = None
    v_ego_array = np.zeros([2, v_len])
    v_ego_t_aligned = 0.

    rk = Ratekeeper(rate, print_delay_threshold=np.inf)
    while 1:
        canMonoTimes = []
        can_pub_radar = []
        for a in messaging.drain_sock(logcan, wait_for_one=True):
            canMonoTimes.append(a.logMonoTime)
            can_pub_radar.extend(can_capnp_to_can_list_old(a, [1, 3]))

        # only run on the 0x445 packets, used for timing
        if not any(x[0] == 0x445 for x in can_pub_radar):
            continue

        cp.update_can(can_pub_radar)

        if not cp.can_valid:
            # TODO: handle this
            pass

        ar_pts = nidec_decode(cp, ar_pts)

        # receive the live100s
        l100 = messaging.recv_sock(live100)
        if l100 is not None:
            enabled = l100.live100.enabled
            v_ego = l100.live100.vEgo
            steer_angle = l100.live100.angleSteers

            v_ego_array = np.append(v_ego_array,
                                    [[v_ego], [float(rk.frame) / rate]], 1)
            v_ego_array = v_ego_array[:, 1:]

        if v_ego is None:
            continue

        # *** get path prediction from the model ***
        PP.update(sec_since_boot(), v_ego)

        # run kalman filter only if prob is high enough
        if PP.lead_prob > 0.7:
            ekfv.update(speedSensorV.read(PP.lead_dist, covar=PP.lead_var))
            ekfv.predict(tsv)
            ar_pts[VISION_POINT] = (float(ekfv.state[XV]),
                                    np.polyval(PP.d_poly,
                                               float(ekfv.state[XV])),
                                    float(ekfv.state[SPEEDV]), np.nan,
                                    PP.logMonoTime, np.nan, sec_since_boot())
        else:
            ekfv.state[XV] = PP.lead_dist
            ekfv.covar = (np.diag([PP.lead_var, ekfv.var_init]))
            ekfv.state[SPEEDV] = 0.
            if VISION_POINT in ar_pts:
                del ar_pts[VISION_POINT]

        # *** compute the likely path_y ***
        if enabled:  # use path from model path_poly
            path_y = np.polyval(PP.d_poly, path_x)
        else:  # use path from steer, set angle_offset to 0 since calibration does not exactly report the physical offset
            path_y = calc_lookahead_offset(v_ego,
                                           steer_angle,
                                           path_x,
                                           VP,
                                           angle_offset=0)[0]

        # *** remove missing points from meta data ***
        for ids in tracks.keys():
            if ids not in ar_pts:
                tracks.pop(ids, None)

        # *** compute the tracks ***
        for ids in ar_pts:
            # ignore the vision point for now
            if ids == VISION_POINT:
                continue
            rpt = ar_pts[ids]

            # align v_ego by a fixed time to align it with the radar measurement
            cur_time = float(rk.frame) / rate
            v_ego_t_aligned = np.interp(cur_time - rdr_delay, v_ego_array[1],
                                        v_ego_array[0])
            d_path = np.sqrt(
                np.amin((path_x - rpt[0])**2 + (path_y - rpt[1])**2))

            # create the track
            if ids not in tracks or rpt[5] == 1:
                tracks[ids] = Track()
            tracks[ids].update(rpt[0], rpt[1], rpt[2], d_path, v_ego_t_aligned)

            # allow the vision model to remove the stationary flag if distance and rel speed roughly match
            if VISION_POINT in ar_pts:
                dist_to_vision = np.sqrt(
                    (0.5 * (ar_pts[VISION_POINT][0] - rpt[0]))**2 +
                    (2 * (ar_pts[VISION_POINT][1] - rpt[1]))**2)
                rel_speed_diff = abs(ar_pts[VISION_POINT][2] - rpt[2])
                tracks[ids].mix_vision(dist_to_vision, rel_speed_diff)

        # publish tracks (debugging)
        dat = messaging.new_message()
        dat.init('liveTracks', len(tracks))
        for cnt, ids in enumerate(tracks.keys()):
            dat.liveTracks[cnt].trackId = ids
            dat.liveTracks[cnt].dRel = float(tracks[ids].dRel)
            dat.liveTracks[cnt].yRel = float(tracks[ids].yRel)
            dat.liveTracks[cnt].vRel = float(tracks[ids].vRel)
            dat.liveTracks[cnt].aRel = float(tracks[ids].aRel)
            dat.liveTracks[cnt].stationary = tracks[ids].stationary
            dat.liveTracks[cnt].oncoming = tracks[ids].oncoming
        liveTracks.send(dat.to_bytes())

        idens = tracks.keys()
        track_pts = np.array(
            [tracks[iden].get_key_for_cluster() for iden in idens])

        # If we have multiple points, cluster them
        if len(track_pts) > 1:
            link = linkage_vector(track_pts, method='centroid')
            cluster_idxs = fcluster(link, 2.5, criterion='distance')
            clusters = [None] * max(cluster_idxs)

            for idx in xrange(len(track_pts)):
                cluster_i = cluster_idxs[idx] - 1

                if clusters[cluster_i] == None:
                    clusters[cluster_i] = Cluster()
                clusters[cluster_i].add(tracks[idens[idx]])
        elif len(track_pts) == 1:
            # TODO: why do we need this?
            clusters = [Cluster()]
            clusters[0].add(tracks[idens[0]])
        else:
            clusters = []

        # *** extract the lead car ***
        lead_clusters = [
            c for c in clusters if c.is_potential_lead(v_ego, enabled)
        ]
        lead_clusters.sort(key=lambda x: x.dRel)
        lead_len = len(lead_clusters)

        # *** extract the second lead from the whole set of leads ***
        lead2_clusters = [
            c for c in lead_clusters if c.is_potential_lead2(lead_clusters)
        ]
        lead2_clusters.sort(key=lambda x: x.dRel)
        lead2_len = len(lead2_clusters)

        # *** publish live20 ***
        dat = messaging.new_message()
        dat.init('live20')
        dat.live20.mdMonoTime = PP.logMonoTime
        dat.live20.canMonoTimes = canMonoTimes
        if lead_len > 0:
            lead_clusters[0].toLive20(dat.live20.leadOne)
            if lead2_len > 0:
                lead2_clusters[0].toLive20(dat.live20.leadTwo)
            else:
                dat.live20.leadTwo.status = False
        else:
            dat.live20.leadOne.status = False

        dat.live20.cumLagMs = -rk.remaining * 1000.
        live20.send(dat.to_bytes())

        rk.monitor_time()
コード例 #10
0
class RadarD(object):
    def __init__(self, VM, mocked):
        self.VM = VM
        self.mocked = mocked

        self.MP = ModelParser()
        self.tracks = defaultdict(dict)

        self.last_md_ts = 0
        self.last_controls_state_ts = 0

        self.active = 0
        self.steer_angle = 0.
        self.steer_override = False

        # Kalman filter stuff:
        self.ekfv = EKFV1D()
        self.speedSensorV = SimpleSensor(XV, 1, 2)

        # v_ego
        self.v_ego = 0.
        self.v_ego_hist_t = deque([0], maxlen=v_len)
        self.v_ego_hist_v = deque([0], maxlen=v_len)
        self.v_ego_t_aligned = 0.

    def update(self, frame, delay, sm, rr):
        ar_pts = {}
        for pt in rr.points:
            ar_pts[pt.trackId] = [
                pt.dRel + RDR_TO_LDR, pt.yRel, pt.vRel, pt.measured
            ]

        if sm.updated['liveParameters']:
            self.VM.update_params(sm['liveParameters'].stiffnessFactor,
                                  sm['liveParameters'].steerRatio)

        if sm.updated['controlsState']:
            self.active = sm['controlsState'].active
            self.v_ego = sm['controlsState'].vEgo
            self.steer_angle = sm['controlsState'].angleSteers
            self.steer_override = sm['controlsState'].steerOverride

            self.v_ego_hist_v.append(self.v_ego)
            self.v_ego_hist_t.append(float(frame) / rate)

            self.last_controls_state_ts = sm.logMonoTime['controlsState']

        if sm.updated['model']:
            self.last_md_ts = sm.logMonoTime['model']
            self.MP.update(self.v_ego, sm['model'])

        # run kalman filter only if prob is high enough
        if self.MP.lead_prob > 0.7:
            reading = self.speedSensorV.read(self.MP.lead_dist,
                                             covar=np.matrix(self.MP.lead_var))
            self.ekfv.update_scalar(reading)
            self.ekfv.predict(DT_MDL)

            # When changing lanes the distance to the lead car can suddenly change,
            # which makes the Kalman filter output large relative acceleration
            if self.mocked and abs(self.MP.lead_dist -
                                   self.ekfv.state[XV]) > 2.0:
                self.ekfv.state[XV] = self.MP.lead_dist
                self.ekfv.covar = (np.diag(
                    [self.MP.lead_var, self.ekfv.var_init]))
                self.ekfv.state[SPEEDV] = 0.

            ar_pts[VISION_POINT] = (float(self.ekfv.state[XV]),
                                    np.polyval(self.MP.d_poly,
                                               float(self.ekfv.state[XV])),
                                    float(self.ekfv.state[SPEEDV]), False)
        else:
            self.ekfv.state[XV] = self.MP.lead_dist
            self.ekfv.covar = (np.diag([self.MP.lead_var, self.ekfv.var_init]))
            self.ekfv.state[SPEEDV] = 0.

            if VISION_POINT in ar_pts:
                del ar_pts[VISION_POINT]

        # *** compute the likely path_y ***
        if (self.active and not self.steer_override) or self.mocked:
            # use path from model (always when mocking as steering is too noisy)
            path_y = np.polyval(self.MP.d_poly, path_x)
        else:
            # use path from steer, set angle_offset to 0 it does not only report the physical offset
            path_y = calc_lookahead_offset(
                self.v_ego,
                self.steer_angle,
                path_x,
                self.VM,
                angle_offset=sm['liveParameters'].angleOffsetAverage)[0]

        # *** remove missing points from meta data ***
        for ids in self.tracks.keys():
            if ids not in ar_pts:
                self.tracks.pop(ids, None)

        # *** compute the tracks ***
        for ids in ar_pts:
            # ignore standalone vision point, unless we are mocking the radar
            if ids == VISION_POINT and not self.mocked:
                continue
            rpt = ar_pts[ids]

            # align v_ego by a fixed time to align it with the radar measurement
            cur_time = float(frame) / rate
            self.v_ego_t_aligned = np.interp(cur_time - delay,
                                             self.v_ego_hist_t,
                                             self.v_ego_hist_v)

            d_path = np.sqrt(
                np.amin((path_x - rpt[0])**2 + (path_y - rpt[1])**2))
            # add sign
            d_path *= np.sign(rpt[1] - np.interp(rpt[0], path_x, path_y))

            # create the track if it doesn't exist or it's a new track
            if ids not in self.tracks:
                self.tracks[ids] = Track()
            self.tracks[ids].update(rpt[0], rpt[1], rpt[2], d_path,
                                    self.v_ego_t_aligned, rpt[3],
                                    self.steer_override)

        # allow the vision model to remove the stationary flag if distance and rel speed roughly match
        if VISION_POINT in ar_pts:
            fused_id = None
            best_score = NO_FUSION_SCORE
            for ids in self.tracks:
                dist_to_vision = np.sqrt(
                    (0.5 *
                     (ar_pts[VISION_POINT][0] - self.tracks[ids].dRel))**2 +
                    (2 * (ar_pts[VISION_POINT][1] - self.tracks[ids].yRel))**2)
                rel_speed_diff = abs(ar_pts[VISION_POINT][2] -
                                     self.tracks[ids].vRel)
                self.tracks[ids].update_vision_score(dist_to_vision,
                                                     rel_speed_diff)
                if best_score > self.tracks[ids].vision_score:
                    fused_id = ids
                    best_score = self.tracks[ids].vision_score

            if fused_id is not None:
                self.tracks[fused_id].vision_cnt += 1
                self.tracks[fused_id].update_vision_fusion()

        if DEBUG:
            print("NEW CYCLE")
            if VISION_POINT in ar_pts:
                print("vision", ar_pts[VISION_POINT])

        idens = list(self.tracks.keys())
        track_pts = np.array(
            [self.tracks[iden].get_key_for_cluster() for iden in idens])

        # If we have multiple points, cluster them
        if len(track_pts) > 1:
            cluster_idxs = cluster_points_centroid(track_pts, 2.5)
            clusters = [None] * (max(cluster_idxs) + 1)

            for idx in xrange(len(track_pts)):
                cluster_i = cluster_idxs[idx]
                if clusters[cluster_i] is None:
                    clusters[cluster_i] = Cluster()
                clusters[cluster_i].add(self.tracks[idens[idx]])

        elif len(track_pts) == 1:
            # TODO: why do we need this?
            clusters = [Cluster()]
            clusters[0].add(self.tracks[idens[0]])
        else:
            clusters = []

        if DEBUG:
            for i in clusters:
                print(i)
        # *** extract the lead car ***
        lead_clusters = [
            c for c in clusters if c.is_potential_lead(self.v_ego)
        ]
        lead_clusters.sort(key=lambda x: x.dRel)
        lead_len = len(lead_clusters)

        # *** extract the second lead from the whole set of leads ***
        lead2_clusters = [
            c for c in lead_clusters if c.is_potential_lead2(lead_clusters)
        ]
        lead2_clusters.sort(key=lambda x: x.dRel)
        lead2_len = len(lead2_clusters)

        # *** publish radarState ***
        dat = messaging.new_message()
        dat.init('radarState')
        dat.valid = sm.all_alive_and_valid(service_list=['controlsState'])
        dat.radarState.mdMonoTime = self.last_md_ts
        dat.radarState.canMonoTimes = list(rr.canMonoTimes)
        dat.radarState.radarErrors = list(rr.errors)
        dat.radarState.controlsStateMonoTime = self.last_controls_state_ts
        if lead_len > 0:
            dat.radarState.leadOne = lead_clusters[0].toRadarState()
            if lead2_len > 0:
                dat.radarState.leadTwo = lead2_clusters[0].toRadarState()
            else:
                dat.radarState.leadTwo.status = False
        else:
            dat.radarState.leadOne.status = False

        return dat
コード例 #11
0
def radard_thread(gctx=None):
    set_realtime_priority(2)

    # wait for stats about the car to come in from controls
    cloudlog.info("radard is waiting for CarParams")
    CP = car.CarParams.from_bytes(Params().get("CarParams", block=True))
    mocked = CP.carName == "mock"
    VM = VehicleModel(CP)
    cloudlog.info("radard got CarParams")

    # import the radar from the fingerprint
    cloudlog.info("radard is importing %s", CP.carName)
    RadarInterface = importlib.import_module(
        'selfdrive.car.%s.radar_interface' % CP.carName).RadarInterface
    context = zmq.Context()

    # *** subscribe to features and model from visiond
    poller = zmq.Poller()
    model = messaging.sub_sock(context,
                               service_list['model'].port,
                               conflate=True,
                               poller=poller)
    controls_state_sock = messaging.sub_sock(
        context,
        service_list['controlsState'].port,
        conflate=True,
        poller=poller)
    live_parameters_sock = messaging.sub_sock(
        context,
        service_list['liveParameters'].port,
        conflate=True,
        poller=poller)

    # Default parameters
    live_parameters = messaging.new_message()
    live_parameters.init('liveParameters')
    live_parameters.liveParameters.valid = True
    live_parameters.liveParameters.steerRatio = CP.steerRatio
    live_parameters.liveParameters.stiffnessFactor = 1.0

    MP = ModelParser()
    RI = RadarInterface(CP)

    last_md_ts = 0
    last_controls_state_ts = 0

    # *** publish radarState and liveTracks
    radarState = messaging.pub_sock(context, service_list['radarState'].port)
    liveTracks = messaging.pub_sock(context, service_list['liveTracks'].port)

    path_x = np.arange(0.0, 140.0, 0.1)  # 140 meters is max

    # Time-alignment
    rate = 20.  # model and radar are both at 20Hz
    tsv = 1. / rate
    v_len = 20  # how many speed data points to remember for t alignment with rdr data

    active = 0
    steer_angle = 0.
    steer_override = False

    tracks = defaultdict(dict)

    # Kalman filter stuff:
    ekfv = EKFV1D()
    speedSensorV = SimpleSensor(XV, 1, 2)

    # v_ego
    v_ego = None
    v_ego_hist_t = deque(maxlen=v_len)
    v_ego_hist_v = deque(maxlen=v_len)
    v_ego_t_aligned = 0.

    rk = Ratekeeper(rate, print_delay_threshold=None)
    while 1:
        rr = RI.update()

        ar_pts = {}
        for pt in rr.points:
            ar_pts[pt.trackId] = [
                pt.dRel + RDR_TO_LDR, pt.yRel, pt.vRel, pt.measured
            ]

        # receive the controlsStates
        controls_state = None
        md = None

        for socket, event in poller.poll(0):
            if socket is controls_state_sock:
                controls_state = messaging.recv_one(socket)
            elif socket is model:
                md = messaging.recv_one(socket)
            elif socket is live_parameters_sock:
                live_parameters = messaging.recv_one(socket)
                VM.update_params(
                    live_parameters.liveParameters.stiffnessFactor,
                    live_parameters.liveParameters.steerRatio)

        if controls_state is not None:
            active = controls_state.controlsState.active
            v_ego = controls_state.controlsState.vEgo
            steer_angle = controls_state.controlsState.angleSteers
            steer_override = controls_state.controlsState.steerOverride

            v_ego_hist_v.append(v_ego)
            v_ego_hist_t.append(float(rk.frame) / rate)

            last_controls_state_ts = controls_state.logMonoTime

        if v_ego is None:
            continue

        if md is not None:
            last_md_ts = md.logMonoTime

        # *** get path prediction from the model ***
        MP.update(v_ego, md)

        # run kalman filter only if prob is high enough
        if MP.lead_prob > 0.7:
            reading = speedSensorV.read(MP.lead_dist,
                                        covar=np.matrix(MP.lead_var))
            ekfv.update_scalar(reading)
            ekfv.predict(tsv)

            # When changing lanes the distance to the lead car can suddenly change,
            # which makes the Kalman filter output large relative acceleration
            if mocked and abs(MP.lead_dist - ekfv.state[XV]) > 2.0:
                ekfv.state[XV] = MP.lead_dist
                ekfv.covar = (np.diag([MP.lead_var, ekfv.var_init]))
                ekfv.state[SPEEDV] = 0.

            ar_pts[VISION_POINT] = (float(ekfv.state[XV]),
                                    np.polyval(MP.d_poly,
                                               float(ekfv.state[XV])),
                                    float(ekfv.state[SPEEDV]), False)
        else:
            ekfv.state[XV] = MP.lead_dist
            ekfv.covar = (np.diag([MP.lead_var, ekfv.var_init]))
            ekfv.state[SPEEDV] = 0.

            if VISION_POINT in ar_pts:
                del ar_pts[VISION_POINT]

        # *** compute the likely path_y ***
        if (active and not steer_override) or mocked:
            # use path from model (always when mocking as steering is too noisy)
            path_y = np.polyval(MP.d_poly, path_x)
        else:
            # use path from steer, set angle_offset to 0 it does not only report the physical offset
            path_y = calc_lookahead_offset(
                v_ego,
                steer_angle,
                path_x,
                VM,
                angle_offset=live_parameters.liveParameters.angleOffsetAverage
            )[0]

        # *** remove missing points from meta data ***
        for ids in tracks.keys():
            if ids not in ar_pts:
                tracks.pop(ids, None)

        # *** compute the tracks ***
        for ids in ar_pts:
            # ignore standalone vision point, unless we are mocking the radar
            if ids == VISION_POINT and not mocked:
                continue
            rpt = ar_pts[ids]

            # align v_ego by a fixed time to align it with the radar measurement
            cur_time = float(rk.frame) / rate
            v_ego_t_aligned = np.interp(cur_time - RI.delay, v_ego_hist_t,
                                        v_ego_hist_v)

            d_path = np.sqrt(
                np.amin((path_x - rpt[0])**2 + (path_y - rpt[1])**2))
            # add sign
            d_path *= np.sign(rpt[1] - np.interp(rpt[0], path_x, path_y))

            # create the track if it doesn't exist or it's a new track
            if ids not in tracks:
                tracks[ids] = Track()
            tracks[ids].update(rpt[0], rpt[1], rpt[2], d_path, v_ego_t_aligned,
                               rpt[3], steer_override)

        # allow the vision model to remove the stationary flag if distance and rel speed roughly match
        if VISION_POINT in ar_pts:
            fused_id = None
            best_score = NO_FUSION_SCORE
            for ids in tracks:
                dist_to_vision = np.sqrt(
                    (0.5 * (ar_pts[VISION_POINT][0] - tracks[ids].dRel))**2 +
                    (2 * (ar_pts[VISION_POINT][1] - tracks[ids].yRel))**2)
                rel_speed_diff = abs(ar_pts[VISION_POINT][2] -
                                     tracks[ids].vRel)
                tracks[ids].update_vision_score(dist_to_vision, rel_speed_diff)
                if best_score > tracks[ids].vision_score:
                    fused_id = ids
                    best_score = tracks[ids].vision_score

            if fused_id is not None:
                tracks[fused_id].vision_cnt += 1
                tracks[fused_id].update_vision_fusion()

        if DEBUG:
            print("NEW CYCLE")
            if VISION_POINT in ar_pts:
                print("vision", ar_pts[VISION_POINT])

        idens = list(tracks.keys())
        track_pts = np.array(
            [tracks[iden].get_key_for_cluster() for iden in idens])

        # If we have multiple points, cluster them
        if len(track_pts) > 1:
            cluster_idxs = cluster_points_centroid(track_pts, 2.5)
            clusters = [None] * (max(cluster_idxs) + 1)

            for idx in xrange(len(track_pts)):
                cluster_i = cluster_idxs[idx]
                if clusters[cluster_i] is None:
                    clusters[cluster_i] = Cluster()
                clusters[cluster_i].add(tracks[idens[idx]])

        elif len(track_pts) == 1:
            # TODO: why do we need this?
            clusters = [Cluster()]
            clusters[0].add(tracks[idens[0]])
        else:
            clusters = []

        if DEBUG:
            for i in clusters:
                print(i)
        # *** extract the lead car ***
        lead_clusters = [c for c in clusters if c.is_potential_lead(v_ego)]
        lead_clusters.sort(key=lambda x: x.dRel)
        lead_len = len(lead_clusters)

        # *** extract the second lead from the whole set of leads ***
        lead2_clusters = [
            c for c in lead_clusters if c.is_potential_lead2(lead_clusters)
        ]
        lead2_clusters.sort(key=lambda x: x.dRel)
        lead2_len = len(lead2_clusters)

        # *** publish radarState ***
        dat = messaging.new_message()
        dat.init('radarState')
        dat.radarState.mdMonoTime = last_md_ts
        dat.radarState.canMonoTimes = list(rr.canMonoTimes)
        dat.radarState.radarErrors = list(rr.errors)
        dat.radarState.controlsStateMonoTime = last_controls_state_ts
        if lead_len > 0:
            dat.radarState.leadOne = lead_clusters[0].toRadarState()
            if lead2_len > 0:
                dat.radarState.leadTwo = lead2_clusters[0].toRadarState()
            else:
                dat.radarState.leadTwo.status = False
        else:
            dat.radarState.leadOne.status = False

        dat.radarState.cumLagMs = -rk.remaining * 1000.
        radarState.send(dat.to_bytes())

        # *** publish tracks for UI debugging (keep last) ***
        dat = messaging.new_message()
        dat.init('liveTracks', len(tracks))

        for cnt, ids in enumerate(tracks.keys()):
            if DEBUG:
                print("id: %4.0f x:  %4.1f  y: %4.1f  vr: %4.1f d: %4.1f  va: %4.1f  vl: %4.1f  vlk: %4.1f alk: %4.1f  s: %1.0f  v: %1.0f" % \
                  (ids, tracks[ids].dRel, tracks[ids].yRel, tracks[ids].vRel,
                   tracks[ids].dPath, tracks[ids].vLat,
                   tracks[ids].vLead, tracks[ids].vLeadK,
                   tracks[ids].aLeadK,
                   tracks[ids].stationary,
                   tracks[ids].measured))
            dat.liveTracks[cnt] = {
                "trackId": ids,
                "dRel": float(tracks[ids].dRel),
                "yRel": float(tracks[ids].yRel),
                "vRel": float(tracks[ids].vRel),
                "aRel": float(tracks[ids].aRel),
                "stationary": bool(tracks[ids].stationary),
                "oncoming": bool(tracks[ids].oncoming),
            }
        liveTracks.send(dat.to_bytes())

        rk.monitor_time()