Exemple #1
0
def main() -> NoReturn:
  clear_apport_folder()  # Clear apport folder on start, otherwise duplicate crashes won't register
  initial_tombstones = set(get_tombstones())

  sentry_sdk.utils.MAX_STRING_LENGTH = 8192
  sentry_sdk.init("https://[email protected]/157615",
                  default_integrations=False, release=get_version())

  dongle_id = Params().get("DongleId", encoding='utf-8')
  sentry_sdk.set_user({"id": dongle_id})
  sentry_sdk.set_tag("dirty", is_dirty())
  sentry_sdk.set_tag("origin", get_origin())
  sentry_sdk.set_tag("branch", get_branch())
  sentry_sdk.set_tag("commit", get_commit())
  sentry_sdk.set_tag("device", HARDWARE.get_device_type())

  while True:
    now_tombstones = set(get_tombstones())

    for fn, _ in (now_tombstones - initial_tombstones):
      try:
        cloudlog.info(f"reporting new tombstone {fn}")
        if fn.endswith(".crash"):
          report_tombstone_apport(fn)
        else:
          report_tombstone_android(fn)
      except Exception:
        cloudlog.exception(f"Error reporting tombstone {fn}")

    initial_tombstones = now_tombstones
    time.sleep(5)
Exemple #2
0
def init(project: SentryProject) -> None:
    # forks like to mess with this, so double check
    comma_remote = is_comma_remote() and "commaai" in get_origin(default="")
    if not comma_remote or not is_registered_device() or PC:
        return

    env = "release" if is_tested_branch() else "master"
    dongle_id = Params().get("DongleId", encoding='utf-8')

    integrations = []
    if project == SentryProject.SELFDRIVE:
        integrations.append(ThreadingIntegration(propagate_hub=True))
    else:
        sentry_sdk.utils.MAX_STRING_LENGTH = 8192

    sentry_sdk.init(project.value,
                    default_integrations=False,
                    release=get_version(),
                    integrations=integrations,
                    traces_sample_rate=1.0,
                    environment=env)

    sentry_sdk.set_user({"id": dongle_id})
    sentry_sdk.set_tag("dirty", is_dirty())
    sentry_sdk.set_tag("origin", get_origin())
    sentry_sdk.set_tag("branch", get_branch())
    sentry_sdk.set_tag("commit", get_commit())
    sentry_sdk.set_tag("device", HARDWARE.get_device_type())

    if project == SentryProject.SELFDRIVE:
        sentry_sdk.Hub.current.start_session()
def main():
  params = Params()
  dongle_id = params.get("DongleId").decode('utf-8')
  cloudlog.bind_global(dongle_id=dongle_id, version=get_version(), dirty=is_dirty())

  try:
    while 1:
      cloudlog.info("starting athena daemon")
      proc = Process(name='athenad', target=launcher, args=('selfdrive.athena.athenad', 'athenad'))
      proc.start()
      proc.join()
      cloudlog.event("athenad exited", exitcode=proc.exitcode)
      time.sleep(5)
  except Exception:
    cloudlog.exception("manage_athenad.exception")
  finally:
    params.delete(ATHENA_MGR_PID_PARAM)
Exemple #4
0
def manager_init() -> None:
  # update system time from panda
  set_time(cloudlog)

  # save boot log
  #subprocess.call("./bootlog", cwd=os.path.join(BASEDIR, "selfdrive/loggerd"))

  params = Params()
  params.clear_all(ParamKeyType.CLEAR_ON_MANAGER_START)

  default_params: List[Tuple[str, Union[str, bytes]]] = [
    ("CompletedTrainingVersion", "0"),
    ("DisengageOnAccelerator", "0"),
    ("HasAcceptedTerms", "0"),
    ("OpenpilotEnabledToggle", "1"),
    ("IsMetric", "1"),

    # HKG
    ("LateralControl", "TORQUE"),
    ("UseClusterSpeed", "0"),
    ("LongControlEnabled", "0"),
    ("MadModeEnabled", "1"),
    ("IsLdwsCar", "0"),
    ("LaneChangeEnabled", "0"),
    ("AutoLaneChangeEnabled", "0"),

    ("SccSmootherSlowOnCurves", "0"),
    ("SccSmootherSyncGasPressed", "0"),
    ("StockNaviDecelEnabled", "0"),
    ("KeepSteeringTurnSignals", "0"),
    ("HapticFeedbackWhenSpeedCamera", "0"),
    ("DisableOpFcw", "0"),
    ("ShowDebugUI", "0"),
    ("NewRadarInterface", "0"),
  ]
  if not PC:
    default_params.append(("LastUpdateTime", datetime.datetime.utcnow().isoformat().encode('utf8')))

  if params.get_bool("RecordFrontLock"):
    params.put_bool("RecordFront", True)

  if not params.get_bool("DisableRadar_Allow"):
    params.delete("DisableRadar")

  # set unset params
  for k, v in default_params:
    if params.get(k) is None:
      params.put(k, v)

  # is this dashcam?
  if os.getenv("PASSIVE") is not None:
    params.put_bool("Passive", bool(int(os.getenv("PASSIVE", "0"))))

  if params.get("Passive") is None:
    raise Exception("Passive must be set to continue")

  # Create folders needed for msgq
  try:
    os.mkdir("/dev/shm")
  except FileExistsError:
    pass
  except PermissionError:
    print("WARNING: failed to make /dev/shm")

  # set version params
  params.put("Version", get_version())
  params.put("TermsVersion", terms_version)
  params.put("TrainingVersion", training_version)
  params.put("GitCommit", get_commit(default=""))
  params.put("GitBranch", get_short_branch(default=""))
  params.put("GitRemote", get_origin(default=""))

  # set dongle id
  reg_res = register(show_spinner=True)
  if reg_res:
    dongle_id = reg_res
  else:
    serial = params.get("HardwareSerial")
    raise Exception(f"Registration failed for device {serial}")
  os.environ['DONGLE_ID'] = dongle_id  # Needed for swaglog

  if not is_dirty():
    os.environ['CLEAN'] = '1'

  # init logging
  sentry.init(sentry.SentryProject.SELFDRIVE)
  cloudlog.bind_global(dongle_id=dongle_id, version=get_version(), dirty=is_dirty(),
                       device=HARDWARE.get_device_type())
Exemple #5
0
def main():
  def get_influxdb_line(measurement: str, value: float, timestamp: datetime, tags: dict):
    res = f"{measurement}"
    for k, v in tags.items():
      res += f",{k}={str(v)}"
    res += f" value={value} {int(timestamp.timestamp() * 1e9)}\n"
    return res

  # open statistics socket
  ctx = zmq.Context().instance()
  sock = ctx.socket(zmq.PULL)
  sock.bind(STATS_SOCKET)

  # initialize stats directory
  Path(STATS_DIR).mkdir(parents=True, exist_ok=True)

  # initialize tags
  tags = {
    'dongleId': Params().get("DongleId", encoding='utf-8'),
    'started': False,
    'version': get_short_version(),
    'branch': get_short_branch(),
    'dirty': is_dirty(),
    'origin': get_normalized_origin(),
    'deviceType': HARDWARE.get_device_type(),
  }

  # subscribe to deviceState for started state
  sm = SubMaster(['deviceState'])

  last_flush_time = time.monotonic()
  gauges = {}
  while True:
    started_prev = sm['deviceState'].started
    sm.update()

    # Update metrics
    while True:
      try:
        metric = sock.recv_string(zmq.NOBLOCK)
        try:
          metric_type = metric.split('|')[1]
          metric_name = metric.split(':')[0]
          metric_value = metric.split('|')[0].split(':')[1]

          if metric_type == METRIC_TYPE.GAUGE:
            gauges[metric_name] = metric_value
          else:
            cloudlog.event("unknown metric type", metric_type=metric_type)
        except Exception:
          cloudlog.event("malformed metric", metric=metric)
      except zmq.error.Again:
        break

    # flush when started state changes or after FLUSH_TIME_S
    if (time.monotonic() > last_flush_time + STATS_FLUSH_TIME_S) or (sm['deviceState'].started != started_prev):
      result = ""
      current_time = datetime.utcnow().replace(tzinfo=timezone.utc)
      tags['started'] = sm['deviceState'].started

      for gauge_key in gauges:
        result += get_influxdb_line(f"gauge.{gauge_key}", gauges[gauge_key], current_time, tags)

      # clear intermediate data
      gauges = {}
      last_flush_time = time.monotonic()

      # check that we aren't filling up the drive
      if len(os.listdir(STATS_DIR)) < STATS_DIR_FILE_LIMIT:
        if len(result) > 0:
          stats_path = os.path.join(STATS_DIR, str(int(current_time.timestamp())))
          with atomic_write_in_dir(stats_path) as f:
            f.write(result)
      else:
        cloudlog.error("stats dir full")
Exemple #6
0
                add_file_handler(cloudlog)
                cloudlog.error("scons build failed\n" + error_s)

                # Show TextWindow
                spinner.close()
                if not os.getenv("CI"):
                    error_s = "\n \n".join("\n".join(textwrap.wrap(e, 65))
                                           for e in errors)
                    with TextWindow("openpilot failed to build\n \n" +
                                    error_s) as t:
                        t.wait_for_exit()
                exit(1)
        else:
            break

    # enforce max cache size
    cache_files = [f for f in CACHE_DIR.rglob('*') if f.is_file()]
    cache_files.sort(key=lambda f: f.stat().st_mtime)
    cache_size = sum(f.stat().st_size for f in cache_files)
    for f in cache_files:
        if cache_size < MAX_CACHE_SIZE:
            break
        cache_size -= f.stat().st_size
        f.unlink()


if __name__ == "__main__" and not PREBUILT:
    spinner = Spinner()
    spinner.update_progress(0, 100)
    build(spinner, is_dirty())
Exemple #7
0
def main() -> NoReturn:
    dongle_id = Params().get("DongleId", encoding='utf-8')

    def get_influxdb_line(measurement: str, value: Union[float, Dict[str,
                                                                     float]],
                          timestamp: datetime, tags: dict) -> str:
        res = f"{measurement}"
        for k, v in tags.items():
            res += f",{k}={str(v)}"
        res += " "

        if isinstance(value, float):
            value = {'value': value}

        for k, v in value.items():
            res += f"{k}={v},"

        res += f"dongle_id=\"{dongle_id}\" {int(timestamp.timestamp() * 1e9)}\n"
        return res

    # open statistics socket
    ctx = zmq.Context().instance()
    sock = ctx.socket(zmq.PULL)
    sock.bind(STATS_SOCKET)

    # initialize stats directory
    Path(STATS_DIR).mkdir(parents=True, exist_ok=True)

    # initialize tags
    tags = {
        'started': False,
        'version': get_short_version(),
        'branch': get_short_branch(),
        'dirty': is_dirty(),
        'origin': get_normalized_origin(),
        'deviceType': HARDWARE.get_device_type(),
    }

    # subscribe to deviceState for started state
    sm = SubMaster(['deviceState'])

    idx = 0
    last_flush_time = time.monotonic()
    gauges = {}
    samples: Dict[str, List[float]] = defaultdict(list)
    while True:
        started_prev = sm['deviceState'].started
        sm.update()

        # Update metrics
        while True:
            try:
                metric = sock.recv_string(zmq.NOBLOCK)
                try:
                    metric_type = metric.split('|')[1]
                    metric_name = metric.split(':')[0]
                    metric_value = float(metric.split('|')[0].split(':')[1])

                    if metric_type == METRIC_TYPE.GAUGE:
                        gauges[metric_name] = metric_value
                    elif metric_type == METRIC_TYPE.SAMPLE:
                        samples[metric_name].append(metric_value)
                    else:
                        cloudlog.event("unknown metric type",
                                       metric_type=metric_type)
                except Exception:
                    cloudlog.event("malformed metric", metric=metric)
            except zmq.error.Again:
                break

        # flush when started state changes or after FLUSH_TIME_S
        if (time.monotonic() > last_flush_time + STATS_FLUSH_TIME_S) or (
                sm['deviceState'].started != started_prev):
            result = ""
            current_time = datetime.utcnow().replace(tzinfo=timezone.utc)
            tags['started'] = sm['deviceState'].started

            for key, value in gauges.items():
                result += get_influxdb_line(f"gauge.{key}", value,
                                            current_time, tags)

            for key, values in samples.items():
                values.sort()
                sample_count = len(values)
                sample_sum = sum(values)

                stats = {
                    'count': sample_count,
                    'min': values[0],
                    'max': values[-1],
                    'mean': sample_sum / sample_count,
                }
                for percentile in [0.05, 0.5, 0.95]:
                    value = values[int(round(percentile * (sample_count - 1)))]
                    stats[f"p{int(percentile * 100)}"] = value

                result += get_influxdb_line(f"sample.{key}", stats,
                                            current_time, tags)

            # clear intermediate data
            gauges.clear()
            samples.clear()
            last_flush_time = time.monotonic()

            # check that we aren't filling up the drive
            if len(os.listdir(STATS_DIR)) < STATS_DIR_FILE_LIMIT:
                if len(result) > 0:
                    stats_path = os.path.join(
                        STATS_DIR, f"{current_time.timestamp():.0f}_{idx}")
                    with atomic_write_in_dir(stats_path) as f:
                        f.write(result)
                    idx += 1
            else:
                cloudlog.error("stats dir full")
Exemple #8
0
def manager_init():
    # update system time from panda
    set_time(cloudlog)

    # save boot log
    subprocess.call("./bootlog",
                    cwd=os.path.join(BASEDIR, "selfdrive/loggerd"))

    params = Params()
    params.clear_all(ParamKeyType.CLEAR_ON_MANAGER_START)

    default_params = [
        ("CompletedTrainingVersion", "0"),
        ("HasAcceptedTerms", "0"),
        ("OpenpilotEnabledToggle", "1"),
    ]
    if not PC:
        default_params.append(
            ("LastUpdateTime",
             datetime.datetime.utcnow().isoformat().encode('utf8')))

    if params.get_bool("RecordFrontLock"):
        params.put_bool("RecordFront", True)

    if not params.get_bool("DisableRadar_Allow"):
        params.delete("DisableRadar")

    # set unset params
    for k, v in default_params:
        if params.get(k) is None:
            params.put(k, v)

    # is this dashcam?
    if os.getenv("PASSIVE") is not None:
        params.put_bool("Passive", bool(int(os.getenv("PASSIVE"))))

    if params.get("Passive") is None:
        raise Exception("Passive must be set to continue")

    # Create folders needed for msgq
    try:
        os.mkdir("/dev/shm")
    except FileExistsError:
        pass
    except PermissionError:
        print("WARNING: failed to make /dev/shm")

    # set version params
    params.put("Version", get_version())
    params.put("TermsVersion", terms_version)
    params.put("TrainingVersion", training_version)
    params.put("GitCommit", get_commit(default=""))
    params.put("GitBranch", get_short_branch(default=""))
    params.put("GitRemote", get_origin(default=""))

    # set dongle id
    reg_res = register(show_spinner=True)
    if reg_res:
        dongle_id = reg_res
    else:
        serial = params.get("HardwareSerial")
        raise Exception(f"Registration failed for device {serial}")
    os.environ['DONGLE_ID'] = dongle_id  # Needed for swaglog

    if not is_dirty():
        os.environ['CLEAN'] = '1'

    cloudlog.bind_global(dongle_id=dongle_id,
                         version=get_version(),
                         dirty=is_dirty(),
                         device=HARDWARE.get_device_type())

    if is_comma_remote() and not (os.getenv("NOLOG") or os.getenv("NOCRASH")
                                  or PC):
        crash.init()
    crash.bind_user(id=dongle_id)
    crash.bind_extra(dirty=is_dirty(),
                     origin=get_origin(),
                     branch=get_short_branch(),
                     commit=get_commit(),
                     device=HARDWARE.get_device_type())