Exemplo n.º 1
0
def loop_body(opts, gstate):
    msgs = []

    def cb_add_item(key, val, category):
        msgs.append(
            ("starlink/dish_{0}/{1}/{2}".format(category, gstate.dish_id,
                                                key), val, 0, False))

    def cb_add_sequence(key, val, category, _):
        msgs.append(
            ("starlink/dish_{0}/{1}/{2}".format(category, gstate.dish_id, key),
             ",".join(str(x) for x in val), 0, False))

    rc = dish_common.get_data(opts, gstate, cb_add_item, cb_add_sequence)

    if msgs:
        try:
            paho.mqtt.publish.multiple(msgs,
                                       client_id=gstate.dish_id,
                                       **opts.mqargs)
            if opts.verbose:
                print("Successfully published to MQTT broker")
        except Exception as e:
            dish_common.conn_error(opts,
                                   "Failed publishing to MQTT broker: %s",
                                   str(e))
            rc = 1

    return rc
Exemplo n.º 2
0
def loop_body(opts, gstate):
    if opts.verbose:
        csv_data = []
    else:
        csv_data = [datetime.utcnow().replace(microsecond=0).isoformat()]

    def cb_data_add_item(name, val, category):
        if opts.verbose:
            csv_data.append("{0:22} {1}".format(VERBOSE_FIELD_MAP.get(name, name) + ":", val))
        else:
            # special case for get_status failure: this will be the lone item added
            if name == "state" and val == "DISH_UNREACHABLE":
                csv_data.extend(["", "", "", val])
            else:
                csv_data.append(str(val))

    def cb_data_add_sequence(name, val, category, start):
        if opts.verbose:
            csv_data.append("{0:22} {1}".format(
                VERBOSE_FIELD_MAP.get(name, name) + ":", ", ".join(str(subval) for subval in val)))
        else:
            csv_data.extend(str(subval) for subval in val)

    def cb_add_bulk(bulk, count, timestamp, counter):
        if opts.verbose:
            print("Time range (UTC):      {0} -> {1}".format(
                datetime.utcfromtimestamp(timestamp).isoformat(),
                datetime.utcfromtimestamp(timestamp + count).isoformat()))
            for key, val in bulk.items():
                print("{0:22} {1}".format(key + ":", ", ".join(str(subval) for subval in val)))
            if opts.loop_interval > 0.0:
                print()
        else:
            for i in range(count):
                timestamp += 1
                fields = [datetime.utcfromtimestamp(timestamp).isoformat()]
                fields.extend(["" if val[i] is None else str(val[i]) for val in bulk.values()])
                print(",".join(fields))

    rc = dish_common.get_data(opts,
                              gstate,
                              cb_data_add_item,
                              cb_data_add_sequence,
                              add_bulk=cb_add_bulk)

    if opts.verbose:
        if csv_data:
            print("\n".join(csv_data))
            if opts.loop_interval > 0.0:
                print()
    else:
        # skip if only timestamp
        if len(csv_data) > 1:
            print(",".join(csv_data))

    return rc
def loop_body(opts, gstate):
    tables = {"status": {}, "ping_stats": {}, "usage": {}}
    hist_cols = ["time", "id"]
    hist_rows = []

    def cb_add_item(key, val, category):
        tables[category][key] = val

    def cb_add_sequence(key, val, category, start):
        tables[category][key] = ",".join(
            str(subv) if subv is not None else "" for subv in val)

    def cb_add_bulk(bulk, count, timestamp, counter):
        nonlocal hist_cols
        if len(hist_cols) == 2:
            hist_cols.extend(bulk.keys())
            hist_cols.append("counter")
        for i in range(count):
            timestamp += 1
            counter += 1
            row = [timestamp, gstate.dish_id]
            row.extend(val[i] for val in bulk.values())
            row.append(counter)
            hist_rows.append(row)

    now = int(time.time())
    rc = dish_common.get_data(opts, gstate, cb_add_item, cb_add_sequence)

    if opts.bulk_mode and not rc:
        if gstate.counter is None and not opts.skip_query and opts.bulk_samples < 0:
            gstate.timestamp, gstate.counter = query_counter(opts, gstate)
        rc = dish_common.get_bulk_data(opts, gstate, cb_add_bulk)

    rows_written = 0

    try:
        cur = gstate.sql_conn.cursor()
        for category, fields in tables.items():
            if fields:
                sql = 'INSERT OR REPLACE INTO "{0}" ("time","id",{1}) VALUES ({2})'.format(
                    category, ",".join('"' + x + '"' for x in fields),
                    ",".join(repeat("?",
                                    len(fields) + 2)))
                values = [now, gstate.dish_id]
                values.extend(fields.values())
                cur.execute(sql, values)
                rows_written += 1

        if hist_rows:
            sql = 'INSERT OR REPLACE INTO "history" ({0}) VALUES({1})'.format(
                ",".join('"' + x + '"' for x in hist_cols),
                ",".join(repeat("?", len(hist_cols))))
            cur.executemany(sql, hist_rows)
            rows_written += len(hist_rows)

        cur.close()
        gstate.sql_conn.commit()
    except sqlite3.OperationalError as e:
        # these are not necessarily fatal, but also not much can do about
        logging.error("Unexpected error from database, discarding data: %s", e)
        rc = 1
    else:
        if opts.verbose:
            print("Rows written to db:", rows_written)

    return rc
Exemplo n.º 4
0
def loop_body(opts, gstate):
    fields = {"status": {}, "ping_stats": {}}

    def cb_add_item(key, val, category):
        fields[category][key] = val

    def cb_add_sequence(key, val, category, start):
        for i, subval in enumerate(val, start=start):
            fields[category]["{0}_{1}".format(key, i)] = subval

    def cb_add_bulk(bulk, count, timestamp, counter):
        if gstate.start_timestamp is None:
            gstate.start_timestamp = timestamp
            gstate.start_counter = counter
        points = gstate.points if gstate.timebase_synced else gstate.deferred_points
        for i in range(count):
            timestamp += 1
            points.append({
                "measurement": BULK_MEASUREMENT,
                "tags": {
                    "id": gstate.dish_id
                },
                "time": timestamp,
                "fields": {
                    key: val[i]
                    for key, val in bulk.items() if val[i] is not None
                },
            })
        if points:
            # save off counter value for script restart
            points[-1]["fields"]["counter"] = counter + count

    now = time.time()
    rc = dish_common.get_data(opts,
                              gstate,
                              cb_add_item,
                              cb_add_sequence,
                              add_bulk=cb_add_bulk)
    if rc:
        return rc

    for category in fields:
        if fields[category]:
            gstate.points.append({
                "measurement":
                "spacex.starlink.user_terminal." + category,
                "tags": {
                    "id": gstate.dish_id
                },
                "time":
                int(now),
                "fields":
                fields[category],
            })

    # This is here and not before the points being processed because if the
    # query previously failed, there will be points that were processed in
    # a prior loop. This avoids having to handle that as a special case.
    if opts.bulk_mode and not gstate.timebase_synced:
        sync_timebase(opts, gstate)

    if opts.verbose:
        print("Data points queued: " + str(len(gstate.points)))

    if len(gstate.points) >= FLUSH_LIMIT:
        return flush_points(opts, gstate)

    return 0