Пример #1
0
    numUpdatedRecords = 0

    for row in cur:
        (id, takeoffIcao, takeoffLat, takeoffLon, landingIcao, landingLat,
         landingLon) = row

        if not takeoffIcao and takeoffLat and takeoffLon:
            takeoffIcao = afm.getNearest(takeoffLat, takeoffLon)
            if takeoffIcao:
                print('takeoffIcao:', takeoffIcao)
                strSql = f"UPDATE logbook_entries set takeoff_icao = '{takeoffIcao}' where id = {id}"
                dbt.addStatement(strSql)
                numUpdatedRecords += 1

        if not landingIcao and landingLat and landingLon:
            landingIcao = afm.getNearest(landingLat, landingLon)
            if landingIcao:
                print('landingIcao:', landingIcao)
                strSql = f"UPDATE logbook_entries set landing_icao = '{landingIcao}' where id = {id}"
                dbt.addStatement(strSql)
                numUpdatedRecords += 1

    print('numUpdatedRecords:', numUpdatedRecords)

    while len(dbt.toDoStatements) > 0:
        print('len DB toDoStatements:', len(dbt.toDoStatements))
        sleep(1)
    dbt.stop()

    print('KOHEU.')
Пример #2
0
class RedisReaper(object):
    RUN_INTERVAL = 5 * 60  # [s]

    REDIS_STALE_INTERVAL_1 = 20 * 60  # [s]
    REDIS_STALE_INTERVAL_2 = 30 * 60  # [s]
    REDIS_TTL_LIMIT = REDIS_RECORD_EXPIRATION - REDIS_STALE_INTERVAL_1
    GS_THRESHOLD = getGroundSpeedThreshold(1, 'L')

    def __init__(self):
        print(
            f"[INFO] RedisReaper(lite) scheduled to run every {self.RUN_INTERVAL}s."
        )

        self.dbt = DbThread(dbConnectionInfo=dbConnectionInfo)
        self.dbt.start()

        self.redis = StrictRedis(**redisConfig)
        self.influx = InfluxDbThread(dbName=INFLUX_DB_NAME,
                                     host=INFLUX_DB_HOST)

        self.airfieldManager = AirfieldManager()

    def doWork(self):
        airborne = []

        keys = self.redis.keys('*status')
        for key in keys:
            key = key.decode('ascii')
            ttl = self.redis.ttl(key)
            value = self.redis.get(key)
            if value:  # entries could have been deleted in the mean while..
                status = int(value.decode('ascii').split(';')[0])
                if status == 1:  # 1 = airborne
                    addr = key.split(
                        '-'
                    )[0]  # in fact addressTypeStr + addr (e.g. I123456, F123456, O123456, ..)
                    airborne.append(addr)

        numLanded = 0
        for addr in airborne:
            prefix = addr[:1]
            addr = addr[1:]
            addrType = REVERSE_ADDRESS_TYPE.get(prefix, None)
            addrPrefixLong = ADDRESS_TYPE_PREFIX.get(addrType, None)

            if not addrPrefixLong:
                continue

            rs = self.influx.client.query(
                f"SELECT * FROM pos WHERE addr='{addrPrefixLong}{addr}' ORDER BY time DESC LIMIT 1;"
            )
            for res in rs:
                # print('res:', res)
                time = res[0]['time']
                ts = int(
                    datetime.strptime(
                        time, '%Y-%m-%dT%H:%M:%SZ').timestamp())  # UTC ts
                agl = res[0]['agl'] if res[0]['agl'] else 0
                alt = res[0]['alt'] if res[0]['alt'] else 0
                gs = res[0]['gs']
                lat = res[0]['lat']
                lon = res[0]['lon']

                landingSuspected = False
                if 0 < agl < 100 and gs < self.GS_THRESHOLD:
                    landingSuspected = True
                else:
                    lastBeaconAge = datetime.utcnow().timestamp() - ts
                    if lastBeaconAge > self.REDIS_STALE_INTERVAL_2:
                        landingSuspected = True

                if landingSuspected:
                    icaoLocation = self.airfieldManager.getNearest(lat, lon)
                    # if not icaoLocation:  # no outlandings yet..
                    #     continue

                    # print(f"addr: {addr}; dt: {dt / 60:.0f}min ; agl: {agl:.0f}m near {icaoLocation}")

                    # set status as Landed in redis (or delete?):
                    self.redis.set(
                        f"{prefix}{addr}-status", '0;0'
                    )  # 0 = on-ground; ts=0 to indicate forced landing
                    self.redis.expire(key, REDIS_RECORD_EXPIRATION)

                    # look-up related takeoff data:
                    logbookItem: LogbookItem = findMostRecentTakeoff(
                        addr, addrType)

                    # create a LANDING logbook_event -> a stored procedure then creates a logbook_entry:
                    flightTime = ts - logbookItem.takeoff_ts
                    if flightTime < 0:
                        flightTime = 0

                    icaoLocationVal = f"'{icaoLocation}'" if icaoLocation else 'null'
                    strSql = f"INSERT INTO logbook_events " \
                             f"(ts, address, address_type, aircraft_type, event, lat, lon, location_icao, flight_time) " \
                             f"VALUES " \
                             f"({ts}, '{addr}', {logbookItem.address_type}, '{logbookItem.aircraft_type}', " \
                             f"'L', {lat:.5f}, {lon:.5f}, {icaoLocationVal}, {flightTime});"

                    # print('strSql:', strSql)

                    self.dbt.addStatement(strSql)

                    numLanded += 1

        if numLanded > 0:
            print(f"[INFO] RedisReaper: cleared {numLanded} stale records")

    def stop(self):
        self.dbt.stop()
Пример #3
0
class BeaconProcessor(object):

    redis = StrictRedis(**redisConfig)

    rawQueueOGN = Queue(
        maxsize=666666666
    )  # 0 ~ infinite (according to docs).. but apparently not
    rawQueueFLR = Queue(maxsize=666666666)
    rawQueueICA = Queue(maxsize=666666666)
    queues = (rawQueueOGN, rawQueueFLR, rawQueueFLR, rawQueueICA
              )  # one worker's performance on current CPU is 35k/min
    queueIds = ('ogn', 'flarm1', 'flarm2', 'icao1')
    # TODO there shall be separate queues for each worker and traffic shall be split/shaped evenly for every worker of the same kind..

    workers = list()

    def __init__(self):

        # restore unprocessed data from redis:
        numRead = 0
        for key, queue in zip(self.queueIds, self.queues):
            while True:
                item = self.redis.lpop(key)
                if not item:
                    break
                queue.put(item)
                numRead += 1
        print(f"[INFO] Loaded {numRead} raw message(s) from redis.")

        self.dbThread = DbThread(dbConnectionInfo)
        self.dbThread.start()

        self.influxDb = InfluxDbThread(dbName=INFLUX_DB_NAME,
                                       host=INFLUX_DB_HOST)
        self.influxDb.start()

        for id, queue in zip(self.queueIds, self.queues):
            rawWorker = RawWorker(id=id,
                                  dbThread=self.dbThread,
                                  rawQueue=queue,
                                  influxDb=self.influxDb)
            rawWorker.start()
            self.workers.append(rawWorker)

        self.timer = PeriodicTimer(60, self._processStats)
        self.timer.start()

    def stop(self):
        for worker in self.workers:
            worker.stop()

        # store all unprocessed data into redis:
        n = 0
        for key, queue in zip(self.queueIds, self.queues):
            n += queue.qsize()
            for item in list(queue.queue):
                self.redis.rpush(key, item)
        print(f"[INFO] Flushed {n} rawQueueX items into redis.")

        self.dbThread.stop()
        self.influxDb.stop()

        self.timer.stop()

        print('[INFO] BeaconProcessor terminated.')

    startTime = time.time()
    numEnquedTasks = 0

    def _processStats(self):
        now = time.time()
        tDiff = now - self.startTime
        numTasksPerMin = self.numEnquedTasks / tDiff * 60
        numQueuedTasks = self.rawQueueOGN.qsize() + self.rawQueueFLR.qsize(
        ) + self.rawQueueICA.qsize()
        print(
            f"[INFO] Beacon rate: {numTasksPerMin:.0f}/min. {numQueuedTasks} queued."
        )

        traffic = dict()
        for worker in self.workers:
            traffic[worker.id] = worker.numProcessed
            worker.numProcessed = 0

        if not DEBUG and numTasksPerMin >= 10:
            cmd = f"mosquitto_pub -h {MQ_HOST} -p {MQ_PORT} -u {MQ_USER} -P {MQ_PASSWORD} -t ognLogbook/rate -m '{round(numTasksPerMin)}'; " \
                  f"mosquitto_pub -h {MQ_HOST} -p {MQ_PORT} -u {MQ_USER} -P {MQ_PASSWORD} -t ognLogbook/queued -m '{round(numQueuedTasks)}'; " \
                  f"mosquitto_pub -h {MQ_HOST} -p {MQ_PORT} -u {MQ_USER} -P {MQ_PASSWORD} -t ognLogbook/ogn -m '{traffic['ogn']}'; " \
                  f"mosquitto_pub -h {MQ_HOST} -p {MQ_PORT} -u {MQ_USER} -P {MQ_PASSWORD} -t ognLogbook/flarm -m '{traffic['flarm1'] + traffic['flarm2']}'; " \
                  f"mosquitto_pub -h {MQ_HOST} -p {MQ_PORT} -u {MQ_USER} -P {MQ_PASSWORD} -t ognLogbook/icao -m '{traffic['icao1']}';"
            os.system(cmd)

        self.numEnquedTasks = 0
        self.startTime = now

    def enqueueForProcessing(self, raw_message: str):
        prefix = raw_message[:3]
        if prefix == 'OGN':
            self.rawQueueOGN.put(raw_message)
        elif prefix == 'FLR':
            self.rawQueueFLR.put(raw_message)
        elif prefix == 'ICA':
            self.rawQueueICA.put(raw_message)
        else:
            print(f'[WARN] Worker for "{prefix}" not implemented!',
                  raw_message,
                  file=sys.stderr)
            return

        self.numEnquedTasks += 1
Пример #4
0
class BeaconProcessor(object):

    redis = StrictRedis(**redisConfig)

    rawQueueOGN = Queue(maxsize=0)  # 0 ~ infinite (according to docs)
    rawQueueFLR = Queue(maxsize=0)
    rawQueueICA = Queue(maxsize=0)
    queues = (rawQueueOGN, rawQueueFLR, rawQueueICA)
    queueIds = ('ogn', 'flarm', 'icao')

    workers = list()

    def __init__(self):

        # restore unprocessed data from redis:
        numRead = 0
        for key, queue in zip(self.queueIds, self.queues):
            while True:
                item = self.redis.lpop(key)
                if not item:
                    break
                queue.put(item)
                numRead += 1
        print(f"[INFO] Loaded {numRead} raw message(s) from redis.")

        self.dbThread = DbThread(dbConnectionInfo)
        self.dbThread.start()

        self.influxDb = InfluxDbThread(dbName=INFLUX_DB_NAME,
                                       host=INFLUX_DB_HOST)
        self.influxDb.start()

        for id, queue in zip(self.queueIds, self.queues):
            rawWorker = RawWorker(id=id,
                                  dbThread=self.dbThread,
                                  rawQueue=queue,
                                  influxDb=self.influxDb)
            rawWorker.start()
            self.workers.append(rawWorker)

        self.timer = PeriodicTimer(60, self._processStats)
        self.timer.start()

    def stop(self):
        for worker in self.workers:
            worker.stop()

        # store all unprocessed data into redis:
        n = 0
        for key, queue in zip(self.queueIds, self.queues):
            n += queue.qsize()
            for item in list(queue.queue):
                self.redis.rpush(key, item)
        print(f"[INFO] Flushed {n} rawQueueX items into redis.")

        self.dbThread.stop()
        self.influxDb.stop()

        self.timer.stop()

        print('[INFO] BeaconProcessor terminated.')

    startTime = time.time()
    numEnquedTasks = 0

    def _processStats(self):
        now = time.time()
        tDiff = now - self.startTime
        numTasksPerMin = self.numEnquedTasks / tDiff * 60
        numQueuedTasks = self.rawQueueOGN.qsize() + self.rawQueueFLR.qsize(
        ) + self.rawQueueICA.qsize()
        print(
            f"[INFO] Beacon rate: {numTasksPerMin:.0f}/min. {numQueuedTasks} queued."
        )

        traffic = dict()
        for worker in self.workers:
            traffic[worker.id] = worker.numProcessed
            worker.numProcessed = 0

        if not debugMode and numTasksPerMin >= 400:
            cmd = f"mosquitto_pub -h {MQ_HOST} -p {MQ_PORT} -u {MQ_USER} -P {MQ_PASSWORD} -t ognLogbook/rate -m '{round(numTasksPerMin)}'; " \
                  f"mosquitto_pub -h {MQ_HOST} -p {MQ_PORT} -u {MQ_USER} -P {MQ_PASSWORD} -t ognLogbook/queued -m '{round(numQueuedTasks)}'; " \
                  f"mosquitto_pub -h {MQ_HOST} -p {MQ_PORT} -u {MQ_USER} -P {MQ_PASSWORD} -t ognLogbook/ogn -m '{traffic['ogn']}'; " \
                  f"mosquitto_pub -h {MQ_HOST} -p {MQ_PORT} -u {MQ_USER} -P {MQ_PASSWORD} -t ognLogbook/flarm -m '{traffic['flarm']}'; " \
                  f"mosquitto_pub -h {MQ_HOST} -p {MQ_PORT} -u {MQ_USER} -P {MQ_PASSWORD} -t ognLogbook/icao -m '{traffic['icao']}';"
            os.system(cmd)

        self.numEnquedTasks = 0
        self.startTime = now

    def enqueueForProcessing(self, raw_message: str):
        prefix = raw_message[:3]
        if prefix == 'OGN':
            self.rawQueueOGN.put(raw_message)
        elif prefix == 'FLR':
            self.rawQueueFLR.put(raw_message)
        else:  # 'ICA'
            self.rawQueueICA.put(raw_message)

        self.numEnquedTasks += 1
Пример #5
0
class BeaconProcessor(object):

    redis = StrictRedis(**redisConfig)

    rawQueueOGN = Queue()
    rawQueueFLR = Queue()
    rawQueueICA = Queue()
    queues = (rawQueueOGN, rawQueueFLR, rawQueueICA)
    queueKeys = ('rawQueueOGN', 'rawQueueFLR', 'rawQueueICA')

    workers = list()

    def __init__(self):

        # restore unprocessed data from redis:
        numRead = 0
        for key, queue in zip(self.queueKeys, self.queues):
            while True:
                item = self.redis.lpop(key)
                if not item:
                    break
                queue.put(item)
                numRead += 1
        print(f"[INFO] Loaded {numRead} raw message(s) from redis.")

        self.dbThread = DbThread(dbConnectionInfo)
        self.dbThread.start()

        for i, queue in enumerate(self.queues):
            rawWorker = RawWorker(index=i,
                                  dbThread=self.dbThread,
                                  rawQueue=queue)
            rawWorker.start()
            self.workers.append(rawWorker)

    def stop(self):
        for worker in self.workers:
            worker.stop()

        # store all unprocessed data into redis:
        n = 0
        for key, queue in zip(self.queueKeys, self.queues):
            n += queue.qsize()
            for item in list(queue.queue):
                self.redis.rpush(key, item)
        print(f"[INFO] Flushed {n} rawQueueX items into redis.")

        self.dbThread.stop()

        print('[INFO] BeaconProcessor terminated.')

    startTime = time.time()
    numEnquedTasks = 0

    def _printStats(self):
        now = time.time()
        tDiff = now - self.startTime
        if tDiff >= 60:
            numTasksPerMin = self.numEnquedTasks / tDiff * 60
            numQueuedTasks = self.rawQueueOGN.qsize() + self.rawQueueFLR.qsize(
            ) + self.rawQueueICA.qsize()

            print(
                f"Beacon rate: {numTasksPerMin:.0f}/min. {numQueuedTasks} queued."
            )

            self.numEnquedTasks = 0
            self.startTime = now

    def enqueueForProcessing(self, raw_message: str):
        self._printStats()

        prefix = raw_message[:3]
        if prefix == 'OGN':
            self.rawQueueOGN.put(raw_message)
        elif prefix == 'FLR':
            self.rawQueueFLR.put(raw_message)
        else:  # 'ICA'
            self.rawQueueICA.put(raw_message)

        self.numEnquedTasks += 1