Exemplo n.º 1
0
class BeaconProcessor(object):

    redis = StrictRedis(**redisConfig)

    rawQueueOGN = Queue(
        maxsize=666666666
    )  # 0 ~ infinite (according to docs).. but apparently not
    rawQueueFLR = Queue(maxsize=666666666)
    rawQueueICA = Queue(maxsize=666666666)
    queues = (rawQueueOGN, rawQueueFLR, rawQueueFLR, rawQueueICA
              )  # one worker's performance on current CPU is 35k/min
    queueIds = ('ogn', 'flarm1', 'flarm2', 'icao1')
    # TODO there shall be separate queues for each worker and traffic shall be split/shaped evenly for every worker of the same kind..

    workers = list()

    def __init__(self):

        # restore unprocessed data from redis:
        numRead = 0
        for key, queue in zip(self.queueIds, self.queues):
            while True:
                item = self.redis.lpop(key)
                if not item:
                    break
                queue.put(item)
                numRead += 1
        print(f"[INFO] Loaded {numRead} raw message(s) from redis.")

        self.dbThread = DbThread(dbConnectionInfo)
        self.dbThread.start()

        self.influxDb = InfluxDbThread(dbName=INFLUX_DB_NAME,
                                       host=INFLUX_DB_HOST)
        self.influxDb.start()

        for id, queue in zip(self.queueIds, self.queues):
            rawWorker = RawWorker(id=id,
                                  dbThread=self.dbThread,
                                  rawQueue=queue,
                                  influxDb=self.influxDb)
            rawWorker.start()
            self.workers.append(rawWorker)

        self.timer = PeriodicTimer(60, self._processStats)
        self.timer.start()

    def stop(self):
        for worker in self.workers:
            worker.stop()

        # store all unprocessed data into redis:
        n = 0
        for key, queue in zip(self.queueIds, self.queues):
            n += queue.qsize()
            for item in list(queue.queue):
                self.redis.rpush(key, item)
        print(f"[INFO] Flushed {n} rawQueueX items into redis.")

        self.dbThread.stop()
        self.influxDb.stop()

        self.timer.stop()

        print('[INFO] BeaconProcessor terminated.')

    startTime = time.time()
    numEnquedTasks = 0

    def _processStats(self):
        now = time.time()
        tDiff = now - self.startTime
        numTasksPerMin = self.numEnquedTasks / tDiff * 60
        numQueuedTasks = self.rawQueueOGN.qsize() + self.rawQueueFLR.qsize(
        ) + self.rawQueueICA.qsize()
        print(
            f"[INFO] Beacon rate: {numTasksPerMin:.0f}/min. {numQueuedTasks} queued."
        )

        traffic = dict()
        for worker in self.workers:
            traffic[worker.id] = worker.numProcessed
            worker.numProcessed = 0

        if not DEBUG and numTasksPerMin >= 10:
            cmd = f"mosquitto_pub -h {MQ_HOST} -p {MQ_PORT} -u {MQ_USER} -P {MQ_PASSWORD} -t ognLogbook/rate -m '{round(numTasksPerMin)}'; " \
                  f"mosquitto_pub -h {MQ_HOST} -p {MQ_PORT} -u {MQ_USER} -P {MQ_PASSWORD} -t ognLogbook/queued -m '{round(numQueuedTasks)}'; " \
                  f"mosquitto_pub -h {MQ_HOST} -p {MQ_PORT} -u {MQ_USER} -P {MQ_PASSWORD} -t ognLogbook/ogn -m '{traffic['ogn']}'; " \
                  f"mosquitto_pub -h {MQ_HOST} -p {MQ_PORT} -u {MQ_USER} -P {MQ_PASSWORD} -t ognLogbook/flarm -m '{traffic['flarm1'] + traffic['flarm2']}'; " \
                  f"mosquitto_pub -h {MQ_HOST} -p {MQ_PORT} -u {MQ_USER} -P {MQ_PASSWORD} -t ognLogbook/icao -m '{traffic['icao1']}';"
            os.system(cmd)

        self.numEnquedTasks = 0
        self.startTime = now

    def enqueueForProcessing(self, raw_message: str):
        prefix = raw_message[:3]
        if prefix == 'OGN':
            self.rawQueueOGN.put(raw_message)
        elif prefix == 'FLR':
            self.rawQueueFLR.put(raw_message)
        elif prefix == 'ICA':
            self.rawQueueICA.put(raw_message)
        else:
            print(f'[WARN] Worker for "{prefix}" not implemented!',
                  raw_message,
                  file=sys.stderr)
            return

        self.numEnquedTasks += 1
Exemplo n.º 2
0
class BeaconProcessor(object):

    redis = StrictRedis(**redisConfig)

    rawQueueOGN = Queue(maxsize=0)  # 0 ~ infinite (according to docs)
    rawQueueFLR = Queue(maxsize=0)
    rawQueueICA = Queue(maxsize=0)
    queues = (rawQueueOGN, rawQueueFLR, rawQueueICA)
    queueIds = ('ogn', 'flarm', 'icao')

    workers = list()

    def __init__(self):

        # restore unprocessed data from redis:
        numRead = 0
        for key, queue in zip(self.queueIds, self.queues):
            while True:
                item = self.redis.lpop(key)
                if not item:
                    break
                queue.put(item)
                numRead += 1
        print(f"[INFO] Loaded {numRead} raw message(s) from redis.")

        self.dbThread = DbThread(dbConnectionInfo)
        self.dbThread.start()

        self.influxDb = InfluxDbThread(dbName=INFLUX_DB_NAME,
                                       host=INFLUX_DB_HOST)
        self.influxDb.start()

        for id, queue in zip(self.queueIds, self.queues):
            rawWorker = RawWorker(id=id,
                                  dbThread=self.dbThread,
                                  rawQueue=queue,
                                  influxDb=self.influxDb)
            rawWorker.start()
            self.workers.append(rawWorker)

        self.timer = PeriodicTimer(60, self._processStats)
        self.timer.start()

    def stop(self):
        for worker in self.workers:
            worker.stop()

        # store all unprocessed data into redis:
        n = 0
        for key, queue in zip(self.queueIds, self.queues):
            n += queue.qsize()
            for item in list(queue.queue):
                self.redis.rpush(key, item)
        print(f"[INFO] Flushed {n} rawQueueX items into redis.")

        self.dbThread.stop()
        self.influxDb.stop()

        self.timer.stop()

        print('[INFO] BeaconProcessor terminated.')

    startTime = time.time()
    numEnquedTasks = 0

    def _processStats(self):
        now = time.time()
        tDiff = now - self.startTime
        numTasksPerMin = self.numEnquedTasks / tDiff * 60
        numQueuedTasks = self.rawQueueOGN.qsize() + self.rawQueueFLR.qsize(
        ) + self.rawQueueICA.qsize()
        print(
            f"[INFO] Beacon rate: {numTasksPerMin:.0f}/min. {numQueuedTasks} queued."
        )

        traffic = dict()
        for worker in self.workers:
            traffic[worker.id] = worker.numProcessed
            worker.numProcessed = 0

        if not debugMode and numTasksPerMin >= 400:
            cmd = f"mosquitto_pub -h {MQ_HOST} -p {MQ_PORT} -u {MQ_USER} -P {MQ_PASSWORD} -t ognLogbook/rate -m '{round(numTasksPerMin)}'; " \
                  f"mosquitto_pub -h {MQ_HOST} -p {MQ_PORT} -u {MQ_USER} -P {MQ_PASSWORD} -t ognLogbook/queued -m '{round(numQueuedTasks)}'; " \
                  f"mosquitto_pub -h {MQ_HOST} -p {MQ_PORT} -u {MQ_USER} -P {MQ_PASSWORD} -t ognLogbook/ogn -m '{traffic['ogn']}'; " \
                  f"mosquitto_pub -h {MQ_HOST} -p {MQ_PORT} -u {MQ_USER} -P {MQ_PASSWORD} -t ognLogbook/flarm -m '{traffic['flarm']}'; " \
                  f"mosquitto_pub -h {MQ_HOST} -p {MQ_PORT} -u {MQ_USER} -P {MQ_PASSWORD} -t ognLogbook/icao -m '{traffic['icao']}';"
            os.system(cmd)

        self.numEnquedTasks = 0
        self.startTime = now

    def enqueueForProcessing(self, raw_message: str):
        prefix = raw_message[:3]
        if prefix == 'OGN':
            self.rawQueueOGN.put(raw_message)
        elif prefix == 'FLR':
            self.rawQueueFLR.put(raw_message)
        else:  # 'ICA'
            self.rawQueueICA.put(raw_message)

        self.numEnquedTasks += 1
Exemplo n.º 3
0
import time

from configuration import INFLUX_DB_NAME, INFLUX_DB_HOST
from db.InfluxDbThread import InfluxDbThread

from experimental.influx_exportData import DUMP_FILEPATH

if __name__ == '__main__':

    influx = InfluxDbThread(dbName=INFLUX_DB_NAME, host=INFLUX_DB_HOST)
    influx.start()

    with open(DUMP_FILEPATH, 'r') as f:
        for line in f:
            # print('line:', line)
            influx.addStatement(line)

    while influx.toDoStatements.qsize() > 0:
        time.sleep(1)  # ~ thread.yield()

    print('KOHEU.')