class PskReporter(Reporter): interval = 300 def getSupportedModes(self): return ["FT8", "FT4", "JT9", "JT65", "FST4", "JS8", "Q65"] def stop(self): self.cancelTimer() with self.spotLock: self.spots = [] def __init__(self): self.spots = [] self.spotLock = threading.Lock() self.uploader = Uploader() self.timer = None metrics = Metrics.getSharedInstance() self.dupeCounter = CounterMetric() metrics.addMetric("pskreporter.duplicates", self.dupeCounter) self.spotCounter = CounterMetric() metrics.addMetric("pskreporter.spots", self.spotCounter) def scheduleNextUpload(self): if self.timer: return delay = PskReporter.interval + random.uniform(0, 30) logger.debug("scheduling next pskreporter upload in %f seconds", delay) self.timer = threading.Timer(delay, self.upload) self.timer.start() def spotEquals(self, s1, s2): keys = ["callsign", "timestamp", "locator", "mode", "msg"] return reduce(and_, map(lambda key: s1[key] == s2[key], keys)) def spot(self, spot): with self.spotLock: if any(x for x in self.spots if self.spotEquals(spot, x)): # dupe self.dupeCounter.inc() else: self.spotCounter.inc() self.spots.append(spot) self.scheduleNextUpload() def upload(self): try: with self.spotLock: self.timer = None spots = self.spots self.spots = [] if spots: self.uploader.upload(spots) except Exception: logger.exception("Failed to upload spots") def cancelTimer(self): if self.timer: self.timer.cancel()
class WsprnetReporter(Reporter): def __init__(self): # max 100 entries self.queue = Queue(100) # single worker Worker(self.queue).start() # metrics metrics = Metrics.getSharedInstance() self.spotCounter = CounterMetric() metrics.addMetric("wsprnet.spots", self.spotCounter) def stop(self): while not self.queue.empty(): self.queue.get(timeout=1) self.queue.task_done() self.queue.put(PoisonPill) def spot(self, spot): try: self.queue.put(spot, block=False) self.spotCounter.inc() except Full: logger.warning("WSPRNet Queue overflow, one spot lost") def getSupportedModes(self): return ["WSPR", "FST4W"]
def pushDecode(self): band = "unknown" if self.band is not None: band = self.band.getName() name = "digiham.decodes.{band}.pocsag".format(band=band) metrics = Metrics.getSharedInstance() metric = metrics.getMetric(name) if metric is None: metric = CounterMetric() metrics.addMetric(name, metric) metric.inc()
def pushDecode(self, band): metrics = Metrics.getSharedInstance() bandName = "unknown" if band is not None: bandName = band.getName() name = "js8call.decodes.{band}.JS8".format(band=bandName) metric = metrics.getMetric(name) if metric is None: metric = CounterMetric() metrics.addMetric(name, metric) metric.inc()
def pushDecode(self, mode, band): metrics = Metrics.getSharedInstance() bandName = "unknown" if band is not None: bandName = band.getName() if mode is None: mode = "unknown" name = "wsjt.decodes.{band}.{mode}".format(band=bandName, mode=mode) metric = metrics.getMetric(name) if metric is None: metric = CounterMetric() metrics.addMetric(name, metric) metric.inc()
class WsjtQueue(Queue): sharedInstance = None creationLock = threading.Lock() @staticmethod def getSharedInstance(): with WsjtQueue.creationLock: if WsjtQueue.sharedInstance is None: pm = PropertyManager.getSharedInstance() WsjtQueue.sharedInstance = WsjtQueue( maxsize=pm["wsjt_queue_length"], workers=pm["wsjt_queue_workers"]) return WsjtQueue.sharedInstance def __init__(self, maxsize, workers): super().__init__(maxsize) metrics = Metrics.getSharedInstance() metrics.addMetric("wsjt.queue.length", DirectMetric(self.qsize)) self.inCounter = CounterMetric() metrics.addMetric("wsjt.queue.in", self.inCounter) self.outCounter = CounterMetric() metrics.addMetric("wsjt.queue.out", self.outCounter) self.overflowCounter = CounterMetric() metrics.addMetric("wsjt.queue.overflow", self.overflowCounter) self.errorCounter = CounterMetric() metrics.addMetric("wsjt.queue.error", self.errorCounter) self.workers = [self.newWorker() for _ in range(0, workers)] def put(self, item): self.inCounter.inc() try: super(WsjtQueue, self).put(item, block=False) except Full: self.overflowCounter.inc() raise def get(self, **kwargs): # super.get() is blocking, so it would mess up the stats to inc() first out = super(WsjtQueue, self).get(**kwargs) self.outCounter.inc() return out def newWorker(self): worker = WsjtQueueWorker(self) worker.start() return worker def onError(self): self.errorCounter.inc()
class DecoderQueue(Queue): sharedInstance = None creationLock = threading.Lock() @staticmethod def getSharedInstance(): with DecoderQueue.creationLock: if DecoderQueue.sharedInstance is None: DecoderQueue.sharedInstance = DecoderQueue() return DecoderQueue.sharedInstance @staticmethod def stopAll(): with DecoderQueue.creationLock: if DecoderQueue.sharedInstance is not None: DecoderQueue.sharedInstance.stop() DecoderQueue.sharedInstance = None def __init__(self): pm = Config.get() super().__init__(pm["decoding_queue_length"]) self.workers = [] self._setWorkers(pm["decoding_queue_workers"]) self.subscriptions = [ pm.wireProperty("decoding_queue_length", self._setMaxSize), pm.wireProperty("decoding_queue_workers", self._setWorkers), ] metrics = Metrics.getSharedInstance() metrics.addMetric("decoding.queue.length", DirectMetric(self.qsize)) self.inCounter = CounterMetric() metrics.addMetric("decoding.queue.in", self.inCounter) self.outCounter = CounterMetric() metrics.addMetric("decoding.queue.out", self.outCounter) self.overflowCounter = CounterMetric() metrics.addMetric("decoding.queue.overflow", self.overflowCounter) self.errorCounter = CounterMetric() metrics.addMetric("decoding.queue.error", self.errorCounter) def _setMaxSize(self, size): if self.maxsize == size: return self.maxsize = size def _setWorkers(self, workers): while len(self.workers) > workers: logger.debug("stopping one worker") self.workers.pop().stop() while len(self.workers) < workers: logger.debug("starting one worker") self.workers.append(self.newWorker()) def stop(self): logger.debug("shutting down the queue") while self.subscriptions: self.subscriptions.pop().cancel() try: # purge all remaining jobs while not self.empty(): job = self.get() job.unlink() self.task_done() except Empty: pass # put() a PoisonPill for all active workers to shut them down for w in self.workers: if w.is_alive(): self.put(PoisonPill) self.join() def put(self, item, **kwargs): self.inCounter.inc() try: super(DecoderQueue, self).put(item, block=False) except Full: self.overflowCounter.inc() raise def get(self, **kwargs): # super.get() is blocking, so it would mess up the stats to inc() first out = super(DecoderQueue, self).get(**kwargs) self.outCounter.inc() return out def newWorker(self): worker = QueueWorker(self) worker.start() return worker def onError(self): self.errorCounter.inc()
class PskReporter(Reporter): """ This class implements the reporting interface to send received signals to pskreporter.info. It interfaces with pskreporter as documented here: https://pskreporter.info/pskdev.html """ interval = 300 def getSupportedModes(self): """ Supports all valid MODE and SUBMODE values from the ADIF standard. Current version at the time of the last change: https://www.adif.org/312/ADIF_312.htm#Mode_Enumeration """ return [ "FT8", "FT4", "JT9", "JT65", "FST4", "JS8", "Q65", "WSPR", "FST4W" ] def stop(self): self.cancelTimer() with self.spotLock: self.spots = [] def __init__(self): self.spots = [] self.spotLock = threading.Lock() self.uploader = Uploader() self.timer = None metrics = Metrics.getSharedInstance() self.dupeCounter = CounterMetric() metrics.addMetric("pskreporter.duplicates", self.dupeCounter) self.spotCounter = CounterMetric() metrics.addMetric("pskreporter.spots", self.spotCounter) def scheduleNextUpload(self): if self.timer: return delay = PskReporter.interval + random.uniform(0, 30) logger.debug("scheduling next pskreporter upload in %f seconds", delay) self.timer = threading.Timer(delay, self.upload) self.timer.start() def spotEquals(self, s1, s2): keys = ["callsign", "timestamp", "locator", "mode", "msg"] return reduce(and_, map(lambda key: s1[key] == s2[key], keys)) def spot(self, spot): with self.spotLock: if any(x for x in self.spots if self.spotEquals(spot, x)): # dupe self.dupeCounter.inc() else: self.spotCounter.inc() self.spots.append(spot) self.scheduleNextUpload() def upload(self): try: with self.spotLock: self.timer = None spots = self.spots self.spots = [] if spots: self.uploader.upload(spots) except Exception: logger.exception("Failed to upload spots") def cancelTimer(self): if self.timer: self.timer.cancel()
class PskReporter(object): sharedInstance = None creationLock = threading.Lock() interval = 300 supportedModes = ["FT8", "FT4", "JT9", "JT65", "JS8"] @staticmethod def getSharedInstance(): with PskReporter.creationLock: if PskReporter.sharedInstance is None: if Config.get()["pskreporter_enabled"]: PskReporter.sharedInstance = PskReporter() else: PskReporter.sharedInstance = PskReporterDummy() return PskReporter.sharedInstance @staticmethod def stop(): if PskReporter.sharedInstance: PskReporter.sharedInstance.cancelTimer() def __init__(self): self.spots = [] self.spotLock = threading.Lock() self.uploader = Uploader() self.timer = None metrics = Metrics.getSharedInstance() self.dupeCounter = CounterMetric() metrics.addMetric("pskreporter.duplicates", self.dupeCounter) self.spotCounter = CounterMetric() metrics.addMetric("pskreporter.spots", self.spotCounter) def scheduleNextUpload(self): if self.timer: return delay = PskReporter.interval + random.uniform(0, 30) logger.debug("scheduling next pskreporter upload in %f seconds", delay) self.timer = threading.Timer(delay, self.upload) self.timer.start() def spotEquals(self, s1, s2): keys = ["callsign", "timestamp", "locator", "mode", "msg"] return reduce(and_, map(lambda key: s1[key] == s2[key], keys)) def spot(self, spot): if not spot["mode"] in PskReporter.supportedModes: return with self.spotLock: if any(x for x in self.spots if self.spotEquals(spot, x)): # dupe self.dupeCounter.inc() else: self.spotCounter.inc() self.spots.append(spot) self.scheduleNextUpload() def upload(self): try: with self.spotLock: spots = self.spots self.spots = [] if spots: self.uploader.upload(spots) except Exception: logger.exception("Failed to upload spots") self.timer = None self.scheduleNextUpload() def cancelTimer(self): if self.timer: self.timer.cancel()