def test_with_messages(self): m = InMemoryStore() channel = "channel" now1 = datetime.datetime.now(datetime.timezone.utc).replace( second=0, microsecond=0) - datetime.timedelta(seconds=180) now2 = datetime.datetime.now(datetime.timezone.utc).replace( second=0, microsecond=0) m.messages.append({'date': now1, 'channel': channel, 'cancer': 10}) m.messages.append({'date': now2, 'channel': channel, 'cancer': 1000}) m.messages.append({'date': now2, 'channel': channel, 'cancer': 1}) expected = [{'channel': channel, 'cancer': 1001, 'messages': 2}] actual = m.cancer() self.assertEqual(actual, expected)
class MemoryStorage(StorageInterface): def __init__(self): super().__init__() self._store = InMemoryStore() # process and delete self.messages every minute self.cron = Cron() self.cron.add(call=self._archive) self.cron.start() # publish a summary of all cancer messages grouped by minute and channel self.zmq_context = zmq.Context() self.pubsub_socket = self.zmq_context.socket(zmq.PUB) summary_socket = Config.get('monitor.socket.cancer_summary') self.pubsub_socket.bind(summary_socket) logger.info("bound publish socket to %s", summary_socket) # respond to live cancer requests self.cancer_socket = self.zmq_context.socket(zmq.REP) request_socket = Config.get('monitor.socket.cancer_request') self.cancer_socket.bind(request_socket) logger.info("bound cancer socket to %s", request_socket) # TODO: use asyncio t = threading.Thread(target=self._handle_cancer_request) t.daemon = True t.start() logger.info("started handle cancer request thread") # adds a record in the in-memory store # @memory.write() def store(self, channel, cancer): self._store.store(channel, cancer) # computes cancer level from the in-memory store # @memory.read() def cancer(self): return self._store.cancer() # respond to cancer request on a socket # @socket.recv() # @memory.read() # @socket.send() def _handle_cancer_request(self): while True: self.cancer_socket.recv() cancer = self._store.cancer() self.cancer_socket.send_pyobj(cancer) # archive live messages from the in-memory store into the persistent store # @memory.read() # @socket.send() def _archive(self): history = self._store.archive() # publish the summaries on the pubsub socket for date, channels in history.items(): for channel, record in channels.items(): record = { 'date': date, 'channel': channel, 'cancer': record['cancer'], 'messages': record['messages'] } self.pubsub_socket.send_multipart( [b'summary', pickle.dumps(record)]) logger.info( 'published leaderboards of round %s with messages from %s channels', date, len(channels))
def test_no_messages(self): m = InMemoryStore() self.assertEqual(m.cancer(), [])
class MemoryStorage(StorageInterface): def __init__(self): super().__init__() self._store = InMemoryStore() # process and delete self.messages every minute self.cron = Cron() self.cron.add(call=self._archive) self.cron.start() # publish a summary of all cancer messages grouped by minute and channel self.zmq_context = zmq.Context() self.pubsub_socket = self.zmq_context.socket(zmq.PUB) self.pubsub_socket.bind(Config.get('monitor.socket.cancer_summary')) logger.info("bound publish socket to %s", Config.get('monitor.socket.cancer_summary')) # respond to live cancer requests self.cancer_socket = self.zmq_context.socket(zmq.REP) self.cancer_socket.bind(Config.get('monitor.socket.cancer_request')) logger.info("bound cancer socket to %s", Config.get('monitor.socket.cancer_request')) # TODO: use asyncio t = threading.Thread(target=self._handle_cancer_request) t.daemon = True t.start() logger.info("started handle cancer request thread") # adds a record in the in-memory store # @memory.write() def store(self, channel, cancer): self._store.store(channel, cancer) # computes cancer level from the in-memory store # @memory.read() def cancer(self): return self._store.cancer() # respond to cancer request on a socket # @socket.recv() # @memory.read() # @socket.send() def _handle_cancer_request(self): while True: self.cancer_socket.recv() cancer = self._store.cancer() self.cancer_socket.send_pyobj(cancer) # archive live messages from the in-memory store into the persistent store # @memory.read() # @socket.send() def _archive(self): history = self._store.archive() # publish the summaries on the pubsub socket for date, channels in history.items(): for channel, record in channels.items(): record = { 'date': date, 'channel': channel, 'cancer': record['cancer'], 'messages': record['messages'] } self.pubsub_socket.send_multipart([b'summary', pickle.dumps(record)]) logger.info('published leaderboards of round %s with messages from %s channels', date, len(channels))