def test_with_messages(self):
        m = InMemoryStore()

        channel1 = "channel1"
        channel2 = "channel2"
        now1 = datetime.datetime.now(datetime.timezone.utc).replace(
            second=0, microsecond=0) - datetime.timedelta(seconds=180)
        now2 = datetime.datetime.now(datetime.timezone.utc).replace(
            second=0, microsecond=0)

        m.messages.append({'date': now1, 'channel': channel1, 'cancer': 10})
        m.messages.append({'date': now1, 'channel': channel1, 'cancer': 20})
        m.messages.append({'date': now1, 'channel': channel2, 'cancer': 50})
        m.messages.append({'date': now2, 'channel': channel2, 'cancer': 1000})

        expected = {
            now1: {
                channel1: {
                    'cancer': 30,
                    'messages': 2
                },
                channel2: {
                    'cancer': 50,
                    'messages': 1
                }
            }
        }

        actual = m.archive()

        self.assertEqual(actual[now1][channel1], expected[now1][channel1])
        self.assertEqual(actual[now1][channel2], expected[now1][channel2])
        self.assertEqual(len(m.messages), 1)
Beispiel #2
0
    def __init__(self):
        super().__init__()

        self._store = InMemoryStore()

        # process and delete self.messages every minute
        self.cron = Cron()
        self.cron.add(call=self._archive)
        self.cron.start()

        # publish a summary of all cancer messages grouped by minute and channel
        self.zmq_context = zmq.Context()
        self.pubsub_socket = self.zmq_context.socket(zmq.PUB)
        summary_socket = Config.get('monitor.socket.cancer_summary')
        self.pubsub_socket.bind(summary_socket)
        logger.info("bound publish socket to %s", summary_socket)

        # respond to live cancer requests
        self.cancer_socket = self.zmq_context.socket(zmq.REP)
        request_socket = Config.get('monitor.socket.cancer_request')
        self.cancer_socket.bind(request_socket)
        logger.info("bound cancer socket to %s", request_socket)

        # TODO: use asyncio
        t = threading.Thread(target=self._handle_cancer_request)
        t.daemon = True
        t.start()
        logger.info("started handle cancer request thread")
    def test_default(self):
        m = InMemoryStore()
        channel = "channel"
        cancer = 10

        m.store(channel, cancer)

        actual = m.messages.popleft()

        # don't compare the whole dict because we might get the date wrong
        self.assertEqual(actual['channel'], channel)
        self.assertEqual(actual['cancer'], cancer)
  def __init__(self):
    super().__init__()

    self._store = InMemoryStore()

    # process and delete self.messages every minute
    self.cron = Cron()
    self.cron.add(call=self._archive)
    self.cron.start()

    # publish a summary of all cancer messages grouped by minute and channel
    self.zmq_context = zmq.Context()
    self.pubsub_socket = self.zmq_context.socket(zmq.PUB)
    self.pubsub_socket.bind(Config.get('monitor.socket.cancer_summary'))
    logger.info("bound publish socket to %s", Config.get('monitor.socket.cancer_summary'))

    # respond to live cancer requests
    self.cancer_socket = self.zmq_context.socket(zmq.REP)
    self.cancer_socket.bind(Config.get('monitor.socket.cancer_request'))
    logger.info("bound cancer socket to %s", Config.get('monitor.socket.cancer_request'))

    # TODO: use asyncio
    t = threading.Thread(target=self._handle_cancer_request)
    t.daemon = True
    t.start()
    logger.info("started handle cancer request thread")
    def test_with_messages(self):
        m = InMemoryStore()

        channel = "channel"
        now1 = datetime.datetime.now(datetime.timezone.utc).replace(
            second=0, microsecond=0) - datetime.timedelta(seconds=180)
        now2 = datetime.datetime.now(datetime.timezone.utc).replace(
            second=0, microsecond=0)

        m.messages.append({'date': now1, 'channel': channel, 'cancer': 10})
        m.messages.append({'date': now2, 'channel': channel, 'cancer': 1000})
        m.messages.append({'date': now2, 'channel': channel, 'cancer': 1})

        expected = [{'channel': channel, 'cancer': 1001, 'messages': 2}]

        actual = m.cancer()

        self.assertEqual(actual, expected)
    def test_no_messages(self):
        m = InMemoryStore()

        self.assertEqual(m.cancer(), [])
    def test_no_messages(self):
        m = InMemoryStore()

        self.assertEqual(m.archive(), {})
Beispiel #8
0
class MemoryStorage(StorageInterface):
    def __init__(self):
        super().__init__()

        self._store = InMemoryStore()

        # process and delete self.messages every minute
        self.cron = Cron()
        self.cron.add(call=self._archive)
        self.cron.start()

        # publish a summary of all cancer messages grouped by minute and channel
        self.zmq_context = zmq.Context()
        self.pubsub_socket = self.zmq_context.socket(zmq.PUB)
        summary_socket = Config.get('monitor.socket.cancer_summary')
        self.pubsub_socket.bind(summary_socket)
        logger.info("bound publish socket to %s", summary_socket)

        # respond to live cancer requests
        self.cancer_socket = self.zmq_context.socket(zmq.REP)
        request_socket = Config.get('monitor.socket.cancer_request')
        self.cancer_socket.bind(request_socket)
        logger.info("bound cancer socket to %s", request_socket)

        # TODO: use asyncio
        t = threading.Thread(target=self._handle_cancer_request)
        t.daemon = True
        t.start()
        logger.info("started handle cancer request thread")

    # adds a record in the in-memory store
    # @memory.write()
    def store(self, channel, cancer):
        self._store.store(channel, cancer)

    # computes cancer level from the in-memory store
    # @memory.read()
    def cancer(self):
        return self._store.cancer()

    # respond to cancer request on a socket
    # @socket.recv()
    # @memory.read()
    # @socket.send()
    def _handle_cancer_request(self):
        while True:
            self.cancer_socket.recv()
            cancer = self._store.cancer()
            self.cancer_socket.send_pyobj(cancer)

    # archive live messages from the in-memory store into the persistent store
    # @memory.read()
    # @socket.send()
    def _archive(self):
        history = self._store.archive()

        # publish the summaries on the pubsub socket
        for date, channels in history.items():
            for channel, record in channels.items():
                record = {
                    'date': date,
                    'channel': channel,
                    'cancer': record['cancer'],
                    'messages': record['messages']
                }

                self.pubsub_socket.send_multipart(
                    [b'summary', pickle.dumps(record)])

            logger.info(
                'published leaderboards of round %s with messages from %s channels',
                date, len(channels))
class MemoryStorage(StorageInterface):

  def __init__(self):
    super().__init__()

    self._store = InMemoryStore()

    # process and delete self.messages every minute
    self.cron = Cron()
    self.cron.add(call=self._archive)
    self.cron.start()

    # publish a summary of all cancer messages grouped by minute and channel
    self.zmq_context = zmq.Context()
    self.pubsub_socket = self.zmq_context.socket(zmq.PUB)
    self.pubsub_socket.bind(Config.get('monitor.socket.cancer_summary'))
    logger.info("bound publish socket to %s", Config.get('monitor.socket.cancer_summary'))

    # respond to live cancer requests
    self.cancer_socket = self.zmq_context.socket(zmq.REP)
    self.cancer_socket.bind(Config.get('monitor.socket.cancer_request'))
    logger.info("bound cancer socket to %s", Config.get('monitor.socket.cancer_request'))

    # TODO: use asyncio
    t = threading.Thread(target=self._handle_cancer_request)
    t.daemon = True
    t.start()
    logger.info("started handle cancer request thread")

  # adds a record in the in-memory store
  # @memory.write()
  def store(self, channel, cancer):
    self._store.store(channel, cancer)

  # computes cancer level from the in-memory store
  # @memory.read()
  def cancer(self):
    return self._store.cancer()

  # respond to cancer request on a socket
  # @socket.recv()
  # @memory.read()
  # @socket.send()
  def _handle_cancer_request(self):
    while True:
      self.cancer_socket.recv()
      cancer = self._store.cancer()
      self.cancer_socket.send_pyobj(cancer)

  # archive live messages from the in-memory store into the persistent store
  # @memory.read()
  # @socket.send()
  def _archive(self):
    history = self._store.archive()

    # publish the summaries on the pubsub socket
    for date, channels in history.items():
      for channel, record in channels.items():
        record = {
          'date': date,
          'channel': channel,
          'cancer': record['cancer'],
          'messages': record['messages']
        }

        self.pubsub_socket.send_multipart([b'summary', pickle.dumps(record)])

      logger.info('published leaderboards of round %s with messages from %s channels', date, len(channels))