Ejemplo n.º 1
0
def test_metrics_indexer_worker(producer, metrics_payload, flush_return_value,
                                with_exception):
    producer.produce = MagicMock()
    producer.flush = MagicMock(return_value=flush_return_value)

    metrics_worker = MetricsIndexerWorker(producer=producer)

    mock_message = Mock()
    mock_message.value = MagicMock(return_value=json.dumps(metrics_payload))

    parsed = metrics_worker.process_message(mock_message)
    assert parsed["tags"] == {
        get_int(k): get_int(v)
        for k, v in metrics_payload["tags"].items()
    }
    assert parsed["metric_id"] == get_int(metrics_payload["name"])

    if with_exception:
        with pytest.raises(Exception,
                           match="didn't get all the callbacks: 1 left"):
            metrics_worker.flush_batch([parsed])
    else:
        metrics_worker.flush_batch([parsed])
        producer.produce.assert_called_with(
            topic="snuba-metrics",
            key=None,
            value=json.dumps(parsed).encode(),
            on_delivery=metrics_worker.callback,
        )
Ejemplo n.º 2
0
    def test_metrics_consumer(self):
        ingest_producer = self._get_producer(self.ingest_topic)
        message = json.dumps(payload).encode()

        # produce message to the dummy ingest-metrics topic
        ingest_producer.produce(self.ingest_topic, message)

        assert ingest_producer.flush() == 0

        options = {
            "max_batch_size": 1,
            "max_batch_time": 5000,
            "group_id": "test-metrics-indexer-consumer",
            "auto_offset_reset": "earliest",
        }
        batching_consumer = get_metrics_consumer(topic=self.ingest_topic,
                                                 **options)

        # couldn't use _run_once() here because .poll() is called
        # with a 1 second timeout which seems to not be enough.
        msg = batching_consumer.consumer.poll(5)
        assert msg

        # _handle_message calls worker's process_message
        # and then we flush() to make sure we call flush_batch
        batching_consumer._handle_message(msg)
        batching_consumer._flush()

        # make sure we produced the message during flush_batch
        snuba_producer = batching_consumer.worker._MetricsIndexerWorker__producer
        assert snuba_producer.flush() == 0

        # in order to test that the message we produced to the dummy
        # snuba-metrics topic was the message we expected, we make a
        # dummy consumer to subscribe to the topic
        snuba_metrics_consumer = Consumer({
            "bootstrap.servers": "localhost:9092",
            "group.id": "test-snuba-metrics-consumer",
            "default.topic.config": {
                "auto.offset.reset": "earliest"
            },
        })
        snuba_metrics_consumer.subscribe([self.snuba_topic])

        # once we have the message, we don't need the consumer anymore
        translated_msg = snuba_metrics_consumer.poll(5)
        snuba_metrics_consumer.close()
        assert translated_msg

        # finally test the payload of the translated message
        parsed = json.loads(translated_msg.value(), use_rapid_json=True)
        assert parsed["tags"] == {
            str(get_int(k)): get_int(v)
            for k, v in payload["tags"].items()
        }
        assert parsed["metric_id"] == get_int(payload["name"])
Ejemplo n.º 3
0
 def test_reverse_resolve(self) -> None:
     strings = ["test-metric"]
     self.indexer.bulk_record(self.org_id, strings)
     assert self.indexer.reverse_resolve(self.org_id, get_int("test-metric")) == "test-metric"
     assert self.indexer.reverse_resolve(self.org_id, 55555) is None
Ejemplo n.º 4
0
 def test_resolve(self) -> None:
     strings = ["test-metric"]
     self.indexer.bulk_record(self.org_id, strings)
     assert self.indexer.resolve(self.org_id, "test-metric") == get_int("test-metric")
     assert self.indexer.resolve(self.org_id, "bad-value") is None
Ejemplo n.º 5
0
 def test_bulk_record(self) -> None:
     strings = ["test-metric", "test-tag-key", "test-tag-value"]
     results = self.indexer.bulk_record(self.org_id, strings)
     assert results == {s: get_int(s) for s in strings}