Beispiel #1
0
def publish_traces():
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument("config_file", type=argparse.FileType("r"),
        help="path to a configuration file")
    arg_parser.add_argument("--queue-name", default="main",
        help="name of trace queue / publisher config (default: main)")
    arg_parser.add_argument("--debug", default=False, action="store_true",
        help="enable debug logging")
    arg_parser.add_argument("--app-name", default="main", metavar="NAME",
        help="name of app to load from config_file (default: main)")
    args = arg_parser.parse_args()

    if args.debug:
        level = logging.DEBUG
    else:
        level = logging.WARNING
    logging.basicConfig(level=level)

    config_parser = configparser.RawConfigParser()
    config_parser.readfp(args.config_file)

    publisher_raw_cfg = dict(config_parser.items("trace-publisher:" + args.queue_name))
    publisher_cfg = config.parse_config(publisher_raw_cfg, {
        "zipkin_api_url": config.Endpoint,
        "post_timeout": config.Optional(config.Integer, POST_TIMEOUT_DEFAULT),
        "max_batch_size": config.Optional(config.Integer, MAX_BATCH_SIZE_DEFAULT),
        "retry_limit": config.Optional(config.Integer, RETRY_LIMIT_DEFAULT),
    })

    trace_queue = MessageQueue(
        "/traces-" + args.queue_name,
        max_messages=MAX_QUEUE_SIZE,
        max_message_size=MAX_SPAN_SIZE,
    )

    # pylint: disable=maybe-no-member
    inner_batch = TraceBatch(max_size=publisher_cfg.max_batch_size)
    batcher = TimeLimitedBatch(inner_batch, MAX_BATCH_AGE)
    metrics_client = metrics_client_from_config(publisher_raw_cfg)
    publisher = ZipkinPublisher(
        publisher_cfg.zipkin_api_url.address,
        metrics_client,
        post_timeout=publisher_cfg.post_timeout,
    )

    while True:
        try:
            message = trace_queue.get(timeout=.2)
        except TimedOutError:
            message = None

        try:
            batcher.add(message)
        except BatchFull:
            serialized = batcher.serialize()
            publisher.publish(serialized)
            batcher.reset()
            batcher.add(message)
Beispiel #2
0
def main():
    """Run a consumer.

    Two environment variables are expected:

    * CONFIG_URI: A PasteDeploy URI pointing at the configuration for the
      application.
    * QUEUE: The name of the queue to consume (currently one of "events" or
      "errors").

    """
    config_uri = os.environ["CONFIG_URI"]
    config = paste.deploy.loadwsgi.appconfig(config_uri)

    logging.config.fileConfig(config["__file__"])

    queue_name = os.environ["QUEUE"]
    queue = MessageQueue("/" + queue_name,
                         max_messages=MAXIMUM_QUEUE_LENGTH,
                         max_message_size=MAXIMUM_EVENT_SIZE)

    metrics_client = baseplate.make_metrics_client(config)

    topic_name = config["topic." + queue_name]

    producer_options = {
        "codec": CODEC_GZIP,
        "batch_send_every_n": 20,
        "batch_send_every_t": 0.01,  # 10 milliseconds
    }

    while True:
        try:
            kafka_client = KafkaClient(config["kafka_brokers"])
            kafka_producer = SimpleProducer(kafka_client, **producer_options)
        except KafkaError as exc:
            _LOG.warning("could not connect: %s", exc)
            metrics_client.counter("injector.connection_error").increment()
            time.sleep(_RETRY_DELAY)
            continue

        while True:
            message = queue.get()
            for retry in itertools.count():
                try:
                    kafka_producer.send_messages(topic_name, message)
                except KafkaError as exc:
                    _LOG.warning("failed to send message: %s", exc)
                    metrics_client.counter("injector.error").increment()
                    time.sleep(_RETRY_DELAY)
                else:
                    metrics_client.counter("collected.injector").increment()
                    break
        kafka_producer.stop()
Beispiel #3
0
def main():
    """Run a consumer.

    Two environment variables are expected:

    * CONFIG_URI: A PasteDeploy URI pointing at the configuration for the
      application.
    * QUEUE: The name of the queue to consume (currently one of "events" or
      "errors").

    """
    config_uri = os.environ["CONFIG_URI"]
    config = paste.deploy.loadwsgi.appconfig(config_uri)

    logging.config.fileConfig(config["__file__"])

    queue_name = os.environ["QUEUE"]
    queue = MessageQueue("/" + queue_name,
        max_messages=MAXIMUM_QUEUE_LENGTH, max_message_size=MAXIMUM_EVENT_SIZE)

    metrics_client = baseplate.make_metrics_client(config)

    topic_name = config["topic." + queue_name]

    producer_options = {
        "codec": CODEC_GZIP,
        "batch_send_every_n": 20,
        "batch_send_every_t": 0.01,  # 10 milliseconds
    }

    while True:
        try:
            kafka_client = KafkaClient(config["kafka_brokers"])
            kafka_producer = SimpleProducer(kafka_client, **producer_options)
        except KafkaError as exc:
            _LOG.warning("could not connect: %s", exc)
            metrics_client.counter("injector.connection_error").increment()
            time.sleep(_RETRY_DELAY)
            continue

        while True:
            message = queue.get()
            for retry in itertools.count():
                try:
                    kafka_producer.send_messages(topic_name, message)
                except KafkaError as exc:
                    _LOG.warning("failed to send message: %s", exc)
                    metrics_client.counter("injector.error").increment()
                    time.sleep(_RETRY_DELAY)
                else:
                    metrics_client.counter("collected.injector").increment()
                    break
        kafka_producer.stop()
Beispiel #4
0
# consumer.py
from baseplate.message_queue import MessageQueue

mq = MessageQueue("/baseplate-testing", max_messages=1, max_message_size=1)
# Unless a `timeout` kwarg is passed, this will block until
# we can pop a message from the queue.
message = mq.get()
print("Get Message: %s" % message)
Beispiel #5
0
class CollectorFunctionalTests(unittest.TestCase):
    def setUp(self):
        # we create the queues before the actual code can so that we can
        # override the max sizes to use these numbers which are safe to use
        # without extra privileges on linux
        self.events_queue = MessageQueue(name="/events",
                                         max_messages=10,
                                         max_message_size=8192)
        self.errors_queue = MessageQueue(name="/errors",
                                         max_messages=10,
                                         max_message_size=8192)

        class MockDatetime(datetime.datetime):
            @classmethod
            def utcnow(cls):
                return datetime.datetime(2015, 11, 17, 12, 34, 56)

        datetime.datetime = MockDatetime

        app = collector.make_app(global_config={},
                                 **{
                                     "key.TestKey1": "dGVzdA==",
                                     "msgq.events": "0xcafe",
                                     "msgq.errors": "0xdecaf",
                                     "allowed_origins": "example.com",
                                     "metrics.namespace": "eventcollector",
                                     "metrics.endpoint": "",
                                 })
        self.test_app = webtest.TestApp(app)

    def tearDown(self):
        self.events_queue.queue.unlink()
        self.events_queue.queue.close()
        self.errors_queue.queue.unlink()
        self.errors_queue.queue.close()

    def test_batch(self):
        self.test_app.post(
            "/v1",
            '[{"event1": "value"}, {"event2": "value"}]',
            headers={
                "Content-Type":
                "application/json",
                "User-Agent":
                "TestApp/1.0",
                "Date":
                "Wed, 25 Nov 2015 06:25:24 GMT",
                "X-Signature":
                "key=TestKey1, mac=d7aab40b9db8ae0e0b40d98e9c50b2cfc80ca06127b42fbbbdf146752b47a5ed",
            },
            extra_environ={
                "REMOTE_ADDR": "1.2.3.4",
            },
        )

        event1 = self.events_queue.get(timeout=0)
        self.assertEqual(
            event1,
            '{"ip": "1.2.3.4", "event": {"event1": "value"}, "time": "2015-11-17T12:34:56"}'
        )
        event2 = self.events_queue.get(timeout=0)
        self.assertEqual(
            event2,
            '{"ip": "1.2.3.4", "event": {"event2": "value"}, "time": "2015-11-17T12:34:56"}'
        )

        with self.assertRaises(TimedOutError):
            self.errors_queue.get(timeout=0)

    def test_cors(self):
        response = self.test_app.options("/v1",
                                         headers={
                                             "Origin":
                                             "http://example.com",
                                             "Access-Control-Request-Method":
                                             "POST",
                                             "Access-Control-Request-Headers":
                                             "X-Signature",
                                         })

        self.assertEqual(response.status_code, 204)
Beispiel #6
0
def publish_events():
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument("config_file",
                            type=argparse.FileType("r"),
                            help="path to a configuration file")
    arg_parser.add_argument(
        "--queue-name",
        default="main",
        help="name of event queue / publisher config (default: main)",
    )
    arg_parser.add_argument("--debug",
                            default=False,
                            action="store_true",
                            help="enable debug logging")
    args = arg_parser.parse_args()

    if args.debug:
        level = logging.DEBUG
    else:
        level = logging.WARNING
    logging.basicConfig(level=level)

    config_parser = configparser.RawConfigParser()
    config_parser.readfp(args.config_file)  # pylint: disable=deprecated-method
    raw_config = dict(config_parser.items("event-publisher:" +
                                          args.queue_name))
    cfg = config.parse_config(
        raw_config,
        {
            "collector": {
                "hostname": config.String,
                "version": config.Optional(config.Integer, default=1),
            },
            "key": {
                "name": config.String,
                "secret": config.Base64
            },
        },
    )

    metrics_client = metrics_client_from_config(raw_config)

    event_queue = MessageQueue("/events-" + args.queue_name,
                               max_messages=MAX_QUEUE_SIZE,
                               max_message_size=MAX_EVENT_SIZE)

    # pylint: disable=maybe-no-member
    serializer = SERIALIZER_BY_VERSION[cfg.collector.version]()
    batcher = TimeLimitedBatch(serializer, MAX_BATCH_AGE)
    publisher = BatchPublisher(metrics_client, cfg)

    while True:
        try:
            message = event_queue.get(timeout=0.2)
        except TimedOutError:
            message = None

        try:
            batcher.add(message)
        except BatchFull:
            serialized = batcher.serialize()
            publisher.publish(serialized)
            batcher.reset()
            batcher.add(message)
class CollectorFunctionalTests(unittest.TestCase):
    def setUp(self):
        # we create the queues before the actual code can so that we can
        # override the max sizes to use these numbers which are safe to use
        # without extra privileges on linux
        self.events_queue = MessageQueue(name="/events",
            max_messages=10, max_message_size=8192)
        self.errors_queue = MessageQueue(name="/errors",
            max_messages=10, max_message_size=8192)

        class MockDatetime(datetime.datetime):
            @classmethod
            def utcnow(cls):
                return datetime.datetime(2015, 11, 17, 12, 34, 56)
        datetime.datetime = MockDatetime

        app = collector.make_app(global_config={}, **{
            "key.TestKey1": "dGVzdA==",
            "msgq.events": "0xcafe",
            "msgq.errors": "0xdecaf",
            "allowed_origins": "example.com",
            "metrics.namespace": "eventcollector",
            "metrics.endpoint": "",
        })
        self.test_app = webtest.TestApp(app)

    def tearDown(self):
        self.events_queue.queue.unlink()
        self.events_queue.queue.close()
        self.errors_queue.queue.unlink()
        self.errors_queue.queue.close()

    def test_batch(self):
        self.test_app.post("/v1",
            '[{"event1": "value"}, {"event2": "value"}]',
            headers={
                "Content-Type": "application/json",
                "User-Agent": "TestApp/1.0",
                "Date": "Wed, 25 Nov 2015 06:25:24 GMT",
                "X-Signature": "key=TestKey1, mac=d7aab40b9db8ae0e0b40d98e9c50b2cfc80ca06127b42fbbbdf146752b47a5ed",
            },
            extra_environ={
                "REMOTE_ADDR": "1.2.3.4",
            },
        )

        event1 = self.events_queue.get(timeout=0)
        self.assertEqual(event1, '{"ip": "1.2.3.4", "event": {"event1": "value"}, "time": "2015-11-17T12:34:56"}')
        event2 = self.events_queue.get(timeout=0)
        self.assertEqual(event2, '{"ip": "1.2.3.4", "event": {"event2": "value"}, "time": "2015-11-17T12:34:56"}')

        with self.assertRaises(TimedOutError):
            self.errors_queue.get(timeout=0)

    def test_cors(self):
        response = self.test_app.options("/v1", headers={
            "Origin": "http://example.com",
            "Access-Control-Request-Method": "POST",
            "Access-Control-Request-Headers": "X-Signature",
        })

        self.assertEqual(response.status_code, 204)