Esempio n. 1
0
def make_app(global_config, **settings):
    """Paste entry point: return a configured WSGI application."""

    config = Configurator(settings=settings)

    keystore = {}
    for setting, value in settings.iteritems():
        key_prefix = "key."
        if setting.startswith(key_prefix):
            key_name = setting[len(key_prefix):]
            key_secret = base64.b64decode(value)
            keystore[key_name] = key_secret

    allowed_origins = [
        x.strip() for x in settings["allowed_origins"].split(",") if x.strip()
    ]

    metrics_client = baseplate.make_metrics_client(settings)
    event_queue = MessageQueue("/events",
                               max_messages=MAXIMUM_QUEUE_LENGTH,
                               max_message_size=MAXIMUM_EVENT_SIZE)
    error_queue = MessageQueue("/errors",
                               max_messages=MAXIMUM_QUEUE_LENGTH,
                               max_message_size=MAXIMUM_EVENT_SIZE)
    collector = EventCollector(keystore, metrics_client, event_queue,
                               error_queue, allowed_origins)
    config.add_route("v1", "/v1", request_method="POST")
    config.add_route("v1_options", "/v1", request_method="OPTIONS")
    config.add_view(collector.process_request, route_name="v1")
    config.add_view(collector.check_cors, route_name="v1_options")
    config.add_route("health", "/health")
    config.add_view(health_check, route_name="health", renderer="json")

    return config.make_wsgi_app()
Esempio n. 2
0
    def setUp(self):
        # we create the queues before the actual code can so that we can
        # override the max sizes to use these numbers which are safe to use
        # without extra privileges on linux
        self.events_queue = MessageQueue(name="/events",
                                         max_messages=10,
                                         max_message_size=8192)
        self.errors_queue = MessageQueue(name="/errors",
                                         max_messages=10,
                                         max_message_size=8192)

        class MockDatetime(datetime.datetime):
            @classmethod
            def utcnow(cls):
                return datetime.datetime(2015, 11, 17, 12, 34, 56)

        datetime.datetime = MockDatetime

        app = collector.make_app(global_config={},
                                 **{
                                     "key.TestKey1": "dGVzdA==",
                                     "msgq.events": "0xcafe",
                                     "msgq.errors": "0xdecaf",
                                     "allowed_origins": "example.com",
                                     "metrics.namespace": "eventcollector",
                                     "metrics.endpoint": "",
                                 })
        self.test_app = webtest.TestApp(app)
Esempio n. 3
0
class EventQueue(ContextFactory):
    """A queue to transfer events to the publisher."""
    def __init__(self, name):
        self.queue = MessageQueue(
            "/events-" + name,
            max_messages=MAX_QUEUE_SIZE,
            max_message_size=MAX_EVENT_SIZE,
        )

    def put(self, event):
        """Add an event to the queue.

        The queue is local to the server this code is run on. The event
        publisher on the server will take these events and send them to the
        collector.

        :param baseplate.events.Event event: The event to send.
        :raises: :py:exc:`EventTooLargeError` The serialized event is too large.
        :raises: :py:exc:`EventQueueFullError` The queue is full. Events are
            not being published fast enough.

        """
        serialized = event.serialize()
        if len(serialized) > MAX_EVENT_SIZE:
            raise EventTooLargeError(len(serialized))

        try:
            self.queue.put(serialized, timeout=0)
        except TimedOutError:
            raise EventQueueFullError

    def make_object_for_context(self, name, server_span):
        return self
Esempio n. 4
0
class SidecarRecorder:
    """Interface for recording spans to a POSIX message queue.

    The SidecarRecorder serializes spans to a string representation before
    adding them to the queue.
    """
    def __init__(self, queue_name):
        self.queue = MessageQueue(
            "/traces-" + queue_name,
            max_messages=MAX_SIDECAR_QUEUE_SIZE,
            max_message_size=MAX_SIDECAR_MESSAGE_SIZE,
        )

    def send(self, span):
        # Don't raise exceptions from here. This is called in the
        # request/response path and should finish cleanly.
        serialized_str = json.dumps(span._serialize())
        if len(serialized_str) > MAX_SIDECAR_MESSAGE_SIZE:
            logger.error(
                "Trace too big. Traces published to %s are not allowed to be larger "
                "than %d bytes. Received trace is %d bytes. This can be caused by "
                "an excess amount of tags or a large amount of child spans.",
                self.queue.queue.name,
                MAX_SIDECAR_MESSAGE_SIZE,
                len(serialized_str),
            )
        try:
            self.queue.put(serialized_str, timeout=0)
        except TimedOutError:
            logger.error("Trace queue %s is full. Is trace sidecar healthy?",
                         self.queue.queue.name)
Esempio n. 5
0
class EventQueue(ContextFactory):
    """A queue to transfer events to the publisher."""

    def __init__(self, name):
        self.queue = MessageQueue(
            "/events-" + name,
            max_messages=MAX_QUEUE_SIZE,
            max_message_size=MAX_EVENT_SIZE,
        )

    def put(self, event):
        """Add an event to the queue.

        The queue is local to the server this code is run on. The event
        publisher on the server will take these events and send them to the
        collector.

        :param baseplate.events.Event event: The event to send.
        :raises: :py:exc:`EventTooLargeError` The serialized event is too large.
        :raises: :py:exc:`EventQueueFullError` The queue is full. Events are
            not being published fast enough.

        """
        serialized = event.serialize()
        if len(serialized) > MAX_EVENT_SIZE:
            raise EventTooLargeError(len(serialized))

        try:
            self.queue.put(serialized, timeout=0)
        except TimedOutError:
            raise EventQueueFullError

    def make_object_for_context(self, name, root_span):
        return self
Esempio n. 6
0
 def __init__(self, name, event_serializer=serialize_v1_event):
     self.queue = MessageQueue(
         "/events-" + name,
         max_messages=MAX_QUEUE_SIZE,
         max_message_size=MAX_EVENT_SIZE,
     )
     self.serialize_event = event_serializer
Esempio n. 7
0
def publish_traces():
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument("config_file", type=argparse.FileType("r"),
        help="path to a configuration file")
    arg_parser.add_argument("--queue-name", default="main",
        help="name of trace queue / publisher config (default: main)")
    arg_parser.add_argument("--debug", default=False, action="store_true",
        help="enable debug logging")
    arg_parser.add_argument("--app-name", default="main", metavar="NAME",
        help="name of app to load from config_file (default: main)")
    args = arg_parser.parse_args()

    if args.debug:
        level = logging.DEBUG
    else:
        level = logging.WARNING
    logging.basicConfig(level=level)

    config_parser = configparser.RawConfigParser()
    config_parser.readfp(args.config_file)

    publisher_raw_cfg = dict(config_parser.items("trace-publisher:" + args.queue_name))
    publisher_cfg = config.parse_config(publisher_raw_cfg, {
        "zipkin_api_url": config.Endpoint,
        "post_timeout": config.Optional(config.Integer, POST_TIMEOUT_DEFAULT),
        "max_batch_size": config.Optional(config.Integer, MAX_BATCH_SIZE_DEFAULT),
        "retry_limit": config.Optional(config.Integer, RETRY_LIMIT_DEFAULT),
    })

    trace_queue = MessageQueue(
        "/traces-" + args.queue_name,
        max_messages=MAX_QUEUE_SIZE,
        max_message_size=MAX_SPAN_SIZE,
    )

    # pylint: disable=maybe-no-member
    inner_batch = TraceBatch(max_size=publisher_cfg.max_batch_size)
    batcher = TimeLimitedBatch(inner_batch, MAX_BATCH_AGE)
    metrics_client = metrics_client_from_config(publisher_raw_cfg)
    publisher = ZipkinPublisher(
        publisher_cfg.zipkin_api_url.address,
        metrics_client,
        post_timeout=publisher_cfg.post_timeout,
    )

    while True:
        try:
            message = trace_queue.get(timeout=.2)
        except TimedOutError:
            message = None

        try:
            batcher.add(message)
        except BatchFull:
            serialized = batcher.serialize()
            publisher.publish(serialized)
            batcher.reset()
            batcher.add(message)
Esempio n. 8
0
def main():
    """Run a consumer.

    Two environment variables are expected:

    * CONFIG_URI: A PasteDeploy URI pointing at the configuration for the
      application.
    * QUEUE: The name of the queue to consume (currently one of "events" or
      "errors").

    """
    config_uri = os.environ["CONFIG_URI"]
    config = paste.deploy.loadwsgi.appconfig(config_uri)

    logging.config.fileConfig(config["__file__"])

    queue_name = os.environ["QUEUE"]
    queue = MessageQueue("/" + queue_name,
                         max_messages=MAXIMUM_QUEUE_LENGTH,
                         max_message_size=MAXIMUM_EVENT_SIZE)

    metrics_client = baseplate.make_metrics_client(config)

    topic_name = config["topic." + queue_name]

    producer_options = {
        "codec": CODEC_GZIP,
        "batch_send_every_n": 20,
        "batch_send_every_t": 0.01,  # 10 milliseconds
    }

    while True:
        try:
            kafka_client = KafkaClient(config["kafka_brokers"])
            kafka_producer = SimpleProducer(kafka_client, **producer_options)
        except KafkaError as exc:
            _LOG.warning("could not connect: %s", exc)
            metrics_client.counter("injector.connection_error").increment()
            time.sleep(_RETRY_DELAY)
            continue

        while True:
            message = queue.get()
            for retry in itertools.count():
                try:
                    kafka_producer.send_messages(topic_name, message)
                except KafkaError as exc:
                    _LOG.warning("failed to send message: %s", exc)
                    metrics_client.counter("injector.error").increment()
                    time.sleep(_RETRY_DELAY)
                else:
                    metrics_client.counter("collected.injector").increment()
                    break
        kafka_producer.stop()
Esempio n. 9
0
def main():
    """Run a consumer.

    Two environment variables are expected:

    * CONFIG_URI: A PasteDeploy URI pointing at the configuration for the
      application.
    * QUEUE: The name of the queue to consume (currently one of "events" or
      "errors").

    """
    config_uri = os.environ["CONFIG_URI"]
    config = paste.deploy.loadwsgi.appconfig(config_uri)

    logging.config.fileConfig(config["__file__"])

    queue_name = os.environ["QUEUE"]
    queue = MessageQueue("/" + queue_name,
        max_messages=MAXIMUM_QUEUE_LENGTH, max_message_size=MAXIMUM_EVENT_SIZE)

    metrics_client = baseplate.make_metrics_client(config)

    topic_name = config["topic." + queue_name]

    producer_options = {
        "codec": CODEC_GZIP,
        "batch_send_every_n": 20,
        "batch_send_every_t": 0.01,  # 10 milliseconds
    }

    while True:
        try:
            kafka_client = KafkaClient(config["kafka_brokers"])
            kafka_producer = SimpleProducer(kafka_client, **producer_options)
        except KafkaError as exc:
            _LOG.warning("could not connect: %s", exc)
            metrics_client.counter("injector.connection_error").increment()
            time.sleep(_RETRY_DELAY)
            continue

        while True:
            message = queue.get()
            for retry in itertools.count():
                try:
                    kafka_producer.send_messages(topic_name, message)
                except KafkaError as exc:
                    _LOG.warning("failed to send message: %s", exc)
                    metrics_client.counter("injector.error").increment()
                    time.sleep(_RETRY_DELAY)
                else:
                    metrics_client.counter("collected.injector").increment()
                    break
        kafka_producer.stop()
Esempio n. 10
0
    def test_put_zero_timeout(self):
        message_queue = MessageQueue(self.qname, max_messages=1, max_message_size=1)

        with contextlib.closing(message_queue) as mq:
            mq.put(b"x", timeout=0)
            message = mq.get()
            self.assertEqual(message, b"x")
Esempio n. 11
0
    def test_put_full_zero_timeout(self):
        message_queue = MessageQueue(self.qname, max_messages=1, max_message_size=1)

        with contextlib.closing(message_queue) as mq:
            mq.put(b"1", timeout=0)

            with self.assertRaises(TimedOutError):
                mq.put(b"2", timeout=0)
Esempio n. 12
0
    def test_create_queue(self):
        message_queue = MessageQueue(self.qname,
                                     max_messages=1,
                                     max_message_size=1)

        with contextlib.closing(message_queue) as mq:
            self.assertEqual(mq.queue.max_messages, 1)
            self.assertEqual(mq.queue.max_message_size, 1)
Esempio n. 13
0
class EventQueue(ContextFactory):
    """A queue to transfer events to the publisher.

    :param str name: The name of the event queue to send to. This specifies
        which publisher should send the events which can be useful for routing
        to different event pipelines (prod/test/v2 etc.).
    :param callable event_serializer: A callable that takes an event object
        and returns serialized bytes ready to send on the wire. See below for
        options.

    """

    def __init__(self, name, event_serializer=serialize_v1_event):
        self.queue = MessageQueue(
            "/events-" + name,
            max_messages=MAX_QUEUE_SIZE,
            max_message_size=MAX_EVENT_SIZE,
        )
        self.serialize_event = event_serializer

    def put(self, event):
        """Add an event to the queue.

        The queue is local to the server this code is run on. The event
        publisher on the server will take these events and send them to the
        collector.

        :param event: The event to send. The type of event object passed in
            depends on the selected ``event_serializer``.
        :raises: :py:exc:`EventTooLargeError` The serialized event is too large.
        :raises: :py:exc:`EventQueueFullError` The queue is full. Events are
            not being published fast enough.

        """
        serialized = self.serialize_event(event)
        if len(serialized) > MAX_EVENT_SIZE:
            raise EventTooLargeError(len(serialized))

        try:
            self.queue.put(serialized, timeout=0)
        except TimedOutError:
            raise EventQueueFullError

    def make_object_for_context(self, name, server_span):
        return self
Esempio n. 14
0
    def test_get_timeout(self):
        message_queue = MessageQueue(self.qname, max_messages=1, max_message_size=1)

        with contextlib.closing(message_queue) as mq:
            start = time.time()
            with self.assertRaises(TimedOutError):
                mq.get(timeout=0.1)
            elapsed = time.time() - start
            self.assertAlmostEqual(elapsed, 0.1, places=2)
Esempio n. 15
0
    def setUp(self):
        # we create the queues before the actual code can so that we can
        # override the max sizes to use these numbers which are safe to use
        # without extra privileges on linux
        self.events_queue = MessageQueue(name="/events",
            max_messages=10, max_message_size=8192)
        self.errors_queue = MessageQueue(name="/errors",
            max_messages=10, max_message_size=8192)

        class MockDatetime(datetime.datetime):
            @classmethod
            def utcnow(cls):
                return datetime.datetime(2015, 11, 17, 12, 34, 56)
        datetime.datetime = MockDatetime

        app = collector.make_app(global_config={}, **{
            "key.TestKey1": "dGVzdA==",
            "msgq.events": "0xcafe",
            "msgq.errors": "0xdecaf",
            "allowed_origins": "example.com",
            "metrics.namespace": "eventcollector",
            "metrics.endpoint": "",
        })
        self.test_app = webtest.TestApp(app)
Esempio n. 16
0
# consumer.py
from baseplate.message_queue import MessageQueue

mq = MessageQueue("/test4", max_messages=5, max_message_size=3)
# Unless a `timeout` kwarg is passed, this will block until
# we can pop a message from the queue.

while True:
    message = mq.get()
    print("Get Message: %s" % message)
Esempio n. 17
0
 def __init__(self, name):
     self.queue = MessageQueue(
         "/events-" + name,
         max_messages=MAX_QUEUE_SIZE,
         max_message_size=MAX_EVENT_SIZE,
     )
Esempio n. 18
0
 def __init__(self, queue_name):
     self.queue = MessageQueue(
         "/traces-" + queue_name,
         max_messages=MAX_SIDECAR_QUEUE_SIZE,
         max_message_size=MAX_SIDECAR_MESSAGE_SIZE,
     )
Esempio n. 19
0
class CollectorFunctionalTests(unittest.TestCase):
    def setUp(self):
        # we create the queues before the actual code can so that we can
        # override the max sizes to use these numbers which are safe to use
        # without extra privileges on linux
        self.events_queue = MessageQueue(name="/events",
            max_messages=10, max_message_size=8192)
        self.errors_queue = MessageQueue(name="/errors",
            max_messages=10, max_message_size=8192)

        class MockDatetime(datetime.datetime):
            @classmethod
            def utcnow(cls):
                return datetime.datetime(2015, 11, 17, 12, 34, 56)
        datetime.datetime = MockDatetime

        app = collector.make_app(global_config={}, **{
            "key.TestKey1": "dGVzdA==",
            "msgq.events": "0xcafe",
            "msgq.errors": "0xdecaf",
            "allowed_origins": "example.com",
            "metrics.namespace": "eventcollector",
            "metrics.endpoint": "",
        })
        self.test_app = webtest.TestApp(app)

    def tearDown(self):
        self.events_queue.queue.unlink()
        self.events_queue.queue.close()
        self.errors_queue.queue.unlink()
        self.errors_queue.queue.close()

    def test_batch(self):
        self.test_app.post("/v1",
            '[{"event1": "value"}, {"event2": "value"}]',
            headers={
                "Content-Type": "application/json",
                "User-Agent": "TestApp/1.0",
                "Date": "Wed, 25 Nov 2015 06:25:24 GMT",
                "X-Signature": "key=TestKey1, mac=d7aab40b9db8ae0e0b40d98e9c50b2cfc80ca06127b42fbbbdf146752b47a5ed",
            },
            extra_environ={
                "REMOTE_ADDR": "1.2.3.4",
            },
        )

        event1 = self.events_queue.get(timeout=0)
        self.assertEqual(event1, '{"ip": "1.2.3.4", "event": {"event1": "value"}, "time": "2015-11-17T12:34:56"}')
        event2 = self.events_queue.get(timeout=0)
        self.assertEqual(event2, '{"ip": "1.2.3.4", "event": {"event2": "value"}, "time": "2015-11-17T12:34:56"}')

        with self.assertRaises(TimedOutError):
            self.errors_queue.get(timeout=0)

    def test_cors(self):
        response = self.test_app.options("/v1", headers={
            "Origin": "http://example.com",
            "Access-Control-Request-Method": "POST",
            "Access-Control-Request-Headers": "X-Signature",
        })

        self.assertEqual(response.status_code, 204)
Esempio n. 20
0
 def __init__(self, name):
     self.queue = MessageQueue(
         "/events-" + name,
         max_messages=MAX_QUEUE_SIZE,
         max_message_size=MAX_EVENT_SIZE,
     )
Esempio n. 21
0
# producer.py
from baseplate.message_queue import MessageQueue

# If the queue doesn't already exist, we'll create it.
mq = MessageQueue("/baseplate-testing", max_messages=1, max_message_size=1)
message = "1"
mq.put(message)
print("Put Message: %s" % message)
Esempio n. 22
0
# consumer.py
from baseplate.message_queue import MessageQueue

mq = MessageQueue("/baseplate-testing", max_messages=1, max_message_size=1)
# Unless a `timeout` kwarg is passed, this will block until
# we can pop a message from the queue.
message = mq.get()
print("Get Message: %s" % message)
import time
# producer.py
from baseplate.message_queue import MessageQueue

# If the queue doesn't already exist, we'll create it.
mq = MessageQueue("/test4", max_messages=5, max_message_size=3)

i = 1

while True:
    message = str(i)
    mq.put(message)
    print("Put Message: %s" % message)
    i += 1
    #time.sleep(1)
Esempio n. 24
0
def main():
    """Run a consumer.

    Two environment variables are expected:

    * CONFIG_URI: A PasteDeploy URI pointing at the configuration for the
      application.
    * QUEUE: The name of the queue to consume (currently one of "events" or
      "errors").

    """
    config_uri = os.environ["CONFIG_URI"]
    config = paste.deploy.loadwsgi.appconfig(config_uri)

    logging.config.fileConfig(config["__file__"])

    queue_name = os.environ["QUEUE"]
    queue = MessageQueue(
        "/" + queue_name,
        max_messages=MAXIMUM_QUEUE_LENGTH[queue_name],
        max_message_size=MAXIMUM_MESSAGE_SIZE[queue_name],
    )

    metrics_client = baseplate.make_metrics_client(config)

    topic_name = config["topic." + queue_name]

    # Details at http://kafka-python.readthedocs.org/en/1.0.2/apidoc/KafkaProducer.html
    producer_options = {
        "compression_type": 'gzip',
        "batch_size": 20,
        "linger_ms": 10,
        "retries": int(config["kafka_retries"]),
        "retry_backoff_ms": _RETRY_DELAY_SECS * 1000
    }

    def producer_error_cb(msg, queue):
        def requeue_msg(exc):
            _LOG.warning("failed to send message=%s due to error=%s", msg, exc)
            metrics_client.counter("injector.error").increment()
            queue.put(msg)

        return requeue_msg

    def producer_success_cb(success_val):
        metrics_client.counter("collected.injector").increment()

    while True:
        try:
            kafka_brokers = [
                broker.strip() for broker in config['kafka_brokers'].split(',')
            ]
            kafka_producer = KafkaProducer(bootstrap_servers=kafka_brokers,
                                           **producer_options)
        except KafkaError as exc:
            _LOG.warning("could not connect: %s", exc)
            metrics_client.counter("injector.connection_error").increment()
            time.sleep(_RETRY_DELAY_SECS)
            continue

        process_queue(queue,
                      topic_name,
                      kafka_producer,
                      producer_success_cb,
                      producer_error_cb,
                      metrics_client=metrics_client)

        kafka_producer.stop()
Esempio n. 25
0
class CollectorFunctionalTests(unittest.TestCase):
    def setUp(self):
        # we create the queues before the actual code can so that we can
        # override the max sizes to use these numbers which are safe to use
        # without extra privileges on linux
        self.events_queue = MessageQueue(name="/events",
                                         max_messages=10,
                                         max_message_size=8192)
        self.errors_queue = MessageQueue(name="/errors",
                                         max_messages=10,
                                         max_message_size=8192)

        class MockDatetime(datetime.datetime):
            @classmethod
            def utcnow(cls):
                return datetime.datetime(2015, 11, 17, 12, 34, 56)

        datetime.datetime = MockDatetime

        app = collector.make_app(global_config={},
                                 **{
                                     "key.TestKey1": "dGVzdA==",
                                     "msgq.events": "0xcafe",
                                     "msgq.errors": "0xdecaf",
                                     "allowed_origins": "example.com",
                                     "metrics.namespace": "eventcollector",
                                     "metrics.endpoint": "",
                                 })
        self.test_app = webtest.TestApp(app)

    def tearDown(self):
        self.events_queue.queue.unlink()
        self.events_queue.queue.close()
        self.errors_queue.queue.unlink()
        self.errors_queue.queue.close()

    def test_batch(self):
        self.test_app.post(
            "/v1",
            '[{"event1": "value"}, {"event2": "value"}]',
            headers={
                "Content-Type":
                "application/json",
                "User-Agent":
                "TestApp/1.0",
                "Date":
                "Wed, 25 Nov 2015 06:25:24 GMT",
                "X-Signature":
                "key=TestKey1, mac=d7aab40b9db8ae0e0b40d98e9c50b2cfc80ca06127b42fbbbdf146752b47a5ed",
            },
            extra_environ={
                "REMOTE_ADDR": "1.2.3.4",
            },
        )

        event1 = self.events_queue.get(timeout=0)
        self.assertEqual(
            event1,
            '{"ip": "1.2.3.4", "event": {"event1": "value"}, "time": "2015-11-17T12:34:56"}'
        )
        event2 = self.events_queue.get(timeout=0)
        self.assertEqual(
            event2,
            '{"ip": "1.2.3.4", "event": {"event2": "value"}, "time": "2015-11-17T12:34:56"}'
        )

        with self.assertRaises(TimedOutError):
            self.errors_queue.get(timeout=0)

    def test_cors(self):
        response = self.test_app.options("/v1",
                                         headers={
                                             "Origin":
                                             "http://example.com",
                                             "Access-Control-Request-Method":
                                             "POST",
                                             "Access-Control-Request-Headers":
                                             "X-Signature",
                                         })

        self.assertEqual(response.status_code, 204)
Esempio n. 26
0
def publish_events():
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument("config_file",
                            type=argparse.FileType("r"),
                            help="path to a configuration file")
    arg_parser.add_argument(
        "--queue-name",
        default="main",
        help="name of event queue / publisher config (default: main)",
    )
    arg_parser.add_argument("--debug",
                            default=False,
                            action="store_true",
                            help="enable debug logging")
    args = arg_parser.parse_args()

    if args.debug:
        level = logging.DEBUG
    else:
        level = logging.WARNING
    logging.basicConfig(level=level)

    config_parser = configparser.RawConfigParser()
    config_parser.readfp(args.config_file)  # pylint: disable=deprecated-method
    raw_config = dict(config_parser.items("event-publisher:" +
                                          args.queue_name))
    cfg = config.parse_config(
        raw_config,
        {
            "collector": {
                "hostname": config.String,
                "version": config.Optional(config.Integer, default=1),
            },
            "key": {
                "name": config.String,
                "secret": config.Base64
            },
        },
    )

    metrics_client = metrics_client_from_config(raw_config)

    event_queue = MessageQueue("/events-" + args.queue_name,
                               max_messages=MAX_QUEUE_SIZE,
                               max_message_size=MAX_EVENT_SIZE)

    # pylint: disable=maybe-no-member
    serializer = SERIALIZER_BY_VERSION[cfg.collector.version]()
    batcher = TimeLimitedBatch(serializer, MAX_BATCH_AGE)
    publisher = BatchPublisher(metrics_client, cfg)

    while True:
        try:
            message = event_queue.get(timeout=0.2)
        except TimedOutError:
            message = None

        try:
            batcher.add(message)
        except BatchFull:
            serialized = batcher.serialize()
            publisher.publish(serialized)
            batcher.reset()
            batcher.add(message)