Ejemplo n.º 1
0
def make_app(global_config, **settings):
    """Paste entry point: return a configured WSGI application."""

    config = Configurator(settings=settings)

    keystore = {}
    for setting, value in settings.iteritems():
        key_prefix = "key."
        if setting.startswith(key_prefix):
            key_name = setting[len(key_prefix):]
            key_secret = base64.b64decode(value)
            keystore[key_name] = key_secret

    allowed_origins = [
        x.strip() for x in settings["allowed_origins"].split(",") if x.strip()
    ]

    metrics_client = baseplate.make_metrics_client(settings)
    event_queue = MessageQueue("/events",
                               max_messages=MAXIMUM_QUEUE_LENGTH,
                               max_message_size=MAXIMUM_EVENT_SIZE)
    error_queue = MessageQueue("/errors",
                               max_messages=MAXIMUM_QUEUE_LENGTH,
                               max_message_size=MAXIMUM_EVENT_SIZE)
    collector = EventCollector(keystore, metrics_client, event_queue,
                               error_queue, allowed_origins)
    config.add_route("v1", "/v1", request_method="POST")
    config.add_route("v1_options", "/v1", request_method="OPTIONS")
    config.add_view(collector.process_request, route_name="v1")
    config.add_view(collector.check_cors, route_name="v1_options")
    config.add_route("health", "/health")
    config.add_view(health_check, route_name="health", renderer="json")

    return config.make_wsgi_app()
Ejemplo n.º 2
0
    def setUp(self):
        # we create the queues before the actual code can so that we can
        # override the max sizes to use these numbers which are safe to use
        # without extra privileges on linux
        self.events_queue = MessageQueue(name="/events",
                                         max_messages=10,
                                         max_message_size=8192)
        self.errors_queue = MessageQueue(name="/errors",
                                         max_messages=10,
                                         max_message_size=8192)

        class MockDatetime(datetime.datetime):
            @classmethod
            def utcnow(cls):
                return datetime.datetime(2015, 11, 17, 12, 34, 56)

        datetime.datetime = MockDatetime

        app = collector.make_app(global_config={},
                                 **{
                                     "key.TestKey1": "dGVzdA==",
                                     "msgq.events": "0xcafe",
                                     "msgq.errors": "0xdecaf",
                                     "allowed_origins": "example.com",
                                     "metrics.namespace": "eventcollector",
                                     "metrics.endpoint": "",
                                 })
        self.test_app = webtest.TestApp(app)
Ejemplo n.º 3
0
 def __init__(self, name, event_serializer=serialize_v1_event):
     self.queue = MessageQueue(
         "/events-" + name,
         max_messages=MAX_QUEUE_SIZE,
         max_message_size=MAX_EVENT_SIZE,
     )
     self.serialize_event = event_serializer
Ejemplo n.º 4
0
    def test_put_zero_timeout(self):
        message_queue = MessageQueue(self.qname, max_messages=1, max_message_size=1)

        with contextlib.closing(message_queue) as mq:
            mq.put(b"x", timeout=0)
            message = mq.get()
            self.assertEqual(message, b"x")
Ejemplo n.º 5
0
    def test_put_full_zero_timeout(self):
        message_queue = MessageQueue(self.qname, max_messages=1, max_message_size=1)

        with contextlib.closing(message_queue) as mq:
            mq.put(b"1", timeout=0)

            with self.assertRaises(TimedOutError):
                mq.put(b"2", timeout=0)
Ejemplo n.º 6
0
    def test_create_queue(self):
        message_queue = MessageQueue(self.qname,
                                     max_messages=1,
                                     max_message_size=1)

        with contextlib.closing(message_queue) as mq:
            self.assertEqual(mq.queue.max_messages, 1)
            self.assertEqual(mq.queue.max_message_size, 1)
Ejemplo n.º 7
0
def publish_traces():
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument("config_file", type=argparse.FileType("r"),
        help="path to a configuration file")
    arg_parser.add_argument("--queue-name", default="main",
        help="name of trace queue / publisher config (default: main)")
    arg_parser.add_argument("--debug", default=False, action="store_true",
        help="enable debug logging")
    arg_parser.add_argument("--app-name", default="main", metavar="NAME",
        help="name of app to load from config_file (default: main)")
    args = arg_parser.parse_args()

    if args.debug:
        level = logging.DEBUG
    else:
        level = logging.WARNING
    logging.basicConfig(level=level)

    config_parser = configparser.RawConfigParser()
    config_parser.readfp(args.config_file)

    publisher_raw_cfg = dict(config_parser.items("trace-publisher:" + args.queue_name))
    publisher_cfg = config.parse_config(publisher_raw_cfg, {
        "zipkin_api_url": config.Endpoint,
        "post_timeout": config.Optional(config.Integer, POST_TIMEOUT_DEFAULT),
        "max_batch_size": config.Optional(config.Integer, MAX_BATCH_SIZE_DEFAULT),
        "retry_limit": config.Optional(config.Integer, RETRY_LIMIT_DEFAULT),
    })

    trace_queue = MessageQueue(
        "/traces-" + args.queue_name,
        max_messages=MAX_QUEUE_SIZE,
        max_message_size=MAX_SPAN_SIZE,
    )

    # pylint: disable=maybe-no-member
    inner_batch = TraceBatch(max_size=publisher_cfg.max_batch_size)
    batcher = TimeLimitedBatch(inner_batch, MAX_BATCH_AGE)
    metrics_client = metrics_client_from_config(publisher_raw_cfg)
    publisher = ZipkinPublisher(
        publisher_cfg.zipkin_api_url.address,
        metrics_client,
        post_timeout=publisher_cfg.post_timeout,
    )

    while True:
        try:
            message = trace_queue.get(timeout=.2)
        except TimedOutError:
            message = None

        try:
            batcher.add(message)
        except BatchFull:
            serialized = batcher.serialize()
            publisher.publish(serialized)
            batcher.reset()
            batcher.add(message)
Ejemplo n.º 8
0
    def test_get_timeout(self):
        message_queue = MessageQueue(self.qname, max_messages=1, max_message_size=1)

        with contextlib.closing(message_queue) as mq:
            start = time.time()
            with self.assertRaises(TimedOutError):
                mq.get(timeout=0.1)
            elapsed = time.time() - start
            self.assertAlmostEqual(elapsed, 0.1, places=2)
Ejemplo n.º 9
0
def main():
    """Run a consumer.

    Two environment variables are expected:

    * CONFIG_URI: A PasteDeploy URI pointing at the configuration for the
      application.
    * QUEUE: The name of the queue to consume (currently one of "events" or
      "errors").

    """
    config_uri = os.environ["CONFIG_URI"]
    config = paste.deploy.loadwsgi.appconfig(config_uri)

    logging.config.fileConfig(config["__file__"])

    queue_name = os.environ["QUEUE"]
    queue = MessageQueue("/" + queue_name,
                         max_messages=MAXIMUM_QUEUE_LENGTH,
                         max_message_size=MAXIMUM_EVENT_SIZE)

    metrics_client = baseplate.make_metrics_client(config)

    topic_name = config["topic." + queue_name]

    producer_options = {
        "codec": CODEC_GZIP,
        "batch_send_every_n": 20,
        "batch_send_every_t": 0.01,  # 10 milliseconds
    }

    while True:
        try:
            kafka_client = KafkaClient(config["kafka_brokers"])
            kafka_producer = SimpleProducer(kafka_client, **producer_options)
        except KafkaError as exc:
            _LOG.warning("could not connect: %s", exc)
            metrics_client.counter("injector.connection_error").increment()
            time.sleep(_RETRY_DELAY)
            continue

        while True:
            message = queue.get()
            for retry in itertools.count():
                try:
                    kafka_producer.send_messages(topic_name, message)
                except KafkaError as exc:
                    _LOG.warning("failed to send message: %s", exc)
                    metrics_client.counter("injector.error").increment()
                    time.sleep(_RETRY_DELAY)
                else:
                    metrics_client.counter("collected.injector").increment()
                    break
        kafka_producer.stop()
Ejemplo n.º 10
0
 def __init__(self, queue_name):
     self.queue = MessageQueue(
         "/traces-" + queue_name,
         max_messages=MAX_SIDECAR_QUEUE_SIZE,
         max_message_size=MAX_SIDECAR_MESSAGE_SIZE,
     )
Ejemplo n.º 11
0
 def __init__(self, name):
     self.queue = MessageQueue(
         "/events-" + name,
         max_messages=MAX_QUEUE_SIZE,
         max_message_size=MAX_EVENT_SIZE,
     )
Ejemplo n.º 12
0
# consumer.py
from baseplate.message_queue import MessageQueue

mq = MessageQueue("/baseplate-testing", max_messages=1, max_message_size=1)
# Unless a `timeout` kwarg is passed, this will block until
# we can pop a message from the queue.
message = mq.get()
print("Get Message: %s" % message)
import time
# producer.py
from baseplate.message_queue import MessageQueue

# If the queue doesn't already exist, we'll create it.
mq = MessageQueue("/test4", max_messages=5, max_message_size=3)

i = 1

while True:
    message = str(i)
    mq.put(message)
    print("Put Message: %s" % message)
    i += 1
    #time.sleep(1)
Ejemplo n.º 14
0
def main():
    """Run a consumer.

    Two environment variables are expected:

    * CONFIG_URI: A PasteDeploy URI pointing at the configuration for the
      application.
    * QUEUE: The name of the queue to consume (currently one of "events" or
      "errors").

    """
    config_uri = os.environ["CONFIG_URI"]
    config = paste.deploy.loadwsgi.appconfig(config_uri)

    logging.config.fileConfig(config["__file__"])

    queue_name = os.environ["QUEUE"]
    queue = MessageQueue(
        "/" + queue_name,
        max_messages=MAXIMUM_QUEUE_LENGTH[queue_name],
        max_message_size=MAXIMUM_MESSAGE_SIZE[queue_name],
    )

    metrics_client = baseplate.make_metrics_client(config)

    topic_name = config["topic." + queue_name]

    # Details at http://kafka-python.readthedocs.org/en/1.0.2/apidoc/KafkaProducer.html
    producer_options = {
        "compression_type": 'gzip',
        "batch_size": 20,
        "linger_ms": 10,
        "retries": int(config["kafka_retries"]),
        "retry_backoff_ms": _RETRY_DELAY_SECS * 1000
    }

    def producer_error_cb(msg, queue):
        def requeue_msg(exc):
            _LOG.warning("failed to send message=%s due to error=%s", msg, exc)
            metrics_client.counter("injector.error").increment()
            queue.put(msg)

        return requeue_msg

    def producer_success_cb(success_val):
        metrics_client.counter("collected.injector").increment()

    while True:
        try:
            kafka_brokers = [
                broker.strip() for broker in config['kafka_brokers'].split(',')
            ]
            kafka_producer = KafkaProducer(bootstrap_servers=kafka_brokers,
                                           **producer_options)
        except KafkaError as exc:
            _LOG.warning("could not connect: %s", exc)
            metrics_client.counter("injector.connection_error").increment()
            time.sleep(_RETRY_DELAY_SECS)
            continue

        process_queue(queue,
                      topic_name,
                      kafka_producer,
                      producer_success_cb,
                      producer_error_cb,
                      metrics_client=metrics_client)

        kafka_producer.stop()
Ejemplo n.º 15
0
def publish_events():
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument("config_file",
                            type=argparse.FileType("r"),
                            help="path to a configuration file")
    arg_parser.add_argument(
        "--queue-name",
        default="main",
        help="name of event queue / publisher config (default: main)",
    )
    arg_parser.add_argument("--debug",
                            default=False,
                            action="store_true",
                            help="enable debug logging")
    args = arg_parser.parse_args()

    if args.debug:
        level = logging.DEBUG
    else:
        level = logging.WARNING
    logging.basicConfig(level=level)

    config_parser = configparser.RawConfigParser()
    config_parser.readfp(args.config_file)  # pylint: disable=deprecated-method
    raw_config = dict(config_parser.items("event-publisher:" +
                                          args.queue_name))
    cfg = config.parse_config(
        raw_config,
        {
            "collector": {
                "hostname": config.String,
                "version": config.Optional(config.Integer, default=1),
            },
            "key": {
                "name": config.String,
                "secret": config.Base64
            },
        },
    )

    metrics_client = metrics_client_from_config(raw_config)

    event_queue = MessageQueue("/events-" + args.queue_name,
                               max_messages=MAX_QUEUE_SIZE,
                               max_message_size=MAX_EVENT_SIZE)

    # pylint: disable=maybe-no-member
    serializer = SERIALIZER_BY_VERSION[cfg.collector.version]()
    batcher = TimeLimitedBatch(serializer, MAX_BATCH_AGE)
    publisher = BatchPublisher(metrics_client, cfg)

    while True:
        try:
            message = event_queue.get(timeout=0.2)
        except TimedOutError:
            message = None

        try:
            batcher.add(message)
        except BatchFull:
            serialized = batcher.serialize()
            publisher.publish(serialized)
            batcher.reset()
            batcher.add(message)