def test_message_processor_integration(self): """ A message gets routed to the processor. """ processor = MessageProcessor() router = Router(processor, "") router.process("gorets:1|c") self.assertEqual(len(processor.counter_metrics), 1)
class TestUDPRedirect(TxTestCase): def setUp(self): self.service = MultiService() self.received = [] class Collect(DatagramProtocol): def datagramReceived(cself, data, host_port): self.got_data(data) self.port = reactor.listenUDP(0, Collect()) self.processor = TestMessageProcessor() self.router = Router(self.processor, r"any => redirect_udp 127.0.0.1 %s" % (self.port.getHost().port,), service=self.service) self.service.startService() return self.router.ready @defer.inlineCallbacks def tearDown(self): yield self.service.stopService() self.port.stopListening() def test_redirect(self): """ Any message gets dropped with the drop rule. """ message = "gorets:1|c" d = defer.Deferred() def got_data(data): self.assertEqual(data, message) d.callback(True) self.got_data = got_data self.router.process(message) return d
def setUp(self): self.service = MultiService() self.received = [] class Collect(DatagramProtocol): def datagramReceived(cself, data, host_port): self.got_data(data) self.port = reactor.listenUDP(0, Collect()) self.processor = TestMessageProcessor() self.router = Router(self.processor, r"any => redirect_udp 127.0.0.1 %s" % (self.port.getHost().port,), service=self.service) self.service.startService() return self.router.ready
def setUp(self): self.processor = TestMessageProcessor() self.router = Router(self.processor, "")
class RouteMessagesTest(TestCase): def setUp(self): self.processor = TestMessageProcessor() self.router = Router(self.processor, "") def update_rules(self, rules_config): self.router.rules = self.router.build_rules(rules_config) def test_message_processor_integration(self): """ A message gets routed to the processor. """ processor = MessageProcessor() router = Router(processor, "") router.process("gorets:1|c") self.assertEqual(len(processor.counter_metrics), 1) def test_receive_counter(self): self.router.process("gorets:1|c") self.assertEqual(len(self.processor.messages), 1) def test_any_and_drop(self): """ Any message gets dropped with the drop rule. """ self.update_rules("any => drop") self.router.process("gorets:1|c") self.assertEqual(len(self.processor.messages), 0) def test_metric_path_like(self): """ path_like matches glob expressions. """ self.update_rules("path_like goret* => drop") self.router.process("gorets:1|c") self.router.process("gorets:1|d") self.router.process("goret:1|d") self.router.process("nomatch:1|d") self.assertEqual(len(self.processor.messages), 1) self.assertEqual(self.processor.messages[0][2], "nomatch") def test_receive_two_rules_no_match(self): """ Messages that do not match more than one rule are processed just fine. """ self.update_rules("path_like goret* => drop\npath_like glork* => drop\n") self.router.process("nomatch:1|c") self.assertEqual(len(self.processor.messages), 1) def test_not(self): """ Messages not matching the path_like expression get dropped. """ self.update_rules("not path_like goret* => drop") self.router.process("gorets:1|c") self.router.process("nomatch:1|d") self.assertEqual(len(self.processor.messages), 1) self.assertEqual(self.processor.messages[0][2], "gorets") def test_rewrite(self): """ Process all messages but only rewrite matching ones. """ self.update_rules(r"any => rewrite (gorets) glork.\1") self.router.process("gorets:1|c") self.router.process("nomatch:1|d") self.assertEqual(len(self.processor.messages), 2) self.assertEqual(self.processor.messages[0][2], "glork.gorets") self.assertEqual(self.processor.messages[1][2], "nomatch") def test_rewrite_and_dup(self): """ Process all messages but only rewrite matching ones. If dup flag is set then duplicate original message without rewriting it. """ self.update_rules(r"any => rewrite (gorets) glork.\1 dup") self.router.process("gorets:1|c") self.router.process("nomatch:1|d") self.assertEqual(len(self.processor.messages), 3) self.assertEqual(self.processor.messages[0][2], "gorets") self.assertEqual(self.processor.messages[1][2], "glork.gorets") self.assertEqual(self.processor.messages[2][2], "nomatch") def test_rewrite_and_no_dup(self): """ Process all messages but only rewrite matching ones. If dup flag is set to no-dup, then the original message is not duplicated. """ self.update_rules(r"any => rewrite (gorets) glork.\1 no-dup") self.router.process("gorets:1|c") self.router.process("nomatch:1|d") self.assertEqual(len(self.processor.messages), 2) self.assertEqual(self.processor.messages[0][2], "glork.gorets") self.assertEqual(self.processor.messages[1][2], "nomatch") def test_set_metric_type(self): """ Set metric type to something else. """ self.update_rules(r"any => set_metric_type d") self.router.process("gorets:1|c") self.assertEqual(self.processor.messages[0][1], "d") self.assertEqual(self.processor.messages[0][2], "gorets") def test_set_metric_type_dup(self): """ Set metric type to something else. If the dup flag is set, duplicate the original message. """ self.update_rules(r"any => set_metric_type d dup") self.router.process("gorets:1|c") self.assertEqual(self.processor.messages[0][1], "c") self.assertEqual(self.processor.messages[0][2], "gorets") self.assertEqual(self.processor.messages[1][1], "d") self.assertEqual(self.processor.messages[1][2], "gorets") def test_set_metric_type_no_dup(self): """ Set metric type to something else. If the dup flag is set to no-dup then do not duplicate the original message. """ self.update_rules(r"any => set_metric_type d no-dup") self.router.process("gorets:1|c") self.assertEqual(self.processor.messages[0][1], "d") self.assertEqual(self.processor.messages[0][2], "gorets")
def createService(options): """Create a txStatsD service.""" from carbon.routers import ConsistentHashingRouter from carbon.client import CarbonClientManager from carbon.conf import settings settings.MAX_QUEUE_SIZE = options["max-queue-size"] settings.MAX_DATAPOINTS_PER_MESSAGE = options["max-datapoints-per-message"] root_service = MultiService() root_service.setName("statsd") prefix = options["prefix"] if prefix is None: prefix = "statsd" instance_name = options["instance-name"] if not instance_name: instance_name = platform.node() # initialize plugins plugin_metrics = [] for plugin in getPlugins(IMetricFactory): plugin.configure(options) plugin_metrics.append(plugin) processor = None if options["dump-mode"]: # LoggingMessageProcessor supersedes # any other processor class in "dump-mode" assert not hasattr(log, 'info') log.info = log.msg # for compatibility with LMP logger interface processor = functools.partial(LoggingMessageProcessor, logger=log) if options["statsd-compliance"]: processor = (processor or MessageProcessor)(plugins=plugin_metrics) input_router = Router(processor, options['routing'], root_service) connection = InternalClient(input_router) metrics = Metrics(connection) else: processor = (processor or ConfigurableMessageProcessor)( message_prefix=prefix, internal_metrics_prefix=prefix + "." + instance_name + ".", plugins=plugin_metrics) input_router = Router(processor, options['routing'], root_service) connection = InternalClient(input_router) metrics = ExtendedMetrics(connection) if not options["carbon-cache-host"]: options["carbon-cache-host"].append("127.0.0.1") if not options["carbon-cache-port"]: options["carbon-cache-port"].append(2004) if not options["carbon-cache-name"]: options["carbon-cache-name"].append(None) reporting = ReportingService(instance_name) reporting.setServiceParent(root_service) reporting.schedule(report_client_manager_stats, options["flush-interval"] / 1000, metrics.gauge) if options["report"] is not None: from txstatsd import process from twisted.internet import reactor reporting.schedule(process.report_reactor_stats(reactor), 60, metrics.gauge) reports = [name.strip() for name in options["report"].split(",")] for report_name in reports: if report_name == "reactor": inspector = ReactorInspectorService(reactor, metrics, loop_time=0.05) inspector.setServiceParent(root_service) for reporter in getattr(process, "%s_STATS" % report_name.upper(), ()): reporting.schedule(reporter, 60, metrics.gauge) # XXX Make this configurable. router = ConsistentHashingRouter() carbon_client = CarbonClientManager(router) carbon_client.setServiceParent(root_service) for host, port, name in zip(options["carbon-cache-host"], options["carbon-cache-port"], options["carbon-cache-name"]): carbon_client.startClient((host, port, name)) statsd_service = StatsDService(carbon_client, input_router, options["flush-interval"]) statsd_service.setServiceParent(root_service) statsd_server_protocol = StatsDServerProtocol( input_router, monitor_message=options["monitor-message"], monitor_response=options["monitor-response"]) listener = UDPServer(options["listen-port"], statsd_server_protocol) listener.setServiceParent(root_service) if options["listen-tcp-port"] is not None: statsd_tcp_server_factory = StatsDTCPServerFactory( input_router, monitor_message=options["monitor-message"], monitor_response=options["monitor-response"]) listener = TCPServer(options["listen-tcp-port"], statsd_tcp_server_factory) listener.setServiceParent(root_service) httpinfo_service = httpinfo.makeService(options, processor, statsd_service) httpinfo_service.setServiceParent(root_service) return root_service