Ejemplo n.º 1
0
def createAggregatorService(config):
    from carbon.aggregator import receiver
    from carbon.aggregator.rules import RuleManager
    from carbon.routers import ConsistentHashingRouter
    from carbon.client import CarbonClientManager
    from carbon.rewrite import RewriteRuleManager
    from carbon.conf import settings
    from carbon import events

    root_service = createBaseService(config)

    # Configure application components
    router = ConsistentHashingRouter(
        settings.REPLICATION_FACTOR,
        diverse_replicas=settings.DIVERSE_REPLICAS)
    client_manager = CarbonClientManager(router)
    client_manager.setServiceParent(root_service)

    events.metricReceived.addHandler(receiver.process)
    events.metricGenerated.addHandler(client_manager.sendDatapoint)

    RuleManager.read_from(settings["aggregation-rules"])
    if exists(settings["rewrite-rules"]):
        RewriteRuleManager.read_from(settings["rewrite-rules"])

    if not settings.DESTINATIONS:
        raise CarbonConfigException(
            "Required setting DESTINATIONS is missing from carbon.conf")

    for destination in util.parseDestinations(settings.DESTINATIONS):
        client_manager.startClient(destination)

    return root_service
Ejemplo n.º 2
0
def createRelayService(config):
    from carbon.routers import RelayRulesRouter, ConsistentHashingRouter, AggregatedConsistentHashingRouter
    from carbon.client import CarbonClientManager
    from carbon.conf import settings
    from carbon import events

    root_service = createBaseService(config)

    # Configure application components
    if settings.RELAY_METHOD == 'rules':
        router = RelayRulesRouter(settings["relay-rules"])
    elif settings.RELAY_METHOD == 'consistent-hashing':
        router = ConsistentHashingRouter(
            settings.REPLICATION_FACTOR,
            diverse_replicas=settings.DIVERSE_REPLICAS)
    elif settings.RELAY_METHOD == 'aggregated-consistent-hashing':
        from carbon.aggregator.rules import RuleManager
        RuleManager.read_from(settings["aggregation-rules"])
        router = AggregatedConsistentHashingRouter(RuleManager,
                                                   settings.REPLICATION_FACTOR)

    client_manager = CarbonClientManager(router)
    client_manager.setServiceParent(root_service)

    events.metricReceived.addHandler(client_manager.sendDatapoint)
    events.metricGenerated.addHandler(client_manager.sendDatapoint)

    if not settings.DESTINATIONS:
        raise CarbonConfigException(
            "Required setting DESTINATIONS is missing from carbon.conf")

    for destination in util.parseDestinations(settings.DESTINATIONS):
        client_manager.startClient(destination)

    return root_service
Ejemplo n.º 3
0
def createAggregatorService(config):
    from carbon.aggregator import receiver
    from carbon.aggregator.rules import RuleManager
    from carbon.routers import ConsistentHashingRouter
    from carbon.client import CarbonClientManager
    from carbon.rewrite import RewriteRuleManager
    from carbon.conf import settings
    from carbon import events

    root_service = createBaseService(config)

    # Configure application components
    router = ConsistentHashingRouter()
    client_manager = CarbonClientManager(router)
    client_manager.setServiceParent(root_service)

    events.metricReceived.addHandler(receiver.process)
    events.metricGenerated.addHandler(client_manager.sendDatapoint)

    RuleManager.read_from(settings["aggregation-rules"])
    if exists(settings["rewrite-rules"]):
        RewriteRuleManager.read_from(settings["rewrite-rules"])

    if not settings.DESTINATIONS:
      raise CarbonConfigException("Required setting DESTINATIONS is missing from carbon.conf")

    for destination in util.parseDestinations(settings.DESTINATIONS):
      client_manager.startClient(destination)

    return root_service
Ejemplo n.º 4
0
def createRelayService(config):
    from carbon.routers import RelayRulesRouter, ConsistentHashingRouter, AggregatedConsistentHashingRouter
    from carbon.client import CarbonClientManager
    from carbon.conf import settings
    from carbon import events

    root_service = createBaseService(config)

    # Configure application components
    if settings.RELAY_METHOD == 'rules':
      router = RelayRulesRouter(settings["relay-rules"])
    elif settings.RELAY_METHOD == 'consistent-hashing':
      router = ConsistentHashingRouter(settings.REPLICATION_FACTOR)
    elif settings.RELAY_METHOD == 'aggregated-consistent-hashing':
      from carbon.aggregator.rules import RuleManager
      RuleManager.read_from(settings["aggregation-rules"])
      router = AggregatedConsistentHashingRouter(RuleManager, settings.REPLICATION_FACTOR)

    client_manager = CarbonClientManager(router)
    client_manager.setServiceParent(root_service)

    events.metricReceived.addHandler(client_manager.sendDatapoint)
    events.metricGenerated.addHandler(client_manager.sendDatapoint)
    events.specialMetricReceived.addHandler(client_manager.sendHighPriorityDatapoint)
    events.specialMetricGenerated.addHandler(client_manager.sendHighPriorityDatapoint)

    if not settings.DESTINATIONS:
      raise CarbonConfigException("Required setting DESTINATIONS is missing from carbon.conf")

    for destination in util.parseDestinations(settings.DESTINATIONS):
      client_manager.startClient(destination)

    return root_service
Ejemplo n.º 5
0
                                     diverse_replicas=options.diverse_replicas)
elif options.routing == 'relay':
    if exists(options.relayrules):
        router = RelayRulesRouter(options.relayrules)
    else:
        print("relay rules file %s does not exist" % options.relayrules)
        raise SystemExit(1)

client_manager = CarbonClientManager(router)
reactor.callWhenRunning(client_manager.startService)

if options.keyfunc:
    router.setKeyFunctionFromModule(options.keyfunc)

firstConnectAttempts = [
    client_manager.startClient(dest) for dest in destinations
]
firstConnectsAttempted = defer.DeferredList(firstConnectAttempts)


class StdinMetricsReader(LineReceiver):
    delimiter = '\n'

    def lineReceived(self, line):
        # log.msg("[DEBUG] lineReceived(): %s" % line)
        try:
            (metric, value, timestamp) = line.split()
            datapoint = (float(timestamp), float(value))
            assert datapoint[1] == datapoint[1]  # filter out NaNs
            client_manager.sendDatapoint(metric, datapoint)
        except ValueError:
Ejemplo n.º 6
0
class CarbonClientManagerTest(TestCase):
    timeout = 1.0

    def setUp(self):
        self.router_mock = Mock(spec=DatapointRouter)
        self.factory_mock = Mock(spec=CarbonClientFactory)
        self.factory_patch = patch("carbon.client.CarbonClientFactory", new=self.factory_mock)
        self.factory_patch.start()
        self.client_mgr = CarbonClientManager(self.router_mock)

    def tearDown(self):
        self.factory_patch.stop()

    @patch("signal.signal", new=Mock())
    def test_start_service_installs_sig_ignore(self, signal_mock):
        from signal import SIGHUP, SIG_IGN

        self.client_mgr.startService()
        signal_mock.assert_called_once_with(SIGHUP, SIG_IGN)

    def test_start_service_starts_factory_connect(self):
        factory_mock = Mock(spec=CarbonClientFactory)
        factory_mock.started = False
        self.client_mgr.client_factories[("127.0.0.1", 2003, "a")] = factory_mock
        self.client_mgr.startService()
        factory_mock.startConnecting.assert_called_once_with()

    def test_stop_service_waits_for_clients_to_disconnect(self):
        dest = ("127.0.0.1", 2003, "a")
        self.client_mgr.startService()
        self.client_mgr.startClient(dest)

        disconnect_deferred = Deferred()
        reactor.callLater(0.1, disconnect_deferred.callback, 0)
        self.factory_mock.return_value.disconnect.return_value = disconnect_deferred
        return self.client_mgr.stopService()

    def test_start_client_instantiates_client_factory(self):
        dest = ("127.0.0.1", 2003, "a")
        self.client_mgr.startClient(dest)
        self.factory_mock.assert_called_once_with(dest)

    def test_start_client_ignores_duplicate(self):
        dest = ("127.0.0.1", 2003, "a")
        self.client_mgr.startClient(dest)
        self.client_mgr.startClient(dest)
        self.factory_mock.assert_called_once_with(dest)

    def test_start_client_starts_factory_if_running(self):
        dest = ("127.0.0.1", 2003, "a")
        self.client_mgr.startService()
        self.client_mgr.startClient(dest)
        self.factory_mock.return_value.startConnecting.assert_called_once_with()

    def test_start_client_adds_destination_to_router(self):
        dest = ("127.0.0.1", 2003, "a")
        self.client_mgr.startClient(dest)
        self.router_mock.addDestination.assert_called_once_with(dest)

    def test_stop_client_removes_destination_from_router(self):
        dest = ("127.0.0.1", 2003, "a")
        self.client_mgr.startClient(dest)
        self.client_mgr.stopClient(dest)
        self.router_mock.removeDestination.assert_called_once_with(dest)
Ejemplo n.º 7
0
def createService(options):
    """Create a txStatsD service."""
    from carbon.routers import ConsistentHashingRouter
    from carbon.client import CarbonClientManager
    from carbon.conf import settings

    settings.MAX_QUEUE_SIZE = options["max-queue-size"]
    settings.MAX_DATAPOINTS_PER_MESSAGE = options["max-datapoints-per-message"]

    root_service = MultiService()
    root_service.setName("statsd")

    prefix = options["prefix"]
    if prefix is None:
        prefix = "statsd"

    instance_name = options["instance-name"]
    if not instance_name:
        instance_name = platform.node()

    # initialize plugins
    plugin_metrics = []
    for plugin in getPlugins(IMetricFactory):
        plugin.configure(options)
        plugin_metrics.append(plugin)

    processor = None
    if options["dump-mode"]:
        # LoggingMessageProcessor supersedes
        #  any other processor class in "dump-mode"
        assert not hasattr(log, 'info')
        log.info = log.msg  # for compatibility with LMP logger interface
        processor = functools.partial(LoggingMessageProcessor, logger=log)

    if options["statsd-compliance"]:
        processor = (processor or MessageProcessor)(plugins=plugin_metrics)
        input_router = Router(processor, options['routing'], root_service)
        connection = InternalClient(input_router)
        metrics = Metrics(connection)
    else:
        processor = (processor or ConfigurableMessageProcessor)(
            message_prefix=prefix,
            internal_metrics_prefix=prefix + "." + instance_name + ".",
            plugins=plugin_metrics)
        input_router = Router(processor, options['routing'], root_service)
        connection = InternalClient(input_router)
        metrics = ExtendedMetrics(connection)

    if not options["carbon-cache-host"]:
        options["carbon-cache-host"].append("127.0.0.1")
    if not options["carbon-cache-port"]:
        options["carbon-cache-port"].append(2004)
    if not options["carbon-cache-name"]:
        options["carbon-cache-name"].append(None)

    reporting = ReportingService(instance_name)
    reporting.setServiceParent(root_service)

    reporting.schedule(report_client_manager_stats,
                       options["flush-interval"] / 1000, metrics.gauge)

    if options["report"] is not None:
        from txstatsd import process
        from twisted.internet import reactor

        reporting.schedule(process.report_reactor_stats(reactor), 60,
                           metrics.gauge)
        reports = [name.strip() for name in options["report"].split(",")]
        for report_name in reports:
            if report_name == "reactor":
                inspector = ReactorInspectorService(reactor,
                                                    metrics,
                                                    loop_time=0.05)
                inspector.setServiceParent(root_service)

            for reporter in getattr(process, "%s_STATS" % report_name.upper(),
                                    ()):
                reporting.schedule(reporter, 60, metrics.gauge)

    # XXX Make this configurable.
    router = ConsistentHashingRouter()
    carbon_client = CarbonClientManager(router)
    carbon_client.setServiceParent(root_service)

    for host, port, name in zip(options["carbon-cache-host"],
                                options["carbon-cache-port"],
                                options["carbon-cache-name"]):
        carbon_client.startClient((host, port, name))

    statsd_service = StatsDService(carbon_client, input_router,
                                   options["flush-interval"])
    statsd_service.setServiceParent(root_service)

    statsd_server_protocol = StatsDServerProtocol(
        input_router,
        monitor_message=options["monitor-message"],
        monitor_response=options["monitor-response"])

    listener = UDPServer(options["listen-port"], statsd_server_protocol)
    listener.setServiceParent(root_service)

    if options["listen-tcp-port"] is not None:
        statsd_tcp_server_factory = StatsDTCPServerFactory(
            input_router,
            monitor_message=options["monitor-message"],
            monitor_response=options["monitor-response"])

        listener = TCPServer(options["listen-tcp-port"],
                             statsd_tcp_server_factory)
        listener.setServiceParent(root_service)

    httpinfo_service = httpinfo.makeService(options, processor, statsd_service)
    httpinfo_service.setServiceParent(root_service)

    return root_service
Ejemplo n.º 8
0
def createService(options):
    """Create a txStatsD service."""
    from carbon.routers import ConsistentHashingRouter
    from carbon.client import CarbonClientManager
    from carbon.conf import settings

    settings.MAX_QUEUE_SIZE = options["max-queue-size"]
    settings.MAX_DATAPOINTS_PER_MESSAGE = options["max-datapoints-per-message"]

    root_service = MultiService()
    root_service.setName("statsd")

    prefix = options["prefix"]
    if prefix is None:
        prefix = "statsd"

    instance_name = options["instance-name"]
    if not instance_name:
        instance_name = platform.node()

    # initialize plugins
    plugin_metrics = []
    for plugin in getPlugins(IMetricFactory):
        plugin.configure(options)
        plugin_metrics.append(plugin)

    processor = None
    if options["dump-mode"]:
        # LoggingMessageProcessor supersedes
        #  any other processor class in "dump-mode"
        assert not hasattr(log, 'info')
        log.info = log.msg  # for compatibility with LMP logger interface
        processor = functools.partial(LoggingMessageProcessor, logger=log)

    if options["statsd-compliance"]:
        processor = (processor or MessageProcessor)(plugins=plugin_metrics)
        input_router = Router(processor, options['routing'], root_service)
        connection = InternalClient(input_router)
        metrics = Metrics(connection)
    else:
        processor = (processor or ConfigurableMessageProcessor)(
            message_prefix=prefix,
            internal_metrics_prefix=prefix + "." + instance_name + ".",
            plugins=plugin_metrics)
        input_router = Router(processor, options['routing'], root_service)
        connection = InternalClient(input_router)
        metrics = ExtendedMetrics(connection)

    if not options["carbon-cache-host"]:
        options["carbon-cache-host"].append("127.0.0.1")
    if not options["carbon-cache-port"]:
        options["carbon-cache-port"].append(2004)
    if not options["carbon-cache-name"]:
        options["carbon-cache-name"].append(None)

    reporting = ReportingService(instance_name)
    reporting.setServiceParent(root_service)

    reporting.schedule(report_client_manager_stats,
                       options["flush-interval"] / 1000,
                       metrics.gauge)

    if options["report"] is not None:
        from txstatsd import process
        from twisted.internet import reactor

        reporting.schedule(
            process.report_reactor_stats(reactor), 60, metrics.gauge)
        reports = [name.strip() for name in options["report"].split(",")]
        for report_name in reports:
            if report_name == "reactor":
                inspector = ReactorInspectorService(reactor, metrics,
                                                    loop_time=0.05)
                inspector.setServiceParent(root_service)

            for reporter in getattr(process, "%s_STATS" %
                                    report_name.upper(), ()):
                reporting.schedule(reporter, 60, metrics.gauge)

    # XXX Make this configurable.
    router = ConsistentHashingRouter()
    carbon_client = CarbonClientManager(router)
    carbon_client.setServiceParent(root_service)

    for host, port, name in zip(options["carbon-cache-host"],
                                options["carbon-cache-port"],
                                options["carbon-cache-name"]):
        carbon_client.startClient((host, port, name))

    statsd_service = StatsDService(carbon_client, input_router,
                                   options["flush-interval"])
    statsd_service.setServiceParent(root_service)

    statsd_server_protocol = StatsDServerProtocol(
        input_router,
        monitor_message=options["monitor-message"],
        monitor_response=options["monitor-response"])

    listener = UDPServer(options["listen-port"], statsd_server_protocol)
    listener.setServiceParent(root_service)

    if options["listen-tcp-port"] is not None:
        statsd_tcp_server_factory = StatsDTCPServerFactory(
            input_router,
            monitor_message=options["monitor-message"],
            monitor_response=options["monitor-response"])

        listener = TCPServer(options["listen-tcp-port"],
                             statsd_tcp_server_factory)
        listener.setServiceParent(root_service)

    httpinfo_service = httpinfo.makeService(options, processor, statsd_service)
    httpinfo_service.setServiceParent(root_service)

    return root_service
Ejemplo n.º 9
0
if options.routing == 'consistent-hashing':
  router = ConsistentHashingRouter(options.replication, diverse_replicas=options.diverse_replicas)
elif options.routing == 'relay':
  if exists(options.relayrules):
    router = RelayRulesRouter(options.relayrules)
  else:
    print("relay rules file %s does not exist" % options.relayrules)
    raise SystemExit(1)

client_manager = CarbonClientManager(router)
reactor.callWhenRunning(client_manager.startService)

if options.keyfunc:
  router.setKeyFunctionFromModule(options.keyfunc)

firstConnectAttempts = [client_manager.startClient(dest) for dest in destinations]
firstConnectsAttempted = defer.DeferredList(firstConnectAttempts)


class StdinMetricsReader(LineReceiver):
  delimiter = '\n'

  def lineReceived(self, line):
    # log.msg("[DEBUG] lineReceived(): %s" % line)
    try:
      (metric, value, timestamp) = line.split()
      datapoint = (float(timestamp), float(value))
      assert datapoint[1] == datapoint[1]  # filter out NaNs
      client_manager.sendDatapoint(metric, datapoint)
    except ValueError:
      log.err(None, 'Dropping invalid line: %s' % line)
Ejemplo n.º 10
0
class CarbonClientManagerTest(TestCase):
  timeout = 1.0

  def setUp(self):
    self.router_mock = Mock(spec=DatapointRouter)
    self.factory_mock = Mock(spec=CarbonPickleClientFactory)
    self.client_mgr = CarbonClientManager(self.router_mock)
    self.client_mgr.createFactory = lambda dest: self.factory_mock(dest, self.router_mock)

  def test_start_service_installs_sig_ignore(self):
    from signal import SIGHUP, SIG_IGN

    with patch('signal.signal', new=Mock()) as signal_mock:
      self.client_mgr.startService()
      signal_mock.assert_called_once_with(SIGHUP, SIG_IGN)

  def test_start_service_starts_factory_connect(self):
    factory_mock = Mock(spec=CarbonPickleClientFactory)
    factory_mock.started = False
    self.client_mgr.client_factories[('127.0.0.1', 2003, 'a')] = factory_mock
    self.client_mgr.startService()
    factory_mock.startConnecting.assert_called_once_with()

  def test_stop_service_waits_for_clients_to_disconnect(self):
    dest = ('127.0.0.1', 2003, 'a')
    self.client_mgr.startService()
    self.client_mgr.startClient(dest)

    disconnect_deferred = Deferred()
    reactor.callLater(0.1, disconnect_deferred.callback, 0)
    self.factory_mock.return_value.disconnect.return_value = disconnect_deferred
    return self.client_mgr.stopService()

  def test_start_client_instantiates_client_factory(self):
    dest = ('127.0.0.1', 2003, 'a')
    self.client_mgr.startClient(dest)
    self.factory_mock.assert_called_once_with(dest, self.router_mock)

  def test_start_client_ignores_duplicate(self):
    dest = ('127.0.0.1', 2003, 'a')
    self.client_mgr.startClient(dest)
    self.client_mgr.startClient(dest)
    self.factory_mock.assert_called_once_with(dest, self.router_mock)

  def test_start_client_starts_factory_if_running(self):
    dest = ('127.0.0.1', 2003, 'a')
    self.client_mgr.startService()
    self.client_mgr.startClient(dest)
    self.factory_mock.return_value.startConnecting.assert_called_once_with()

  def test_start_client_adds_destination_to_router(self):
    dest = ('127.0.0.1', 2003, 'a')
    self.client_mgr.startClient(dest)
    self.router_mock.addDestination.assert_called_once_with(dest)

  def test_stop_client_removes_destination_from_router(self):
    dest = ('127.0.0.1', 2003, 'a')
    self.client_mgr.startClient(dest)
    self.client_mgr.stopClient(dest)
    self.router_mock.removeDestination.assert_called_once_with(dest)
Ejemplo n.º 11
0
if options.routing == 'consistent-hashing':
  router = ConsistentHashingRouter(options.replication, diverse_replicas=options.diverse_replicas)
elif options.routing == 'relay':
  if exists(options.relayrules):
    router = RelayRulesRouter(options.relayrules)
  else:
    print "relay rules file %s does not exist" % options.relayrules
    raise SystemExit(1)

client_manager = CarbonClientManager(router)
reactor.callWhenRunning(client_manager.startService)

if options.keyfunc:
  router.setKeyFunctionFromModule(options.keyfunc)

firstConnectAttempts = [client_manager.startClient(dest) for dest in destinations]
firstConnectsAttempted = defer.DeferredList(firstConnectAttempts)


class StdinMetricsReader(LineReceiver):
  delimiter = '\n'

  def lineReceived(self, line):
    #log.msg("[DEBUG] lineReceived(): %s" % line)
    try:
      (metric, value, timestamp) = line.split()
      datapoint = (float(timestamp), float(value))
      assert datapoint[1] == datapoint[1] # filter out NaNs
      client_manager.sendDatapoint(metric, datapoint)
    except ValueError:
      log.err(None, 'Dropping invalid line: %s' % line)