コード例 #1
0
    def __init__(self, config, cluster='main'):
        # Support multiple versions of carbon, the API changed in 0.10.
        args = inspect.getargspec(ConsistentHashingRouter.__init__).args
        if 'replication_factor' in args:
            r = ConsistentHashingRouter(config.replication_factor(cluster))
        else:
            class Settings(object):
                REPLICATION_FACTOR = config.replication_factor(cluster)
                DIVERSE_REPLICAS = False
                ROUTER_HASH_TYPE = None
            r = ConsistentHashingRouter(Settings())

        # 'hash_type' was added only in carbon 1.0.2 or master
        args = inspect.getargspec(ConsistentHashRing.__init__).args
        if 'hash_type' in args:
            r.ring = ConsistentHashRing(nodes=[],
                                        hash_type=config.hashing_type(cluster))

        self.ring = r

        try:
            dest_list = config.destinations(cluster)
            self.destinations = util.parseDestinations(dest_list)
        except ValueError as e:
            raise SystemExit("Unable to parse destinations!" + str(e))

        for d in self.destinations:
            self.ring.addDestination(d)
コード例 #2
0
    def __init__(self, config, cluster='main'):
        self.ring = ConsistentHashingRouter(config.replication_factor(cluster))

        try:
            dest_list = config.destinations(cluster)
            self.destinations = util.parseDestinations(dest_list)
        except ValueError as e:
            raise SystemExit("Unable to parse destinations!" + str(e))

        for d in self.destinations:
            self.ring.addDestination(d)
コード例 #3
0
ファイル: cluster.py プロジェクト: unbrice/carbonate
class Cluster():
    def __init__(self, config, cluster='main'):
        self.ring = ConsistentHashingRouter(config.replication_factor(cluster))

        try:
            dest_list = config.destinations(cluster)
            self.destinations = util.parseDestinations(dest_list)
        except ValueError as e:
            raise SystemExit("Unable to parse destinations!" + str(e))

        for d in self.destinations:
            self.ring.addDestination(d)

    def getDestinations(self, metric):
        return self.ring.getDestinations(metric)
コード例 #4
0
def createRelayService(config):
    from carbon.routers import RelayRulesRouter, ConsistentHashingRouter, AggregatedConsistentHashingRouter
    from carbon.client import CarbonClientManager
    from carbon.conf import settings
    from carbon import events

    root_service = createBaseService(config)

    # Configure application components
    if settings.RELAY_METHOD == 'rules':
        router = RelayRulesRouter(settings["relay-rules"])
    elif settings.RELAY_METHOD == 'consistent-hashing':
        router = ConsistentHashingRouter(
            settings.REPLICATION_FACTOR,
            diverse_replicas=settings.DIVERSE_REPLICAS)
    elif settings.RELAY_METHOD == 'aggregated-consistent-hashing':
        from carbon.aggregator.rules import RuleManager
        RuleManager.read_from(settings["aggregation-rules"])
        router = AggregatedConsistentHashingRouter(RuleManager,
                                                   settings.REPLICATION_FACTOR)

    client_manager = CarbonClientManager(router)
    client_manager.setServiceParent(root_service)

    events.metricReceived.addHandler(client_manager.sendDatapoint)
    events.metricGenerated.addHandler(client_manager.sendDatapoint)

    if not settings.DESTINATIONS:
        raise CarbonConfigException(
            "Required setting DESTINATIONS is missing from carbon.conf")

    for destination in util.parseDestinations(settings.DESTINATIONS):
        client_manager.startClient(destination)

    return root_service
コード例 #5
0
def createAggregatorService(config):
    from carbon.aggregator import receiver
    from carbon.aggregator.rules import RuleManager
    from carbon.routers import ConsistentHashingRouter
    from carbon.client import CarbonClientManager
    from carbon.rewrite import RewriteRuleManager
    from carbon.conf import settings
    from carbon import events

    root_service = createBaseService(config)

    # Configure application components
    router = ConsistentHashingRouter(
        settings.REPLICATION_FACTOR,
        diverse_replicas=settings.DIVERSE_REPLICAS)
    client_manager = CarbonClientManager(router)
    client_manager.setServiceParent(root_service)

    events.metricReceived.addHandler(receiver.process)
    events.metricGenerated.addHandler(client_manager.sendDatapoint)

    RuleManager.read_from(settings["aggregation-rules"])
    if exists(settings["rewrite-rules"]):
        RewriteRuleManager.read_from(settings["rewrite-rules"])

    if not settings.DESTINATIONS:
        raise CarbonConfigException(
            "Required setting DESTINATIONS is missing from carbon.conf")

    for destination in util.parseDestinations(settings.DESTINATIONS):
        client_manager.startClient(destination)

    return root_service
コード例 #6
0
def setupRelayProcessor(root_service, settings):
    from carbon.routers import AggregatedConsistentHashingRouter, \
        ConsistentHashingRouter, RelayRulesRouter
    from carbon.client import CarbonClientManager

    if settings.RELAY_METHOD == 'consistent-hashing':
        router = ConsistentHashingRouter(
            settings.REPLICATION_FACTOR,
            diverse_replicas=settings.DIVERSE_REPLICAS)
    elif settings.RELAY_METHOD == 'aggregated-consistent-hashing':
        from carbon.aggregator.rules import RuleManager
        aggregation_rules_path = settings["aggregation-rules"]
        RuleManager.read_from(aggregation_rules_path)
        router = AggregatedConsistentHashingRouter(
            RuleManager,
            settings.REPLICATION_FACTOR,
            diverse_replicas=settings.DIVERSE_REPLICAS)
    elif settings.RELAY_METHOD == 'rules':
        router = RelayRulesRouter(settings["relay-rules"])

    state.client_manager = CarbonClientManager(router)
    state.client_manager.setServiceParent(root_service)

    for destination in util.parseDestinations(settings.DESTINATIONS):
        state.client_manager.startClient(destination)
コード例 #7
0
ファイル: cluster.py プロジェクト: yunstanford/carbonate
    def __init__(self, config, cluster='main'):
        # Support multiple versions of carbon, the API changed in 0.10.
        args = inspect.getargspec(ConsistentHashingRouter.__init__).args
        if 'replication_factor' in args:
            ring = ConsistentHashingRouter(config.replication_factor(cluster))
        else:

            class Settings(object):
                REPLICATION_FACTOR = config.replication_factor(cluster)
                DIVERSE_REPLICAS = False

            ring = ConsistentHashingRouter(Settings())

        self.ring = ring

        try:
            dest_list = config.destinations(cluster)
            self.destinations = util.parseDestinations(dest_list)
        except ValueError as e:
            raise SystemExit("Unable to parse destinations!" + str(e))

        for d in self.destinations:
            self.ring.addDestination(d)
コード例 #8
0
    def __init__(self, config, cluster='main', aggregation_rules=None):
        relay_method = config.relay_method(cluster=cluster)
        if relay_method == "consistent-hashing":
            # Support multiple versions of carbon, the API changed in 0.10.
            args = inspect.getargspec(ConsistentHashingRouter.__init__).args
            if 'replication_factor' in args:
                r = ConsistentHashingRouter(config.replication_factor(cluster))
            else:

                class Settings(object):
                    REPLICATION_FACTOR = config.replication_factor(cluster)
                    DIVERSE_REPLICAS = False

                r = ConsistentHashingRouter(Settings())

            # 'hash_type' was added only in carbon 1.0.2 or master
            args = inspect.getargspec(ConsistentHashRing.__init__).args
            if 'hash_type' in args:
                r.ring = ConsistentHashRing(
                    hash_type=config.hashing_type(cluster))
        elif relay_method == "aggregated-consistent-hashing":
            if aggregation_rules:
                RuleManager.read_from(aggregation_rules)
            r = AggregatedConsistentHashingRouter(
                RuleManager, config.replication_factor(cluster))

        self.ring = r

        try:
            dest_list = config.destinations(cluster)
            self.destinations = util.parseDestinations(dest_list)
        except ValueError as e:
            raise SystemExit("Unable to parse destinations!" + str(e))

        for d in self.destinations:
            self.ring.addDestination(d)
コード例 #9
0
ファイル: carbon-client.py プロジェクト: pavvyb/proj3rdyear
    parts = arg.split(':', 2)
    host = parts[0]
    port = int(parts[1])
    if len(parts) > 2:
        instance = parts[2]
    else:
        instance = None
    destinations.append((host, port, instance))

if options.debug:
    log.logToStdout()
    log.setDebugEnabled(True)
    defer.setDebugging(True)

if options.routing == 'consistent-hashing':
    router = ConsistentHashingRouter(options.replication,
                                     diverse_replicas=options.diverse_replicas)
elif options.routing == 'relay':
    if exists(options.relayrules):
        router = RelayRulesRouter(options.relayrules)
    else:
        print("relay rules file %s does not exist" % options.relayrules)
        raise SystemExit(1)

client_manager = CarbonClientManager(router)
reactor.callWhenRunning(client_manager.startService)

if options.keyfunc:
    router.setKeyFunctionFromModule(options.keyfunc)

firstConnectAttempts = [
    client_manager.startClient(dest) for dest in destinations
コード例 #10
0
def createService(options):
    """Create a txStatsD service."""
    from carbon.routers import ConsistentHashingRouter
    from carbon.client import CarbonClientManager
    from carbon.conf import settings

    settings.MAX_QUEUE_SIZE = options["max-queue-size"]
    settings.MAX_DATAPOINTS_PER_MESSAGE = options["max-datapoints-per-message"]

    root_service = MultiService()
    root_service.setName("statsd")

    prefix = options["prefix"]
    if prefix is None:
        prefix = "statsd"

    instance_name = options["instance-name"]
    if not instance_name:
        instance_name = platform.node()

    # initialize plugins
    plugin_metrics = []
    for plugin in getPlugins(IMetricFactory):
        plugin.configure(options)
        plugin_metrics.append(plugin)

    processor = None
    if options["dump-mode"]:
        # LoggingMessageProcessor supersedes
        #  any other processor class in "dump-mode"
        assert not hasattr(log, 'info')
        log.info = log.msg  # for compatibility with LMP logger interface
        processor = functools.partial(LoggingMessageProcessor, logger=log)

    if options["statsd-compliance"]:
        processor = (processor or MessageProcessor)(plugins=plugin_metrics)
        input_router = Router(processor, options['routing'], root_service)
        connection = InternalClient(input_router)
        metrics = Metrics(connection)
    else:
        processor = (processor or ConfigurableMessageProcessor)(
            message_prefix=prefix,
            internal_metrics_prefix=prefix + "." + instance_name + ".",
            plugins=plugin_metrics)
        input_router = Router(processor, options['routing'], root_service)
        connection = InternalClient(input_router)
        metrics = ExtendedMetrics(connection)

    if not options["carbon-cache-host"]:
        options["carbon-cache-host"].append("127.0.0.1")
    if not options["carbon-cache-port"]:
        options["carbon-cache-port"].append(2004)
    if not options["carbon-cache-name"]:
        options["carbon-cache-name"].append(None)

    reporting = ReportingService(instance_name)
    reporting.setServiceParent(root_service)

    reporting.schedule(report_client_manager_stats,
                       options["flush-interval"] / 1000, metrics.gauge)

    if options["report"] is not None:
        from txstatsd import process
        from twisted.internet import reactor

        reporting.schedule(process.report_reactor_stats(reactor), 60,
                           metrics.gauge)
        reports = [name.strip() for name in options["report"].split(",")]
        for report_name in reports:
            if report_name == "reactor":
                inspector = ReactorInspectorService(reactor,
                                                    metrics,
                                                    loop_time=0.05)
                inspector.setServiceParent(root_service)

            for reporter in getattr(process, "%s_STATS" % report_name.upper(),
                                    ()):
                reporting.schedule(reporter, 60, metrics.gauge)

    # XXX Make this configurable.
    router = ConsistentHashingRouter()
    carbon_client = CarbonClientManager(router)
    carbon_client.setServiceParent(root_service)

    for host, port, name in zip(options["carbon-cache-host"],
                                options["carbon-cache-port"],
                                options["carbon-cache-name"]):
        carbon_client.startClient((host, port, name))

    statsd_service = StatsDService(carbon_client, input_router,
                                   options["flush-interval"])
    statsd_service.setServiceParent(root_service)

    statsd_server_protocol = StatsDServerProtocol(
        input_router,
        monitor_message=options["monitor-message"],
        monitor_response=options["monitor-response"])

    listener = UDPServer(options["listen-port"], statsd_server_protocol)
    listener.setServiceParent(root_service)

    if options["listen-tcp-port"] is not None:
        statsd_tcp_server_factory = StatsDTCPServerFactory(
            input_router,
            monitor_message=options["monitor-message"],
            monitor_response=options["monitor-response"])

        listener = TCPServer(options["listen-tcp-port"],
                             statsd_tcp_server_factory)
        listener.setServiceParent(root_service)

    httpinfo_service = httpinfo.makeService(options, processor, statsd_service)
    httpinfo_service.setServiceParent(root_service)

    return root_service
コード例 #11
0
        metric_key = arg

# Check required key        
if not metric_key: 
    print('Usage: python graphite-router.py -k <metric key>')
    sys.exit(2)

## Settings
# Absolute path to the Graphite Data Directory
DATA_DIR = join(ROOT_DIR, 'storage/whisper')

# Parse config
settings.readFrom(join(ROOT_DIR, 'conf/carbon.conf'), 'relay')

# Read in destinations from config
destinations = util.parseDestinations(settings.DESTINATIONS)

# Setup Router
router = ConsistentHashingRouter(settings.REPLICATION_FACTOR)
 
for destination in destinations: 
    router.addDestination(destination);    
    
# Echo routes
print('routes for ' + metric_key) 
routes = router.getDestinations(metric_key)
for route in routes:
    print(route)


コード例 #12
0
    if response.lower() != 'y':
        sys.exit()
    

## Settings
# Absolute path to the Graphite Data Directory
DATA_DIR = join(ROOT_DIR, 'storage/whisper')

# Parse config
settings.readFrom(join(ROOT_DIR, 'conf/carbon.conf'), 'relay')

# Read in destinations from config
destinations = util.parseDestinations(settings.DESTINATIONS)

# Setup Router
router = ConsistentHashingRouter(settings.REPLICATION_FACTOR)
 
for destination in destinations:
    if destination[0] == target_node:
        local_destinations.append(destination) 
    
    router.addDestination(destination);    

 
# Walk Data dir and process orphaned whisper files 
for dirname, dirnames, filenames in os.walk(DATA_DIR):
    #if dirname.startswith(join(DATA_DIR, settings.CARBON_METRIC_PREFIX)):
    #    continue
    for filename in filenames:
        pathname = os.path.join(dirname, filename)
        basename, ext = os.path.splitext(filename)
コード例 #13
0
    parts = arg.split(':', 2)
    host = parts[0]
    port = int(parts[1])
    if len(parts) > 2:
        instance = parts[2]
    else:
        instance = None
    destinations.append((host, port, instance))

if options.debug:
    log.logToStdout()
    log.setDebugEnabled(True)
    defer.setDebugging(True)

if options.routing == 'consistent-hashing':
    router = ConsistentHashingRouter(options.replication)
elif options.routing == 'relay':
    if exists(options.relayrules):
        router = RelayRulesRouter(options.relayrules)
    else:
        print "relay rules file %s does not exist" % options.relayrules
        raise SystemExit(1)

client_manager = CarbonClientManager(router)
reactor.callWhenRunning(client_manager.startService)

if options.keyfunc:
    router.setKeyFunctionFromModule(options.keyfunc)

firstConnectAttempts = [
    client_manager.startClient(dest) for dest in destinations
コード例 #14
0
  parts = arg.split(':', 2)
  host = parts[0]
  port = int(parts[1])
  if len(parts) > 2:
    instance = parts[2]
  else:
    instance = None
  destinations.append((host, port, instance))

if options.debug:
  log.logToStdout()
  log.setDebugEnabled(True)
  defer.setDebugging(True)

if options.routing == 'consistent-hashing':
  router = ConsistentHashingRouter(options.replication, diverse_replicas=options.diverse_replicas)
elif options.routing == 'relay':
  if exists(options.relayrules):
    router = RelayRulesRouter(options.relayrules)
  else:
    print("relay rules file %s does not exist" % options.relayrules)
    raise SystemExit(1)

client_manager = CarbonClientManager(router)
reactor.callWhenRunning(client_manager.startService)

if options.keyfunc:
  router.setKeyFunctionFromModule(options.keyfunc)

firstConnectAttempts = [client_manager.startClient(dest) for dest in destinations]
firstConnectsAttempted = defer.DeferredList(firstConnectAttempts)
コード例 #15
0
    parts = arg.split(':', 2)
    host = parts[0]
    port = int(parts[1])
    if len(parts) > 2:
        instance = parts[2]
    else:
        instance = None
    destinations.append((host, port, instance))

if options.debug:
    log.logToStdout()
    log.setDebugEnabled(True)
    defer.setDebugging(True)

if options.routing == 'consistent-hashing':
    router = ConsistentHashingRouter(options.replication,
                                     hash_type=options.hashtype)
elif options.routing == 'relay':
    if exists(options.relayrules):
        router = RelayRulesRouter(options.relayrules)
    else:
        print "relay rules file %s does not exist" % options.relayrules
        raise SystemExit(1)

client_manager = CarbonClientManager(router)
reactor.callWhenRunning(client_manager.startService)

if options.keyfunc:
    router.setKeyFunctionFromModule(options.keyfunc)

firstConnectAttempts = [
    client_manager.startClient(dest) for dest in destinations
コード例 #16
0
ファイル: snippet.py プロジェクト: szabo92/gistable
# 32 shards, 01-32.
REPLICATION_FACTOR = 2
WHISPER_PATH = '/opt/graphite/storage/whisper'
DESTINATIONS = [
    "vnode-01:3001:01", "vnode-02:3002:02", "vnode-03:3003:03", "vnode-04:3004:04",
    "vnode-05:3005:05", "vnode-06:3006:06", "vnode-07:3007:07", "vnode-08:3008:08",
    "vnode-09:3009:09", "vnode-10:3010:10", "vnode-11:3011:11", "vnode-12:3012:12",
    "vnode-13:3013:13", "vnode-14:3014:14", "vnode-15:3015:15", "vnode-16:3016:16",
    "vnode-17:3017:17", "vnode-18:3018:18", "vnode-19:3019:19", "vnode-20:3020:20",
    "vnode-21:3021:21", "vnode-22:3022:22", "vnode-23:3023:23", "vnode-24:3024:24",
    "vnode-25:3025:25", "vnode-26:3026:26", "vnode-27:3027:27", "vnode-28:3028:28",
    "vnode-29:3029:29", "vnode-30:3030:30", "vnode-31:3031:31", "vnode-32:3032:32"
]

# use the ConsistentHashingRouter from carbon-relay
router = ConsistentHashingRouter(replication_factor=REPLICATION_FACTOR)
for dest in DESTINATIONS:
    router.addDestination(dest.split(':'))

file_map = {}
count = 0

print "Beginning scan of: %s" % WHISPER_PATH
for dirname, dirnames, filenames in os.walk(WHISPER_PATH):
    for filename in filenames:
        pathname = os.path.join(dirname, filename)
        rel_pathname = os.path.relpath(pathname, WHISPER_PATH)
        basename, ext = os.path.splitext(filename)
        if '.wsp' != ext:
            print('skipping %s' % pathname)