Beispiel #1
0
def reloadAggregationSchemas():
    global agg_schemas
    try:
        agg_schemas = loadAggregationSchemas()
    except:
        log.msg("Failed to reload aggregation schemas")
        log.err()
Beispiel #2
0
def reloadAggregationSchemas():
  global agg_schemas
  try:
    agg_schemas = loadAggregationSchemas()
  except:
    log.msg("Failed to reload aggregation schemas")
    log.err()
Beispiel #3
0
def reloadAggregationSchemas():
  global AGGREGATION_SCHEMAS
  try:
    AGGREGATION_SCHEMAS = loadAggregationSchemas()
  except Exception:
    log.msg("Failed to reload aggregation SCHEMAS")
    log.err()
Beispiel #4
0
def reloadAggregationSchemas():
    global AGGREGATION_SCHEMAS
    try:
        AGGREGATION_SCHEMAS = loadAggregationSchemas()
    except Exception:
        log.msg("Failed to reload aggregation SCHEMAS")
        log.err()
Beispiel #5
0
def reloadAggregationSchemas(first_run=False):
  global agg_schemas
  try:
    agg_schemas = loadAggregationSchemas()
  except Exception, e:
    if first_run:
      raise e
    log.msg("Failed to reload aggregation schemas")
    log.err()
Beispiel #6
0
 def test_loadAggregationSchemas_return_schemas(self):
     from carbon.storage import loadAggregationSchemas, PatternSchema
     schema_list = loadAggregationSchemas()
     self.assertEquals(len(schema_list), 5)
     expected = [
         PatternSchema('min', '\.min$', (0.1, 'min')),
         PatternSchema('max', '\.max$', (0.1, 'max')),
         PatternSchema('sum', '\.count$', (0, 'sum')),
         PatternSchema('default_average', '.*', (0.5, 'average'))
     ]
     for schema, expected_schema in zip(schema_list[:-1], expected):
         self.assertEquals(schema.name, expected_schema.name)
         self.assertEquals(schema.pattern, expected_schema.pattern)
         self.assertEquals(schema.archives, expected_schema.archives)
Beispiel #7
0
 def test_loadAggregationSchemas_return_schemas(self):
     from carbon.storage import loadAggregationSchemas, PatternSchema
     schema_list = loadAggregationSchemas()
     self.assertEquals(len(schema_list), 5)
     expected = [
         PatternSchema('min', '\.min$', (0.1, 'min')),
         PatternSchema('max', '\.max$', (0.1, 'max')),
         PatternSchema('sum', '\.count$', (0, 'sum')),
         PatternSchema('default_average', '.*', (0.5, 'average'))
     ]
     for schema, expected_schema in zip(schema_list[:-1], expected):
         self.assertEquals(schema.name, expected_schema.name)
         self.assertEquals(schema.pattern, expected_schema.pattern)
         self.assertEquals(schema.archives, expected_schema.archives)
Beispiel #8
0
    def test_loadAggregationSchemas_return_schemas(self):
        from carbon.storage import loadAggregationSchemas, PatternSchema

        schema_list = loadAggregationSchemas()
        self.assertEquals(len(schema_list), 5)
        expected = [
            PatternSchema("min", "\.min$", (0.1, "min")),
            PatternSchema("max", "\.max$", (0.1, "max")),
            PatternSchema("sum", "\.count$", (0, "sum")),
            PatternSchema("default_average", ".*", (0.5, "average")),
        ]
        for schema, expected_schema in zip(schema_list[:-1], expected):
            self.assertEquals(schema.name, expected_schema.name)
            self.assertEquals(schema.pattern, expected_schema.pattern)
            self.assertEquals(schema.archives, expected_schema.archives)
    def __init__(self, args):
        # Injecting the Carbon Lib Path if needed
        if args.carbonlibdir is not None:
            sys.path.insert(0, args.carbonlibdir)

        try:
            from carbon import conf
            from carbon.conf import settings
            # set carbon config dir
            settings.CONF_DIR = args.carbonconfigdir
            from carbon.storage import loadStorageSchemas, loadAggregationSchemas, DefaultSchema, PatternSchema
        except ImportError as e:
            raise SystemExit('[ERROR] Can\'t find the carbon module, try using --carbonlibdir to explicitly include the path')

        self.DefaultSchema = DefaultSchema
        self.PatternSchema = PatternSchema

        # load schemas from storage-schemas.conf and storage-aggregation.conf using carbon module
        self.storage_schemas = loadStorageSchemas()
        self.aggregation_schemas = loadAggregationSchemas()
Beispiel #10
0
 def test_loadAggregationSchema_return_the_default_schema_last(self):
     from carbon.storage import loadAggregationSchemas, defaultAggregation
     schema_list = loadAggregationSchemas()
     last_schema = schema_list[-1]
     self.assertEquals(last_schema, defaultAggregation)
Beispiel #11
0
try:
    from carbon import conf
    from carbon.conf import settings
except ImportError:
    raise SystemExit('[ERROR] Can\'t find the carbon module, try using --carbonlib to explicitly include the path')

#carbon.conf not seeing the config files so give it a nudge
settings.CONF_DIR = configPath
settings.LOCAL_DATA_DIR = storagePath

# import these once we have the settings figured out
from carbon.storage import loadStorageSchemas, loadAggregationSchemas

# Load the Defined Schemas from our config files
schemas = loadStorageSchemas()
agg_schemas = loadAggregationSchemas()

# check to see if a metric needs to be resized based on the current config
def processMetric(fullPath, schemas, agg_schemas):
    """
        method to process a given metric, and resize it if necessary

        Parameters:
            fullPath    - full path to the metric whisper file
            schemas     - carbon storage schemas loaded from config
            agg_schemas - carbon storage aggregation schemas load from confg

    """
    schema_config_args = ''
    schema_file_args   = ''
    rebuild = False
Beispiel #12
0
    loadAggregationSchemas
from carbon.conf import settings
from carbon import log, events, instrumentation
from carbon.util import TokenBucket

from twisted.internet import reactor
from twisted.internet.task import LoopingCall
from twisted.application.service import Service

try:
    import signal
except ImportError:
    log.msg("Couldn't import signal module")

SCHEMAS = loadStorageSchemas()
AGGREGATION_SCHEMAS = loadAggregationSchemas()
CACHE_SIZE_LOW_WATERMARK = settings.MAX_CACHE_SIZE * 0.95

# Inititalize token buckets so that we can enforce rate limits on creates and
# updates if the config wants them.
CREATE_BUCKET = None
UPDATE_BUCKET = None
if settings.MAX_CREATES_PER_MINUTE != float('inf'):
    capacity = settings.MAX_CREATES_PER_MINUTE
    fill_rate = float(settings.MAX_CREATES_PER_MINUTE) / 60
    CREATE_BUCKET = TokenBucket(capacity, fill_rate)

if settings.MAX_UPDATES_PER_SECOND != float('inf'):
    capacity = settings.MAX_UPDATES_PER_SECOND
    fill_rate = settings.MAX_UPDATES_PER_SECOND
    UPDATE_BUCKET = TokenBucket(capacity, fill_rate)
Beispiel #13
0
 def test_loadAggregationSchema_return_the_default_schema_last(self):
     from carbon.storage import loadAggregationSchemas, defaultAggregation
     schema_list = loadAggregationSchemas()
     last_schema = schema_list[-1]
     self.assertEquals(last_schema, defaultAggregation)
Beispiel #14
0
from carbon.conf import settings
from carbon import log, events, instrumentation
from carbon.util import TokenBucket

from twisted.internet import reactor
from twisted.internet.task import LoopingCall
from twisted.application.service import Service

try:
    import signal
except ImportError:
    log.msg("Couldn't import signal module")


SCHEMAS = loadStorageSchemas()
AGGREGATION_SCHEMAS = loadAggregationSchemas()
CACHE_SIZE_LOW_WATERMARK = settings.MAX_CACHE_SIZE * 0.95


# Inititalize token buckets so that we can enforce rate limits on creates and
# updates if the config wants them.
CREATE_BUCKET = None
UPDATE_BUCKET = None
if settings.MAX_CREATES_PER_MINUTE != float('inf'):
  capacity = settings.MAX_CREATES_PER_MINUTE
  fill_rate = float(settings.MAX_CREATES_PER_MINUTE) / 60
  CREATE_BUCKET = TokenBucket(capacity, fill_rate)

if settings.MAX_UPDATES_PER_SECOND != float('inf'):
  capacity = settings.MAX_UPDATES_PER_SECOND
  fill_rate = settings.MAX_UPDATES_PER_SECOND
Beispiel #15
0
def reloadAggregationSchemas():
  global AGGREGATION_SCHEMAS
  try:
    AGGREGATION_SCHEMAS = loadAggregationSchemas()
  except Exception as e:
    log.msg("Failed to reload aggregation SCHEMAS: %s" % (e))
Beispiel #16
0
def reloadAggregationSchemas():
  global AGGREGATION_SCHEMAS
  try:
    AGGREGATION_SCHEMAS = loadAggregationSchemas()
  except Exception, e:
    log.msg("Failed to reload aggregation SCHEMAS: %s" % (e))
Beispiel #17
0
import whisper
from carbon import state
from carbon.cache import MetricCache
from carbon.storage import getFilesystemPath, loadStorageSchemas,\
    loadAggregationSchemas
from carbon.conf import settings
from carbon import log, events, instrumentation

from twisted.internet import reactor
from twisted.internet.task import LoopingCall
from twisted.application.service import Service

lastCreateInterval = 0
createCount = 0
schemas = loadStorageSchemas()
agg_schemas = loadAggregationSchemas()
CACHE_SIZE_LOW_WATERMARK = settings.MAX_CACHE_SIZE * 0.95


def optimalWriteOrder():
    """Generates metrics with the most cached values first and applies a soft
  rate limit on new metrics"""
    global lastCreateInterval
    global createCount
    metrics = MetricCache.counts()

    t = time.time()
    metrics.sort(key=lambda item: item[1],
                 reverse=True)  # by queue size, descending
    log.debug("Sorted %d cache queues in %.6f seconds" %
              (len(metrics), time.time() - t))