def __init__(self, config=None): """ Create a new instance of the InfluxdbeHandler """ # Initialize Handler Handler.__init__(self, config) if not InfluxDBClient: self.log.error('influxdb.client.InfluxDBClient import failed. ' 'Handler disabled') # Initialize Options if self.config['ssl'] == "True": self.ssl = True else: self.ssl = False self.hostname = self.config['hostname'] self.port = int(self.config['port']) self.username = self.config['username'] self.password = self.config['password'] self.database = self.config['database'] self.batch_size = int(self.config['batch_size']) self.batch_count = 0 self.time_precision = self.config['time_precision'] # Initialize Data self.batch = {} self.influx = None # Connect self._connect()
def __init__(self, config=None): """ Create a new instance of cloudwatchHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Data self.connection = None # Initialize Options self.region = self.config['region'] self.instance_id = boto.utils.get_instance_metadata()['instance-id'] self.log.debug("Setting InstanceID: " + self.instance_id) self.valid_config = ('region', 'collector', 'metric', 'namespace', "name", "unit") self.rules = [] for key_name, section in self.config.items(): if section.__class__ is Section: keys = section.keys() rules = {} for key in keys: if key not in self.valid_config: self.log.warning("invalid key %s in section %s", key, section.name) else: rules[key] = section[key] self.rules.append(rules) # Create CloudWatch Connection self._bind()
def __init__(self, config=None): """ Create a new instance of the LibratoHandler class """ # Initialize Handler Handler.__init__(self, config) logging.debug("Initialized Librato handler.") if librato is None: logging.error("Failed to load librato module") return # Initialize Options self.api = librato.connect(self.config["user"], self.config["apikey"]) self.queue = [] self.queue_max_age = int(self.config["queue_max_age"]) self.queue_max_size = int(self.config["queue_max_size"]) self.queue_max_interval = int(self.config["queue_max_interval"]) self.queue_max_timestamp = int(time.time() + self.queue_max_interval) self.disable_force_flush = bool(self.config["disable_force_flush"]) # If a user leaves off the ending comma, cast to a array for them include_filters = self.config["include_filters"] if isinstance(include_filters, basestring): include_filters = [include_filters] self.include_reg = re.compile(r"(?:%s)" % "|".join(include_filters))
def __init__(self, config=None): # Initialize Handler Handler.__init__(self, config) if discovery is None: logging.error("Failed to load apiclient.discovery") return elif GoogleCredentials is None: logging.error("Failed to load " "oauth2client.client.GoogleCredentials") return # Initialize options self.topic = self.config['topic'] self.scopes = self.config['scopes'] self.retries = int(self.config['retries']) self.batch = self.config['batch'] self.batch_size = int(self.config['batch_size']) self.metrics = [] tags_items = self.config['tags'] self.tags = {} for item in tags_items: k, v = item.split(':') self.tags[k] = v # Initialize client credentials = GoogleCredentials.get_application_default() if credentials.create_scoped_required(): credentials = credentials.create_scoped(self.scopes) self.client = discovery.build('pubsub', 'v1', credentials=credentials)
def __init__(self, config=None): """ Create a new instance of the ObservabilityHandler class """ # Initialize Handler Handler.__init__(self, config) logging.debug("Initialized Observability handler.") if oauth2 is None: logging.error("Failed to load oauthlib module") return if requests_oauthlib is None: logging.error("Failed to load requests_oauthlib module") return # Initialize client self.client_key = self.config['client_key'] self.client_secret = self.config['client_secret'] self.floor_seconds = int(self.config['floor_seconds']) self.queue = [] self.queue_max_age = int(self.config['queue_max_age']) client = oauth2.BackendApplicationClient(client_id=self.client_key) self.session = requests_oauthlib.OAuth2Session(client=client) self.token = None
def __init__(self, config=None): """ Create a new instance of the LibratoHandler class """ # Initialize Handler Handler.__init__(self, config) logging.debug("Initialized Librato handler.") if librato is None: logging.error("Failed to load librato module") return # Initialize Options api = librato.connect(self.config['user'], self.config['apikey']) self.queue = api.new_queue() self.queue_max_size = int(self.config['queue_max_size']) self.queue_max_interval = int(self.config['queue_max_interval']) self.queue_max_timestamp = int(time.time() + self.queue_max_interval) self.current_n_measurements = 0 # If a user leaves off the ending comma, cast to a array for them include_filters = self.config['include_filters'] if isinstance(include_filters, basestring): include_filters = [include_filters] self.include_reg = re.compile(r'(?:%s)' % '|'.join(include_filters))
def __init__(self, config=None): """ Create a new instance of rmqHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Data self.connection = None self.channel = None # Initialize Options self.server = self.config.get('server', '127.0.0.1') self.port = int(self.config.get('port', 5672)) self.topic_exchange = self.config.get('topic_exchange', 'diamond') self.vhost = self.config.get('vhost', '') self.user = self.config.get('user', 'guest') self.password = self.config.get('password', 'guest') self.routing_key = self.config.get('routing_key', 'metric') self.custom_routing_key = self.config.get( 'custom_routing_key', 'diamond') if not pika: self.log.error('pika import failed. Handler disabled') return # Create rabbitMQ topic exchange and bind try: self._bind() except pika.exceptions.AMQPConnectionError: self.log.error('Failed to bind to rabbitMQ topic exchange')
def __init__(self, config=None): """ Create a new instance of the SensuHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Data self.socket = None # Initialize Options self.proto = self.config['proto'].lower().strip() self.host = self.config['host'] self.port = int(self.config['port']) self.timeout = int(self.config['timeout']) self.keepalive = bool(self.config['keepalive']) self.keepaliveinterval = int(self.config['keepaliveinterval']) self.batch_size = int(self.config['batch']) self.max_backlog_multiplier = int( self.config['max_backlog_multiplier']) self.trim_backlog_multiplier = int( self.config['trim_backlog_multiplier']) self.metrics = [] # Connect self._connect()
def __init__(self, config=None): """ Create a new instance of the StatsdHandler class """ # Initialize Handler Handler.__init__(self, config) logging.debug("Initialized statsd handler.") if not statsd: self.log.error('statsd import failed. Handler disabled') self.enabled = False return if not hasattr(statsd, 'StatsClient'): self.log.warn('python-statsd support is deprecated ' 'and will be removed in the future. ' 'Please use https://pypi.python.org/pypi/statsd/') # Initialize Options self.host = self.config['host'] self.port = int(self.config['port']) self.batch_size = int(self.config['batch']) self.metrics = [] self.old_values = {} # Connect self._connect()
def __init__(self, config=None): """ Create a new instance of the GraphiteHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Data self.socket = None # Initialize Options self.proto = self.config['proto'].lower().strip() self.host = self.config['host'] self.port = int(self.config['port']) self.timeout = float(self.config['timeout']) self.keepalive = bool(self.config['keepalive']) self.keepaliveinterval = int(self.config['keepaliveinterval']) self.batch_size = int(self.config['batch']) self.max_backlog_multiplier = int( self.config['max_backlog_multiplier']) self.trim_backlog_multiplier = int( self.config['trim_backlog_multiplier']) self.flow_info = self.config['flow_info'] self.scope_id = self.config['scope_id'] self.metrics = [] self.reconnect_interval = int(self.config['reconnect_interval']) self.last_connect_timestamp = -1 # Connect self._connect()
def __init__(self, config=None): """ Create a new instance of the HostedGraphiteHandler class """ # Initialize Handler Handler.__init__(self, config) self.key = self.config['apikey'].lower().strip() self.graphite = GraphiteHandler(self.config)
def __init__(self, config=None): Handler.__init__(self, config) self.metrics = [] self.batch_size = int(self.config['batch']) self.url = self.config['url'] self.auth_token = self.config['auth_token'] self.batch_max_interval = self.config['batch_max_interval'] self.resetBatchTimeout() if self.auth_token == "": logging.error("Failed to load Signalfx module") return
def __init__(self, config=None): """ New instance of LogentriesDiamondHandler class """ Handler.__init__(self, config) self.log_token = self.config.get('log_token', None) self.queue_size = int(self.config['queue_size']) self.queue = deque([]) if self.log_token is None: raise Exception
def __init__(self, config=None): """ initialize Netuitive api and populate agent host metadata """ if not netuitive: self.log.error('netuitive import failed. Handler disabled') self.enabled = False return try: Handler.__init__(self, config) logging.debug("initialize Netuitive handler") self.version = self._get_version() self.api = netuitive.Client(self.config['url'], self.config[ 'api_key'], self.version) self.element = netuitive.Element( location=self.config.get('location')) self.batch_size = int(self.config['batch']) self.max_backlog_multiplier = int( self.config['max_backlog_multiplier']) self.trim_backlog_multiplier = int( self.config['trim_backlog_multiplier']) self._add_sys_meta() self._add_aws_meta() self._add_docker_meta() self._add_azure_meta() self._add_config_tags() self._add_config_relations() self._add_collectors() self.flush_time = 0 try: self.config['write_metric_fqns'] = str_to_bool(self.config['write_metric_fqns']) except KeyError, e: self.log.warning('write_metric_fqns missing from the config') self.config['write_metric_fqns'] = False if self.config['write_metric_fqns']: self.metric_fqns_path = self.config['metric_fqns_path'] truncate_fqn_file = open(self.metric_fqns_path, "w") truncate_fqn_file.close() logging.debug(self.config)
def __init__(self, config=None): """ Create a new instance of the InfluxdbeHandler """ # Initialize Handler Handler.__init__(self, config) # Initialize Options if self.config['ssl'] == "True": self.ssl = True else: self.ssl = False self.hostname = self.config['hostname'] self.port = int(self.config['port']) self.username = self.config['username'] self.password = self.config['password'] self.database = self.config['database'] self.batch_size = int(self.config['batch_size']) self.metric_max_cache = int(self.config['cache_size']) self.batch_count = 0 self.time_precision = self.config['time_precision'] self.timeout = self.config['timeout'] self.influxdb_version = self.config['influxdb_version'] self.using_0_8 = False if self.influxdb_version in ['0.8', '.8']: if not InfluxDB08Client: self.log.error( 'influxdb.influxdb08.client.InfluxDBClient import failed. ' 'Handler disabled') self.enabled = False return else: self.client = InfluxDB08Client self.using_0_8 = True else: if not InfluxDBClient: self.log.error('influxdb.client.InfluxDBClient import failed. ' 'Handler disabled') self.enabled = False return else: self.client = InfluxDBClient # Initialize Data self.batch = {} self.influx = None self.batch_timestamp = time.time() self.time_multiplier = 1 # Connect self._connect()
def __init__(self, config=None): """ Create a new instance of the StatsdHandler class """ # Initialize Handler Handler.__init__(self, config) logging.debug("Initialized statsd handler.") # Initialize Options self.host = self.config['host'] self.port = int(self.config['port']) self.batch_size = int(self.config.get('batch', 1)) self.old_values = {} self.queues = {}
def __init__(self, config=None): """ Create a new instance of the StatsdHandler class """ # Initialize Handler Handler.__init__(self, config) logging.debug("Initialized statsd handler.") # Initialize Options self.host = self.config['host'] self.port = int(self.config['port']) # Connect self._connect()
def __init__(self, config=None): """ Create a new instance of the LibratoHandler class """ # Initialize Handler Handler.__init__(self, config) logging.debug("Initialized statsd handler.") # Initialize Options api = librato.connect(self.conf['user'], self.conf['apikey']) self.queue = api.new_queue() self.batch_size = 300 self.current_n_measurements = 0
def __init__(self, config=None): """ @type config: configobj.ConfigObj """ Handler.__init__(self, config) # init sentry/raven self.sentry_log_handler = raven.handlers.logging.SentryHandler( self.config['dsn']) self.raven_logger = logging.getLogger(self.__class__.__name__) self.raven_logger.addHandler(self.sentry_log_handler) self.configure_sentry_errors() self.rules = self.compile_rules() self.hostname = get_hostname(self.config) if not len(self.rules): self.log.warning("No rules, this graphite handler is unused")
def __init__(self, config=None): # Initialize Handler Handler.__init__(self, config) # Initialize options self.host = self.config['host'] self.port = int(self.config['port']) self.transport = self.config.get('transport', 'tcp') # Initialize client if self.transport == 'tcp': transportCls = bernhard.TCPTransport else: transportCls = bernhard.UDPTransport self.client = bernhard.Client(self.host, self.port, transportCls)
def __init__(self, config=None): """ Create a new instance of the MultiGraphitePickleHandler class """ # Initialize Handler Handler.__init__(self, config) self.handlers = [] # Initialize Options hosts = self.config['host'] for host in hosts: config = deepcopy(self.config) config['host'] = host self.handlers.append(GraphitePickleHandler(config))
def __init__(self, config=None): """ Create a new instance of the LibratoHandler class """ # Initialize Handler Handler.__init__(self, config) logging.debug("Initialized statsd handler.") # Initialize Options self.user = self.config['user'] self.apikey = self.config['apikey'] self.url = 'https://metrics-api.librato.com/v1/metrics' self.batch_size = 300 self.batch = { 'counters': [], 'gauges': [], }
def __init__(self, config=None): """ New instance of DatadogHandler class """ Handler.__init__(self, config) logging.debug("Initialized Datadog handler.") if dogapi is None: logging.error("Failed to load dogapi module.") return self.api = dogapi.dog_http_api self.api.api_key = self.config.get('api_key', '') self.queue_size = self.config.get('queue_size', 1) self.queue = deque([])
def __init__(self, config=None): """ Create a new instance of the StatsdHandler class """ # Initialize Handler Handler.__init__(self, config) logging.debug("Initialized statsd handler.") # Initialize Options self.host = self.config["host"] self.port = int(self.config["port"]) self.batch_size = int(self.config["batch"]) self.metrics = [] self.old_values = {} # Connect self._connect()
def __init__(self, config=None): """ Create a new instance of cloudwatchHandler class """ # Initialize Handler Handler.__init__(self, config) if not boto: self.log.error( "CloudWatch: Boto is not installed, please install boto.") return # Initialize Data self.connection = None # Initialize Options self.region = self.config['region'] instance_metadata = boto.utils.get_instance_metadata() if 'instance-id' in instance_metadata: self.instance_id = instance_metadata['instance-id'] self.log.debug("Setting InstanceId: " + self.instance_id) else: self.instance_id = None self.log.error('CloudWatch: Failed to load instance metadata') self.valid_config = ('region', 'collector', 'metric', 'namespace', 'name', 'unit', 'collect_by_instance', 'collect_without_dimension') self.rules = [] for key_name, section in self.config.items(): if section.__class__ is Section: keys = section.keys() rules = self.get_default_rule_config() for key in keys: if key not in self.valid_config: self.log.warning("invalid key %s in section %s", key, section.name) else: rules[key] = section[key] self.rules.append(rules) # Create CloudWatch Connection self._bind()
def __init__(self, config=None): Handler.__init__(self, config) self.metrics = [] self.filter_metrics = self.config["filter_metrics_regex"] self.batch_size = int(self.config['batch']) self.url = self.config['url'] self.auth_token = self.config['auth_token'] self.batch_max_interval = int(self.config['batch_max_interval']) self.resetBatchTimeout() self._compiled_filters = [] for fltr in self.filter_metrics: collector, metric = fltr.split(":") self._compiled_filters.append((collector, re.compile(metric),)) if self.auth_token == "": logging.error("Failed to load Signalfx module") return
def __init__(self, config=None): """ Create a new instance of the TSDBHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Data self.socket = None # Initialize Options self.host = self.config['host'] self.port = int(self.config['port']) self.timeout = int(self.config['timeout']) # Connect self._connect()
def __init__(self, config=None): """ Create a new instance of the GraphiteHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Data self.socket = None # Initialize Options self.host = self.config['host'] self.port = int(self.config['port']) self.timeout = int(self.config['timeout']) # Connect self._connect()
def __init__(self, config=None): """ @type config: configobj.ConfigObj """ Handler.__init__(self, config) if not raven: return # init sentry/raven self.sentry_log_handler = raven.handlers.logging.SentryHandler( self.config['dsn']) self.raven_logger = logging.getLogger(self.__class__.__name__) self.raven_logger.addHandler(self.sentry_log_handler) self.configure_sentry_errors() self.rules = self.compile_rules() self.hostname = get_hostname(self.config) if not len(self.rules): self.log.warning("No rules, this graphite handler is unused")
def __init__(self, config): """ Create a new instance of the ArchiveHandler class """ # Initialize Handler Handler.__init__(self, config) # Create Archive Logger self.archive = logging.getLogger('archive') self.archive.setLevel(logging.DEBUG) # Create Archive Log Formatter formatter = logging.Formatter('%(message)s') # Create Archive Log Handler handler = logging.handlers.TimedRotatingFileHandler(self.config['log_file'], 'midnight', 1, backupCount=int(self.config['days'])) handler.setFormatter(formatter) handler.setLevel(logging.DEBUG) self.archive.addHandler(handler)
def __init__(self, config=None): """ Create a new instance of rmqHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Data self.connection = None self.channel = None # Initialize Options self.server = self.config['server'] self.rmq_exchange = self.config['rmq_exchange'] # Create rabbitMQ pub socket and bind self._bind()
def __init__(self, config=None): """ Create a new instance of zmqHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Data self.context = None self.socket = None # Initialize Options self.port = int(self.config['port']) # Create ZMQ pub socket and bind self._bind()
def __init__( self, config=None ): """ Create a new instance of zmqHandler class """ # Initialize Handler Handler.__init__(self,config) # Initialize Data self.context = None self.socket = None # Initialize Options self.port = int( self.config['port'] ) # Create ZMQ pub socket and bind self._bind()
def __init__(self, config=None): Handler.__init__(self, config) self.metrics = [] self.batch_size = int(self.config['batch']) self.url = self.config['url'] self.auth_token = self.config['auth_token'] self.batch_max_interval = self.config['batch_max_interval'] self.resetBatchTimeout() # If a user leaves off the ending comma, cast to a array for them include_filters = self.config['include_filters'] if isinstance(include_filters, basestring): include_filters = [include_filters] self.include_reg = re.compile(r'(?:%s)' % '|'.join(include_filters)) if self.auth_token == "": logging.error("Failed to load Signalfx module") return
def __init__(self, config=None): """ Create a new instance of the LibratoHandler class """ # Initialize Handler Handler.__init__(self, config) logging.debug("Initialized statsd handler.") # Initialize Options api = librato.connect(self.config['user'], self.config['apikey']) self.queue = api.new_queue() self.queue_max_size = int(self.config.get('queue_max_size',300)) self.queue_max_interval = int(self.config.get('queue_max_interval',60)) self.queue_max_timestamp = int(time.time() + self.queue_max_interval) self.current_n_measurements = 0 self.include_reg = re.compile(r'(?:%s)' % '|'.join( self.config.get('include_filters',['^.*'])))
def __init__(self, config=None): """ Create a new instance of cloudwatchHandler class """ # Initialize Handler Handler.__init__(self, config) if not boto: self.log.error( "CloudWatch: Boto is not installed, please install boto.") return # Initialize Data self.connection = None # Initialize Options self.region = self.config['region'] instances = boto.utils.get_instance_metadata() if 'instance-id' not in instances: self.log.error('CloudWatch: Failed to load instance metadata') return self.instance_id = instances['instance-id'] self.log.debug("Setting InstanceID: " + self.instance_id) self.valid_config = ('region', 'collector', 'metric', 'namespace', 'name', 'unit') self.rules = [] for key_name, section in self.config.items(): if section.__class__ is Section: keys = section.keys() rules = {} for key in keys: if key not in self.valid_config: self.log.warning("invalid key %s in section %s", key, section.name) else: rules[key] = section[key] self.rules.append(rules) # Create CloudWatch Connection self._bind()
def __init__(self, config=None): """ Create a new instance of the InfluxdbeHandler """ # Initialize Handler Handler.__init__(self, config) # Initialize Options if self.config['ssl'] == "True": self.ssl = True else: self.ssl = False self.hostname = self.config['hostname'] self.port = int(self.config['port']) self.username = self.config['username'] self.password = self.config['password'] self.database = self.config['database'] self.batch_size = int(self.config['batch_size']) self.metric_max_cache = int(self.config['cache_size']) self.batch_count = 0 self.time_precision = self.config['time_precision'] self.influxdb_version = self.config['influxdb_version'] # Initialize Data self.batch = {} self.influx = None self.batch_timestamp = time.time() self.time_multiplier = 1 if self.influxdb_version == '0.8' and not InfluxDB08Client: self.log.error( 'influxdb.influxdb08.client.InfluxDBClient import failed. ' 'Handler disabled') self.enabled = False return elif not InfluxDBClient: self.log.error('influxdb.client.InfluxDBClient import failed. ' 'Handler disabled') self.enabled = False return # Connect self._connect()
def __init__(self, config=None): """ Create a new instance of the GmetricHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Data self.socket = None # Initialize Options self.host = self.config['host'] self.port = int(self.config['port']) self.protocol = self.config['protocol'] if not self.protocol: self.protocol = 'udp' # Initialize self.gmetric = gmetric.Gmetric(self.host, self.port, self.protocol)
def __init__(self, config=None): """ Create a new instance of the TSDBHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Options # host self.host = str(self.config['host']) self.port = int(self.config['port']) self.timeout = int(self.config['timeout']) # Authorization self.user = str(self.config['user']) self.password = str(self.config['password']) # data self.batch = int(self.config['batch']) self.compression = int(self.config['compression']) # prefix if self.config['prefix'] != "": self.prefix = str(self.config['prefix']) + '.' else: self.prefix = "" # tags self.tags = [] pattern = re.compile(r'([a-zA-Z0-9]+)=([a-zA-Z0-9]+)') for (key, value) in re.findall(pattern, str(self.config['tags'])): self.tags.append([key, value]) # headers self.httpheader = {} self.httpheader["Content-Type"] = "application/json" # Authorization if self.user != "": self.httpheader["Authorization"] = "Basic " +\ base64.encodestring('%s:%s' % (self.user, self.password))[:-1] # compression if self.compression >= 1: self.httpheader["Content-Encoding"] = "gzip" self.entrys = [] self.skipAggregates = self.config['skipAggregates'] self.cleanMetrics = self.config['cleanMetrics']
def __init__(self, config=None): # Initialize Handler Handler.__init__(self, config) if bernhard is None: logging.error("Failed to load bernhard module") return # Initialize options self.host = self.config['host'] self.port = int(self.config['port']) self.transport = self.config['transport'] # Initialize client if self.transport == 'tcp': transportCls = bernhard.TCPTransport else: transportCls = bernhard.UDPTransport self.client = bernhard.Client(self.host, self.port, transportCls)
def __init__(self, config=None): """ Create a new instance of the TSDBHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Data self.socket = None # Initialize Options self.host = self.config['host'] self.port = int(self.config['port']) self.timeout = int(self.config['timeout']) self.metric_format = str(self.config['format']) self.tags = str(self.config['tags']) # Connect self._connect()
def __init__(self, config=None): # Initialize Handler Handler.__init__(self, config) if riemann_client is None: logging.error("Failed to load riemann_client module") return # Initialize options self.host = self.config['host'] self.port = int(self.config['port']) self.transport = self.config['transport'] # Initialize client if self.transport == 'tcp': self.transport = TCPTransport(self.host, self.port) else: self.transport = UDPTransport(self.host, self.port) self.client = Client(self.transport) self._connect()
def __init__(self, config=None): """ Create a new instance of the StatsdHandler class """ # Initialize Handler Handler.__init__(self, config) logging.debug("Initialized statsd handler.") if not statsd: self.log.error('statsd import failed. Handler disabled') # Initialize Options self.host = self.config['host'] self.port = int(self.config['port']) self.batch_size = int(self.config['batch']) self.metrics = [] self.old_values = {} # Connect self._connect()
def __init__(self, config=None): """ Create a new instance of aggmonHandler class """ # Initialize Handler Handler.__init__(self, config) if not zmq: self.log.error('zmq import failed. Handler disabled') self.enabled = False return # Initialize Data self.context = None self.socket = None # Initialize Options self.collector = self.config['collector']
def __init__(self, config=None): """ Create a new instance of the MySQLHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Options self.hostname = self.config['hostname'] self.port = int(self.config['port']) self.username = self.config['username'] self.password = self.config['password'] self.database = self.config['database'] self.table = self.config['table'] self.col_time = self.config['col_time'] self.col_metric = self.config['col_metric'] self.col_value = self.config['col_value'] # Connect self._connect()
def __init__(self, config): """ Create a new instance of the ArchiveHandler class """ # Initialize Handler Handler.__init__(self, config) # Create Archive Logger self.archive = logging.getLogger('archive') self.archive.setLevel(logging.DEBUG) # Create Archive Log Formatter formatter = logging.Formatter('%(message)s') # Create Archive Log Handler handler = logging.handlers.TimedRotatingFileHandler( self.config['log_file'], 'midnight', 1, backupCount=int(self.config['days'])) handler.setFormatter(formatter) handler.setLevel(logging.DEBUG) self.archive.addHandler(handler)
def __init__(self, config=None): """ Create a new instance of zmqHandler class """ # Initialize Handler Handler.__init__(self, config) if not zmq: self.log.error('zmq import failed. Handler disabled') # Initialize Data self.context = None self.socket = None # Initialize Options self.port = int(self.config['port']) # Create ZMQ pub socket and bind self._bind()
def __init__(self, config=None): """ Create a new instance of rmqHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Data self.connection = None self.channel = None # Initialize Options self.server = self.config['server'] self.rmq_exchange = self.config['rmq_exchange'] # Create rabbitMQ pub socket and bind try: self._bind() except pika.exceptions.AMQPConnectionError: self.log.error('Failed to bind to rabbitMQ pub socket')
def __init__(self, config=None): """ Create a new instance of rmqHandler class """ # Initialize Handler Handler.__init__(self, config) if pika is None: self.log.error('pika import failed. Handler disabled') self.enabled = False return # Initialize Data self.connections = {} self.channels = {} self.reconnect_interval = 1 # Initialize Options tmp_rmq_server = self.config['rmq_server'] if type(tmp_rmq_server) is list: self.rmq_server = tmp_rmq_server else: self.rmq_server = [tmp_rmq_server] self.rmq_port = 5672 self.rmq_exchange = self.config['rmq_exchange'] self.rmq_user = None self.rmq_password = None self.rmq_vhost = '/' self.rmq_exchange_type = 'fanout' self.rmq_durable = True self.rmq_heartbeat_interval = 300 self.get_config() # Create rabbitMQ pub socket and bind try: self._bind_all() except pika.exceptions.AMQPConnectionError: self.log.error('Failed to bind to rabbitMQ pub socket')
def __init__(self, config=None): """ initialize Netuitive api and populate agent host metadata """ if not netuitive: self.log.error('netuitive import failed. Handler disabled') self.enabled = False return try: Handler.__init__(self, config) logging.debug("initialize Netuitive handler") self.version = self._get_version() self.api = netuitive.Client(self.config['url'], self.config['api_key'], self.version) self.element = netuitive.Element( location=self.config.get('location')) self.batch_size = int(self.config['batch']) self.max_backlog_multiplier = int( self.config['max_backlog_multiplier']) self.trim_backlog_multiplier = int( self.config['trim_backlog_multiplier']) self._add_sys_meta() self._add_aws_meta() self._add_docker_meta() self._add_config_tags() self._add_config_relations() logging.debug(self.config) except Exception as e: logging.exception('NetuitiveHandler: init - %s', str(e))
def __init__(self, config=None): """ Create a new instance of the LibratoHandler class """ # Initialize Handler Handler.__init__(self, config) logging.debug("Initialized statsd handler.") # Initialize Options api = librato.connect(self.config['user'], self.config['apikey']) self.queue = api.new_queue() self.queue_max_size = int(self.config.get('queue_max_size', 300)) self.queue_max_interval = int(self.config.get('queue_max_interval', 60)) self.queue_max_timestamp = int(time.time() + self.queue_max_interval) self.current_n_measurements = 0 # If a user leaves off the ending comma, cast to a array for them include_filters = self.config.get('include_filters', ['^.*']) if isinstance(include_filters, basestring): include_filters = [include_filters] self.include_reg = re.compile(r'(?:%s)' % '|'.join(include_filters))
def __init__(self, config): """ Create a new instance of the ArchiveHandler class """ # Initialize Handler Handler.__init__(self, config) # Create Archive Logger self.archive = logging.getLogger('archive') self.archive.setLevel(logging.DEBUG) self.archive.propagate = self.config['propagate'] # Create Archive Log Formatter formatter = logging.Formatter('%(message)s') # Create Archive Log Handler handler = logging.handlers.TimedRotatingFileHandler( filename=self.config['log_file'], when=self.config['when'], interval=int(self.config['rollover_interval']), backupCount=int(self.config['days']), encoding=self.config['encoding']) handler.setFormatter(formatter) handler.setLevel(logging.DEBUG) self.archive.addHandler(handler)
def __init__(self, config=None): """ Create a new instance of the GmetricHandler class """ # Initialize Handler Handler.__init__(self, config) if gmetric is None: logging.error("Failed to load gmetric module") return # Initialize Data self.socket = None # Initialize Options self.host = self.config['host'] self.port = int(self.config['port']) self.protocol = self.config['protocol'] if not self.protocol: self.protocol = 'udp' # Initialize self.gmetric = gmetric.Gmetric(self.host, self.port, self.protocol)
def __init__(self, config=None): """ Create a new instance of the OpenTSDBHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Options self.timeout = int(self.config['timeout']) self.batchsize = int(self.config['batchsize']) servers = self.config['servers'] # Force servers to be a list if isinstance(servers, basestring): servers = [servers] tags = self.config['tags'] # Force tags to be a list if isinstance(tags, basestring): tags = [tags] # Parse tags tags = [tuple(t.split("=")) for t in tags if t.find("=") > 0] self.tags = dict(tags) tagsinmetric = self.config['tagsinmetric'] # Force tagsinmetric to be a list if isinstance(tagsinmetric, basestring): tagsinmetric = [tagsinmetric] # Parse regexes self.tagsinmetric = [re.compile(t) for t in tagsinmetric] self.session = requests.Session() self.endpoints = ["http://%s/api/put" % h for h in servers] # Select one at random to be the main server self.mainep = random.randint(0, len(self.endpoints) - 1) self.batch = []
def __init__(self, config=None): """ Create a new instance of the GraphiteHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Data self.socket = None # Initialize Options self.proto = self.config.get('proto', 'tcp').lower().strip() self.host = self.config['host'] self.port = int(self.config.get('port', 2003)) self.timeout = int(self.config.get('timeout', 15)) self.batch_size = int(self.config.get('batch', 1)) self.max_backlog_multiplier = int( self.config.get('max_backlog_multiplier', 5)) self.trim_backlog_multiplier = int( self.config.get('trim_backlog_multiplier', 4)) self.metrics = [] # Connect self._connect()
def __init__(self): Handler.__init__(self) self.content = ''
def __init__(self, config=None): """ Create a new instance of the MQTTHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Data self.mqttc = None self.hostname = get_hostname(self.config) self.client_id = "%s_%s" % (self.hostname, os.getpid()) # Initialize Options self.host = self.config.get('host', 'localhost') self.port = 0 self.qos = int(self.config.get('qos', 0)) self.prefix = self.config.get('prefix', "") self.tls = self.config.get('tls', False) self.timestamp = 0 try: self.timestamp = self.config['timestamp'] if not self.timestamp: self.timestamp = 1 else: self.timestamp = 0 except: self.timestamp = 1 # Initialize self.mqttc = mosquitto.Mosquitto(self.client_id, clean_session=True) if not self.tls: self.port = int(self.config.get('port', 1883)) else: # Set up TLS if requested self.port = int(self.config.get('port', 8883)) self.cafile = self.config.get('cafile', None) self.certfile = self.config.get('certfile', None) self.keyfile = self.config.get('keyfile', None) if (self.cafile is None or self.certfile is None or self.keyfile is None): self.log.error("MQTTHandler: TLS configuration missing.") return try: self.mqttc.tls_set(self.cafile, certfile=self.certfile, keyfile=self.keyfile, cert_reqs=ssl.CERT_REQUIRED, tls_version=3, ciphers=None) except: self.log.error("MQTTHandler: Cannot set up TLS " + "configuration. Files missing?") self.mqttc.will_set("clients/diamond/%s" % (self.hostname), payload="Adios!", qos=0, retain=False) self.mqttc.connect(self.host, self.port, 60) self.mqttc.on_disconnect = self._disconnect
def __init__(self, config=None): Handler.__init__(self, config) self.metrics = [] self.batch_size = int(self.config['batch']) self.url = self.config.get('url')
def __init__(self, config=None, queue=None, log=None): # Initialize Handler Handler.__init__(self, config=config, log=log) self.metrics = [] self.queue = queue
def __init__(self, config=None): """ Create a new instance of the InfluxdbeHandler """ # Initialize Handler Handler.__init__(self, config) # Initialize Options if self.config['ssl'] == "True": self.ssl = True else: self.ssl = False self.hostname = self.config['hostname'] self.port = int(self.config['port']) self.username = self.config['username'] self.password = self.config['password'] self.database = self.config['database'] self.batch_size = int(self.config['batch_size']) self.metric_max_cache = int(self.config['cache_size']) self.batch_count = 0 self.time_precision = self.config['time_precision'] self.timeout = int(self.config['timeout']) self.retries = int(self.config['retries']) self.influxdb_version = self.config['influxdb_version'] self.tags = self.config['tags'] self.reconnect = int(self.config['reconnect_interval']) try: self.dimensions = json.loads(self.config['dimensions']) except Exception: self._throttle_error( "InfluxDBHandler ERROR - Invalid dimensions JSON in config") sys.exit(1) self.merge_delimiter = self.config['merge_delimiter'] self.blacklisted = self.config['blacklisted'] self.blacklisted_prefix = self.config['blacklisted_prefix'] self.using_0_8 = False if self.influxdb_version in ['0.8', '.8']: if not InfluxDB08Client: self.log.error( 'influxdb.influxdb08.client.InfluxDBClient import failed. ' 'Handler disabled') self.enabled = False return else: self.client = InfluxDB08Client self.using_0_8 = True else: if not InfluxDBClient: self.log.error('influxdb.client.InfluxDBClient import failed. ' 'Handler disabled') self.enabled = False return else: self.client = InfluxDBClient # Initialize Data self.batch = {} self.influx = None self.batch_timestamp = time.time() self.time_multiplier = 1 # Set send_count for reconnect self.send_count = 0 # Connect self._connect()