Beispiel #1
0
def _handle_config_changes():
    cmd = (_splunk_cli, "cmd", "splunkd",
           "print-modinput-config", "vnx_data_loader")
    try:
        output = Popen(cmd, stdout=PIPE, stderr=PIPE).communicate()
        if output[1]:
            _logger.error("Failed to get modinput config: %s" % output[1])
            return None

        configs = conf.parse_configs(output[0])
    except Exception as e:
        _logger.error("Failed to parse configs")
        return None
    else:
        if configs:
            conf.setup_logging(LOG_FILE, configs[0].get("loglevel", "INFO"))
        return configs
Beispiel #2
0
def run():
    global _logger
    configs = get_config(_logger)
    if configs and configs[0]["loglevel"] != "INFO":
        _logger = conf.setup_logging(LOG_FILE, configs[0]["loglevel"])

    data_loader = dl.GlobalDataLoader.get_data_loader(configs)
    data_loader.register_config_change_handler(_handle_config_changes)
    data_loader.run()
Beispiel #3
0
import json
from itertools import izip
import traceback
import threading
import Queue
import socket

from rauth import OAuth1Service
from rauth import OAuth1Session

import configure


_LOGGER = configure.setup_logging("cubesensor")


class CubeSensorServer(object):
    def __init__(self, ip, port, consumer_key, consumer_secret):
        self.consumer_key = consumer_key
        self.consumer_secret = consumer_secret
        self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
        self.sock.bind((ip, port))
        self.sock.listen(10)
        self.thr = threading.Thread(target=self._do_request)
        self.state = "new"
        self.clients = {}
        self.wakeup_q = Queue.Queue()

    def start(self):
        if self.state == "new":
Beispiel #4
0
#!/usr/bin/python


import multiprocessing
import threading
import Queue
import logging
import os
import os.path as op

import configure as conf
_logger = conf.setup_logging("data_loader")

import thread_pool as tp
import job_scheduler as js
import job_factory as jf
import timer_queue as tq


def get_supported_metric_types(platform):
    return jf.JobFactory.get_supported_metric_types(platform)


class DataLoader(object):

    def __init__(self, configs):
        self.event_queue = Queue.Queue()
        self.wakeup_queue = Queue.Queue()
        self._set_event_queue(configs)
        pool_size = self._get_pool_size(configs)
        self.thr_pool = tp.ThreadPool(pool_size)
Beispiel #5
0
#!/usr/bin/python


import sys
import os
import os.path as op
import logging
from subprocess import Popen, PIPE

import data_loader as dl
import configure as conf


LOG_FILE = "ta_vnx"
_logger = conf.setup_logging(LOG_FILE)
_splunk_cli = op.join(os.environ["SPLUNK_HOME"], "bin", "splunk")


def do_scheme():
    print """
    <scheme>
    <title>EMC TA</title>
    <description>EMC TA</description>
    <use_external_validation>true</use_external_validation>
    <streaming_mode>xml</streaming_mode>
    <use_single_instance>true</use_single_instance>
    <endpoint>
      <args>
        <arg name="name">
          <title>Unique stanza name for differentiation</title>
        </arg>