Example #1
0
import threading
from subprocess import Popen, PIPE
import time, datetime
import shlex
import uuid
import json
import collections
import urllib2, urllib
from os.path import expanduser

from reporting.parsers import MatchParser, SplitParser, DummyParser, JsonGrepParser
from reporting.utilities import getLogger, get_hostname, init_object
from reporting.exceptions import PluginInitialisationError, RemoteServerError
from reporting.crontab import CronEvent

log = getLogger(__name__)

class IDataSource(object):
    def get_data(self, **kwargs):
        assert 0, "This method must be defined."

class CommandRunner(IDataSource):
    def __init__(self, cmd):
        self.__cmd=cmd
    def get_data(self, **kwargs):
        log.debug("running cmd %s"%self.__cmd)
        process = Popen(shlex.split(self.__cmd), stdout=PIPE)
        pipe = process.stdout
        output = ''.join(pipe.readlines()).strip()
        process.wait()
        return output
import getopt
from getopt import GetoptError

from daemon import Daemon
from reporting.utilities import getLogger, excepthook, get_log_level, set_global, init_object
from reporting.__version__ import version
from reporting.outputs import KafkaHTTPOutput, BufferOutput, FileOutput, BufferThread, AWSOutput
from reporting.pusher import Pusher
from reporting.collectors import Collector
from reporting.tailer import Tailer
from reporting.admin import AsyncServer
from reporting.exceptions import AsyncServerException
from __builtin__ import True

log = getLogger("producer")


class ProducerDaemon(Daemon):
    def __init__(self,
                 pidfile,
                 socketfile,
                 config,
                 stdin='/dev/null',
                 stdout='/dev/null',
                 stderr='/dev/null'):
        Daemon.__init__(self, pidfile, stdin, stdout, stderr)
        self.__running = True
        self.__socket_file = socketfile
        self.config = config
        self.__outputs = {}
Example #3
0
#!/usr/bin/env python

# pylint: disable=broad-except

from reporting.parsers import IParser
from reporting.collectors import IDataSource
from reporting.utilities import getLogger, list_to_dict, get_hostname
from reporting.outputs import IOutput
import json
import time
import os

log = getLogger(__name__)

class MomLogParser(IParser):
    def parse(self, data):
        tokens = [x.lower().strip() for x in data.split(";")]
        data={}
        data["timestamp"] = int(time.mktime(time.strptime(tokens[0], "%m/%d/%Y %H:%M:%S")))
        data["hostname"] = get_hostname()

        event_type = tokens[3]
        data['event_type']=event_type

        if event_type == "svr":
            data['svr_type']=tokens[4]
            data['event_description']=tokens[5]
        elif event_type == "job":
            data['jobid']=tokens[4]
            data['event_description']=tokens[5]
            if data['jobid']=="tmomfinalizejob3":
import getopt
from getopt import GetoptError

from daemon import Daemon
from reporting.utilities import getLogger, excepthook, get_log_level, set_global, init_object
from reporting.__version__ import version
from reporting.outputs import KafkaHTTPOutput, BufferOutput, FileOutput, BufferThread
from reporting.pusher import Pusher
from reporting.collectors import Collector
from reporting.tailer import Tailer
from reporting.admin import AsyncServer
from reporting.exceptions import AsyncServerException
from __builtin__ import True

log = getLogger("producer")

class ProducerDaemon(Daemon):
    def __init__(self, pidfile, socketfile, config, stdin='/dev/null', stdout='/dev/null', stderr='/dev/null'):
        Daemon.__init__(self, pidfile, stdin, stdout, stderr)
        self.__running=True
        self.__socket_file=socketfile
        self.config=config
        self.__outputs={}
        self.__pusher_pid=-1
        self.__tailer = None
        self.__buffer_thread=None
        self.__collectors=[]
        self.__asyncServer = AsyncServer(self)

    def __sigTERMhandler(self, signum, frame):