Beispiel #1
0
    def __init__(self, time, sample=None, interruptcatcher=None):
        # Logger already setup by config, just get an instance
        logobj = logging.getLogger('eventgen')
        from eventgenconfig import EventgenAdapter
        if sample == None:
            adapter = EventgenAdapter(logobj, {
                'module': 'Timer',
                'sample': 'null'
            })
        else:
            adapter = EventgenAdapter(logobj, {
                'module': 'Timer',
                'sample': sample.name
            })
        self.logger = adapter

        globals()['c'] = Config()

        self.logger.debug('Initializing timer for %s' %
                          sample.name if sample is not None else "None")

        self.time = time
        self.stopping = False
        self.interruptcatcher = interruptcatcher
        self.countdown = 0

        self.sample = sample
        if self.sample != None:
            self.rater = c.getPlugin('rater.' + self.sample.rater)(self.sample)
        threading.Thread.__init__(self)
Beispiel #2
0
    def __init__(self, sample):
        GeneratorPlugin.__init__(self, sample)

        # Logger already setup by config, just get an instance
        logger = logging.getLogger('eventgen')
        from eventgenconfig import EventgenAdapter
        adapter = EventgenAdapter(logger, {'module': 'WeblogGenerator', 'sample': sample.name})
        globals()['logger'] = adapter

        from eventgenconfig import Config
        globals()['c'] = Config()

        f = open('tests/perf/weblog/external_ips.sample')
        self.external_ips = [x.strip() for x in f.readlines()]
        self.external_ips_len = len(self.external_ips)
        f.close()

        f = open('tests/perf/weblog/webhosts.sample')
        self.webhosts = [x.strip() for x in f.readlines()]
        f.close()
        self.webhosts_len = len(self.webhosts)

        f = open('tests/perf/weblog/useragents.sample')
        self.useragents = [x.strip() for x in f.readlines()]
        f.close()
        self.useragents_len = len(self.useragents)

        f = open('tests/perf/weblog/webserverstatus.sample')
        self.webserverstatus = [x.strip() for x in f.readlines()]
        f.close()
        self.webserverstatus_len = len(self.webserverstatus)
Beispiel #3
0
    def __init__(self, sample):
        OutputPlugin.__init__(self, sample)

        # Logger already setup by config, just get an instance
        logger = logging.getLogger('eventgen')
        from eventgenconfig import EventgenAdapter
        adapter = EventgenAdapter(logger, {'module': 'SplunkStreamOutputPlugin', 'sample': sample.name})
        globals()['logger'] = adapter

        from eventgenconfig import Config
        globals()['c'] = Config()

        self._splunkUrl, self._splunkMethod, self._splunkHost, self._splunkPort = c.getSplunkUrl(self._sample)
        self._splunkUser = self._sample.splunkUser
        self._splunkPass = self._sample.splunkPass
            
        if self._sample.sessionKey == None:
            try:
                myhttp = httplib2.Http(disable_ssl_certificate_validation=True)
                logger.debugv("Getting session key from '%s' with user '%s' and pass '%s'" % (self._splunkUrl + '/services/auth/login', self._splunkUser, self._splunkPass))
                response = myhttp.request(self._splunkUrl + '/services/auth/login', 'POST',
                                            headers = {}, body=urllib.urlencode({'username': self._splunkUser, 
                                                                                'password': self._splunkPass}))[1]
                self._sample.sessionKey = minidom.parseString(response).getElementsByTagName('sessionKey')[0].childNodes[0].nodeValue
                logger.debug("Got new session for splunkstream, sessionKey '%s'" % self._sample.sessionKey)
            except:
                logger.error("Error getting session key for non-SPLUNK_EMBEEDED for sample '%s'.  Credentials are missing or wrong" % self._sample.name)
                raise IOError("Error getting session key for non-SPLUNK_EMBEEDED for sample '%s'.  Credentials are missing or wrong" % self._sample.name)
                
        logger.debug("Retrieved session key '%s' for Splunk session for sample %s'" % (self._sample.sessionKey, self._sample.name))   
Beispiel #4
0
    def __init__(self, sample):
        OutputPlugin.__init__(self, sample)

        #disable any "requests" warnings
        requests.packages.urllib3.disable_warnings()
        #Setup loggers from the root eventgen
        logger = logging.getLogger('eventgen')
        from eventgenconfig import EventgenAdapter
        adapter = EventgenAdapter(logger, {
            'module': 'BattlecatOutputPlugin',
            'sample': sample.name
        })
        globals()['logger'] = adapter

        from eventgenconfig import Config
        globals()['c'] = Config()

        #Bind passed in samples to the outputter.
        if hasattr(sample, 'battlecatServers') == False:
            logger.error(
                'outputMode battlecat but battlecatServers not specified for sample %s'
                % self._sample.name)
            raise ValueError(
                'outputMode battlecat but battlecatServers not specified for sample %s'
                % self._sample.name)
        self.battlecatServers = sample.battlecatServers
        logger.debug("Setting up the connection pool for %s in %s" %
                     (self._sample.name, self._app))
        self.createConnections()
        logger.debug("Pool created.")
Beispiel #5
0
    def __init__(self, sample):
        self.__plugins = {}

        # Logger already setup by config, just get an instance
        logobj = logging.getLogger('eventgen')
        from eventgenconfig import EventgenAdapter
        adapter = EventgenAdapter(logobj, {
            'module': 'Output',
            'sample': sample.name
        })
        globals()['logger'] = adapter

        from eventgenconfig import Config
        globals()['c'] = Config()
        self._app = sample.app
        self._sample = sample
        self._outputMode = sample.outputMode

        self._queue = deque([])
        self._workers = []

        if self._sample.maxQueueLength == 0:
            self.MAXQUEUELENGTH = c.getPlugin(self._sample.name).MAXQUEUELENGTH
        else:
            self.MAXQUEUELENGTH = self._sample.maxQueueLength
Beispiel #6
0
    def __init__(self, sample):
        OutputPlugin.__init__(self, sample)

        #disable any "requests" warnings
        requests.packages.urllib3.disable_warnings()
        # set default output mode to round robin
        #Setup loggers from the root eventgen
        logger = logging.getLogger('eventgen')
        from eventgenconfig import EventgenAdapter
        adapter = EventgenAdapter(logger, {'module': 'HTTPEventOutputPlugin', 'sample': sample.name})
        globals()['logger'] = adapter

        from eventgenconfig import Config
        globals()['c'] = Config()

        #Bind passed in samples to the outputter.
        logger.debug("Outputmode: %s" % sample.httpeventOutputMode)
        self.lastsourcetype = None
        try:
            if hasattr(sample, 'httpeventServers') == False:
                logger.error('outputMode httpevent but httpeventServers not specified for sample %s' % self._sample.name)
                raise NoServers('outputMode httpevent but httpeventServers not specified for sample %s' % self._sample.name)
            self.httpeventoutputmode = sample.httpeventOutputMode if hasattr(sample, 'httpeventOutputMode') and sample.httpeventOutputMode else 'roundrobin'
            self.httpeventmaxsize = sample.httpeventMaxPayloadSize if hasattr(sample, 'httpeventMaxPayloadSize') and sample.httpeventMaxPayloadSize else 10000
            logger.debug("Currentmax size: %s " % self.httpeventmaxsize)
            self.httpeventServers = sample.httpeventServers
            logger.debug("Setting up the connection pool for %s in %s" % (self._sample.name, self._app))
            self.createConnections()
            logger.debug("Pool created.")
            logger.debug("Finished init of httpevent plugin.")
        except Exception as e:
            logger.exception(e)
Beispiel #7
0
    def __init__(self, sample):
        OutputPlugin.__init__(self, sample)

        # Logger already setup by config, just get an instance
        logger = logging.getLogger('eventgen')
        from eventgenconfig import EventgenAdapter
        adapter = EventgenAdapter(logger, {
            'module': 'FileOutputPlugin',
            'sample': sample.name
        })
        globals()['logger'] = adapter

        from eventgenconfig import Config
        globals()['c'] = Config()

        if sample.fileName == None:
            logger.error(
                'outputMode file but file not specified for sample %s' %
                self._sample.name)
            raise ValueError(
                'outputMode file but file not specified for sample %s' %
                self._sample.name)

        self._file = sample.pathParser(sample.fileName)
        self._fileMaxBytes = sample.fileMaxBytes
        self._fileBackupFiles = sample.fileBackupFiles

        self._fileHandle = open(self._file, 'a')
        self._fileLength = os.stat(self._file).st_size
        logger.debug("Configured to log to '%s' with maxBytes '%s' with backupCount '%s'" % \
                        (self._file, self._fileMaxBytes, self._fileBackupFiles))
Beispiel #8
0
    def __init__(self, sample):
        GeneratorPlugin.__init__(self, sample)

        self._sample = sample

        # Logger already setup by config, just get an instance
        logger = logging.getLogger('eventgen')
        from eventgenconfig import EventgenAdapter
        adapter = EventgenAdapter(logger, {'module': 'ReplayGenerator', 'sample': sample.name})
        globals()['logger'] = adapter

        from eventgenconfig import Config
        globals()['c'] = Config()

        self._currentevent = 0
        self._timeSinceSleep = datetime.timedelta()
        self._times = [ ]

        s = self._sample

        # Load sample from a file, using cache if possible, from superclass GeneratorPlugin
        s.loadSample()
        self._rpevents = s.sampleDict
        self._currentevent = 0

        # 8/18/15 CS Because this is not a queueable plugin, we can in a threadsafe way modify these data structures at init
        # Iterate through events and remove any events which do not match a configured timestamp,
        # log it and then continue on
        for e in self._rpevents:
            try:
                s.getTSFromEvent(e[s.timeField])
            except ValueError:
                self._rpevents = [x for x in self._rpevents if x['_raw'] != e['_raw']]

        # Quick check to see if we're sorted in time order, if not reverse
        if len(self._rpevents) > 1:
            ts1 = s.getTSFromEvent(self._rpevents[0][s.timeField])
            ts2 = s.getTSFromEvent(self._rpevents[1][s.timeField])
            td = ts2 - ts1
            x = 2
            # Make sure we're not all zero
            while td.days == 0 and td.seconds == 0 and td.microseconds == 0 and x < len(self._rpevents):
                ts2 = s.getTSFromEvent(self._rpevents[x][s.timeField])
                td = ts2 - ts1
                x += 1

            self.logger.debug("Testing timestamps ts1: %s ts2: %s" % (ts1.strftime('%Y-%m-%d %H:%M:%S'), ts2.strftime('%Y-%m-%d %H:%M:%S')))

            if td.days < 0:
                self.logger.debug("Timestamp order seems to be reverse chronological, reversing")
                self._rpevents.reverse()

        try:
            self.setupBackfill()
        except ValueError as e:
            self.logger.error("Exception during backfill for sample '%s': '%s'" % (s.name, str(e)))
    def __init__(self, sample):
        GeneratorPlugin.__init__(self, sample)

        # Logger already setup by config, just get an instance
        logger = logging.getLogger('eventgen')
        from eventgenconfig import EventgenAdapter
        adapter = EventgenAdapter(logger, {'module': 'PerDayVolumeGenerator', 'sample': sample.name})
        globals()['logger'] = adapter

        from eventgenconfig import Config
        globals()['c'] = Config()
Beispiel #10
0
    def __init__(self, sample):
        # Logger already setup by config, just get an instance
        logger = logging.getLogger('eventgen')
        from eventgenconfig import EventgenAdapter
        adapter = EventgenAdapter(logger, {'module': 'PerDayVolume', 'sample': sample.name})
        self.logger = adapter

        from eventgenconfig import Config
        globals()['c'] = Config()

        self.logger.debug('Starting PerDayVolumeRater for %s' % sample.name if sample is not None else "None")

        self._sample = sample
Beispiel #11
0
    def __init__(self, sample):
        OutputPlugin.__init__(self, sample)

        # Logger already setup by config, just get an instance
        logger = logging.getLogger('eventgen')
        from eventgenconfig import EventgenAdapter
        adapter = EventgenAdapter(logger, {'module': 'SpoolOutputPlugin', 'sample': sample.name})
        globals()['logger'] = adapter

        from eventgenconfig import Config
        globals()['c'] = Config()

        self._spoolDir = sample.pathParser(sample.spoolDir)
        self._spoolFile = sample.spoolFile
Beispiel #12
0
    def __init__(self, sample=None):

        # Logger already setup by config, just get an instance
        logger = logging.getLogger('eventgen')
        from eventgenconfig import EventgenAdapter
        if sample == None:
            name = "None"
        else:
            name = sample.name
        adapter = EventgenAdapter(logger, {'module': 'Token', 'sample': name})
        globals()['logger'] = adapter

        self._earliestTime = (None, None)
        self._latestTime = (None, None)
Beispiel #13
0
    def __init__(self, depth, threading):
        # Logger already setup by config, just get an instance
        logobj = logging.getLogger('eventgen')
        from eventgenconfig import EventgenAdapter
        adapter = EventgenAdapter(logobj, {
            'module': 'Queue',
            'sample': 'null'
        })
        self.logger = adapter

        # logger.info("Creating Queue of depth %d, threading %s" % (depth, threading))
        if threading == 'thread':
            self.q = PQueue.Queue(depth)
        else:
            self.q = multiprocessing.Manager().Queue(depth)

        self.depth = depth
    def __init__(self, name):
        # 9/2/15 CS Can't make logger an attribute of the object like we do in other classes
        # because it borks deepcopy of the sample object
        logger = logging.getLogger('eventgen')
        from eventgenconfig import EventgenAdapter
        adapter = EventgenAdapter(logger, {'module': 'Sample', 'sample': name})
        globals()['logger'] = adapter
        
        self.name = name
        self.tokens = [ ]
        self._lockedSettings = [ ]

        self.backfilldone = False
        
        # Import config
        from eventgenconfig import Config
        globals()['c'] = Config()
Beispiel #15
0
    def __init__(self, num, q1, q2, stop):
        from eventgenconfig import Config

        # Logger already setup by config, just get an instance
        logger = logging.getLogger('eventgen')
        from eventgenconfig import EventgenAdapter
        adapter = EventgenAdapter(logger, {
            'module': 'GeneratorRealWorker',
            'sample': 'null'
        })
        globals()['logger'] = adapter

        globals()['c'] = Config()

        self.stopping = False
        self.working = False

        self._pluginCache = {}

        self.num = num
        c.generatorQueue = q1
        c.outputQueue = q2
        self.stop = stop

        # 10/9/15 CS Prime plugin cache to avoid concurrency bugs when creating local copies of samples
        time.sleep(random.randint(0, 100) / 1000)
        logger.debug("Priming plugin cache for GeneratorWorker%d" % num)
        with c.copyLock:
            while c.pluginsStarting.value() > 0:
                logger.debug(
                    "Waiting for exclusive lock to start for GeneratorWorker%d"
                    % num)
                time.sleep(random.randint(0, 100) / 1000)

            c.pluginsStarting.increment()
            for sample in c.samples:
                plugin = c.getPlugin('generator.' + sample.generator, sample)
                if plugin.queueable:
                    p = plugin(sample)
                    self._pluginCache[sample.name] = p

            c.pluginsStarting.decrement()
            c.pluginsStarted.increment()
Beispiel #16
0
    def __init__(self, num):
        from eventgenconfig import Config

        # Logger already setup by config, just get an instance
        logger = logging.getLogger('eventgen')
        from eventgenconfig import EventgenAdapter
        adapter = EventgenAdapter(logger, {
            'module': 'OutputRealWorker',
            'sample': 'null'
        })
        globals()['logger'] = adapter

        globals()['c'] = Config()

        self.stopping = False

        logger.debug("Starting OutputWorker %d" % num)

        self.num = num
Beispiel #17
0
 def __init__(self, sample):
     # Logger already setup by config, just get an instance
     logger = logging.getLogger('eventgen')
     from eventgenconfig import EventgenAdapter
     adapter = EventgenAdapter(logger, {'module': 'GeneratorPlugin', 'sample': sample.name})
     self.logger = adapter
     
     from eventgenconfig import Config
     globals()['c'] = Config()
     
     # # 2/10/14 CS Make a threadsafe copy of all of the samples for us to work on
     # with c.copyLock:
     #     # 10/9/15 CS Moving this to inside the lock, in theory, there should only be one thread
     #     # trying to start at once, going to try to ensure this is the case and hoping for no deadlocks
     #     while c.pluginsStarting.value() > 0:
     #         self.logger.debug("Waiting for exclusive lock to start for GeneratorPlugin '%s'" % sample.name)
     #         time.sleep(0.1)
         
     #     c.pluginsStarting.increment()
     self.logger.debug("GeneratorPlugin being initialized for sample '%s'" % sample.name)
     
     self._out = Output(sample)
     
     # # 9/6/15 Don't do any work until all the timers have started
     # while c.timersStarted.value() < len(c.sampleTimers):
     #     self.logger.debug("Not all timers started, sleeping for GeneratorPlugin '%s'" % sample.name)
     #     time.sleep(1.0)
     
     self._samples = { }
     for s in c.samples:
         news = copy.copy(s)
         news.tokens = [ copy.copy(t) for t in s.tokens ]
         for setting in c._jsonSettings:
             if setting in s.__dict__:
                 setattr(news, setting, getattr(s, setting))
         self._samples[news.name] = news
          
     # self._samples = dict((s.name, copy.deepcopy(s)) for s in c.samples)
     self._sample = sample
Beispiel #18
0
    def __init__(self, sample):
        self._app = sample.app
        self._sample = sample
        self._outputMode = sample.outputMode

        # Logger already setup by config, just get an instance
        logger = logging.getLogger('eventgen')
        from eventgenconfig import EventgenAdapter
        adapter = EventgenAdapter(logger, {
            'module': 'OutputPlugin',
            'sample': sample.name
        })
        self.logger = adapter

        from eventgenconfig import Config
        globals()['c'] = Config()

        self.logger.debug(
            "Starting OutputPlugin for sample '%s' with output '%s'" %
            (self._sample.name, self._sample.outputMode))

        self._queue = deque([])
Beispiel #19
0
        if sys.argv[1] == "--scheme":
            do_scheme()
            sys.exit(0)

    c = Config()
    # Logger is setup by Config, just have to get an instance
    logobj = logging.getLogger('eventgen')
    logobj.propagate = False  # Prevent the log messages from being duplicated in the python.log file
    logobj.setLevel(logging.INFO)
    formatter = logging.Formatter('%(levelname)s %(message)s')
    streamHandler = logging.StreamHandler(sys.stderr)
    streamHandler.setFormatter(formatter)
    logobj.handlers = []
    logobj.addHandler(streamHandler)
    from eventgenconfig import EventgenAdapter
    adapter = EventgenAdapter(logobj, {'sample': 'null', 'module': 'main'})
    logger = adapter

    logobj.info('Starting eventgen')

    # Start the stream, only once for the whole program
    print '<stream>\n'

    splunkconf = get_config()
    # logger.debug("Splunkconf: %s" % pprint.pformat(splunkconf))
    if 'session_key' in splunkconf:
        c.makeSplunkEmbedded(sessionKey=splunkconf['session_key'])
    else:
        raise ValueError('sessionKey missing from Splunk stdin config')

    c.parse()
Beispiel #20
0
    def __init__(self, sample):
        GeneratorPlugin.__init__(self, sample)

        # Logger already setup by config, just get an instance
        logger = logging.getLogger('eventgen')
        from eventgenconfig import EventgenAdapter
        adapter = EventgenAdapter(logger, {
            'module': 'WindbagGenerator',
            'sample': sample.name
        })
        globals()['logger'] = adapter

        from eventgenconfig import Config
        globals()['c'] = Config()

        # Pull customers into a dictionary
        fh = open(
            os.path.join(
                os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
                'samples', 'customer_master.sample'), 'r')
        # fh = open('../samples/customer_master.sample', 'r')
        self.customers = []
        csvReader = csv.DictReader(fh)
        for line in csvReader:
            newline = dict(
                (k, line[k])
                for k in ('Address', 'Age', 'Sex', 'accountNumber',
                          'customerCity', 'customerMDN', 'customerState',
                          'customerZip', 'firstName', 'lastName'))
            newline['address'] = newline['Address']
            del newline['Address']
            newline['age'] = newline['Age']
            del newline['Age']
            newline['sex'] = newline['Sex']
            del newline['Sex']
            newline['city'] = newline['customerCity']
            del newline['customerCity']
            newline['phone'] = newline['customerMDN']
            del newline['customerMDN']
            newline['state'] = newline['customerState']
            del newline['customerState']
            newline['zip'] = newline['customerZip']
            del newline['customerZip']
            self.customers.append(newline)

        # Bring items into a dictionary
        fh = open(
            os.path.join(
                os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
                'samples', 'items.sample'), 'r')
        self.items = []
        csvReader = csv.reader(fh)
        for line in csvReader:
            self.items.append({
                'category': line[0],
                'itemid': line[1],
                'description': line[2],
                'price': float(line[3])
            })

        self.transType = [
            'purchase', 'purchase', 'purchase', 'purchase', 'purchase',
            'purchase', 'sale'
        ]
        self.characterType = [
            'Milk Maid', 'Masked Mouse', 'Curd Cobbler', 'Whey Warrior',
            'Fermented Friar'
        ]
        self.regions = [
            'Gorgonzolia', 'Camemberalot', 'Jarlsberg', 'Swiss Alps',
            'Limburgerland'
        ]
        self.servers = []
        for a in ['ace', 'bubbles', 'cupcake', 'dash']:
            for b in xrange(0, random.randint(1, 12)):
                self.servers.append('%s.%s.woc.com' % (a, b))

        self.typeRate = {'purchase': 1.0, 'sale': 0.2}
        self.maxItems = 12
        self.tps = 5.0

        self.customerslen = len(self.customers)
        self.itemslen = len(self.items)
        self.transtypelen = len(self.transType)
        self.charactertypelen = len(self.characterType)
        self.serverslen = len(self.servers)
        self.regionslen = len(self.regions)