def __init__(self, sample, output_counter=None): OutputPlugin.__init__(self, sample, output_counter) if sample.fileName is None: logger.error( "outputMode file but file not specified for sample %s" % self._sample.name) raise ValueError( "outputMode file but file not specified for sample %s" % self._sample.name) self._file = sample.pathParser(sample.fileName) self._fileMaxBytes = sample.fileMaxBytes self._fileBackupFiles = sample.fileBackupFiles self._fileFiles = sample.fileFiles def new_name(name, i): split = name.rsplit(".", 1) if len(split) == 1: return "{}_{}".format(name, i) else: return "{}_{}.{}".format(split[0], i, split[1]) self._multifiles = [ new_name(self._file, i) for i in range(int(self._fileFiles)) ] self._fileHandles = [open(file, "a") for file in self._multifiles] self._fileLengths = [ os.stat(file).st_size for file in self._multifiles ]
def __init__(self, sample, output_counter=None): syslogAddHeader = getattr(sample, 'syslogAddHeader', False) OutputPlugin.__init__(self, sample, output_counter) self._syslogDestinationHost = sample.syslogDestinationHost if hasattr( sample, 'syslogDestinationHost') and sample.syslogDestinationHost else '127.0.0.1' self._syslogDestinationPort = sample.syslogDestinationPort if hasattr( sample, 'syslogDestinationPort') and sample.syslogDestinationPort else 1514 loggerName = 'syslog' + sample.name self._l = logging.getLogger(loggerName) if syslogAddHeader: self._l.addFilter(HostFilter(host=sample.host)) self._l.setLevel(logging.INFO) global loggerInitialized # This class is instantiated at least once each interval. Since each logger with a given name is a singleton, # only add the syslog handler once instead of every interval. if loggerName not in loggerInitialized: syslogHandler = logging.handlers.SysLogHandler( address=(self._syslogDestinationHost, int(self._syslogDestinationPort))) if syslogAddHeader: formatter = logging.Formatter(fmt='%(asctime)s %(host)s %(message)s', datefmt='%b %d %H:%M:%S') syslogHandler.setFormatter(formatter) self._l.addHandler(syslogHandler) loggerInitialized[loggerName] = True
def __init__(self, sample, output_counter=None): # Override maxQueueLength to EventPerKey so that each flush # will generate one aws key if sample.awsS3EventPerKey: sample.maxQueueLength = sample.awsS3EventPerKey OutputPlugin.__init__(self, sample, output_counter) if not boto_imported: logger.error("There is no boto3 or botocore library available") return # disable any "requests" warnings requests.packages.urllib3.disable_warnings() # Bind passed in samples to the outputter. self.awsS3compressiontype = (sample.awsS3CompressionType if hasattr(sample, "awsS3CompressionType") and sample.awsS3CompressionType else None) self.awsS3eventtype = (sample.awsS3EventType if hasattr(sample, "awsS3EventType") and sample.awsS3EventType else "syslog") self.awsS3objectprefix = (sample.awsS3ObjectPrefix if hasattr(sample, "awsS3ObjectPrefix") and sample.awsS3ObjectPrefix else "") self.awsS3objectsuffix = (sample.awsS3ObjectSuffix if hasattr(sample, "awsS3ObjectSuffix") and sample.awsS3ObjectSuffix else "") self.awsS3bucketname = sample.awsS3BucketName logger.debug("Setting up the connection pool for %s in %s" % (self._sample.name, self._app)) self._client = None self._createConnections(sample) logger.debug("Finished init of awsS3 plugin.")
def __init__(self, sample, output_counter=None): OutputPlugin.__init__(self, sample, output_counter) self.scsHttpPayloadMax = 150000 # Documentation recommends 20KB to 200KB. Going with 150KB. self.scsEndPoint = getattr(self._sample, "scsEndPoint", None) self.scsAccessToken = getattr(self._sample, "scsAccessToken", None) self.scsClientId = getattr(self._sample, 'scsClientId', '') self.scsClientSecret = getattr(self._sample, 'scsClientSecret', '') self.scsRetryNum = int(getattr(self._sample, 'scsRetryNum', 0)) # By default, retry num is 0 self._setup_REST_workers()
def __init__(self, sample, output_counter=None): OutputPlugin.__init__(self, sample, output_counter) self._tcpDestinationHost = sample.tcpDestinationHost if hasattr( sample, 'tcpDestinationHost' ) and sample.tcpDestinationHost else '127.0.0.1' self._tcpDestinationPort = sample.tcpDestinationPort if hasattr( sample, 'tcpDestinationPort') and sample.tcpDestinationPort else '3333' import socket # Import socket module self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
def __init__(self, sample, output_counter=None): OutputPlugin.__init__(self, sample, output_counter) self._udpDestinationHost = ( sample.udpDestinationHost if hasattr(sample, "udpDestinationHost") and sample.udpDestinationHost else "127.0.0.1") self._udpDestinationPort = (sample.udpDestinationPort if hasattr(sample, "udpDestinationPort") and sample.udpDestinationPort else "3333") import socket # Import socket module self.s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def __init__(self, sample, output_counter=None): OutputPlugin.__init__(self, sample, output_counter) from splunk_eventgen.lib.eventgenconfig import Config globals()['c'] = Config() self._splunkUrl, self._splunkMethod, self._splunkHost, self._splunkPort = c.getSplunkUrl( self._sample) # noqa self._splunkUser = self._sample.splunkUser self._splunkPass = self._sample.splunkPass # Cancel SSL verification import ssl ssl._create_default_https_context = ssl._create_unverified_context if not self._sample.sessionKey: try: myhttp = httplib2.Http(disable_ssl_certificate_validation=True) logger.debug( "Getting session key from '%s' with user '%s' and pass '%s'" % (self._splunkUrl + '/services/auth/login', self._splunkUser, self._splunkPass)) response = myhttp.request(self._splunkUrl + '/services/auth/login', 'POST', headers={}, body=urllib.parse.urlencode({ 'username': self._splunkUser, 'password': self._splunkPass }))[1] self._sample.sessionKey = minidom.parseString( response).getElementsByTagName( 'sessionKey')[0].childNodes[0].nodeValue logger.debug( "Got new session for splunkstream, sessionKey '%s'" % self._sample.sessionKey) except: logger.error( "Error getting session key for non-SPLUNK_EMBEEDED for sample '%s'." % self._sample.name + " Credentials are missing or wrong") raise IOError( "Error getting session key for non-SPLUNK_EMBEEDED for sample '%s'." % self._sample.name + "Credentials are missing or wrong") logger.debug( "Retrieved session key '%s' for Splunk session for sample %s'" % (self._sample.sessionKey, self._sample.name))
def updateConfig(self, config): OutputPlugin.updateConfig(self, config) try: if hasattr(self.config, 'httpeventServers') is False: if hasattr(self._sample, 'httpeventServers'): self.config.httpeventServers = self._sample.httpeventServers else: logger.error( 'outputMode %s but httpeventServers not specified for sample %s' % (self.name, self._sample.name)) raise NoServers( 'outputMode %s but httpeventServers not specified for sample %s' % (self.name, self._sample.name)) # set default output mode to round robin if hasattr( self.config, 'httpeventOutputMode') and self.config.httpeventOutputMode: self.httpeventoutputmode = config.httpeventOutputMode else: if hasattr(self._sample, 'httpeventOutputMode' ) and self._sample.httpeventOutputMode: self.httpeventoutputmode = self._sample.httpeventOutputMode else: self.httpeventoutputmode = 'roundrobin' if hasattr(self.config, 'httpeventMaxPayloadSize' ) and self.config.httpeventMaxPayloadSize: self.httpeventmaxsize = self.config.httpeventMaxPayloadSize else: if hasattr(self._sample, 'httpeventMaxPayloadSize' ) and self._sample.httpeventMaxPayloadSize: self.httpeventmaxsize = self._sample.httpeventMaxPayloadSize else: self.httpeventmaxsize = 10000 logger.debug("Currentmax size: %s " % self.httpeventmaxsize) if isinstance(config.httpeventServers, str): self.httpeventServers = json.loads(config.httpeventServers) else: self.httpeventServers = config.httpeventServers logger.debug("Setting up the connection pool for %s in %s" % (self._sample.name, self._app)) self.createConnections() logger.debug("Pool created.") logger.debug("Finished init of %s plugin." % self.name) except Exception as e: logger.exception(str(e))
def __init__(self, sample, output_counter=None): OutputPlugin.__init__(self, sample, output_counter) if sample.fileName is None: logger.error( "outputMode file but file not specified for sample %s" % self._sample.name) raise ValueError( "outputMode file but file not specified for sample %s" % self._sample.name) self._file = sample.pathParser(sample.fileName) self._fileMaxBytes = sample.fileMaxBytes self._fileBackupFiles = sample.fileBackupFiles self._fileHandle = open(self._file, "a") self._fileLength = os.stat(self._file).st_size logger.debug( "Configured to log to '%s' with maxBytes '%s' with backupCount '%s'" % (self._file, self._fileMaxBytes, self._fileBackupFiles))
def __init__(self, sample, output_counter=None): OutputPlugin.__init__(self, sample, output_counter) self._spoolDir = sample.pathParser(sample.spoolDir) self._spoolFile = sample.spoolFile self.spoolPath = self._spoolDir + os.sep + self._spoolFile
def __init__(self, sample, output_counter=None): OutputPlugin.__init__(self, sample, output_counter)
def __init__(self, sample, output_counter=None): OutputPlugin.__init__(self, sample, output_counter) self.firsttime = True