def runStartQuery(self, plugin_source_type, rules):
        cVal = "NA"
        if self.__myDataBaseCursor is None:
            return cVal
        try:
            if plugin_source_type != "db2":
                sql = rules['start_query']['query']
                self.logdebug(Lazyformat("Running Start query: {}", sql))
                self.__myDataBaseCursor.execute(sql)
                rows = self.__myDataBaseCursor.fetchone()
                if not rows:
                    self.logwarn("Initial query empty, please double-check")
                    return cVal
                cVal = str((rows[0]))

                sql = rules['query']['query']
            elif plugin_source_type == "db2":
                sql = rules['start_query']['query']
                self.logdebug(Lazyformat("Start query: {}", sql))
                result = ibm_db.exec_immediate(self.__objDBConn, sql)
                dictionary = ibm_db.fetch_both(result)
                if not dictionary:
                    self.logwarn("Initial query empty, please double-check")
                    return cVal
                cVal = str((dictionary[0]))
                self.loginfo("Connection closed")
            if cVal == None or cVal == "None" or len(cVal) <= 0:
                cVal = "NA"
        except Exception, e:
            cVal = "NA"
            self.logerror(Lazyformat("Error running the start query: {}", e))
 def stop(self):
     self.logdebug(Lazyformat("Scheduling plugin stop"))
     self.stop_processing = True
     try:
         self.join(1)
     except RuntimeError:
         self.logwarn(Lazyformat("Stopping thread that likely hasn't started"))
Exemple #3
0
    def getLastRecordTimeString(self):
        """
        Gets the last record time
            # To get las record by time: "Select TimeWritten from Win32_NTLogEvent Where Logfile = 'Application' and TimeWritten >\"20110803103502.000000-000\""
        """
        last_record_file = ParserWMI.LAST_RECORD_FILE_TMP % (self.__hostname,
                                                             self.__section)

        self.__last_record_time = ""
        if os.path.exists(last_record_file):
            file = open(last_record_file, 'r')
            data = file.readline()
            self.logdebug(Lazyformat("Last record time: {}", data.rstrip()))
            self.__last_record_time = data.rstrip()
            file.close()
            if self.__last_record_time != "":
                return
        #cmd_run = "wmic -U %s%%%s //%s  \"Select TimeWritten from Win32_NTLogEvent Where Logfile = '%s'\"  | sort | head -n 1 | tr \"|\" \" \" |awk '{print$3;}'" % (self.__username, self.__password, self.__hostname, self.__section)
        cmd_run = "wmic -U %s%%%s //%s  \"Select TimeWritten from Win32_NTLogEvent Where Logfile = '%s'\" | grep %s | tr \"\|\" \" \" |awk '{print$3;}'  | sort -r | head -n 1" % (
            self.__username, self.__password, self.__hostname, self.__section,
            self.__section)
        status, output = commands.getstatusoutput(cmd_run)
        if status != 0 or output == "":
            self.logwarn(
                Lazyformat(
                    "[GET_LAST_RECORD] An error occurred while trying to get logs from: {}, section: {}",
                    self.__hostname, self.__section))
            self.__last_record_time = ""
        else:
            self.logdebug(
                Lazyformat("[GET_LAST_RECORD] Last record time: {}", output))
            self.__last_record_time = output
            self.updateLastRecordTimeString()
Exemple #4
0
 def __init__(self, conf, plugin, conn, hostname, username, password):
     self.__conf = conf
     self.__plugin = plugin
     self.__rules = []  # list of RuleMatch objects
     self.__conn = conn
     self.__hostname = hostname
     self.__username = username
     self.__password = password.strip()
     self.__section = self.__plugin.get("config", "section")
     self.__last_record_time = ""
     if self.__section == "":
         #search into the command to find the section
         rules = self.__plugin.rules()
         cmd_str = rules['cmd']['cmd']
         for sec in ParserWMI.VALID_SECTIONS:
             if cmd_str.find(sec) >= 0:
                 self.__section = sec
                 self.logwarn(
                     Lazyformat(
                         "The section was not found in [config]. Section deduced: {}",
                         self.__section))
                 break
         if self.__section == "":
             self.__section = "Security"
             self.logwarn(
                 Lazyformat(
                     "The section was not found in [config]. It can't be deduced: applying default value: {}",
                     self.__section))
     self.__pluginID = self.__plugin.get("DEFAULT", "plugin_id")
     self.__stop_processing = False
     self.__sectionExists = False
     Detector.__init__(self, conf, plugin, conn)
    def process(self):
        self.loginfo(Lazyformat("Starting process ParserElastic"))
        try:
            es = ElasticDetector(self.elastic_url, plugin_name=self.name, store_index=self.store_index,
                                 verify_certs=self.verify_certs,credentials=self.elastic_credentials)
        except Exception as ex:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.logerror(Lazyformat("process[{}]:{} {}".format(exc_tb.tb_lineno, exc_type, ex)))
            return

        while not self.stop_processing:
            for rule in self.rules:
                try:
                    es.rule_name = rule.name
                    es.plugin_sid = rule.plugin_sid
                    timestamp = es.get_last_timestamp()
                    self.loginfo(Lazyformat("Getting last documents since {}".format(timestamp)))
                    documents = es.get_matches_since(data_index=rule.data_index, timestamp=timestamp, query=rule.query)
                    for doc in documents:
                        wdoc = DotAccessibleDict(doc['_source'])
                        self.loginfo(wdoc)
                        group = self._fetch(wdoc, rule.fields)
                        self.generate(group, rule)
                    es.insert_timestamp()
                except Exception, ex:
                    self.logerror(Lazyformat("Elasticsearch operation to {} failed: {}", self.elastic_url, ex))
                sleep(float(self.sleep_time))
Exemple #6
0
 def process(self):
     self.loadRules()
     ftp_host = self._plugin.get("config", "ftp_server")
     ftp_port = self._plugin.get("config", "ftp_port")
     ftp_user = self._plugin.get("config", "ftp_user")
     ftp_pswd = self._plugin.get("config", "ftp_user_password")
     sleep_time = self._plugin.get("config", "tSleep")
     remote_file =  self._plugin.get("config", "remote_filename")
     while not self.stop_processing:
         #connecto to ftp server and retreive the file
         try:
             ftp_conn = FTP(ftp_host)
             filename = "/tmp/file_%s"% time()
             file_tmp = open(filename,'wb')
             ftp_conn.login(ftp_user,ftp_pswd)
             cmd = "RETR " + remote_file
             self.loginfo(Lazyformat("FTP cmd: {}", cmd))
             ftp_conn.retrbinary(cmd,file_tmp.write)
             file_tmp.close()
             #read the file
             file_tmp = open(filename)
             for line in file_tmp:
                 self.tryMatchRule(line)
                 sleep(0.001)
             file_tmp.close()
             os.remove(filename)
             
         except Exception,e:
             self.logerror(Lazyformat("FTP connection to {} failed: {}", ftp_host, e))
         sleep(float(sleep_time))
Exemple #7
0
    def check_file_path(self, location):
        can_read = True
        if self._plugin.has_option("config", "create_file"):
            create_file = self._plugin.getboolean("config", "create_file")
        else:
            create_file = False

        if not os.path.exists(location) and create_file:
            if not os.path.exists(os.path.dirname(location)):
                self.logwarn(
                    Lazyformat("Creating the {} directory...",
                               os.path.dirname(location)))
                os.makedirs(os.path.dirname(location), 0755)

            self.logwarn(
                Lazyformat("The {} file is missing. Creating a new one...",
                           location))
            fd = open(location, 'w')
            fd.close()

        # open file
        fd = None
        try:
            #check if file exist.
            if os.path.isfile(location):
                fd = open(location, 'r')
            else:
                self.logwarn(Lazyformat("File: {} does not exist!", location))
                can_read = False

        except IOError, e:
            self.logerror(
                Lazyformat("Failed to read the file {}: {}", location, e))
            can_read = False
Exemple #8
0
 def process(self):
     """Starts to process events!
     """
     number_of_lines = 0
     # Check if we have to create the location file
     self.check_file_path(self.__location)
     while not self.__shutdown_event.is_set() and not os.path.isfile(
             self.__location):
         time.sleep(1)
     tail = TailFollowBookmark(filename=self.__location,
                               track=True,
                               encoding=self.__plugin_configuration.get(
                                   'config', 'encoding'))
     self.loginfo(Lazyformat("Reading from {}", self.__location))
     while not self.__shutdown_event.is_set():
         try:
             # stop processing tails if requested
             if self.__shutdown_event.is_set():
                 break
             for line in tail:
                 try:
                     json_event = json.loads(line)
                     event = Event()
                     if 'plugin_sid' not in json_event:
                         event[
                             'plugin_sid'] = self.__plugin_configuration.get(
                                 "DEFAULT", 'plugin_sid'
                             ) if self.__plugin_configuration.has_option(
                                 "DEFAULT", 'plugin_sid') else 1
                     event['plugin_id'] = self.__plugin_configuration.get(
                         "DEFAULT", 'plugin_id')
                     for key, value in json_event.iteritems():
                         event_key = key
                         if self.__plugin_configuration.has_section(
                                 "mapping"):
                             if self.__plugin_configuration.has_option(
                                     "mapping", key):
                                 event_key = self.__plugin_configuration.get(
                                     "mapping", key)
                                 if event_key == "date":
                                     value = normalize_date(value)
                         event[event_key] = value
                     event['log'] = json.dumps(json_event)
                     self.send_message(event)
                     number_of_lines += 1
                 except Exception as exp:
                     print "CRG %s" % exp
                     self.logwarn(Lazyformat("Invalid Json event: {}",
                                             line))
             # Added small sleep to avoid the excessive cpu usage
             time.sleep(0.01)
         except Exception, e:
             self.logerror(Lazyformat("Processing failed: {}", e))
Exemple #9
0
 def existsLogForSeciton(self):
     """
     Checks whether the specified section exists.        
     """
     returnValue = False
     """
     Example query output:
     CLASS: Win32_NTEventlogFile
     LogfileName|Name
     Application|C:\WINDOWS\system32\config\AppEvent.Evt
     Security|C:\WINDOWS\System32\config\SecEvent.Evt
     System|C:\WINDOWS\system32\config\SysEvent.Evt
     """
     self.__plugin.get("config", "section")
     query = ParserWMI.CMD_CHECK_SECTION % (
         self.__username, self.__password, self.__hostname, self.__section)
     status, output = commands.getstatusoutput(query)
     if status != 0 or output == "":
         self.logwarn(
             Lazyformat(
                 "An error occurred while trying to get logs from: {} - status:{} --output: {}",
                 self.__hostname, status, output))
     else:
         returnValue = True
     return returnValue
Exemple #10
0
    def __init__(self, conf, plugin, conn, idm=False):
        Detector.__init__(self, conf, plugin, conn)
        self._conf = conf
        self._plugin = plugin
        self.rules = []  # list of RuleMatch objects
        self.conn = conn
        self.__myDataBaseCursor = None
        self.__objDBConn = None
        self.__tries = 0
        self.stop_processing = False
        self._databasetype = self._plugin.get("config", "source_type")
        self._canrun = True
        self.__idm = True if self._plugin.get("config",
                                              "idm") == "true" else False
        self.loginfo(
            Lazyformat("IDM is {}", "enabled" if self.__idm else "disabled"))

        if self._databasetype == "db2" and db2notloaded:
            self.loginfo("You need python ibm_db module installed")
            self._canrun = False
        elif self._databasetype == "mysql" and mysqlnotloaded:
            self.loginfo("You need python mysqldb module installed")
            self._canrun = False
            self.stop()
        elif self._databasetype == "oracle" and oraclenotloaded:
            self.loginfo("You need python cx_Oracle module installed")
            self._canrun = False
        elif self._databasetype == "mssql" and mssqlnotloaded:
            self.loginfo("You need python pymssql module installed")
            self._canrun = False
Exemple #11
0
    def checkTimeZone(self):
        if self._timezone in all_timezones:
            used_tzone = self._timezone
            self.logdebug(
                Lazyformat("Using custom plugin tzone data: {}", used_tzone))
        elif self._agenttimezone in all_timezones:
            used_tzone = self._agenttimezone
            self.loginfo(
                Lazyformat(
                    "Failed to find the {} timezone in database. Falling back to agent timezone: {}",
                    self._timezone, used_tzone))
        else:
            self.set_system_tzone()
            used_tzone = self.systemtzone

        self._EventTimeZone = used_tzone
Exemple #12
0
    def parse(self, data):
        doc = xml.dom.minidom.parseString(data)
        alertlist = doc.getElementsByTagName('sd:evIdsAlert')

        alert_obj_list = []

        for alert in alertlist:
            sig = alert.getElementsByTagName('sd:signature')[0]
            self.logdebug("SDEE Parsing Alert")
            #Plugin sid
            sid = sig.attributes['id'].nodeValue

            desc = sig.attributes['description'].nodeValue

            participants = alert.getElementsByTagName('sd:participants')[0]

            if not participants.hasChildNodes():
                self.logdebug("Ignoring SDEE alert. Possible TCP/UDP/ARP DoS")
                continue

            attacker = participants.getElementsByTagName('sd:attacker')[0]
            #Src addr
            attAddr = attacker.getElementsByTagName('sd:addr')[0].firstChild.data

            #Src port
            try:
                attPort = attacker.getElementsByTagName('sd:port')[0].firstChild.data

            except:
                attPort = 0

            for dst in alert.getElementsByTagName('sd:target'):
                data1 = self.sanitize(alert.toxml())
                self.logdebug(Lazyformat("SDEE: {}", data1))
                #Dst Address
                dstAddr = dst.getElementsByTagName('sd:addr')[0].firstChild.data

                #Dst Port
                try:
                    dstPort = dst.getElementsByTagName('sd:port')[0].firstChild.data

                except:
                    dstPort = 0

                self.logdebug(Lazyformat("{}:{}, {}:{}, {}:{}", sid, desc, attAddr, attPort, dstAddr, dstPort))
                self.generate(sid, attAddr, attPort, dstAddr, dstPort, data1)
Exemple #13
0
 def updateLastRecordTimeString(self):
     last_record_file = ParserWMI.LAST_RECORD_FILE_TMP % (self.__hostname,
                                                          self.__section)
     thefile = open(last_record_file, 'w')
     thefile.write(self.__last_record_time + "\n")
     self.logdebug(
         Lazyformat("Updating last_record_file: {}",
                    self.__last_record_time))
     thefile.close()
 def generate(self, groups, rule):
     self.logwarn(Lazyformat(groups))
     event = Event()
     for key, value in rule.original_rule.iteritems():
         if key not in self.SKIP_RULE_FIELD:
             data = self._plugin.get_replace_array_value(value.encode('utf-8'), groups)
             if data is not None:
                 event[key] = data
     if event is not None:
         self.send_message(event)
 def _parse_rules(self):
     try:
         rules = self._plugin.rules()
         for rule_name in rules:
             rule = rules[rule_name]
             el_rule = ElasticRules(rule_name, rule)
             self.rules.append(el_rule)
     except Exception as ex:
         exc_type, exc_obj, exc_tb = sys.exc_info()
         self.logerror(Lazyformat("_parse_rules[{}]:{} {}".format(exc_tb.tb_lineno, exc_type, ex.message)))
Exemple #16
0
    def _exclude_event(self, event):

        if self._plugin.has_option("config", "exclude_sids"):
            exclude_sids = self._plugin.get("config", "exclude_sids")
            if event["plugin_sid"] in Config.split_sids(exclude_sids):
                self.logdebug(
                    Lazyformat(
                        "Excluding event with plugin_id={} and plugin_sid={}",
                        event["plugin_id"], event["plugin_sid"]))
                return True

        return False
Exemple #17
0
 def tryConnectDB(self):
     connected = False
     while not connected and self.__tries < MAX_TRIES_DB_CONNECT:
         time.sleep(10)
         connected = self.openDataBaseCursor(
             self._plugin.get("config", "source_type"))
         if not connected:
             self.loginfo(
                 Lazyformat(
                     "Failed to establish the DB connection, retrying in 10 seconds...try: {}",
                     self.__tries))
         self.__tries += 1
     else:
         if connected:
             self.loginfo(
                 Lazyformat("DB connection established after {} tries",
                            self.__tries))
         if self.__tries >= MAX_TRIES_DB_CONNECT:
             self.loginfo("Max connection attempts reached")
         self.__tries = 0
     return connected
    def process(self):
        self._dir = self._plugin.get("config", "directory")
        self._linklayer = self._plugin.get("config", "linklayer")
        self._unified_version = int(
            self._plugin.get("config", "unified_version"))

        if self._linklayer in ['ethernet', 'cookedlinux']:
            if os.path.isdir(self._dir):
                self._prefix = self._plugin.get("config", "prefix")

                if self._prefix != "":
                    snort = ParserSnort(linklayer=self._linklayer,
                                        unified_version=self._unified_version)
                    snort.init_log_dir(self._dir, self._prefix)

                    while 1:
                        # get next snort event (blocking)
                        ev = snort.get_snort_event()
                        # create the Snort event
                        event = Snort()
                        event["event_type"] = Snort.EVENT_TYPE

                        if event['interface'] is None:
                            event["interface"] = self._plugin.get(
                                "config", "interface")

                        (event["unziplen"], event["gzipdata"]) = ev.strgzip()

                        if event['plugin_id'] is None:
                            event['plugin_id'] = self._plugin.get(
                                "config", "plugin_id")

                        if event['type'] is None:
                            event['type'] = self._plugin.get("config", "type")

                        if ev.isIPV6():
                            event['src_ip'] = ev.sip
                            event['dst_ip'] = ev.dip
                            event['ipv6'] = "1"
                            self.send_message(event)
                        else:
                            self.send_message(event)

                else:
                    self.logerror(
                        Lazyformat("Bad config parameter: directory ({})",
                                   dir))
                    sys.exit(-1)

            else:
                self.logerror("Unknown link layer")
                sys.exit(-1)
    def __init__(self, conf, plugin, conn):
        self._conf = conf  # config.cfg info
        self._plugin = plugin  # plugins/X.cfg info
        self.rules = []  # list of ElasticRules objects
        self.conn = conn
        self.stop_processing = False
        self.sleep_time = 10

        Detector.__init__(self, conf, plugin, conn)
        # Initialize values with config
        self._plugin_config()
        self._parse_rules()
        self.loginfo(Lazyformat("Init ParserElastic"))
Exemple #20
0
 def connectMysql(self):
     host = self._plugin.get("config", "source_ip")
     user = self._plugin.get("config", "user")
     passwd = self._plugin.get("config", "password")
     db = self._plugin.get("config", "db")
     try:
         self.__objDBConn = MySQLdb.connect(host=host,
                                            user=user,
                                            passwd=passwd,
                                            db=db)
     except Exception, e:
         self.logerror(Lazyformat("DB connection failed: {}", e))
         return None
Exemple #21
0
 def tryMatchRule(self,line):
     rule_matched = False
     for rule in self.rules:
         rule.feed(line)
         if rule.match() and not rule_matched:
             self.logdebug(Lazyformat("Matching rule: [{}] -> {}", rule.name, line))
             event = rule.generate_event()
             self.resetAllrules()
             # send the event as appropriate
             if event is not None:
                 self.send_message(event)
             # one rule matched, no need to check more
             rule_matched = True
             break
Exemple #22
0
    def getLastRecord(self):
        """
        Gets the last record.
        """

        last_record = 0
        query = ParserWMI.CMD_GET_LAST_RECORD % (
            self.__username, self.__password, self.__hostname, self.__section)
        status, output = commands.getstatusoutput(query)
        if status != 0:
            self.logwarn(
                Lazyformat(
                    "[GET_LAST_RECORD] An error occurred while trying to get logs from: {}, section: {}",
                    self.__hostname, self.__section))
        elif output == "":
            last_record = 0
        else:
            last_record = output
        return last_record
Exemple #23
0
    def _getLocalIP(self):
        if self.override_sensor:
            self.loginfo("override_sensor detected")

        if self._conf.has_section("plugin-defaults"):
            mylocalip = self._conf.get("plugin-defaults", "sensor")
            return mylocalip

        hostname, aliaslist, ipaddrlist = socket.gethostbyname_ex(
            socket.gethostname())
        for ip in ipaddrlist:
            if not ip.startswith("127"):
                return ip
        # In this case we try to parse the output of ip a
        lines = commands.getoutput(
            "ip a | grep inet | grep -v inet6 | awk '{print $2}'| grep -v \"127.0.0.1\" | awk -F '/' '{print $1}'"
        ).split("\n")
        if len(lines) > 0:
            self.loginfo(Lazyformat("Using sensor ip: {}", lines[0]))
            return lines[0]
Exemple #24
0
    def __init__(self, conf, plugin, conn):

        self._conf = conf
        self._plugin = plugin
        self.os_hash = {}
        self.conn = conn
        self.consolidation = EventConsolidation(self._conf)
        self.systemtzone = None
        self.__plugin_id = self._plugin.get("DEFAULT", "plugin_id")
        self.__plugin_name = self._plugin.get("config", "name")

        if "tzone" in self._plugin.hitems("DEFAULT"):
            self._timezone = self._plugin.get("DEFAULT", "tzone")
        else:
            self._timezone = self._conf.get("plugin-defaults", "tzone")

        self._sensorID = None
        if "sensor_id" in self._conf.hitems("plugin-defaults"):
            self._sensorID = self._conf.get("plugin-defaults", "sensor_id")

        self.loginfo(
            Lazyformat("Starting plugin with the following tzone: {}",
                       self._timezone))
        threading.Thread.__init__(self)
        self._agenttimezone = self._conf.get("plugin-defaults", "tzone")
        self._EventTimeZone = None
        if self._conf.has_option("plugin-defaults", "override_sensor"):
            self.override_sensor = self._conf.getboolean(
                "plugin-defaults", "override_sensor")
        else:
            self.override_sensor = False

        # 2011-02-01 17:00:16
        self.patterndate = re.compile('(\d{10})')
        self.patternISO_date = re.compile(
            '(?P<year>\d+)[\s-](?P<month>\d+)[\s-](?P<day>\d+)\s+(?P<hour>\d+):(?P<minute>\d+):(?P<second>\d+)'
        )
        self.set_system_tzone()
        self.checkTimeZone()
Exemple #25
0
    def process(self):
        self.__notifier = pyinotify.ThreadedNotifier(
            self.__watchdog, FileEventHandler(self.addTail))
        try:
            self.__bookmark_dir = self._plugin.get("config", "bookmark_dir")
        except:
            self.__bookmark_dir = ""
        mask = pyinotify.IN_CREATE

        #check if the plugin has rlocation
        locations = []
        rlocationvalue = self._plugin.get("config", "rlocation")
        if rlocationvalue != "":
            files = glob.glob(rlocationvalue)
            for f in files:
                self.logdebug(Lazyformat("Adding location: {}", f))
                locations.append(f)
        else:
            locations = self._plugin.get("config", "location")
            locations = locations.split(',')
        self.__notifier.start()
        # first check if file exists
        for location in locations:
            if self.check_file_path(location):
                self.__locations.append(location)
            else:
                self.__monitorLocations.append(location)
                dir = os.path.dirname(location)
                #                if not self.__watchdog.watches.has_key(dir): #--version python-pyinotify 0.8.9
                self.__watchdog.add_watch(dir, mask, rec=True)

        # compile the list of regexp
        unsorted_rules = self._plugin.rules()
        keys = unsorted_rules.keys()
        keys.sort()
        for key in keys:
            item = unsorted_rules[key]
            self.rules.append(RuleMatch(key, item, self._plugin))

        # Move to the end of file
        # fd.seek(0, 2)

        for location in self.__locations:
            self.__tails.append(
                TailFollowBookmark(location, 1, self.__bookmark_dir,
                                   self._plugin.get('config', 'encoding')))

        while not self.stop_processing:
            # is plugin enabled?
            if not self._plugin.getboolean("config", "enable"):

                # wait until plugin is enabled
                while not self._plugin.getboolean("config", "enable"):
                    time.sleep(1)

                # plugin is now enabled, skip events generated on
                # 'disable' state, so move to the end of file

            self._thresholding()

            for tail in self.__tails:
                # stop processing tails if requested
                if self.stop_processing:
                    break

                for line in tail:
                    matches = 0
                    rules = 0

                    # stop processing lines if requested
                    if self.stop_processing:
                        break
                    rule_matched = False
                    for rule in self.rules:
                        rules += 1
                        rule.feed(line)

                        if rule.match() and not rule_matched:
                            matches += 1
                            self.logdebug(
                                Lazyformat("Match rule: [{}] -> {}", rule.name,
                                           line))
                            event = rule.generate_event()
                            self.resetAllrules()
                            # send the event as appropriate
                            if event is not None:
                                self.send_message(event)

                            # one rule matched, no need to check more
                            rule_matched = True
                            break
            #Added small sleep to avoid the excessive cpu usage
            time.sleep(0.01)

        for tail in self.__tails:
            tail.close()
        self.logdebug("Processing completed.")
Exemple #26
0
    def process(self):
        self.loginfo("Started SDEE Collector")
        if self.__hostname:
            self.host = self.__hostname
        else:
            self.host = self._plugin.get("config", "source_ip")
        if self.__username:
            self.username = self.__username
        else:
            self.username = self._plugin.get("config", "user")
        if self.__password:
            self.password = self.__password
        else:
            self.password = self._plugin.get("config", "password")
        self.sleepField = self._plugin.get("config", "sleep")
        self.plugin_id = self._plugin.get("DEFAULT", "plugin_id")

        sdee = SDEE(user=self.username,password=self.password,host=self.host,method='https', force='yes')
        try:
            sdee.open()
            self.loginfo(Lazyformat("SDEE subscriberId: {}", sdee._subscriptionid))
            f = open(self.sIdFile, 'w')
            f.write("%s\n" % sdee._subscriptionid)
            f.close()

        except:
            self.logerror(Lazyformat("Failed to open SDEE connection to device {}", self.host))
            self.loginfo("SDEE: Trying to close last session")
            try:
                f = open(self.sIdFile, 'r')
            except IOError:
                self.logerror("SDEE: Cannot read subscriber ID")
                return
            subs = f.readline()

            try:
                sdee = SDEE(user=self.username,password=self.password,host=self.host,method='https', force='yes')
                sdee._subscriptionid = subs
                sdee.close()

            except:
                self.logerror("SDEE: losing last session Failed")
                return

            try:
                sdee = SDEE(user=self.username,password=self.password,host=self.host,method='https', force='yes')
                sdee.open()
                self.loginfo(Lazyformat("SDEE subscriberId: {}", sdee._subscriptionid))
                f = open(self.sIdFile, 'w')
                f.write("%s\n" % sdee._subscriptionid)
                f.close()

            except:
                self.logerror("SDEE Failed")
                return

        while 1:
            sdee.get()
            self.loginfo("Requesting SDEE Data...")
            data = sdee.data()
            self.logdebug(data)
            self.parse(data)
            time.sleep(int(self.sleepField))
class ParserElastic(Detector):
    SKIP_RULE_FIELD = {'query', 'fields', 'data_index'}

    def __init__(self, conf, plugin, conn):
        self._conf = conf  # config.cfg info
        self._plugin = plugin  # plugins/X.cfg info
        self.rules = []  # list of ElasticRules objects
        self.conn = conn
        self.stop_processing = False
        self.sleep_time = 10

        Detector.__init__(self, conf, plugin, conn)
        # Initialize values with config
        self._plugin_config()
        self._parse_rules()
        self.loginfo(Lazyformat("Init ParserElastic"))

    def _fetch(self, document, fields):
        group = []
        for field in fields:
            try:
                group.append(document[field])
            except KeyError:
                group.append("")
                self.logwarn("{} doesn't exists in document {}".format(field, document))
        return group

    def _plugin_config(self):
        self.plugin_id = self._plugin.get("DEFAULT", "plugin_id")
        self.name = self._plugin.get("config", "name")
        self.elastic_url = self._plugin.get("config", "elastic_url")
        self.store_index = self._plugin.get("config", "store_index")
        self.verify_certs = self._plugin.getboolean("config", "verify_certs")
        user = self._plugin.get("config", "elastic_user")
        password = self._plugin.get("config", "elastic_password")
        self.elastic_credentials = None
        if user and password:
            self.elastic_credentials = (user, password)

    def _parse_rules(self):
        try:
            rules = self._plugin.rules()
            for rule_name in rules:
                rule = rules[rule_name]
                el_rule = ElasticRules(rule_name, rule)
                self.rules.append(el_rule)
        except Exception as ex:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.logerror(Lazyformat("_parse_rules[{}]:{} {}".format(exc_tb.tb_lineno, exc_type, ex.message)))

    def process(self):
        self.loginfo(Lazyformat("Starting process ParserElastic"))
        try:
            es = ElasticDetector(self.elastic_url, plugin_name=self.name, store_index=self.store_index,
                                 verify_certs=self.verify_certs,credentials=self.elastic_credentials)
        except Exception as ex:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.logerror(Lazyformat("process[{}]:{} {}".format(exc_tb.tb_lineno, exc_type, ex)))
            return

        while not self.stop_processing:
            for rule in self.rules:
                try:
                    es.rule_name = rule.name
                    es.plugin_sid = rule.plugin_sid
                    timestamp = es.get_last_timestamp()
                    self.loginfo(Lazyformat("Getting last documents since {}".format(timestamp)))
                    documents = es.get_matches_since(data_index=rule.data_index, timestamp=timestamp, query=rule.query)
                    for doc in documents:
                        wdoc = DotAccessibleDict(doc['_source'])
                        self.loginfo(wdoc)
                        group = self._fetch(wdoc, rule.fields)
                        self.generate(group, rule)
                    es.insert_timestamp()
                except Exception, ex:
                    self.logerror(Lazyformat("Elasticsearch operation to {} failed: {}", self.elastic_url, ex))
                sleep(float(self.sleep_time))

        self.loginfo(Lazyformat("Exiting process()"))
Exemple #28
0
                    self.logerror(
                        "Couldn't connect to database, maximum retries exceeded"
                    )
                    return
            else:
                sql = rules['query']['query']
                sql = sql.replace("$1", str(cVal))
                self.logdebug(sql)
                result = ibm_db.exec_immediate(self.__objDBConn, sql)
                row = ibm_db.fetch_tuple(result)
                ret = []
                while row:
                    self.loginfo(str(row))
                    ret.append(row)
                    row = ibm_db.fetch_tuple(result)
                self.loginfo(Lazyformat("len ret {} y ref {}", len(ret), ref))
                if len(ret) > 0:
                    cVal = ret[len(ret) - 1][ref]
                    for e in ret:
                        self.loginfo(Lazyformat("-.-->{}", e))
                        self.generate(e)

            time.sleep(tSleep)

    def connectMysql(self):
        host = self._plugin.get("config", "source_ip")
        user = self._plugin.get("config", "user")
        passwd = self._plugin.get("config", "password")
        db = self._plugin.get("config", "db")
        try:
            self.__objDBConn = MySQLdb.connect(host=host,
Exemple #29
0
    def process(self):
        '''
        Process the snort file.
        '''
        self.__setKeepWorkingValue(True)
        keepWorking = self.__getKeepWorkingValue()
        if self.__pluginConfig.get("config", "linklayer") != "ethernet":
            self.logerror(
                "This kind of snort parser only works for 'ethernet' linklayer. Please update the configuration"
            )
            return
        if int(self.__pluginConfig.get("config", "unified_version")) != 2:
            self.logerror(
                "This kind of snort parser only works for 'UNIFIED 2' version. Please update the configuration"
            )
            return
        self.__filePrefix = self.__pluginConfig.get("config", "prefix")
        if self.__filePrefix == "":
            self.logerror("Invalid prefix used")
            return
        self.__logDirectory = self.__pluginConfig.get("config", "directory")
        self.__lookForFiles()
        #return
        SnortUnpack.startPurgeEventsThread()
        last_valid_position = 0
        last_valid_packet_size = 0
        #testconter = 0
        while keepWorking:
            sleep(0.02)  #to avoid excessive cpu usage when no snort events.
            if self.__currentOpenedLogFile_fd == None:
                if len(self.__logfiles) == 0:
                    #There's no files ....waiting for it
                    self.__lookForFiles(True)
                    #and wait for some time...
                    sleep(10)
                    continue
                else:
                    #read the file!
                    self.__currentOpenedLogFile_name = self.__logfiles[0]
                    del self.__logfiles[0]
                    self.__timestamp = self.__currentOpenedLogFile_name[
                        self.__currentOpenedLogFile_name.rindex('.') + 1:]
                    try:
                        self.__currentOpenedLogFile_fd = open(
                            self.__currentOpenedLogFile_name, 'r')
                    except IOError:
                        self.logerror(
                            Lazyformat(
                                "Error reading file {}: it no longer exists",
                                self.__currentOpenedLogFile_name))
                    # For unified (version 2) files there is no dedicated file header. The endianess
                    # is always in NETWORK byte order.
            else:
                #there's an opened file..
                self.logdebug(
                    Lazyformat("Processing file: {}",
                               self.__currentOpenedLogFile_name))
                filestat = os.fstat(self.__currentOpenedLogFile_fd.fileno())
                self.__currentOpenedLogFile_size = filestat[stat.ST_SIZE]
                position = self.__currentOpenedLogFile_fd.tell()
                if self.__skipOldEvents:
                    self.logdebug("Event skipping is enabled")
                    self.__do_skipOldEvents()
                    self.__skipOldEvents = False
                position = self.__currentOpenedLogFile_fd.tell()
                if (position + SNORT_FILE_HEADER_SIZE
                    ) <= self.__currentOpenedLogFile_size:
                    data = self.__currentOpenedLogFile_fd.read(
                        SNORT_FILE_HEADER_SIZE)
                    event_type, size = struct.unpack("!II", data)
                else:
                    self.__tryRotate()
                    continue
                position = self.__currentOpenedLogFile_fd.tell()
                #wait until the packet bytes are written by snort
                max_tries = 10  # Max tries until the data should be there
                while ((position + size) >
                       self.__currentOpenedLogFile_size) and max_tries > 0:
                    self.loginfo("waiting until Snort writes the packet data")
                    filestat = os.fstat(
                        self.__currentOpenedLogFile_fd.fileno())
                    self.__currentOpenedLogFile_size = filestat[stat.ST_SIZE]
                    max_tries = max_tries - 1
                    sleep(0.1)

                if (position + size) <= self.__currentOpenedLogFile_size:
                    data = self.__currentOpenedLogFile_fd.read(size)
                    position = self.__currentOpenedLogFile_fd.tell()
                    if self.snort_events_by_type.has_key(event_type):
                        last_valid_position = position - size - SNORT_FILE_HEADER_SIZE
                        last_valid_packet_size = size
                        if event_type == UNIFIED2_EVENT:  #1
                            SnortUnpack.get_UNIFIED2_EVENT(
                                data)  # --Not information
                        elif event_type == UNIFIED2_PACKET:  #2
                            ev = SnortUnpack.get_Serial_Unified2Packet(
                                data, event_type, size)  #           -- done
                            if ev:
                                self.send_message(ev)
                        elif event_type == EVENT_TYPE_EXTRA_DATA:
                            SnortUnpack.get_EVENT_TYPE_EXTRA_DATA(
                                data
                            )  #4                       -not information
                        elif event_type == UNIFIED2_IDS_EVENT:  #7                                        -- done
                            SnortUnpack.get_Serial_Unified2IDSEvent_legacy(
                                data, event_type, size)
                        elif event_type == UNIFIED2_IDS_EVENT_IPV6:  #72                                        -- done
                            SnortUnpack.get_Serial_Unified2IDSEventIPv6_legacy(
                                data)
                        elif event_type == UNIFIED2_IDS_EVENT_MPLS:  #99                        -not information
                            SnortUnpack.get_UNIFIED2_IDS_EVENT_MPLS(data)
                        elif event_type == UNIFIED2_IDS_EVENT_IPV6_MPLS:  #100                        -not information
                            SnortUnpack.get_UNIFIED2_IDS_EVENT_IPV6_MPLS(data)
                        elif event_type == UNIFIED2_IDS_EVENT_VLAN:  #104                                -- done
                            SnortUnpack.get_Unified2IDSEvent(data)
                        elif event_type == UNIFIED2_IDS_EVENT_IPV6_VLAN:  #105                            --done
                            SnortUnpack.get_Unified2IDSEventIPv6(data)
                        elif event_type == UNIFIED2_EXTRA_DATA:  #110
                            SnortUnpack.get_SerialUnified2ExtraData(data)
                        elif event_type == UNIFIED2_IDS_EVENT_NG:  #207                                  NOT YET INFORMATION
                            SnortUnpack.get_Unified2IDSEventNG(data)
                        elif event_type == UNIFIED2_IDS_EVENT_IPV6_NG:  #208 -                           NOT YET INFORMATION
                            SnortUnpack.get_Unified2IDSEventIPv6_NG(data)
                    else:
                        self.logerror(
                            Lazyformat(
                                "Unknown record type: {}, last valid cursor: {}, last valid packet size: {}, current_cursor: {}, theoric packet size: {}",
                                event_type, last_valid_position,
                                last_valid_packet_size, position, size))
                        self.__currentOpenedLogFile_fd.seek(
                            position, os.SEEK_CUR)
                else:
                    self.loginfo(
                        "Snort Log file size is less than packet size... we have been waiting for a second, try rotate.."
                    )
                    #Set the current position of file descriptor fd to position pos, modified by how: SEEK_SET or 0 to set the position relative to the beginning of the file; SEEK_CUR or 1 to set it relative to the current position; os.SEEK_END or 2 to set it relative to the end of the file.
                    self.__currentOpenedLogFile_fd.seek(position, os.SEEK_SET)
                    self.__tryRotate()
                #testconter+=1
            keepWorking = self.__getKeepWorkingValue()
        SnortUnpack.stopPurgeEventsThread()
Exemple #30
0
    def process(self):
        tSleep = DEFAULT_SLEEP
        try:
            tSleep = int(self._plugin.get("config", "sleep"))
        except ValueError:
            self.logerror(
                Lazyformat(
                    "sleep should be an integer number...using default value: {}",
                    DEFAULT_SLEEP))
        if not self._canrun:
            self.loginfo("We can't start the process, missing modules")
            return

        self.loginfo("Starting Database plugin")
        rules = self._plugin.rules()
        run_process = False

        if not self.tryConnectDB():
            self.stop()
            return

        cVal = "NA"
        plugin_source_type = self._plugin.get("config", "source_type")
        while cVal == "NA" and not self.stop_processing:
            cVal = self.runStartQuery(plugin_source_type, rules)
            if cVal == "NA":
                self.loginfo(
                    "No data retrieved in the start quere, retrying in 10 seconds"
                )
                time.sleep(10)
            else:
                run_process = True
        ref = int(rules['query']['ref'])
        while run_process and not self.stop_processing:
            if self._plugin.get("config", "source_type") != "db2":
                try:
                    if self.__myDataBaseCursor:
                        self.__myDataBaseCursor.close()
                    if self.__objDBConn:
                        self.__objDBConn.close()
                except Exception, e:
                    self.loginfo(
                        Lazyformat("Failed to close the cursor: {}", e))

                if self.tryConnectDB():
                    sql = rules['query']['query']
                    sql = sql.replace("$1", str(cVal))
                    self.logdebug(sql)
                    try:
                        self.__myDataBaseCursor.execute(sql)
                        ret = self.__myDataBaseCursor.fetchall()
                    except Exception, e:
                        self.logerror(
                            Lazyformat("DB query failed: {} -> {}", sql, e))
                        time.sleep(1)
                        continue
                    try:
                        if len(ret) > 0:
                            #We have to think about event order when processing
                            cVal = ret[len(ret) - 1][ref]
                            for e in ret:
                                self.generate(e)
                    except Exception, e:
                        self.logerror(
                            Lazyformat("Error building the event: {}", e))
                        time.sleep(tSleep)