Пример #1
0
    def __init__(self, config):
        """
        ToMBL tool. Used to push data to MBL using the Splunk Web API.
        :param config: configuration file
        """
        super().__init__(config, [AlertAction.get('All')])
        self._logger = logging.getLogger("LQMT.ToolName.{0}".format(
            self.getName()))
        self.alerts = {}
        self.sourcetype_lexicon = ['block', 'spearphish', 'malware']

        self._parser = FlexTransformParser(
            {'mbl': 'resources/sampleConfigurations/MBL.cfg'})
        for file_type, source_config in self._config.source_configs.items():
            self._parser.add_parser(file_type, source_config)

            self.splunk_handler = ApiHandler(
                self._config.host,
                self._config.port,
                self._config.username,
                self._config.password,
                cert_check=self._config.cert_check,
                source=self._config.source,
                index=self._config.index,
            )
Пример #2
0
    def __init__(self, config):
        """
        ToFlexText tool. Used to reformat CTI data in a user configured manner.

        :param config: configuration file
        """
        super().__init__(config, [AlertAction.get('All')])
        self._logger = logging.getLogger("LQMT.FlexText.{0}".format(self.getName()))

        # initialize parser with a dict created with variables from flextext configuration
        self._parser = FlexTransformParser({'CSV': self._config.flext_config})
        for file_type, source_config in self._config.source_configs.items():
            self._parser.add_parser(file_type, source_config)

        self._file_obj = None
        self._processed = []
Пример #3
0
    def setUp(self):
        sysconf = SystemConfig()
        self.sys_config = sysconf.getConfig()
        self.sys_config = self.sys_config['parsers']
        self.flext = FlexTransformParser()

        # Add parsers for the different data formats
        self.flext.add_parser('LQMTools', 'resources/sampleConfigurations/lqmtools.cfg')
        self.flext.add_parser('Cfm13Alert', 'resources/sampleConfigurations/cfm13.cfg')
        self.flext.add_parser('Cfm20Alert', 'resources/sampleConfigurations/cfm20alert.cfg')
        self.flext.add_parser('stixtlp', 'resources/sampleConfigurations/stix_tlp.cfg')
        self.flext.add_parser('STIX', 'resources/sampleConfigurations/stix_tlp.cfg')

        # Standard timestamp that can be used so all meta files use the same time and for easier validation
        self.time = str(time.time()).split('.')[0]

        # Parse all the data ahead of the tests
        self.cfm13_parsed_data = self.flext.parse(io.StringIO(CFM13ALERT), self.getMeta("Cfm13Alert"))
        self.cfm20_parsed_data = self.flext.parse(io.StringIO(CFM20ALERT), self.getMeta("Cfm20Alert"))
        self.stix_parsed_data = self.flext.parse(io.StringIO(STIX), self.getMeta("STIX"))
Пример #4
0
    def __init__(self, config):
        """
        ToMBL tool. Used to push data to MBL using the Splunk Web API.
        :param config: configuration file
        """
        super().__init__(config, [AlertAction.get('All')])
        self._logger = logging.getLogger("LQMT.ToolName.{0}".format(self.getName()))
        self.alerts = {}
        self.sourcetype_lexicon = ['block', 'spearphish', 'malware']

        self._parser = FlexTransformParser({'mbl': 'resources/sampleConfigurations/MBL.cfg'})
        for file_type, source_config in self._config.source_configs.items():
            self._parser.add_parser(file_type, source_config)

            self.splunk_handler = ApiHandler(
                self._config.host,
                self._config.port,
                self._config.username,
                self._config.password,
                cert_check=self._config.cert_check,
                source=self._config.source,
                index=self._config.index,

            )
Пример #5
0
class ToFlexText(Tool):
    def __init__(self, config):
        """
        ToFlexText tool. Used to reformat CTI data in a user configured manner.

        :param config: configuration file
        """
        super().__init__(config, [AlertAction.get('All')])
        self._logger = logging.getLogger("LQMT.FlexText.{0}".format(self.getName()))

        # initialize parser with a dict created with variables from flextext configuration
        self._parser = FlexTransformParser({'CSV': self._config.flext_config})
        for file_type, source_config in self._config.source_configs.items():
            self._parser.add_parser(file_type, source_config)

        self._file_obj = None
        self._processed = []

    def initialize(self):
        super().initialize()

    def process(self, datafile, meta):
        """
        Process function. Handles the processing of data for the tool. Does so by calling the FlexText parser

        :param datafile: String that contains the path to the alert being processed.
        :param meta: meta data of the datafile. Used to assign correct parser
        """
        if self._file_obj is None and not self.openfile():
            self.disable()
        if self.isEnabled():
            if datafile not in self._processed:
                # Use flextext parser to parse datafile.
                self._parser.parseflextext(datafile, meta, self._file_obj, self._config.config_to_str())
                self._processed.append(datafile)

    def openfile(self):
        """
        Creates and opens the file specified in the user configuration, specifically the file_destination variable.
        The file object create here is then passed to the parser
        """
        try:
            file = self._config.file_destination
            file_dir = os.path.dirname(file)
            if not os.path.exists(file_dir):
                os.makedirs(file_dir, 0o755, True)
            self._file_obj = open(file, 'a', newline='')
            self.writeheader()
            return True
        except Exception as e:
            self._logger.error("Unable to open csv file: {0}".format(self._config.file_destination))
            self._logger.error(e)
            return False

    def writeheader(self):
        """
        When inserting the header using FlexT, the header gets repeated everytime a file is processed due to the way the
        config is currently sent over. This (hacky) solution writes the header to the file before any configuration data
        is sent over to FlexT. After the header is written the header_line value is set to False so that the header
        isn't rewritten when the configuration is passed to FlexT.
        """

        if self._config.header_line:
            quote_style = None
            if self._config.quote_style.lower() == 'none':
                quote_style = csv.QUOTE_NONE
            elif self._config.quote_style.lower() == 'nonnumeric':
                quote_style = csv.QUOTE_NONNUMERIC
            elif self._config.quote_style.lower() == 'all':
                quote_style = csv.QUOTE_ALL
            elif self._config.quote_style.lower() == 'minimal':
                quote_style = csv.QUOTE_MINIMAL

            csv.register_dialect('flext',
                                 delimiter=self._config.delimiter,
                                 quotechar=self._config.quote_char,
                                 escapechar=bytes(self._config.escape_char, "utf-8").decode("unicode_escape"),
                                 doublequote=self._config.double_quote,
                                 lineterminator='\r\n',
                                 quoting=quote_style
                                 )

            writer = csv.DictWriter(self._file_obj, self._config.fields, dialect='flext')

            writer.writeheader()
            self._config.header_line = False

    def commit(self):
        pass

    def cleanup(self):
        if self._file_obj is not None:
            self._file_obj.close()
Пример #6
0
class ToMBL(Tool):
    def __init__(self, config):
        """
        ToMBL tool. Used to push data to MBL using the Splunk Web API.
        :param config: configuration file
        """
        super().__init__(config, [AlertAction.get('All')])
        self._logger = logging.getLogger("LQMT.ToolName.{0}".format(
            self.getName()))
        self.alerts = {}
        self.sourcetype_lexicon = ['block', 'spearphish', 'malware']

        self._parser = FlexTransformParser(
            {'mbl': 'resources/sampleConfigurations/MBL.cfg'})
        for file_type, source_config in self._config.source_configs.items():
            self._parser.add_parser(file_type, source_config)

            self.splunk_handler = ApiHandler(
                self._config.host,
                self._config.port,
                self._config.username,
                self._config.password,
                cert_check=self._config.cert_check,
                source=self._config.source,
                index=self._config.index,
            )

    def initialize(self):
        super().initialize()

    def process(self, alert, meta):
        """
        Process function. Handles the processing of data for the tool. 
        """
        # parse alert using custom parser
        mbl_alert = self._parser.custom_parser(alert, meta['PayloadFormat'],
                                               'mbl')

        if mbl_alert:
            # pop alert data from list
            mbl_alert = mbl_alert.pop()

            # put alert data into a dictionary. key is generated from a hash of the list
            key_hash = self.compute_hash(str(mbl_alert))
            if key_hash not in self.alerts:
                self.alerts[key_hash] = mbl_alert

    def commit(self):
        """
        Commit function where the data is actually transmitted to Splunk instance. 
        """
        for dict_key, alert_data in self.alerts.items():
            sourcetype, message = self.parse_alert_dictionary(alert_data)

            # sourcetype override from user configuration
            if self._config.sourcetype:
                sourcetype = self._config.sourcetype

            # if sourcetype is valid, then send message
            if sourcetype in self.sourcetype_lexicon:
                self.splunk_handler.send_message(message,
                                                 sourcetype=sourcetype)

    def cleanup(self):
        pass

    @staticmethod
    def parse_alert_dictionary(alert_data):
        """
        Method used for extracting the source type and message data from the provided alert data dictionary
        :param alert_data: dictionary containing parsed alert data
        :return: returns the sourcetype(str) and formatted message(str) both of which are derived from the alert_data
        dictionary
        """
        message = ""
        sourcetype = ""
        for alert_dictionary in alert_data:
            if 'sourcetype' in alert_dictionary:

                # Extract sourcetype from alert data and delete key from dictionary.
                sourcetype = alert_dictionary['sourcetype'].strip("'")
                del alert_dictionary['sourcetype']

            # format message in key value format. Replace single string with double quotes.
            for key, data in alert_dictionary.items():
                message += "{0}={1} ".format(key, str(data).replace("'", '"'))

        return sourcetype, message

    @staticmethod
    def compute_hash(string):
        """
        Method used for turning an alert string into a hash. Currently being used to sort out duplicate data.
        :param string: Alert data in of the type Str
        :return: returns sha256 digest of given string
        """
        return hashlib.sha256(string.encode('utf-8')).hexdigest()
Пример #7
0
class TestParser(TestCase):
    """
    Testing class for the parser alert parser.
    """

    def setUp(self):
        sysconf = SystemConfig()
        self.sys_config = sysconf.getConfig()
        self.sys_config = self.sys_config['parsers']
        self.flext = FlexTransformParser()

        # Add parsers for the different data formats
        self.flext.add_parser('LQMTools', 'resources/sampleConfigurations/lqmtools.cfg')
        self.flext.add_parser('Cfm13Alert', 'resources/sampleConfigurations/cfm13.cfg')
        self.flext.add_parser('Cfm20Alert', 'resources/sampleConfigurations/cfm20alert.cfg')
        self.flext.add_parser('stixtlp', 'resources/sampleConfigurations/stix_tlp.cfg')
        self.flext.add_parser('STIX', 'resources/sampleConfigurations/stix_tlp.cfg')

        # Standard timestamp that can be used so all meta files use the same time and for easier validation
        self.time = str(time.time()).split('.')[0]

        # Parse all the data ahead of the tests
        self.cfm13_parsed_data = self.flext.parse(io.StringIO(CFM13ALERT), self.getMeta("Cfm13Alert"))
        self.cfm20_parsed_data = self.flext.parse(io.StringIO(CFM20ALERT), self.getMeta("Cfm20Alert"))
        self.stix_parsed_data = self.flext.parse(io.StringIO(STIX), self.getMeta("STIX"))

    def getMeta(self, payloadFormat):
        """
        Method for generating metadata. Relatively static for now, but can be expanded to be randomized on each test run

        :param payloadFormat: Expected format of the payload.
        :return: Returns metadata in a dictionary
        """
        meta = {
            "PayloadFormat": payloadFormat,
            "SendingSite": "ANL",
            "PayloadType": "Alert",
            "UploadID": str(uuid.uuid4()).upper(),
            "FileName": "TestAlert",
            "SentTimestamp": self.time,
        }

        return meta

    # CFM13 format tests
    def test_cfm13_content_returned(self):
        self.assertEquals(len(self.cfm13_parsed_data), 1)

    def test_cfm13_indicator(self):
        self.assertEquals(self.cfm13_parsed_data[0]._indicator, "10.10.10.10")

    def test_cfm13_indicator_type(self):
        self.assertEquals(self.cfm13_parsed_data[0]._indicatorType, "IPv4Address")

    def test_cfm13_action(self):
        self.assertEquals(self.cfm13_parsed_data[0]._action1, "Block")

    def test_cfm13_duration(self):
        self.assertEquals(self.cfm13_parsed_data[0]._duration1, "86400")
        self.assertIsNone(self.cfm13_parsed_data[0]._duration2)

    def test_cfm13_sensitivity(self):
        self.assertEquals(self.cfm13_parsed_data[0]._sensitivity, "noSensitivity")

    def test_cfm13_restriction(self):
        self.assertEquals(self.cfm13_parsed_data[0]._restriction, "AMBER")

    # CFM20 format tests
    def test_cfm20_content_returned(self):
        self.assertEquals(len(self.cfm20_parsed_data), 1)

    def test_cfm20_indicator(self):
        self.assertEquals(self.cfm20_parsed_data[0]._indicator, "8675:a289:5:102c::bd8:baac")

    def test_cfm20_indicator_type(self):
        self.assertEquals(self.cfm20_parsed_data[0]._indicatorType, "IPv6Address")

    def test_cfm20_action(self):
        self.assertEquals(self.cfm13_parsed_data[0]._action1, "Block")

    def test_cfm20_duration(self):
        self.assertEquals(self.cfm20_parsed_data[0]._duration1, "86400")
        self.assertIsNone(self.cfm20_parsed_data[0]._duration2)

    def test_cfm20_sensitivity(self):
        self.assertEquals(self.cfm20_parsed_data[0]._sensitivity, "noSensitivity")

    def test_cfm20_restriction(self):
        self.assertIsNone(self.cfm20_parsed_data[0]._restriction)

    # STIX format tests
    def test_stix_content_returned(self):
        self.assertEquals(len(self.stix_parsed_data), 11)

    def test_stix_indicator(self):
        self.assertEquals(self.stix_parsed_data[1]._indicator, "13.13.13.13")
        self.assertEquals(self.stix_parsed_data[9]._indicator, "bad.domain.be/poor/path")

    def test_stix_indicator_type(self):
        self.assertEquals(self.stix_parsed_data[1]._indicatorType, "IPv4Address")
        self.assertEquals(self.stix_parsed_data[5]._indicatorType, "FilePath")

    def test_stix_action(self):
        self.assertEquals(self.stix_parsed_data[1]._action1, "Block")

    def test_stix_duration(self):
        self.assertEquals(self.stix_parsed_data[1]._duration1, "86400")
        self.assertIsNone(self.stix_parsed_data[1]._duration2)

    def test_stix_sensitivity(self):
        self.assertEquals(self.stix_parsed_data[1]._sensitivity, "noSensitivity")

    def test_stix_restriction(self):
        self.assertIsNone(self.stix_parsed_data[1]._restriction)
Пример #8
0
class ToMBL(Tool):
    def __init__(self, config):
        """
        ToMBL tool. Used to push data to MBL using the Splunk Web API.
        :param config: configuration file
        """
        super().__init__(config, [AlertAction.get('All')])
        self._logger = logging.getLogger("LQMT.ToolName.{0}".format(self.getName()))
        self.alerts = {}
        self.sourcetype_lexicon = ['block', 'spearphish', 'malware']

        self._parser = FlexTransformParser({'mbl': 'resources/sampleConfigurations/MBL.cfg'})
        for file_type, source_config in self._config.source_configs.items():
            self._parser.add_parser(file_type, source_config)

            self.splunk_handler = ApiHandler(
                self._config.host,
                self._config.port,
                self._config.username,
                self._config.password,
                cert_check=self._config.cert_check,
                source=self._config.source,
                index=self._config.index,

            )

    def initialize(self):
        super().initialize()

    def process(self, alert, meta):
        """
        Process function. Handles the processing of data for the tool. 
        """
        # parse alert using custom parser
        mbl_alert = self._parser.custom_parser(alert, meta['PayloadFormat'], 'mbl')

        if mbl_alert:
            # pop alert data from list
            mbl_alert = mbl_alert.pop()

            # put alert data into a dictionary. key is generated from a hash of the list
            key_hash = self.compute_hash(str(mbl_alert))
            if key_hash not in self.alerts:
                self.alerts[key_hash] = mbl_alert

    def commit(self):
        """
        Commit function where the data is actually transmitted to Splunk instance. 
        """
        for dict_key, alert_data in self.alerts.items():
            sourcetype, message = self.parse_alert_dictionary(alert_data)

            # sourcetype override from user configuration
            if self._config.sourcetype:
                sourcetype = self._config.sourcetype

            # if sourcetype is valid, then send message
            if sourcetype in self.sourcetype_lexicon:
                self.splunk_handler.send_message(message, sourcetype=sourcetype)

    def cleanup(self):
        pass

    @staticmethod
    def parse_alert_dictionary(alert_data):
        """
        Method used for extracting the source type and message data from the provided alert data dictionary
        :param alert_data: dictionary containing parsed alert data
        :return: returns the sourcetype(str) and formatted message(str) both of which are derived from the alert_data
        dictionary
        """
        message = ""
        sourcetype = ""
        for alert_dictionary in alert_data:
            if 'sourcetype' in alert_dictionary:

                # Extract sourcetype from alert data and delete key from dictionary.
                sourcetype = alert_dictionary['sourcetype'].strip("'")
                del alert_dictionary['sourcetype']

            # format message in key value format. Replace single string with double quotes.
            for key, data in alert_dictionary.items():
                message += "{0}={1} ".format(key, str(data).replace("'", '"'))

        return sourcetype, message

    @staticmethod
    def compute_hash(string):
        """
        Method used for turning an alert string into a hash. Currently being used to sort out duplicate data.
        :param string: Alert data in of the type Str
        :return: returns sha256 digest of given string
        """
        return hashlib.sha256(string.encode('utf-8')).hexdigest()