示例#1
0
文件: qradar.py 项目: iaji/ACE-1
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

        # the API class we use to communicate with QRadar
        # this can also be a unit testing class
        self.api_class = QRadarAPIClient
        if 'api_class' in kwargs:
            self.api_class = kwargs['api_class']

        # load the AQL query for this instance
        with open(abs_path(self.config['aql_path']), 'r') as fp:
            self.aql_query = fp.read()

        # each query can specify it's own range
        if 'relative_duration_before' in self.config:
            self.relative_duration_before = create_timedelta(
                self.config['relative_duration_before'])
        else:
            self.relative_duration_before = create_timedelta(
                saq.CONFIG['qradar']['relative_duration_before'])

        if 'relative_duration_after' in self.config:
            self.relative_duration_after = create_timedelta(
                self.config['relative_duration_after'])
        else:
            self.relative_duration_after = create_timedelta(
                saq.CONFIG['qradar']['relative_duration_after'])

        # load the observable mapping for this query
        # NOTE that the keys (event field names) are case sensitive
        self.observable_mapping = {
        }  # key = event field name, value = observable_type
        for key in self.config.keys():
            if key.startswith('map_'):
                event_field, observable_type = [
                    _.strip() for _ in self.config[key].split('=', 2)
                ]
                if observable_type not in VALID_OBSERVABLE_TYPES:
                    logging.error(
                        f"invalid observable type specified for observable mapping "
                        f"{key} in {self}: {observable_type}")
                    continue

                self.observable_mapping[event_field] = observable_type

        # the configuration can specify what field should be used as the event time
        # by default this is disabled, in which case the observables are non-termporal
        self.time_event_field = self.config.get('time_event_field', None)

        # the format of the time can also be specified in strptime syntax
        # the special value TIMESTAMP indicates a unix timestamp (this is the default)
        # the special value TIMESTAMP_MILLISECONDS indicates a unix timestamp in milliseconds
        self.time_event_field_format = self.config.get(
            'time_event_field_format', 'TIMESTAMP')

        # are we delaying QRadar correlational queries?
        self.correlation_delay = None
        if 'correlation_delay' in saq.CONFIG['qradar']:
            self.correlation_delay = create_timedelta(
                saq.CONFIG['qradar']['correlation_delay'])
示例#2
0
    def create_test_file(self,
                         file_path='.unittest_test_data',
                         file_content=None,
                         root_analysis=None):
        """Creates a test file and returns the path to the newly created file.
           Any file created this way is automatically deleted after the test runs.
           If file_path is relative then the file is created relative to SAQ_HOME.
           If root_analysis is a RootAnalysis object then file_path is crated relative to the storage_dir of this analysis.
           If file_content is not None then it is used as the content of the file.
           Otherwise, 1024 random bytes are used."""

        if not os.path.isabs(file_path):
            if root_analysis:
                target_file_path = os.path.join(root_analysis.storage_dir,
                                                file_path)
            else:
                target_file_path = abs_path(file_path)

        mode = 'wb'
        if isinstance(file_content, str):
            mode = 'w'

        with open(target_file_path, mode) as fp:
            if file_content:
                fp.write(file_content)
            else:
                fp.write(secrets.token_bytes(1024))

        self.tracked_test_files.append(target_file_path)
        return file_path
示例#3
0
    def load_hunts_from_config(self):
        """Loads the hunts from the configuration settings.
           Returns True if all of the hunts were loaded correctly, False if any errors occurred."""
        for rule_dir in self.rule_dirs:
            rule_dir = abs_path(rule_dir)
            if not os.path.isdir(rule_dir):
                logging.error(f"rules directory {rule_dir} specified for {self} is not a directory")
                continue

            # load each .ini file found in this rules directory
            logging.debug(f"searching {rule_dir} for hunt configurations")
            for root, dirnames, filenames in os.walk(rule_dir):
                for hunt_config in filenames:
                    if not hunt_config.endswith('.ini'):
                        continue

                    hunt_config = os.path.join(root, hunt_config)
                    hunt = self.hunt_cls()
                    logging.debug(f"loading hunt from {hunt_config}")
                    hunt.load_from_ini(hunt_config)
                    hunt.type = self.hunt_type
                    logging.info(f"loaded {hunt} from {hunt_config}")
                    self.add_hunt(hunt)

        # remember that we loaded the hunts from the configuration file
        # this is used when we receive the signal to reload the hunts
        self.hunts_loaded_from_config = True
示例#4
0
    def load_query_from_file(self, path):
        with open(abs_path(self.search_query_path), 'r') as fp:
            result = fp.read()

            if self.strip_comments:
                result = COMMENT_REGEX.sub('', result)

        return result
示例#5
0
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

        # are we using SSL for MySQL connections? (you should be)
        if saq.CONFIG['database_ace'].get('ssl_ca', fallback=None) \
        or saq.CONFIG['database_ace'].get('ssl_cert', fallback=None) \
        or saq.CONFIG['database_ace'].get('ssl_key', fallback=None):
            ssl_options = {
                'ca': abs_path(saq.CONFIG['database_ace']['ssl_ca'])
            }
            if saq.CONFIG['database_ace'].get('ssl_cert', fallback=None):
                ssl_options['cert'] = abs_path(
                    saq.CONFIG['database_ace']['ssl_cert'])
            if saq.CONFIG['database_ace'].get('ssl_key', fallback=None):
                ssl_options['key'] = abs_path(
                    saq.CONFIG['database_ace']['ssl_key'])
            self.SQLALCHEMY_DATABASE_OPTIONS['connect_args'][
                'ssl'] = ssl_options
示例#6
0
    def query(self):
        """Returns the query to execute. Loads the query if required."""
        # have we loaded it yet?
        if self._query is not None:
            return self._query

        with open(abs_path(self.search_query_path), 'r') as fp:
            self._query = fp.read()

            if self.strip_comments:
                self._query = COMMENT_REGEX.sub('', self._query)

        return self._query
示例#7
0
    def __init__(self, *args, **kwargs):
        super().__init__(service_config=saq.CONFIG['service_falcon_collector'],
                         workload_type='falcon',
                         delete_files=True,
                         *args,
                         **kwargs)

        # location of the falcon siem collector output file
        self.siem_collector_log_path = abs_path(
            saq.CONFIG['falcon']['siem_collector_log_path'])

        # every JSON entry seems to contain an "offset" field in the "metadata" section
        # looks like it's basically a unique ID for the message that increments
        # we'll use this to make sure we don't reprocess messages
        self._last_offset = 0
        self.last_offset_path = os.path.join(self.persistence_dir,
                                             'falcon_last_offset')

        # the open file descriptor for the log file
        self.siem_fp = None
        # the inode of the file when we opened it
        # when this changes it means the log file rotated
        self.siem_inode = None
        # if the file size is SMALLER then the file probably got over-written
        self.siem_fs = None

        # the current JSON buffer
        self.json_buffer = []

        if os.path.exists(self.last_offset_path):
            with open(self.last_offset_path) as fp:
                try:
                    self._last_offset = int(fp.read())
                except Exception as e:
                    logging.error(
                        f"unable to read last offset from {self.last_offset_path}: {e}"
                    )
                    self._last_offset = 0

        # for tool_instance
        self.hostname = socket.getfqdn()

        # map event types to the functions that handle them
        self.event_type_map = {
            EVENT_TYPE_DETECTION: self.process_detection_event,
        }
示例#8
0
文件: hunter.py 项目: iaji/ACE-1
    def _list_hunt_ini(self):
        """Returns the list of ini files for hunts in self.rule_dirs."""
        result = []
        for rule_dir in self.rule_dirs:
            rule_dir = abs_path(rule_dir)
            if not os.path.isdir(rule_dir):
                logging.error(
                    f"rules directory {rule_dir} specified for {self} is not a directory"
                )
                continue

            # load each .ini file found in this rules directory
            logging.debug(f"searching {rule_dir} for hunt configurations")
            for root, dirnames, filenames in os.walk(rule_dir):
                for hunt_config in filenames:
                    if not hunt_config.endswith('.ini'):
                        continue

                    result.append(os.path.join(root, hunt_config))

        return result
示例#9
0
    def load_hunts_from_config(self, hunt_filter=lambda hunt: True):
        """Loads the hunts from the configuration settings.
           Returns True if all of the hunts were loaded correctly, False if any errors occurred.
           The hunt_filter paramter defines an optional lambda function that takes the Hunt object
           after it is loaded and returns True if the Hunt should be added, False otherwise.
           This is useful for unit testing."""
        for rule_dir in self.rule_dirs:
            rule_dir = abs_path(rule_dir)
            if not os.path.isdir(rule_dir):
                logging.error(
                    f"rules directory {rule_dir} specified for {self} is not a directory"
                )
                continue

            # load each .ini file found in this rules directory
            logging.debug(f"searching {rule_dir} for hunt configurations")
            for root, dirnames, filenames in os.walk(rule_dir):
                for hunt_config in filenames:
                    if not hunt_config.endswith('.ini'):
                        continue

                    hunt_config = os.path.join(root, hunt_config)
                    hunt = self.hunt_cls()
                    logging.debug(f"loading hunt from {hunt_config}")
                    hunt.load_from_ini(hunt_config)
                    hunt.type = self.hunt_type
                    if hunt_filter(hunt):
                        logging.info(f"loaded {hunt} from {hunt_config}")
                        self.add_hunt(hunt)
                    else:
                        logging.debug(
                            f"not loading {hunt} (hunt_filter returned False)")

        # remember that we loaded the hunts from the configuration file
        # this is used when we receive the signal to reload the hunts
        self.hunts_loaded_from_config = True
示例#10
0
 def verify_environment(self):
     self.verify_config_exists('question')
     self.verify_config_exists('summary')
     self.verify_config_exists('aql_path')
     self.verify_path_exists(abs_path(self.config['aql_path']))