def get(self, rule_uuid): """Handles GET request to the resource. Args: rule_uuid: uuid of the sigma rule Returns: JSON sigma rule """ return_rule = None try: sigma_rules = ts_sigma_lib.get_all_sigma_rules() except ValueError as e: logger.error('OS Error, unable to get the path to the Sigma rules', exc_info=True) abort(HTTP_STATUS_CODE_NOT_FOUND, f'ValueError {e}') for rule in sigma_rules: if rule is not None: if rule_uuid == rule.get('id'): return_rule = rule if return_rule is None: abort(HTTP_STATUS_CODE_NOT_FOUND, 'No sigma rule found with this ID.') meta = { 'current_user': current_user.username, 'rules_count': len(sigma_rules) } return jsonify({'objects': [return_rule], 'meta': meta})
def get(self, rule_uuid): """Handles GET request to the resource. Args: rule_uuid: uuid of the sigma rule Returns: JSON sigma rule """ return_rule = None try: sigma_rules = ts_sigma_lib.get_all_sigma_rules() except ValueError: logger.error('OS Error, unable to get the path to the Sigma rules', exc_info=True) abort( HTTP_STATUS_CODE_NOT_FOUND, 'OS Error, unable to get the path to the Sigma rules') for rule in sigma_rules: if rule is not None: if rule_uuid == rule.get('id'): return_rule = rule if return_rule is None: abort( HTTP_STATUS_CODE_NOT_FOUND, 'No sigma rule found with this ID.') return return_rule
def get(self, rule_uuid): """Handles GET request to the resource. Args: rule_uuid: uuid of the sigma rule Returns: JSON sigma rule """ return_rule = None try: sigma_rules = ts_sigma_lib.get_all_sigma_rules() except ValueError as e: logger.error("OS Error, unable to get the path to the Sigma rules", exc_info=True) abort(HTTP_STATUS_CODE_NOT_FOUND, f"ValueError {e}") for rule in sigma_rules: if rule is not None: if rule_uuid == rule.get("id"): return_rule = rule if return_rule is None: abort(HTTP_STATUS_CODE_NOT_FOUND, "No sigma rule found with this ID.") meta = { "current_user": current_user.username, "rules_count": len(sigma_rules) } return jsonify({"objects": [return_rule], "meta": meta})
def get_kwargs(): """Returns an array of all rules of Timesketch. Returns: sigma_rules All Sigma rules """ sigma_rules = [ {'rule': rule} for rule in ts_sigma_lib.get_all_sigma_rules() ] return sigma_rules
def get_kwargs(): """Returns an array of all rules of Timesketch. Returns: sigma_rules All Sigma rules """ sigma_rules = [] for rule in ts_sigma_lib.get_all_sigma_rules(): sigma_rules.append({"rule": rule}) return sigma_rules
def get(self): """Handles GET request to the resource. Returns: Dict of sigma rules """ sigma_rules = [] try: sigma_rules = ts_sigma_lib.get_all_sigma_rules() except ValueError: logger.error('OS Error, unable to get the path to the Sigma rules', exc_info=True) abort( HTTP_STATUS_CODE_NOT_FOUND, 'OS Error, unable to get the path to the Sigma rules') meta = {'current_user': current_user.username, 'rules_count': len(sigma_rules)} return jsonify({'objects': sigma_rules, 'meta': meta})
def get(self): """Handles GET request to the resource. Returns: Dict of sigma rules """ sigma_rules = [] try: sigma_rules = ts_sigma_lib.get_all_sigma_rules() except ValueError as e: logger.error("OS Error, unable to get the path to the Sigma rules", exc_info=True) abort(HTTP_STATUS_CODE_NOT_FOUND, f"Value Error, {e}") # TODO: idea for meta: add a list of folders that have been parsed meta = { "current_user": current_user.username, "rules_count": len(sigma_rules) } return jsonify({"objects": sigma_rules, "meta": meta})
def run(self): """Entry point for the analyzer. Returns: String with summary of the analyzer result. """ tags_applied = {} sigma_rule_counter = 0 sigma_rules = ts_sigma_lib.get_all_sigma_rules() if sigma_rules is None: logger.error('No Sigma rules found. Check SIGMA_RULES_FOLDERS') problem_strings = [] output_strings = [] for rule in sigma_rules: tags_applied[rule.get('file_name')] = 0 try: sigma_rule_counter += 1 tagged_events_counter = self.run_sigma_rule( rule.get('es_query'), rule.get('file_name'), tag_list=rule.get('tags')) tags_applied[rule.get('file_name')] += tagged_events_counter if sigma_rule_counter % 10 == 0: logger.debug('Rule {0:d}/{1:d}'.format( sigma_rule_counter, len(sigma_rules))) except elasticsearch.TransportError as e: logger.error( 'Timeout executing search for {0:s}: ' '{1!s} waiting for 10 seconds'.format( rule.get('file_name'), e), exc_info=True) # this is caused by too many ES queries in short time range # TODO: https://github.com/google/timesketch/issues/1782 sleep_time = current_app.config.get( 'SIGMA_TAG_DELAY', 15) time.sleep(sleep_time) tagged_events_counter = self.run_sigma_rule( rule.get('es_query'), rule.get('file_name'), tag_list=rule.get('tags')) tags_applied[rule.get('file_name')] += tagged_events_counter # Wide exception handling since there are multiple exceptions that # can be raised by the underlying sigma library. except: # pylint: disable=bare-except logger.error( 'Problem with rule in file {0:s}: '.format( rule.get('file_name')), exc_info=True) problem_strings.append('* {0:s}'.format( rule.get('file_name'))) continue total_tagged_events = sum(tags_applied.values()) output_strings.append('Applied {0:d} tags'.format(total_tagged_events)) if sigma_rule_counter > 0: self.add_sigma_match_view(sigma_rule_counter) if len(problem_strings) > 0: output_strings.append('Problematic rules:') output_strings.extend(problem_strings) return '\n'.join(output_strings)
def run(self): """Entry point for the analyzer. Returns: String with summary of the analyzer result. """ tags_applied = {} sigma_rule_counter = 0 sigma_rules = ts_sigma_lib.get_all_sigma_rules() if sigma_rules is None: logger.error('No Sigma rules found. Check SIGMA_RULES_FOLDERS') problem_strings = [] output_strings = [] for rule in sigma_rules: tags_applied[rule.get('file_name')] = 0 try: sigma_rule_counter += 1 tagged_events_counter = self.run_sigma_rule( rule.get('es_query'), rule.get('file_name')) tags_applied[rule.get('file_name')] += tagged_events_counter except elasticsearch.TransportError as e: logger.error( 'Timeout executing search for {0:s}: ' '{1!s} waiting for 10 seconds'.format( rule.get('file_name'), e), exc_info=True) # this is caused by to many ES queries in short time range # thus waiting for 10 seconds before sending the next one. time.sleep(10) # This except block is by purpose very broad as one bad rule could # otherwise stop the whole analyzer run # it might be an option to write the problematic rules to the output except: # pylint: disable=bare-except logger.error( 'Problem with rule in file {0:s}: '.format( rule.get('file_name')), exc_info=True) problem_strings.append('* {0:s}'.format( rule.get('file_name'))) continue total_tagged_events = sum(tags_applied.values()) output_strings.append('Applied {0:d} tags'.format(total_tagged_events)) for tag_name, tagged_events_counter in tags_applied.items(): output_strings.append('* {0:s}: {1:d}'.format( tag_name, tagged_events_counter)) if sigma_rule_counter > 0: view = self.sketch.add_view( view_name='Sigma Rule matches', analyzer_name=self.NAME, query_string='tag:"sigma*"') agg_params = { 'field': 'tag', 'limit': 20, 'index': [self.timeline_id], } agg_obj = self.sketch.add_aggregation( name='Top 20 Sigma tags', agg_name='field_bucket', agg_params=agg_params, view_id=view.id, chart_type='hbarchart', description='Created by the Sigma analyzer') story = self.sketch.add_story('Sigma Rule hits') story.add_text( utils.SIGMA_STORY_HEADER, skip_if_exists=True) story.add_text( '## Sigma Analyzer.\n\nThe Sigma ' 'analyzer takes Events and matches them with Sigma rules.' 'In this timeline the analyzer discovered {0:d} ' 'Sigma tags.\n\nThis is a summary of ' 'it\'s findings.'.format(sigma_rule_counter)) story.add_text( 'The top 20 most commonly discovered tags were:') story.add_aggregation(agg_obj) story.add_text( 'And an overview of all the discovered search terms:') story.add_view(view) output_strings.append('Problematic rules:') output_strings.extend(problem_strings) return '\n'.join(output_strings)