def __init__(self, pattern: Pattern, data_model_mapper, options, transformers): self.dmm = data_model_mapper self.pattern = pattern # Now report_params_passed is a JSON object which is pointing to an array of JSON Objects (report_params_array) self.report_params_passed = {} self.report_params_array = [] self.report_params_array_size = 0 # Now qsearch_params_passed is a JSON object which is pointing to an array of JSON Objects (qsearch_params_array) self.qsearch_params_passed = {} self.qsearch_params_array = [] self.qsearch_params_array_size = 0 self.translated = self.parse_expression(pattern) self.transformers = transformers # Read reference data self.REFERENCE_DATA_TYPES = read_json('reference_data_types4Query', options) # Read report definition data self.REPORT_DEF = read_json('guardium_reports_def', options) # Read report definition data self.REPORT_PARAMS_MAP = read_json('guardium_report_params_map', options) # Read qsearch definition data self.QSEARCH_DEF = read_json('guardium_qsearch_def', options) # Read qsearch definition data self.QSEARCH_PARAMS_MAP = read_json('guardium_qsearch_params_map', options)
def __init__(self, pattern: Pattern, data_model_mapper, options, transformers): self.dmm = data_model_mapper self.pattern = pattern self.logger = logger.set_logger(__name__) # Now report_params_passed is a JSON object which is pointing to an array of JSON Objects (report_params_array) self.report_params_passed = {} self.report_params_array = [] self.report_params_array_size = 0 # Now qsearch_params_passed is a JSON object which is pointing to an array of JSON Objects (qsearch_params_array) self.qsearch_params_passed = {} self.qsearch_params_array = [] self.qsearch_params_array_size = 0 self.translated = self.parse_expression(pattern) self.transformers = transformers # Read report definition data self.REPORT_DEF = read_json('guardium_reports_def', options) # Read report definition data self.REPORT_PARAMS_MAP = read_json('guardium_report_params_map', options) # Read qsearch definition data self.QSEARCH_DEF = read_json('guardium_qsearch_def', options) # Read qsearch definition data self.QSEARCH_PARAMS_MAP = read_json('guardium_qsearch_params_map', options)
def _parse_comparison_expression(self, expression, qualifier=None): # Resolve STIX Object Path to a field in the target Data Model stix_object, stix_field = expression.object_path.split(':') # Multiple QRadar fields may map to the same STIX Object mapped_fields_array = self.dmm.map_field(stix_object, stix_field) # Resolve the comparison symbol to use in the query string (usually just ':') comparator = self._lookup_comparison_operator(self, expression.comparator) # Special case for artifact:payload_bin object with Like operator where we apply aql TEXT SEARCH if expression.comparator == ComparisonComparators.Like and ( expression.object_path == 'artifact:payload_bin'): return "TEXT SEARCH '{}'".format(expression.value) # Special case where we want the risk finding if stix_object == 'x-ibm-finding' and stix_field == 'name' and expression.value == "*": return "devicetype = 18" if stix_field == 'protocols[*]': map_data = read_json('network_protocol_map', self.options) try: expression.value = map_data[expression.value.lower()] except Exception as protocol_key: raise KeyError("Network protocol {} is not supported.".format( protocol_key)) elif stix_field == 'start' or stix_field == 'end': transformer = TimestampToMilliseconds() expression.value = transformer.transform(expression.value) # Some values are formatted differently based on how they're being compared if expression.comparator == ComparisonComparators.Matches: # needs forward slashes value = self._format_match(expression.value) # should be (x, y, z, ...) elif expression.comparator == ComparisonComparators.In: value = self._format_in(expression.value) elif expression.comparator == ComparisonComparators.Equal or expression.comparator == ComparisonComparators.NotEqual: # Should be in single-quotes value = self._format_equality(expression.value) # '%' -> '*' wildcard, '_' -> '?' single wildcard elif expression.comparator == ComparisonComparators.Like and not ( expression.object_path == 'artifact:payload_bin'): value = self._format_like(expression.value) else: value = self._escape_value(expression.value) comparison_string = self._parse_mapped_fields(self, expression, value, comparator, stix_field, mapped_fields_array) if (len(mapped_fields_array) > 1 and not self._is_reference_value(stix_field)): # More than one AQL field maps to the STIX attribute so group the ORs. comparison_string = "({})".format(comparison_string) if expression.negated: comparison_string = self._negate_comparison(comparison_string) if qualifier: self.qualified_queries.append("{} limit {} {}".format( comparison_string, self.result_limit, qualifier)) return '' else: return "{}".format(comparison_string)
def __init__(self, pattern: Pattern, data_model_mapper, options): self.dmm = data_model_mapper self.comparator_lookup = self.dmm.map_comparator() self.pattern = pattern self.translated = self.parse_expression(pattern) self.mapping_network_protocol = read_json('network_protocol_map', options) query_split = self.translated.split("split") LOGGER.info("Query {}", query_split) if len(query_split) > 1: # remove empty strings in the array query_array = list(map(lambda x: x.rstrip(), list(filter(None, query_split)))) start_pattern = "START((t'\d{4}(-\d{2}){2}T\d{2}(:\d{2}){2}(\.\d+)?Z')|(\s\d{13}\s))" query_array = list(map(lambda x: re.sub(start_pattern, self.startreplace, x), query_array)) stop_pattern = "STOP((t'\d{4}(-\d{2}){2}T\d{2}(:\d{2}){2}(\.\d+)?Z')|(\s\d{13}\s))" query_array = list(map(lambda x: re.sub(stop_pattern, self.stopreplace, x), query_array)) # removing leading AND/OR query_array = list(map(lambda x: re.sub("^\s(OR|AND)\s", "", x), query_array)) # transform time format from '2014-04-25T15:51:20Z' into '2014-04-25 15:51:20' t_pattern = "((?<=START'\d{4}-\d{2}-\d{2})(T))|((?<=STOP'\d{4}-\d{2}-\d{2})(T))" query_array = list(map(lambda x: re.sub(t_pattern, " ", x), query_array)) r_pattern = "((?<=\d{2}:\d{2}:\d{2})(Z))" query_array = list(map(lambda x: re.sub(r_pattern, "", x), query_array)) self.queries = query_array else: self.queries = query_split
def protocol_lookup(self, value): """ Function for protocol number lookup :param value:str or list, protocol :return:str or list, protocol """ value = value.values if hasattr(value, 'values') else value protocol_json = read_json(PROTOCOL_LOOKUP_JSON_FILE, self.options) if isinstance(value, list): protocol_value = [ protocol_json.get(each_value.lower()) for each_value in value if each_value.lower() in protocol_json ] else: protocol_value = protocol_json.get(value.lower()) return protocol_value
def __init__(self, pattern: Pattern, data_model_mapper, time_range, options): self.options = options self.dmm = data_model_mapper self.pattern = pattern self._time_range = time_range self.log_type = self.dmm.dialect self._log_config_data = read_json(MASTER_CONFIG_FILE, self.options) self._protocol_lookup_needed = True if self.log_type in ['vpcflow' ] else False self._parse_statement = [] self.qualified_queries = [] self.time_range_lst = [] self._parse_filter = '' self._parse_statement = {} self.logtype_filter = self._log_config_data[self.log_type].get( 'logtype_filter') self.translated = self.parse_expression(pattern)
def __init__(self, pattern: Pattern, data_model_mapper, time_range, options): self.dmm = data_model_mapper self.comparator_lookup = self.dmm.map_comparator() self.pattern = pattern self._time_range = time_range self.qualified_queries = [] self.qualifier_string = '' self.search_folder = DEFAULT_SEARCH_FOLDER self._master_obj = None self._split_master_obj_list = [] self._relevance_string_list = [] self._relevance_query_for_split_attr = [] self._relevance_property_format_string_dict = read_json( RELEVANCE_PROPERTY_MAP_JSON, options) self._time_range_comparator_list = [ self._lookup_comparison_operator(each) for each in (ComparisonComparators.GreaterThanOrEqual, ComparisonComparators.LessThanOrEqual) ] self.parse_expression(pattern)
def __init__(self, options, dialect, basepath): super().__init__(options, dialect, basepath) self.select_fields = read_json(f"select_fields", options)
def __init__(self, options, dialect, basepath, rows=1024): super().__init__(options, dialect, basepath) self.rows = rows self.logger = logger.set_logger(__name__) self.map_data = read_json(f"{dialect}_from_stix_map", options) self.select_fields = read_json(f"{dialect}_event_fields", options)
def __init__(self, client, options): self.client = client self.logger = logger.set_logger(__name__) self.mapping_protocol = read_json('network_protocol_map', options) self.mapping_common_attr = read_json('common_attributes', options)
def fetch_mapping(self, not_used_path, not_used_dialect, not_used_options): process_mapping = read_json("process_from_stix_map.json", self.options) binary_mapping = read_json("binary_from_stix_map.json", self.options) return {"binary": binary_mapping, "process": process_mapping}
def __init__(self, options={}, dialect=None): super().__init__(options, dialect, path.dirname(__file__)) self.select_fields = read_json('cim_select_fields', options)