Example #1
0
    def __init__(self, connection, configuration, options):
        self.__async = True
        stack = traceback.extract_stack()
        self.__connector_module = stack[-2].filename.split(os.sep)[-2]
        self.__dialect_to_query_translator = {}
        self.__dialect_to_results_translator = {}
        self.__dialects_visible = []
        self.__dialect_default = None
        self.__options = options

        self.__results_connector = None
        self.__status_connector = None
        self.__delete_connector = None
        self.__query_connector = None

        if connection:
            validation_obj = {
                'connection': connection,
                'configuration': configuration
            }
            validation_obj = json.loads(json.dumps(validation_obj))
            modernize_objects(self.__connector_module, validation_obj)
            validation_obj = param_validator(self.__connector_module,
                                             validation_obj)
            connection.clear()
            configuration.clear()
            connection.update(validation_obj['connection'])
            configuration.update(validation_obj['configuration'])
    def __init__(self, connection, configuration, options):
        self.__async = True
        stack = traceback.extract_stack()
        self.__connector_module = stack[-2].filename.split(os.sep)[-2]
        self.__dialect_to_query_translator = {}
        self.__dialect_to_results_translator = {}
        self.__dialects_all = []
        self.__dialects_active_default = []
        self.__dialect_default = {}
        self.__options = options

        self.__results_connector = None
        self.__status_connector = None
        self.__delete_connector = None
        self.__query_connector = None

        module_name = self.__connector_module
        module = importlib.import_module("stix_shifter_modules." +
                                         module_name + ".stix_translation")
        json_path = os.path.dirname(module.__file__)
        json_path = os.path.abspath(json_path)
        json_path = os.path.join(json_path, 'json')
        if os.path.isdir(json_path):
            to_stix = os.path.join(json_path, 'to_stix_map.json')
            if not os.path.isfile(to_stix):
                raise Exception(to_stix + ' is not found')

        if connection:
            validation_obj = {
                'connection': connection,
                'configuration': configuration
            }
            validation_obj = json.loads(json.dumps(validation_obj))
            modernize_objects(self.__connector_module, validation_obj)
            validation_obj = param_validator(self.__connector_module,
                                             validation_obj)
            connection.clear()
            configuration.clear()
            options.clear()
            connection.update(validation_obj['connection'])
            options.update(validation_obj['connection']['options'])
            configuration.update(validation_obj['configuration'])
    def translate(self,
                  module,
                  translate_type,
                  data_source,
                  data,
                  options={},
                  recursion_limit=1000):
        """
        Translated queries to a specified format
        :param module: What module to use
        :type module: one of connector modules: 'qradar', 'dummy'
        :param translate_type: translation of a query or result set must be one of: 'parse', 'mapping' 'query', 'results' 
        :type translate_type: str
        :param data: the data to translate
        :type data: str
        :param options: translation options { stix_validator: bool }
        :type options: dict
        :param recursion_limit: maximum depth of Python interpreter stack
        :type recursion_limit: int
        :return: translated results
        :rtype: str
        """

        module, dialects = process_dialects(module, options)

        try:
            try:
                connector_module = importlib.import_module(
                    "stix_shifter_modules." + module + ".entry_point")
            except Exception as ex:
                raise UnsupportedDataSourceException(
                    "{} is an unsupported data source.".format(module))
            try:
                if options:
                    validated_options = param_validator(
                        module, options, 'connection.options')

                entry_point = connector_module.EntryPoint(
                    options=validated_options)
            except Exception as ex:
                track = traceback.format_exc()
                self.logger.error(ex)
                self.logger.error(track)
                raise

            if len(dialects) == 0:
                dialects = entry_point.get_dialects()

            if translate_type == QUERY or translate_type == PARSE:
                # Increase the python recursion limit to allow ANTLR to parse large patterns
                current_recursion_limit = sys.getrecursionlimit()
                if current_recursion_limit < recursion_limit:
                    self.logger.debug(
                        "Changing Python recursion limit from {} to {}".format(
                            current_recursion_limit, recursion_limit))
                    sys.setrecursionlimit(recursion_limit)

                if translate_type == QUERY:
                    # Carbon Black combines the mapping files into one JSON using process and binary keys.
                    # The query constructor has some logic around which of the two are used.
                    if validated_options.get('validate_pattern'):
                        self._validate_pattern(data)
                    queries = []
                    unmapped_stix_collection = []
                    for dialect in dialects:
                        antlr_parsing = generate_query(data)
                        query_translator = entry_point.get_query_translator(
                            dialect)
                        if query_translator and not isinstance(
                                query_translator, EmptyQueryTranslator):
                            stripped_parsing = strip_unmapped_attributes(
                                antlr_parsing, query_translator)
                            antlr_parsing = stripped_parsing.get('parsing')
                            unmapped_stix = stripped_parsing.get(
                                'unmapped_stix')
                            if unmapped_stix:
                                unmapped_stix_collection.append(unmapped_stix)
                            if not antlr_parsing:
                                continue
                        translated_queries = entry_point.transform_query(
                            dialect, data, antlr_parsing)

                        if isinstance(translated_queries, str):
                            translated_queries = [translated_queries]
                        for query in translated_queries:
                            queries.append(query)

                    if not queries:
                        raise DataMappingException("{} {}".format(
                            MAPPING_ERROR, unmapped_stix_collection))

                    return {'queries': queries}
                else:
                    self._validate_pattern(data)
                    antlr_parsing = generate_query(data)
                    # Extract pattern elements into parsed stix object
                    parsed_stix_dictionary = parse_stix(
                        antlr_parsing, validated_options['time_range'])
                    parsed_stix = parsed_stix_dictionary['parsed_stix']
                    start_time = parsed_stix_dictionary['start_time']
                    end_time = parsed_stix_dictionary['end_time']
                    return {
                        'parsed_stix': parsed_stix,
                        'start_time': start_time,
                        'end_time': end_time
                    }

            elif translate_type == RESULTS:
                # Converting data from the datasource to STIX objects
                return entry_point.translate_results(data_source, data)
            elif translate_type == MAPPING:
                mappings = {}
                for dialect in dialects:
                    mapping = entry_point.get_mapping(dialect)
                    mappings[dialect] = mapping
                return mappings
            elif translate_type == SUPPORTED_ATTRIBUTES:
                # Return mapped STIX attributes supported by the data source
                result = {}
                for dialect in dialects:
                    query_translator = entry_point.get_query_translator(
                        dialect)
                    result[dialect] = query_translator.map_data

                return {'supported_attributes': result}
            else:
                raise NotImplementedError('wrong parameter: ' + translate_type)
        except Exception as ex:
            self.logger.error('Caught exception: ' + str(ex) + " " +
                              str(type(ex)))
            response = dict()
            ErrorResponder.fill_error(response,
                                      message_struct={'exception': ex})
            return response
    def translate(self,
                  module,
                  translate_type,
                  data_source,
                  data,
                  options={},
                  recursion_limit=1000):
        """
        Translated queries to a specified format
        :param module: What module to use
        :type module: one of connector modules: 'qradar', 'template'
        :param translate_type: translation of a query or result set must be one of: 'parse', 'mapping' 'query', 'results'
        :type translate_type: str
        :param data: the data to translate
        :type data: str
        :param options: translation options { stix_validator: bool }
        :type options: dict
        :param recursion_limit: maximum depth of Python interpreter stack
        :type recursion_limit: int
        :return: translated results
        :rtype: str
        """

        module, dialects = process_dialects(module, options)
        try:
            try:
                connector_module = importlib.import_module(
                    "stix_shifter_modules." + module + ".entry_point")
            except Exception as ex:
                raise UnsupportedDataSourceException(
                    "{} is an unsupported data source.".format(module))
            try:
                if not translate_type == DIALECTS:
                    validated_options = param_validator(
                        module, options, 'connection.options')
                else:
                    validated_options = {}
                entry_point = connector_module.EntryPoint(
                    options=validated_options)
            except Exception as ex:
                track = traceback.format_exc()
                self.logger.error(ex)
                self.logger.debug(track)
                raise

            if translate_type == DIALECTS:
                dialects = entry_point.get_dialects_full()
                return dialects

            if len(dialects) == 0:
                dialects = entry_point.get_dialects()
                language = validated_options['language']
            else:
                language = options.get('language')

            if translate_type == QUERY or translate_type == PARSE:
                # Increase the python recursion limit to allow ANTLR to parse large patterns
                current_recursion_limit = sys.getrecursionlimit()
                if current_recursion_limit < recursion_limit:
                    self.logger.debug(
                        "Changing Python recursion limit from {} to {}".format(
                            current_recursion_limit, recursion_limit))
                    sys.setrecursionlimit(recursion_limit)

                if translate_type == QUERY:
                    # Carbon Black combines the mapping files into one JSON using process and binary keys.
                    # The query constructor has some logic around which of the two are used.
                    queries = []
                    unmapped_stix_collection = []
                    unmapped_operator_collection = []
                    dialects_used = 0
                    for dialect in dialects:
                        query_translator = entry_point.get_query_translator(
                            dialect)
                        if not language or language == query_translator.get_language(
                        ):
                            dialects_used += 1
                            transform_result = entry_point.transform_query(
                                dialect, data)
                            if 'async_call' in transform_result:
                                queries.append(transform_result)
                            else:
                                queries.extend(
                                    transform_result.get('queries', []))
                            unmapped_stix_collection.extend(
                                transform_result.get('unmapped_attributes',
                                                     []))
                            unmapped_operator_collection.extend(
                                transform_result.get('unmapped_operator', []))
                    if not dialects_used:
                        raise UnsupportedLanguageException(language)

                    unmapped_stix_collection = list(
                        set(unmapped_stix_collection))
                    unmapped_operator_collection = list(
                        set(unmapped_operator_collection))
                    if not queries:
                        if unmapped_stix_collection and unmapped_operator_collection:
                            raise DataMappingException(
                                "{} {} and Operators: {} to data source fields"
                                .format(MAPPING_ERROR,
                                        unmapped_stix_collection,
                                        unmapped_operator_collection))
                        elif unmapped_stix_collection:
                            raise DataMappingException(
                                "{} {} to data source fields".format(
                                    ATTRIBUTE_MAPPING_ERROR,
                                    unmapped_stix_collection))
                        elif unmapped_operator_collection:
                            raise DataMappingException(
                                "{} {} to data source fields".format(
                                    OPERATOR_MAPPING_ERROR,
                                    unmapped_operator_collection))
                    return {'queries': queries}
                else:
                    return entry_point.parse_query(data)
            elif translate_type == RESULTS:
                # Converting data from the datasource to STIX objects
                return entry_point.translate_results(data_source, data)
            elif translate_type == MAPPING:
                mappings = entry_point.get_mapping()
                return mappings
            elif translate_type == SUPPORTED_ATTRIBUTES:
                # Return mapped STIX attributes supported by the data source
                result = {}
                for dialect in dialects:
                    query_translator = entry_point.get_query_translator(
                        dialect)
                    result[dialect] = query_translator.map_data
                return {'supported_attributes': result}
            else:
                raise NotImplementedError('wrong parameter: ' + translate_type)
        except Exception as ex:
            self.logger.error('Caught exception: ' + str(ex) + " " +
                              str(type(ex)))
            self.logger.debug(exception_to_string(ex))
            response = dict()
            ErrorResponder.fill_error(response,
                                      message_struct={'exception': ex},
                                      connector=module)
            return response