class Connector(BaseSyncConnector): init_error = None logger = logger.set_logger(__name__) def __init__(self, connection, configuration): """Initialization. :param connection: dict, connection dict :param configuration: dict,config dict""" try: self.token = Connector.generate_token(connection, configuration) configuration['auth']['access_token'] = self.token self.api_client = APIClient(connection, configuration) except Exception as ex: self.init_error = ex @staticmethod def _handle_errors(response, return_obj): """Handling API error response :param response: response for the API :param return_obj: dict, response for the API call with status """ response_code = response.code response_txt = response.read().decode('utf-8') if 200 <= response_code < 300: return_obj['success'] = True return_obj['data'] = response_txt return return_obj elif ErrorResponder.is_plain_string(response_txt): ErrorResponder.fill_error(return_obj, message=response_txt) raise Exception(return_obj) elif ErrorResponder.is_json_string(response_txt): response_json = json.loads(response_txt) ErrorResponder.fill_error(return_obj, response_json, ['reason']) raise Exception(return_obj) else: raise Exception(return_obj) def ping_connection(self): """Ping the endpoint.""" return_obj = dict() if self.init_error: raise self.init_error response = self.api_client.ping_box() response_code = response.code if 200 <= response_code < 300: return_obj['success'] = True else: ErrorResponder.fill_error(return_obj, message='unexpected exception') return return_obj def delete_query_connection(self, search_id): """"delete_query_connection response :param search_id: str, search_id""" return {"success": True, "search_id": search_id} def create_results_connection(self, query, offset, length): """"built the response object :param query: str, search_id :param offset: int,offset value :param length: int,length value""" response_txt = None return_obj = dict() try: if self.init_error: raise self.init_error response = self.api_client.run_search(query, offset, length) return_obj = self._handle_errors(response, return_obj) response_json = json.loads(return_obj["data"]) return_obj['data'] = response_json['Results'] # Customizing the output json, # Get 'TableName' attribute from each row of event data # Create a dictionary with 'TableName' as key and other attributes in an event data as value # Filter the "None" and empty values except for RegistryValueName, which support empty string # Customizing of Registryvalues json table_event_data = [] for event_data in return_obj['data']: lookup_table = event_data['TableName'] event_data.pop('TableName') build_data = dict() build_data[lookup_table] = {k: v for k, v in event_data.items() if v or k == "RegistryValueName"} if lookup_table == "DeviceRegistryEvents": registry_build_data = copy.deepcopy(build_data) registry_build_data[lookup_table]["RegistryValues"] = [] registry_value_dict = {} for k, v in build_data[lookup_table].items(): if k in ["RegistryValueData", "RegistryValueName", "RegistryValueType"]: registry_value_dict.update({k: v}) registry_build_data[lookup_table].pop(k) registry_build_data[lookup_table]["RegistryValues"].append(registry_value_dict) build_data[lookup_table] = registry_build_data[lookup_table] build_data[lookup_table]['event_count'] = '1' table_event_data.append(build_data) return_obj['data'] = table_event_data return return_obj except Exception as ex: if response_txt is not None: ErrorResponder.fill_error(return_obj, message='unexpected exception') self.logger.error('can not parse response: ' + str(response_txt)) else: raise ex @staticmethod def generate_token(connection, configuration): """To generate the Token :param connection: dict, connection dict :param configuration: dict,config dict""" authority_url = ('https://login.windows.net/' + configuration['auth']['tenant']) resource = "https://" + str(connection.get('host')) try: context = adal.AuthenticationContext( authority_url, validate_authority=configuration['auth']['tenant'] != 'adfs', ) token = context.acquire_token_with_client_credentials( resource, configuration['auth']['clientId'], configuration['auth']['clientSecret']) token_value = token['accessToken'] return token_value except Exception as ex: return_obj = dict() if ex.error_response: ErrorResponder.fill_error(return_obj, ex.error_response, ['reason']) Connector.logger.error("Token generation Failed: " + return_obj) raise ex
class ErrorResponder(): logger = logger.set_logger(__name__) @staticmethod def get_struct_item(message_struct, message_path): # "+isFailure=True" means the current item is a list and the new item will be a list containing items with the field 'Failure' equal 'True' # '~result' means the current item is a list and new item will be a list containing specified field ('result') values # document it: '+' and '~' if message_struct is not None and message_path is not None: if (isinstance(message_struct, collections.Mapping) or type(message_struct).__name__ == 'list'): struct = message_struct.copy() for i in message_path: if (isinstance(struct, collections.Mapping) and i in struct) or (type(struct).__name__ == 'list' and isinstance(i, int) and i < len(struct)): struct = struct[i] if struct is None: break elif i[:1] == '+' and isinstance(struct, list): key, value = i[1:].split('=') filtered_struct = list( filter( lambda item: (key in item and str(item[key]) == str(value)), struct)) struct = filtered_struct elif i[:1] == '~' and isinstance(struct, list): key = i[1:] filtered_struct = set() for s in struct: filtered_struct.add(s[key]) struct = list(filtered_struct) else: break return struct else: return message_struct @staticmethod def fill_error(return_object, message_struct=None, message_path=None, message=None, error=None): return_object['success'] = False error_code = ErrorCode.TRANSMISSION_UNKNOWN if message is None: message = '' struct_item = ErrorResponder.get_struct_item(message_struct, message_path) if struct_item is not None: if len(message) > 0: message += ';' if (isinstance(struct_item, list)): struct_item = json.dumps(struct_item) message += str(struct_item) error_msg = '' if error is not None: str_error = str(error) #TODO replace with logger + stacktrace it to logger ErrorResponder.logger.error("error occurred: " + str_error) if isinstance(error, SSLError): error_code = ErrorCode.TRANSMISSION_AUTH_SSL error_msg = 'Wrong certificate: ' + str_error elif isinstance(error, ConnectionError): error_code = ErrorCode.TRANSMISSION_CONNECT error_msg = 'Connection error: ' + str_error else: error_msg = str(error) if len(error_msg) > 0: if len(message) > 0: message += '; ' message += error_msg if message is not None and len(message) > 0: if error_code.value == ErrorCode.TRANSMISSION_UNKNOWN.value: if 'uthenticat' in message or 'uthoriz' in message: error_code = ErrorCode.TRANSMISSION_AUTH_CREDENTIALS elif 'query_syntax_error' in message: error_code = ErrorCode.TRANSMISSION_QUERY_PARSING_ERROR return_object['error'] = str(message) ErrorMapperBase.set_error_code(return_object, error_code.value) if error_code == ErrorCode.TRANSMISSION_UNKNOWN: ErrorResponder.call_module_error_mapper(message_struct, return_object) @staticmethod def call_module_error_mapper(json_data, return_object): caller_path_list = traceback.extract_stack()[-3].filename.split('/') if 'stix_translation.py' in caller_path_list[-1]: module_path = 'stix_shifter_utils.stix_translation.stix_translation_error_mapper' else: caller_module_name = caller_path_list[-3:-1] module_path = 'stix_shifter_modules.' + caller_module_name[ 0] + '.' + caller_module_name[1] + '.error_mapper' # path_start_position = ErrorResponder.rindex(caller_path_list, 'stix-shifter') # module_path = 'stix_shifter_modules.' + caller_module_name[0] + '.' + caller_module_name[1] + '.' + caller_module_name[0] + '_error_mapper' # module_path = '.'.join(caller_path_list[path_start_position: -1]) + '.' + caller_module_name + '_error_mapper' try: module = importlib.import_module(module_path) if json_data is not None: module.ErrorMapper.set_error_code(json_data, return_object) else: ErrorMapperBase.set_error_code( return_object, module.ErrorMapper.DEFAULT_ERROR) except ModuleNotFoundError: pass @staticmethod def rindex(mylist, myvalue): return len(mylist) - mylist[::-1].index(myvalue) - 1 @staticmethod def is_plain_string(s): return isinstance( s, str) and not s.startswith('<?') and not s.startswith('{') @staticmethod def is_json_string(s): return isinstance(s, str) and s.startswith('{')
import json import traceback import os from pathlib import Path from stix_shifter_utils.utils.logger import set_logger logger = set_logger(__name__) __path_searchable = ['stix_shifter_modules', 'modules'] __default_search_path = ['stix_translation', 'json'] def read_json(filepath, options, search_path=__default_search_path): # Read JSON file that is either passed in with the options or internally contained in the module # logger.debug('call: read_json: ' + json.dumps(options, indent=4)) # filepath may be: # 'to_stix_map.json' -> 'to_stix_map' mapping data if present otherwise contents of 'module'/stix_translation/json/to_stix_map.json # 'to_stix_map' -> 'to_stix_map' mapping data if present otherwise contents of 'module'/stix_translation/json/to_stix_map.json # '/full/path/somefile.json' -> 'somefile' mapping data if present otherwise contents of /full/path/somefile.json file_name = Path(filepath).name file_key = file_name trim_str = '.json' if file_key.endswith(trim_str): file_key = file_key[:-len(trim_str)] if 'mapping' in options and file_key in options['mapping']: logger.debug('returning options_mapping for: ' + filepath) return options['mapping'][file_key] if os.path.isfile(filepath): file_path = filepath logger.debug('returning full_path for: ' + filepath) else:
def __init__(self, connection, configuration): self.api_client = APIClient(connection, configuration) self.logger = logger.set_logger(__name__) self.connector = __name__.split('.')[1]
from stix_shifter_utils.stix_translation.src.patterns.pattern_objects import ObservationExpression, ComparisonExpression, \ ComparisonExpressionOperators, ComparisonComparators, Pattern, \ CombinedComparisonExpression, CombinedObservationExpression, ObservationOperators from stix_shifter_utils.stix_translation.src.utils.transformers import TimestampToMilliseconds from stix_shifter_utils.stix_translation.src.json_to_stix import observable from stix_shifter_utils.utils import logger from stix_shifter_utils.utils.file_helper import read_json import re logger = logger.set_logger(__name__) REFERENCE_DATA_TYPES = {"sourceip": ["ipv4", "ipv6", "ipv4_cidr", "ipv6_cidr"], "sourcemac": ["mac"], "destinationip": ["ipv4", "ipv6", "ipv4_cidr", "ipv6_cidr"], "destinationmac": ["mac"], "sourcev6": ["ipv6", "ipv6_cidr"], "destinationv6": ["ipv6", "ipv6_cidr"]} FILTERING_DATA_TYPES = {"x-qradar:INOFFENSE": "INOFFENSE"} START_STOP_STIX_QUALIFIER = "START((t'\d{4}(-\d{2}){2}T\d{2}(:\d{2}){2}(\.\d+)?Z')|(\s\d{13}\s))STOP" TIMESTAMP = "^'\d{4}(-\d{2}){2}T\d{2}(:\d{2}){2}(\.\d+)?Z'$" TIMESTAMP_MILLISECONDS = "\.\d+Z$" class AqlQueryStringPatternTranslator: def __init__(self, pattern: Pattern, data_model_mapper, result_limit, options): self.options = options self.dmm = data_model_mapper self.comparator_lookup = self.dmm.map_comparator()
def set_logger(): if PingConnector.logger is None: PingConnector.logger = utils_logger.set_logger(__name__)
def __init__(self, client): self.client = client self.logger = logger.set_logger(__name__)
def __init__(self, options, dialect, basepath, rows=1024): super().__init__(options, dialect, basepath) self.rows = rows self.logger = logger.set_logger(__name__) self.map_data = read_json(f"{dialect}_from_stix_map", options) self.select_fields = read_json(f"{dialect}_event_fields", options)
def __init__(self, options): self.logger = logger.set_logger(__name__) self.stix_validator = options.get('stix_validator')
def main(): """ Stix-shifter can either be called to either translate or transmit. In the case of translation, stix-shifter either translates a stix pattern to a datasource query, or converts data source query results into JSON of STIX observations. Arguments will take the form of... "translate" <module> <translate_type (query or results)> <data (STIX pattern or query results)> <options> The module and translate_type will determine what module and method gets executed. Option arguments comes in as: "{ "mapping": <mapping hash for stix pattern to datasource and data results to stix observation objects>, "resultSizeLimit": <integer limit number for max results in the data source query>, "timeRange": <integer time range for LAST x MINUTES used in the data source query when START STOP qualifiers are absent> }" In the case of transmission, stix-shifter connects to a datasource to execute queries, status updates, and result retrieval. Arguments will take the form of... "transmit" <module> '{"host": <host IP>, "port": <port>, "cert": <certificate>}', '{"auth": <authentication>}', < query <query string>, status <search id>, results <search id> <offset> <length>, ping, is_async > """ # process arguments parent_parser = argparse.ArgumentParser(description='stix_shifter') parent_subparsers = parent_parser.add_subparsers(dest='command') # translate parser translate_parser = parent_subparsers.add_parser( TRANSLATE, help= 'Translate a query or result set using a specific translation module') # positional arguments translate_parser.add_argument('module', help='The translation module to use') translate_parser.add_argument('translate_type', choices=[ stix_translation.RESULTS, stix_translation.QUERY, stix_translation.PARSE ], help='The translation action to perform') translate_parser.add_argument( 'data_source', help='STIX identity object representing a datasource') translate_parser.add_argument( 'data', type=str, help='The STIX pattern or JSON results to be translated') translate_parser.add_argument('options', nargs='?', help='Options dictionary') translate_parser.add_argument( 'recursion_limit', type=int, nargs='?', help='Maximum depth of Python interpreter stack') # optional arguments translate_parser.add_argument( '-x', '--stix-validator', action='store_true', help='Run the STIX 2 validator against the translated results') translate_parser.add_argument('-d', '--debug', action='store_true', help='Print detail logs for debugging') # modules parser parent_subparsers.add_parser(MODULES, help='Get modules list') # mapping parser mapping_parser = parent_subparsers.add_parser(MAPPING, help='Get module mapping') # positional arguments mapping_parser.add_argument('module', help='The translation module to use') # transmit parser transmit_parser = parent_subparsers.add_parser( TRANSMIT, help='Connect to a datasource and exectue a query...') # positional arguments transmit_parser.add_argument('module', help='Choose which connection module to use') transmit_parser.add_argument( 'connection', type=str, help='Data source connection with host, port, and certificate') transmit_parser.add_argument('configuration', type=str, help='Data source authentication') transmit_parser.add_argument('-d', '--debug', action='store_true', help='Print detail logs for debugging') # operation subparser operation_subparser = transmit_parser.add_subparsers( title="operation", dest="operation_command") operation_subparser.add_parser(stix_transmission.PING, help="Pings the data source") query_operation_parser = operation_subparser.add_parser( stix_transmission.QUERY, help="Executes a query on the data source") query_operation_parser.add_argument('query_string', help='native datasource query string') query_operation_parser.add_argument('-d', '--debug', action='store_true', help='Print detail logs for debugging') results_operation_parser = operation_subparser.add_parser( stix_transmission.RESULTS, help="Fetches the results of the data source query") results_operation_parser.add_argument('search_id', help='uuid of executed query') results_operation_parser.add_argument('offset', help='offset of results') results_operation_parser.add_argument('length', help='length of results') results_operation_parser.add_argument( '-d', '--debug', action='store_true', help='Print detail logs for debugging') resultsstix_operation_parser = operation_subparser.add_parser( stix_transmission.RESULTS_STIX, help= "Fetches the results of the data source query, response is translated in STIX" ) resultsstix_operation_parser.add_argument('search_id', help='uuid of executed query') resultsstix_operation_parser.add_argument('offset', help='offset of results') resultsstix_operation_parser.add_argument('length', help='length of results') resultsstix_operation_parser.add_argument( 'data_source', help='STIX identity object representing a datasource') resultsstix_operation_parser.add_argument( '-d', '--debug', action='store_true', help='Print detail logs for debugging') status_operation_parser = operation_subparser.add_parser( stix_transmission.STATUS, help="Gets the current status of the query") status_operation_parser.add_argument('search_id', help='uuid of executed query') status_operation_parser.add_argument( '-d', '--debug', action='store_true', help='Print detail logs for debugging') delete_operation_parser = operation_subparser.add_parser( stix_transmission.DELETE, help="Delete a running query on the data source") delete_operation_parser.add_argument('search_id', help='id of query to remove') delete_operation_parser.add_argument( '-d', '--debug', action='store_true', help='Print detail logs for debugging') operation_subparser.add_parser( stix_transmission.IS_ASYNC, help='Checks if the query operation is asynchronous') execute_parser = parent_subparsers.add_parser( EXECUTE, help='Translate and fully execute a query') # positional arguments execute_parser.add_argument('transmission_module', help='Which connection module to use') execute_parser.add_argument( 'module', help='Which translation module to use for translation') execute_parser.add_argument( 'data_source', type=str, help='STIX Identity object for the data source') execute_parser.add_argument( 'connection', type=str, help='Data source connection with host, port, and certificate') execute_parser.add_argument('configuration', type=str, help='Data source authentication') execute_parser.add_argument('query', type=str, help='Query String') execute_parser.add_argument('-d', '--debug', action='store_true', help='Print detail logs for debugging') host_parser = parent_subparsers.add_parser( HOST, help='Host a local query service, for testing and development') host_parser.add_argument('data_source', type=str, help='STIX Identity object for the data source') host_parser.add_argument('host_address', type=str, help='Proxy Host:Port') host_parser.add_argument('ssl_cert', type=str, help='SSL certificate filename') host_parser.add_argument('ssl_key', type=str, help='SSL key filename') host_parser.add_argument('-d', '--debug', action='store_true', help='Print detail logs for debugging') args = parent_parser.parse_args() help_and_exit = args.command is None if 'debug' in args and args.debug: utils_logger.init(logging.DEBUG) else: utils_logger.init(logging.INFO) log = utils_logger.set_logger(__name__) if 'module' in args: args_module_dialects = args.module options = {} if 'options' in args and args.options: options = json.loads(args.options) module = process_dialects(args_module_dialects, options)[0] try: importlib.import_module("stix_shifter_modules." + module + ".entry_point") except Exception as ex: log.debug(exception_to_string(ex)) log.error('Module {} not found'.format(module)) log.debug(exception_to_string(ex)) help_and_exit = True if help_and_exit: parent_parser.print_help(sys.stderr) sys.exit(1) elif args.command == HOST: # Host means to start a local web service for STIX shifter, to use in combination with the proxy data source # module. This combination allows one to run and debug their stix-shifter code locally, while interacting with # it inside a service provider such as IBM Security Connect app = Flask("stix-shifter") @app.route('/transform_query', methods=['POST']) def transform_query(): host = ProxyHost() return host.transform_query() @app.route('/translate_results', methods=['POST']) def translate_results(): data_source_identity_object = args.data_source host = ProxyHost() return host.translate_results(data_source_identity_object) @app.route('/create_query_connection', methods=['POST']) def create_query_connection(): host = ProxyHost() return host.create_query_connection() @app.route('/create_status_connection', methods=['POST']) def create_status_connection(): host = ProxyHost() return host.create_status_connection() @app.route('/create_results_connection', methods=['POST']) def create_results_connection(): host = ProxyHost() return host.create_results_connection() @app.route('/delete_query_connection', methods=['POST']) def delete_query_connection(): host = ProxyHost() return host.delete_query_connection() @app.route('/ping', methods=['POST']) def ping_connection(): host = ProxyHost() return host.ping_connection() @app.route('/is_async', methods=['POST']) def is_async(): host = ProxyHost() return host.is_async() host_address = args.host_address.split(":") app.run(debug=True, port=int(host_address[1]), host=host_address[0], ssl_context=(args.ssl_cert, args.ssl_key)) elif args.command == EXECUTE: # Execute means take the STIX SCO pattern as input, execute query, and return STIX as output translation = stix_translation.StixTranslation() connection_dict = json.loads(args.connection) configuration_dict = json.loads(args.configuration) translation_options = copy.deepcopy(connection_dict.get('options', {})) options['validate_pattern'] = True dsl = translation.translate(args.module, 'query', args.data_source, args.query, translation_options) transmission = stix_transmission.StixTransmission( args.transmission_module, connection_dict, configuration_dict) results = [] log.info('Translated Queries: \n' + json.dumps(dsl, indent=4)) if 'queries' not in dsl: exit(1) for query in dsl['queries']: search_result = transmission.query(query) if search_result["success"]: search_id = search_result["search_id"] if transmission.is_async(): time.sleep(1) status = transmission.status(search_id) if status['success']: while status['progress'] < 100 and status[ 'status'] == 'RUNNING': log.debug(status) status = transmission.status(search_id) log.debug(status) else: raise RuntimeError("Fetching status failed") result = transmission.results(search_id, 0, 9) if result["success"]: log.debug("Search {} results is:\n{}".format( search_id, result["data"])) # Collect all results results += result["data"] else: raise RuntimeError( "Fetching results failed; see log for details") else: log.error(str(search_result)) exit(0) # Translate results to STIX translation_options = copy.deepcopy(connection_dict.get('options', {})) options['validate_pattern'] = True result = translation.translate(args.module, 'results', args.data_source, json.dumps(results), translation_options) log.info('STIX Results: \n' + json.dumps(result, indent=4, sort_keys=False)) exit(0) elif args.command == TRANSLATE: data = args.data if not data: data_lines = [] for line in sys.stdin: data_lines.append(line) data = '\n'.join(data_lines) if args.stix_validator: options['stix_validator'] = args.stix_validator recursion_limit = args.recursion_limit if args.recursion_limit else 1000 translation = stix_translation.StixTranslation() result = translation.translate(args.module, args.translate_type, args.data_source, data, options=options, recursion_limit=recursion_limit) elif args.command == MAPPING: translation = stix_translation.StixTranslation() result = translation.translate(args.module, stix_translation.MAPPING, None, None, options=options) elif args.command == MODULES: translation = stix_translation.StixTranslation() result = {} all_modules = modules_list() for m in all_modules: result[m] = translation.translate(m, stix_translation.DIALECTS, None, None) elif args.command == TRANSMIT: result = transmit(args) # stix_transmission print(json.dumps(result, indent=4, sort_keys=False)) exit(0)
def __init__(self, client, options): self.client = client self.logger = logger.set_logger(__name__) self.mapping_protocol = read_json('network_protocol_map', options) self.mapping_common_attr = read_json('common_attributes', options)
class DataSourceObjToStixObj: logger = logger.set_logger(__name__) def __init__(self, data_source, ds_to_stix_map, transformers, options, callback=None): self.identity_id = data_source["id"] self.ds_to_stix_map = ds_to_stix_map self.transformers = transformers self.options = options self.callback = callback # parse through options self.stix_validator = options.get('stix_validator') self.cybox_default = options.get('cybox_default', True) self.properties = observable.properties self.data_source = data_source['name'] self.ds_key_map = [ val for val in self.gen_dict_extract('ds_key', ds_to_stix_map) ] self.bundle = { "type": "bundle", "id": "bundle--" + str(uuid.uuid4()), "objects": [] } if options.get("stix_2.1"): self.spec_version = "2.1" else: self.spec_version = "2.0" self.bundle["spec_version"] = "2.0" self.unique_cybox_objects = {} self.bundle['objects'] += [data_source] @staticmethod def _get_value(obj, ds_key, transformer): """ Get value from source object, transforming if specified :param obj: the input object we are translating to STIX :param ds_key: the property from the input object :param transformer: the transform to apply to the property value (can be None) :return: the resulting STIX value """ if ds_key not in obj: DataSourceObjToStixObj.logger.debug( '{} not found in object'.format(ds_key)) return None ret_val = obj[ds_key] if ret_val and transformer is not None: return transformer.transform(ret_val) return ret_val @staticmethod def _add_property(obj, key, stix_value, group=False): """ Add stix_value to dictionary based on the input key, the key can be '.'-separated path to inner object :param obj: the dictionary we are adding our key to :param key: the key to add :param stix_value: the STIX value translated from the input object """ split_key = key.split('.') child_obj = obj parent_props = split_key[0:-1] for prop in parent_props: if prop not in child_obj: child_obj[prop] = {} child_obj = child_obj[prop] if split_key[-1] not in child_obj.keys(): child_obj[split_key[-1]] = stix_value elif group is True: # Mapping of multiple data fields to single STIX object field. Ex: Network Protocols if (isinstance(child_obj[split_key[-1]], list)): child_obj[split_key[-1]].extend( stix_value) # append to existing list def _handle_cybox_key_def(self, key_to_add, observation, stix_value, obj_name_map, obj_name, group=False): """ Handle the translation of the input property to its STIX CybOX property :param key_to_add: STIX property key derived from the mapping file :param observation: the the STIX observation currently being worked on :param stix_value: the STIX value translated from the input object :param obj_name_map: the mapping of object name to actual object :param obj_name: the object name derived from the mapping file """ obj_type, obj_prop = key_to_add.split('.', 1) objs_dir = observation['objects'] if obj_name in obj_name_map: # add property to existing cybox object cybox_obj = objs_dir[obj_name_map[obj_name]] else: # create new cybox object cybox_obj = {'type': obj_type} if self.spec_version == "2.1": # Todo: Move this elsewhere? cybox_obj["id"] = "{}--{}".format(obj_type, str(uuid.uuid4())) observation["objects"][cybox_obj["id"]] = cybox_obj # resolves_to_refs lists have been deprecated in favor of relationship objects that have a relationship type of resolves-to. # See the Domain Name cybox object https://docs.oasis-open.org/cti/stix/v2.1/csprd01/stix-v2.1-csprd01.html#_Toc16070687 for an example. obj_name_map[obj_name] = cybox_obj["id"] else: obj_dir_key = str(len(objs_dir)) objs_dir[obj_dir_key] = cybox_obj if obj_name is not None: obj_name_map[obj_name] = obj_dir_key self._add_property(cybox_obj, obj_prop, stix_value, group) @staticmethod def _valid_stix_value(props_map, key, stix_value, unwrap=False): """ Checks that the given STIX value is valid for this STIX property :param props_map: the map of STIX properties which contains validation attributes :param key: the STIX property name :param stix_value: the STIX value translated from the input object :param unwrap: unwrapping datasource field value of type list :return: whether STIX value is valid for this STIX property :rtype: bool """ if stix_value is None or stix_value == '': DataSourceObjToStixObj.logger.debug( "Removing invalid value '{}' for {}".format(stix_value, key)) return False elif isinstance(stix_value, list): if len(stix_value) == 0: DataSourceObjToStixObj.logger.debug( "Removing invalid value '{}' for {}".format( stix_value, key)) return False elif key in props_map and 'valid_regex' in props_map[key]: pattern = re.compile(props_map[key]['valid_regex']) if unwrap and isinstance(stix_value, list): for val in stix_value: if not pattern.match(str(val)): return False else: if not pattern.match(str(stix_value)): return False return True # get the nested ds_keys in the mapping def gen_dict_extract(self, key, var): if hasattr(var, 'items'): for k, v in var.items(): if k == key: yield v if isinstance(v, dict): for result in self.gen_dict_extract(key, v): yield result elif isinstance(v, list): for d in v: for result in self.gen_dict_extract(key, d): yield result # update the object key of the mapping @staticmethod def _update_object_key(ds_map, indx): for key, value in ds_map.items(): if isinstance(value, dict): if 'object' in value: value['object'] = str(value['object']) + '_' + str(indx) if isinstance(value, list): for item in value: if 'object' in item: item['object'] = str(item['object']) + '_' + str(indx) if 'references' in item: item['references'] = str( item['references']) + '_' + str(indx) return ds_map def _transform(self, object_map, observation, ds_map, ds_key, obj): to_map = obj[ds_key] if ds_key not in ds_map: if self.options.get('unmapped_fallback'): if ds_key not in self.ds_key_map: self.logger.info( 'Unmapped fallback is enabled. Adding {} attribute to the custom object' .format(ds_key)) cust_obj = { "key": "x-" + self.data_source.replace("_", "-") + "." + ds_key, "object": "cust_object" } if to_map is None or to_map == '': self.logger.debug( "Removing invalid value '{}' for {}".format( to_map, ds_key)) return self._handle_cybox_key_def(cust_obj["key"], observation, to_map, object_map, cust_obj["object"]) else: self.logger.debug( '{} is not found in map, skipping'.format(ds_key)) return if isinstance(to_map, dict): self.logger.debug('{} is complex; descending'.format(to_map)) # If the object is complex we must descend into the map on both sides for key in to_map.keys(): self._transform(object_map, observation, ds_map[ds_key], key, to_map) return # if the datasource fields is a collection of json object than we need to unwrap it and create multiple objects if isinstance(to_map, list): self.logger.debug('{} is a list; unwrapping.'.format(to_map)) for item in to_map: if isinstance(item, dict): new_ds_map = self._update_object_key( ds_map[ds_key], to_map.index(item)) for field in item.keys(): self._transform(object_map, observation, new_ds_map, field, item) generic_hash_key = '' # get the stix keys that are mapped ds_key_def_obj = ds_map[ds_key] if isinstance(ds_key_def_obj, list): ds_key_def_list = ds_key_def_obj else: # Use callback function to run module-specific logic to handle unknown filehash types if self.callback: try: generic_hash_key = self.callback(obj, ds_key, ds_key_def_obj['key'], self.options) except (Exception): return ds_key_def_list = [ds_key_def_obj] for ds_key_def in ds_key_def_list: if ds_key_def is None or 'key' not in ds_key_def: self.logger.debug( '{} is not valid (None, or missing key)'.format( ds_key_def)) continue if generic_hash_key: key_to_add = generic_hash_key else: key_to_add = ds_key_def['key'] transformer = self.transformers[ds_key_def[ 'transformer']] if 'transformer' in ds_key_def else None group = False unwrap = False # unwrap array of stix values to separate stix objects if 'unwrap' in ds_key_def: unwrap = True if ds_key_def.get('cybox', self.cybox_default): object_name = ds_key_def.get('object') if 'references' in ds_key_def: references = ds_key_def['references'] if isinstance(references, list): stix_value = [] for ref in references: if unwrap: pattern = re.compile("{}_[0-9]+".format(ref)) for obj_name in object_map: if pattern.match(obj_name): val = object_map.get(obj_name) stix_value.append(val) else: val = object_map.get(ref) if not self._valid_stix_value( self.properties, key_to_add, val): continue stix_value.append(val) if not stix_value: continue else: if unwrap: stix_value = [] pattern = re.compile( "{}_[0-9]+".format(references)) for obj_name in object_map: if pattern.match(obj_name): val = object_map.get(obj_name) stix_value.append(val) else: stix_value = object_map.get(references) if not self._valid_stix_value( self.properties, key_to_add, stix_value): continue else: # use the hard-coded value in the mapping if 'value' in ds_key_def: stix_value = ds_key_def['value'] else: stix_value = self._get_value(obj, ds_key, transformer) if not self._valid_stix_value(self.properties, key_to_add, stix_value, unwrap): continue # Group Values if 'group' in ds_key_def: group = True if unwrap and 'references' not in ds_key_def and isinstance( stix_value, list): self.logger.debug("Unwrapping {} of {}".format( stix_value, object_name)) for i in range(len(stix_value)): obj_i_name = "{}_{}".format(object_name, i + 1) val = stix_value[i] self._handle_cybox_key_def(key_to_add, observation, val, object_map, obj_i_name, group) else: self._handle_cybox_key_def(key_to_add, observation, stix_value, object_map, object_name, group) else: # get the object name defined for custom attributes if 'object' in ds_key_def: object_name = ds_key_def.get('object') # use the hard-coded value in the mapping if 'value' in ds_key_def: stix_value = ds_key_def['value'] # get the value from mapped key elif 'ds_key' in ds_key_def: ds_key = ds_key_def['ds_key'] stix_value = self._get_value(obj, ds_key, transformer) if not self._valid_stix_value(self.properties, key_to_add, stix_value): continue self._handle_cybox_key_def(key_to_add, observation, stix_value, object_map, object_name, group) else: stix_value = self._get_value(obj, ds_key, transformer) if not self._valid_stix_value(self.properties, key_to_add, stix_value): continue self._add_property(observation, key_to_add, stix_value, group) # STIX 2.1 helper methods def _generate_and_apply_deterministic_id(self, object_id_map, cybox_objects): # Generates ID based on common namespace and SCO properties (omitting id and spec_version) # TODO: References may need to be include as part of the ID generation for key, cybox in cybox_objects.items(): cybox_type = "" # set id mapping key to original id object_id_map[key] = "" cybox_properties = {} for property, value in cybox.items(): if property == "type": cybox_type = value if not (property == "id" or re.match(".*_ref$", property)): cybox_properties[property] = value unique_id = cybox_type + "--" + str( uuid.uuid5(namespace=uuid.UUID(UUID5_NAMESPACE), name=json.dumps(cybox_properties))) # set id mapping value to new id object_id_map[key] = unique_id # replace old id with new cybox["id"] = unique_id def _replace_references(self, object_id_map, cybox_objects): for key, cybox in cybox_objects.items(): # replace refs with new ids for property, value in cybox.items(): if re.match(".*_ref$", property) and str(value) in object_id_map: cybox[property] = object_id_map[value] cybox["spec_version"] = "2.1" def _collect_unique_cybox_objects(self, cybox_objects): for key, cybox in cybox_objects.items(): if not cybox["id"] in self.unique_cybox_objects: self.unique_cybox_objects[cybox["id"]] = cybox def transform(self, obj): """ Transforms the given object in to a STIX observation based on the mapping file and transform functions :param obj: the datasource object that is being converted to stix :return: the input object converted to stix valid json """ object_map = {} stix_type = 'observed-data' ds_map = self.ds_to_stix_map now = "{}Z".format( datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3]) object_id_map = {} observation = { 'id': stix_type + '--' + str(uuid.uuid4()), 'type': stix_type, 'created_by_ref': self.identity_id, 'created': now, 'modified': now, 'objects': {} } # create normal type objects if isinstance(obj, dict): for ds_key in obj.keys(): self._transform(object_map, observation, ds_map, ds_key, obj) else: self.logger.debug("Not a dict: {}".format(obj)) # special case: # remove object if: # a reference attribute object does not contain at least one property other than 'type' self._cleanup_references(object_map, observation, ds_map) # Add required properties to the observation if it wasn't added from the mapping if FIRST_OBSERVED_KEY not in observation: observation[FIRST_OBSERVED_KEY] = now if LAST_OBSERVED_KEY not in observation: observation[LAST_OBSERVED_KEY] = now if NUMBER_OBSERVED_KEY not in observation: observation[NUMBER_OBSERVED_KEY] = 1 if self.spec_version == "2.1": cybox_objects = observation["objects"] self._generate_and_apply_deterministic_id(object_id_map, cybox_objects) self._replace_references(object_id_map, cybox_objects) object_refs = [] # add cybox references to observed-data object for key, value in object_id_map.items(): object_refs.append(value) observation["object_refs"] = object_refs observation["spec_version"] = "2.1" self._collect_unique_cybox_objects(cybox_objects) # Validate each STIX object if self.stix_validator: validated_result = validate_instance(observation) print_results(validated_result) return observation def _cleanup_references(self, object_map, observation, ds_map): objects = observation.get('objects') remove_keys = [] for obj, values in objects.items(): rm_keys = list(key for key in values if '_ref' in key) rm_keys.append('type') obj_keys = list(values.keys()) if sorted(rm_keys) == sorted(obj_keys): self.logger.debug( 'Reference object does not contain required properties, removing: ' + str(values)) remove_keys.append(obj) for k in remove_keys: objects.pop(k)
class Connector(BaseSyncConnector): init_error = None logger = logger.set_logger(__name__) PROVIDER = 'CrowdStrike' IDS_LIMIT = 500 def __init__(self, connection, configuration): """Initialization. :param connection: dict, connection dict :param configuration: dict,config dict""" try: self.api_client = APIClient(connection, configuration) self.result_limit = Connector.get_result_limit(connection) except Exception as ex: self.init_error = ex @staticmethod def _handle_errors(response, return_obj): """Handling API error response :param response: response for the API :param return_obj: dict, response for the API call with status """ response_code = response.code response_txt = response.read().decode('utf-8') if 200 <= response_code < 300: return_obj['success'] = True return_obj['data'] = response_txt return return_obj elif ErrorResponder.is_plain_string(response_txt): ErrorResponder.fill_error(return_obj, message=response_txt) raise Exception(return_obj) elif ErrorResponder.is_json_string(response_txt): response_json = json.loads(response_txt) ErrorResponder.fill_error(return_obj, response_json, ['reason']) raise Exception(return_obj) else: raise Exception(return_obj) def ping_connection(self): response_txt = None return_obj = {} try: response = self.api_client.ping_box() response_code = response.code response_txt = response.read().decode('utf-8') if 199 < response_code < 300: return_obj['success'] = True elif isinstance(json.loads(response_txt), dict): response_error_ping = json.loads(response_txt) response_dict = response_error_ping['errors'][0] ErrorResponder.fill_error(return_obj, response_dict, ['message']) else: raise Exception(response_txt) except Exception as e: if response_txt is not None: ErrorResponder.fill_error(return_obj, message='unexpected exception') self.logger.error('can not parse response: ' + str(response_txt)) else: raise e return return_obj def send_info_request_and_handle_errors(self, ids_lst): return_obj = dict() response = self.api_client.get_detections_info(ids_lst) return_obj = self._handle_errors(response, return_obj) response_json = json.loads(return_obj["data"]) return_obj['data'] = response_json['resources'] return return_obj def handle_detection_info_request(self, ids): ids = [ ids[x:x + self.IDS_LIMIT] for x in range(0, len(ids), self.IDS_LIMIT) ] ids_lst = ids.pop(0) return_obj = self.send_info_request_and_handle_errors(ids_lst) for ids_lst in ids: curr_obj = self.send_info_request_and_handle_errors(ids_lst) return_obj['data'].extend(curr_obj['data']) return return_obj @staticmethod def get_result_limit(connection): default_result_limit = Connector.IDS_LIMIT if 'options' in connection: return connection['options'].get('result_limit', default_result_limit) return default_result_limit @staticmethod def _handle_quarantined_files(qua_files_lst, device_data): qua_files_event_lst = [] if qua_files_lst: for file_dict in qua_files_lst: qua_file_data = dict() qua_file_data['display_name'] = file_dict['state'] qua_file_data['quarantined_file_sha256'] = file_dict['sha256'] qua_file_data['provider'] = Connector.PROVIDER qua_file_data.update(device_data) qua_files_event_lst.append(qua_file_data) return qua_files_event_lst @staticmethod def _handle_ioc(ioc_type, ioc_source, ioc_value): # ioc_value may contains many values separated by ',' # first, we'll take the first value ioc_value = ioc_value.split(',')[0] # TODO - handle the rest values ioc_data = dict() file_sources = ['file_read', 'file_write', 'library_load'] # handle ioc_source = file_read / file_write if ioc_source and ioc_type and ioc_source in file_sources: if 'sha256' in ioc_type: ioc_data['sha256_ioc'] = ioc_value elif 'md5' in ioc_type: ioc_data['md5_ioc'] = ioc_value.replace("_", " ") ioc_data['display_name'] = ioc_source.replace("_", " ") # handle ioc_type = domain elif ioc_type and 'domain' in ioc_type: ioc_data['domain_ioc'] = ioc_value # handle ioc_type = 'registry_key' elif ioc_type and 'registry_key' in ioc_type: ioc_data['registry_key'] = ioc_value return ioc_data def create_results_connection(self, query, offset, length): """"built the response object :param query: str, search_id :param offset: int,offset value :param length: int,length value""" result_limit = offset + length response_txt = None ids_obj = dict() return_obj = dict() table_event_data = [] try: if self.init_error: raise self.init_error response = self.api_client.get_detections_IDs(query, result_limit) self._handle_errors(response, ids_obj) response_json = json.loads(ids_obj["data"]) ids_obj['ids'] = response_json.get('resources') if ids_obj[ 'ids']: # There are not detections that match the filter arg return_obj = self.handle_detection_info_request(ids_obj['ids']) for event_data in return_obj['data']: device_data = event_data['device'] hostinfo_data = event_data['hostinfo'] device_data.update(hostinfo_data) # device & host build_device_data = { k: v for k, v in device_data.items() if v } # device & host build_data = { k: v for k, v in event_data.items() if not isinstance(v, dict) and k not in 'behaviors' } # other detection fields build_data.update(build_device_data) quarantined_files = event_data.get('quarantined_files') if quarantined_files: quarantined_files_lst = self._handle_quarantined_files( quarantined_files, build_data) table_event_data.extend(quarantined_files_lst) event_data.pop("quarantined_files", None) for behavior in event_data['behaviors']: ioc_type = behavior.pop("ioc_type", None) ioc_source = behavior.pop("ioc_source", None) ioc_value = behavior.pop("ioc_value", None) ioc_data = self._handle_ioc(ioc_type, ioc_source, ioc_value) build_ioc_data = { k: v for k, v in ioc_data.items() if v } parent_details_data = behavior['parent_details'] build_event_data = { k: v for k, v in behavior.items() if v and not isinstance(v, dict) } build_event_data.update(parent_details_data) build_event_data.update(build_data) build_event_data.update(build_ioc_data) # build_event_data['device'] = build_device_data build_event_data.pop('device_id') build_event_data['provider'] = Connector.PROVIDER build_event_data = { k: v for k, v in build_event_data.items() if v != "N/A" } table_event_data.append(build_event_data) return_obj['data'] = table_event_data if not return_obj.get('success'): return_obj['success'] = True return return_obj except Exception as ex: if response_txt is not None: ErrorResponder.fill_error(return_obj, message='unexpected exception') self.logger.error('can not parse response: ' + str(response_txt)) else: raise ex
class DataSourceObjToStixObj: logger = logger.set_logger(__name__) def __init__(self, identity_id, ds_to_stix_map, transformers, options, data_source, callback=None): self.identity_id = identity_id self.ds_to_stix_map = ds_to_stix_map self.transformers = transformers self.options = options self.callback = callback # parse through options self.stix_validator = options.get('stix_validator') self.cybox_default = options.get('cybox_default', True) self.properties = observable.properties self.data_source = data_source self.ds_key_map = [ val for val in self.gen_dict_extract('ds_key', ds_to_stix_map) ] @staticmethod def _get_value(obj, ds_key, transformer): """ Get value from source object, transforming if specified :param obj: the input object we are translating to STIX :param ds_key: the property from the input object :param transformer: the transform to apply to the property value (can be None) :return: the resulting STIX value """ if ds_key not in obj: DataSourceObjToStixObj.logger.debug( '{} not found in object'.format(ds_key)) return None ret_val = obj[ds_key] # Is this getting his with a none-type value? if ret_val and transformer is not None: return transformer.transform(ret_val) return ret_val @staticmethod def _add_property(obj, key, stix_value, group=False): """ Add stix_value to dictionary based on the input key, the key can be '.'-separated path to inner object :param obj: the dictionary we are adding our key to :param key: the key to add :param stix_value: the STIX value translated from the input object """ split_key = key.split('.') child_obj = obj parent_props = split_key[0:-1] for prop in parent_props: if prop not in child_obj: child_obj[prop] = {} child_obj = child_obj[prop] if split_key[-1] not in child_obj.keys(): child_obj[split_key[-1]] = stix_value elif group is True: # Mapping of multiple data fields to single STIX object field. Ex: Network Protocols if (isinstance(child_obj[split_key[-1]], list)): child_obj[split_key[-1]].extend( stix_value) # append to existing list @staticmethod def _handle_cybox_key_def(key_to_add, observation, stix_value, obj_name_map, obj_name, group=False): """ Handle the translation of the input property to its STIX CybOX property :param key_to_add: STIX property key derived from the mapping file :param observation: the the STIX observation currently being worked on :param stix_value: the STIX value translated from the input object :param obj_name_map: the mapping of object name to actual object :param obj_name: the object name derived from the mapping file """ obj_type, obj_prop = key_to_add.split('.', 1) objs_dir = observation['objects'] if obj_name in obj_name_map: obj = objs_dir[obj_name_map[obj_name]] else: obj = {'type': obj_type} obj_dir_key = str(len(objs_dir)) objs_dir[obj_dir_key] = obj if obj_name is not None: obj_name_map[obj_name] = obj_dir_key DataSourceObjToStixObj._add_property(obj, obj_prop, stix_value, group) @staticmethod def _valid_stix_value(props_map, key, stix_value, unwrap=False): """ Checks that the given STIX value is valid for this STIX property :param props_map: the map of STIX properties which contains validation attributes :param key: the STIX property name :param stix_value: the STIX value translated from the input object :param unwrap: unwrapping datasource field value of type list :return: whether STIX value is valid for this STIX property :rtype: bool """ # Causing a couple of failing tests in MSATP if stix_value is None or stix_value == '': DataSourceObjToStixObj.logger.debug( "Removing invalid value '{}' for {}".format(stix_value, key)) return False elif key in props_map and 'valid_regex' in props_map[key]: pattern = re.compile(props_map[key]['valid_regex']) if unwrap and isinstance(stix_value, list): for val in stix_value: if not pattern.match(str(val)): return False else: if not pattern.match(str(stix_value)): return False return True # get the nested ds_keys in the mapping def gen_dict_extract(self, key, var): if hasattr(var, 'items'): for k, v in var.items(): if k == key: yield v if isinstance(v, dict): for result in self.gen_dict_extract(key, v): yield result elif isinstance(v, list): for d in v: for result in self.gen_dict_extract(key, d): yield result def _transform(self, object_map, observation, ds_map, ds_key, obj): to_map = obj[ds_key] if ds_key not in ds_map: if self.options.get('unmapped_fallback'): if ds_key not in self.ds_key_map: self.logger.info( 'Unmapped fallback is enabled. Adding {} attribute to the custom object' .format(ds_key)) cust_obj = { "key": "x-" + self.data_source.replace("_", "-") + "." + ds_key, "object": "cust_object" } DataSourceObjToStixObj._handle_cybox_key_def( cust_obj["key"], observation, to_map, object_map, cust_obj["object"]) else: self.logger.debug( '{} is not found in map, skipping'.format(ds_key)) return if isinstance(to_map, dict): self.logger.debug('{} is complex; descending'.format(to_map)) # If the object is complex we must descend into the map on both sides for key in to_map.keys(): self._transform(object_map, observation, ds_map[ds_key], key, to_map) return generic_hash_key = '' # get the stix keys that are mapped ds_key_def_obj = ds_map[ds_key] if isinstance(ds_key_def_obj, list): ds_key_def_list = ds_key_def_obj else: # Use callback function to run module-specific logic to handle unknown filehash types if self.callback: try: generic_hash_key = self.callback(obj, ds_key, ds_key_def_obj['key'], self.options) except (Exception): return ds_key_def_list = [ds_key_def_obj] for ds_key_def in ds_key_def_list: if ds_key_def is None or 'key' not in ds_key_def: self.logger.debug( '{} is not valid (None, or missing key)'.format( ds_key_def)) continue if generic_hash_key: key_to_add = generic_hash_key else: key_to_add = ds_key_def['key'] transformer = self.transformers[ds_key_def[ 'transformer']] if 'transformer' in ds_key_def else None group = False unwrap = False # unwrap array of stix values to separate stix objects if 'unwrap' in ds_key_def: unwrap = True if ds_key_def.get('cybox', self.cybox_default): object_name = ds_key_def.get('object') if 'references' in ds_key_def: references = ds_key_def['references'] if isinstance(references, list): stix_value = [] for ref in references: if unwrap: pattern = re.compile("{}_[0-9]+".format(ref)) for obj_name in object_map: if pattern.match(obj_name): val = object_map.get(obj_name) stix_value.append(val) else: val = object_map.get(ref) if not DataSourceObjToStixObj._valid_stix_value( self.properties, key_to_add, val): continue stix_value.append(val) if not stix_value: continue else: if unwrap: stix_value = [] pattern = re.compile( "{}_[0-9]+".format(references)) for obj_name in object_map: if pattern.match(obj_name): val = object_map.get(obj_name) stix_value.append(val) else: stix_value = object_map.get(references) if not DataSourceObjToStixObj._valid_stix_value( self.properties, key_to_add, stix_value): continue else: # use the hard-coded value in the mapping if 'value' in ds_key_def: stix_value = ds_key_def['value'] else: stix_value = DataSourceObjToStixObj._get_value( obj, ds_key, transformer) if not DataSourceObjToStixObj._valid_stix_value( self.properties, key_to_add, stix_value, unwrap): continue # Group Values if 'group' in ds_key_def: group = True if unwrap and 'references' not in ds_key_def and isinstance( stix_value, list): self.logger.debug("Unwrapping {} of {}".format( stix_value, object_name)) for i in range(len(stix_value)): obj_i_name = "{}_{}".format(object_name, i + 1) val = stix_value[i] DataSourceObjToStixObj._handle_cybox_key_def( key_to_add, observation, val, object_map, obj_i_name, group) else: DataSourceObjToStixObj._handle_cybox_key_def( key_to_add, observation, stix_value, object_map, object_name, group) else: # get the object name defined for custom attributes if 'object' in ds_key_def: object_name = ds_key_def.get('object') # use the hard-coded value in the mapping if 'value' in ds_key_def: stix_value = ds_key_def['value'] # get the value from mapped key elif 'ds_key' in ds_key_def: ds_key = ds_key_def['ds_key'] stix_value = DataSourceObjToStixObj._get_value( obj, ds_key, transformer) if not DataSourceObjToStixObj._valid_stix_value( self.properties, key_to_add, stix_value): continue DataSourceObjToStixObj._handle_cybox_key_def( key_to_add, observation, stix_value, object_map, object_name, group) else: stix_value = DataSourceObjToStixObj._get_value( obj, ds_key, transformer) if not DataSourceObjToStixObj._valid_stix_value( self.properties, key_to_add, stix_value): continue DataSourceObjToStixObj._add_property( observation, key_to_add, stix_value, group) def transform(self, obj): """ Transforms the given object in to a STIX observation based on the mapping file and transform functions :param obj: the datasource object that is being converted to stix :return: the input object converted to stix valid json """ NUMBER_OBSERVED_KEY = 'number_observed' object_map = {} stix_type = 'observed-data' ds_map = self.ds_to_stix_map observation = { 'id': stix_type + '--' + str(uuid.uuid4()), 'type': stix_type, 'created_by_ref': self.identity_id, 'created': "{}Z".format( datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3]), 'modified': "{}Z".format( datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3]), 'objects': {} } # create normal type objects if isinstance(obj, dict): for ds_key in obj.keys(): self._transform(object_map, observation, ds_map, ds_key, obj) else: self.logger.debug("Not a dict: {}".format(obj)) # Add required property to the observation if it wasn't added via the mapping if self.options.get('unmapped_fallback'): if "first_observed" not in observation and "last_observed" not in observation: observation['first_observed'] = "{}Z".format( datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3]) observation['last_observed'] = "{}Z".format( datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3]) # Add required property to the observation if it wasn't added via the mapping if NUMBER_OBSERVED_KEY not in observation: observation[NUMBER_OBSERVED_KEY] = 1 # Validate each STIX object if self.stix_validator: validated_result = validate_instance(observation) print_results(validated_result) return observation
from requests.exceptions import SSLError, ConnectionError from enum import Enum import importlib import traceback from stix_shifter_utils.utils.error_mapper_base import ErrorMapperBase from stix_shifter_utils.utils import logger as utils_logger import collections import json logger = utils_logger.set_logger(__name__) class ErrorCode(Enum): TRANSLATION_NOTIMPLEMENTED_MODE = 'not_implemented' TRANSLATION_MODULE_DEFAULT_ERROR = 'invalid_parameter' TRANSLATION_MAPPING_ERROR = 'mapping_error' TRANSLATION_STIX_VALIDATION = 'invalid_parameter' TRANSLATION_NOTSUPPORTED = 'invalid_parameter' TRANSLATION_RESULT = 'mapping_error' TRANSLATION_UNKNOWN_DIALOG = 'invalid_parameter' TRANSLATION_UNKNOWN_LANGUAGE = 'invalid_parameter' TRANSMISSION_UNKNOWN = 'unknown' TRANSMISSION_CONNECT = 'service_unavailable' TRANSMISSION_AUTH_SSL = 'authentication_fail' TRANSMISSION_AUTH_CREDENTIALS = 'authentication_fail' TRANSMISSION_MODULE_DEFAULT_ERROR = 'unknown' TRANSMISSION_QUERY_PARSING_ERROR = 'invalid_query' TRANSMISSION_QUERY_LOGICAL_ERROR = 'invalid_query' TRANSMISSION_RESPONSE_EMPTY_RESULT = 'no_results'
def __init__(self, connection, configuration): self.logger = logger.set_logger(__name__) self.endpoint_start = 'restAPI/' self.connection = connection self.configuration = configuration self.headers = dict() self.search_id = None self.query = None self.authorization = None self.credential = None # Check if connection object contains the following username = configuration.get('auth').get("username", None) password = configuration.get('auth').get("password", None) grant_type = connection.get('options', {}).get("grant_type", 'password') client_id = connection.get('options', {}).get("client_id", None) client_secret = connection.get('options', {}).get("client_secret", None) # It is decided the authorization will not be sent by UDS if (username is None or password is None or grant_type is None or client_id is None or client_secret is None): self.credential = None raise IOError( 3001, "Guardium Credential not provided in the connection / configuration object" ) else: self.credential = { "username": username, "password": password, "grant_type": grant_type, "client_id": client_id, "client_secret": client_secret } host_port = connection.get('host') + ':' + \ str(connection.get('port', '')) url_modifier_function = None proxy = connection.get('proxy') if proxy is not None: proxy_url = proxy.get('url') proxy_auth = proxy.get('auth') if (proxy_url is not None and proxy_auth is not None): self.headers['proxy'] = proxy_url self.headers['proxy-authorization'] = 'Basic ' + proxy_auth if proxy.get('x_forward_proxy', None) is not None: self.headers['x-forward-url'] = 'https://' + \ host_port + '/' # + endpoint, is set by 'add_endpoint_to_url_header' host_port = proxy.get('x_forward_proxy') if proxy.get('x_forward_proxy_auth', None) is not None: self.headers['x-forward-auth'] = proxy.get( 'x_forward_proxy_auth') self.headers['user-agent'] = 'UDS' url_modifier_function = self.add_endpoint_to_url_header self.client = RestApiClient(host_port, None, self.headers, url_modifier_function, cert_verify=connection.get( 'selfSignedCert', True), sni=connection.get('sni', None)) self.search_timeout = connection['options'].get('timeout')
def __init__(self): self.args = [] self.logger = logger.set_logger(__name__)
def __init__(self, api_client): self.api_client = api_client self.logger = logger.set_logger(__name__)
from stix_shifter_utils.stix_translation.src.utils.transformers import ValueTransformer from stix_shifter_utils.utils import logger import re LOGGER = logger.set_logger(__name__) class SecondsToTimeStamp(ValueTransformer): """A value transformer for the seconds in 00h00m00s format""" @staticmethod def transform(obj): try: seconds = int(obj) hours, seconds = seconds // 3600, seconds % 3600 minutes, seconds = seconds // 60, seconds % 60 return f"{hours:02d}h{minutes:02d}m{seconds:02d}s" except ValueError: LOGGER.error("Cannot convert epoch value %s to timestamp", obj) return None class ConnStateToDesc(ValueTransformer): """A value transformer for the connection state description""" @staticmethod def transform(obj): """CONN::TCP CONN STATE The human readable connection state, which varies for TCP and UDP connections. Update from Darktrace API Documentation""" dt_conn_description = { "OTH":
def __init__(self, api_client): self.api_client = api_client self.logger = logger.set_logger(__name__) self.connector = __name__.split('.')[1]
def __init__(self, options, dialect, basepath, rows=1024): super().__init__(options, dialect, basepath) self.rows = rows self.logger = logger.set_logger(__name__)