def test_get_client_dict(self, mock_get, mock_connect, co3_args): mock_connect.return_value = {"orgs": [{"id": 204}]} mock_get.return_value = {"actions_framework_enabled": True} args = { 'host': 'resilient_host', 'email': '*****@*****.**', 'password': '******', 'org': 'org', } rest_client = resilient.get_client(args) assert not rest_client.proxies # args['proxy_host'] = 'proxy_host' args['proxy_port'] = 1443 rest_client = resilient.get_client(args) assert rest_client.proxies assert rest_client.proxies['https'] == "https://*****:*****@proxy_host:1443/"
def __init__(self, opts, rest_client_helper): """ configure the rest_client for the destination resilient :param opts: used for Resilient target :param rest_client_helper: used for Resilient source """ self.opts = opts # rest_client_helper is None for destination Resilient org try: if rest_client_helper: self.rest_client = rest_client_helper.rest_client( ) # source resilient else: self.rest_client = resilient.get_client( self.opts) # target resilient except Exception as err: raise IntegrationError(str(err)) # get the class to maintain the reference map: either datatable or sqlite # only needed for target Resilient if not rest_client_helper: self.dbsync = DBSyncFactory.get_dbsync( self.rest_client.org_id, self.opts.get("sqlite_sync_file", None), self.opts.get("postgresql_connect", None), self.opts.get("postgresql_uid", None), self.opts.get("postgresql_pwd", None)) if not self.dbsync: raise IntegrationError("Unable to create DBSync object")
def __init__(self, opts): self.opts = opts # Create SimpleClient and connect self.client = resilient.get_client(opts) if not self.client: raise Exception("Resilient Client is not valid.") # Build a catalog of all the field definitions # If opts specifies a restricted list of fields, only include the ones specified. fieldlist = opts.field or [] self.rows = {} self.fields = {} for objecttype in OBJECT_TYPES.keys(): # Add the "id" field self.fields[objecttype] = { "name": "id", "input_type": "id", "text": "id" } # Then get the actual defined fields thefields = self.client.get("/types/{}/fields".format(objecttype)) if not thefields: raise Exception("Unable to get fields from REST API") for thefield in thefields: if fieldlist == [] or thefield["name"] in fieldlist: self.fields[objecttype][thefield["name"]] = thefield # Allows for ID to string conversion self.types = self.client.get("/types") if not self.types: raise Exception("Unable to get Resilient types")
def main(): parser = ExampleArgumentParser(config_file=resilient.get_config_file()) opts = parser.parse_args() # Create SimpleClient for a REST connection to the Resilient services client = resilient.get_client(opts) if opts["create"]: create_incident(client, opts["create"], opts["attach"]) if opts["list"]: show_incident_list(client, opts["query"]) if opts["get"]: generic_get(client, opts["get"]) if opts["post"]: generic_post(client, opts["post"][0], opts["post"][1]) if opts["update"]: generic_update(client, opts["update"][0], opts["update"][1]) if opts["patch"]: generic_patch(client, opts["patch"][0], opts["patch"][1]) if opts["delete"]: generic_delete(client, opts["delete"]) if opts["search"]: generic_search(client, opts["search"])
def generate_code(args): """generate template code components from functions""" parser = AppArgumentParser(config_file=resilient.get_config_file()) (opts, extra) = parser.parse_known_args() client = resilient.get_client(opts) if args.cmd == "extract" and args.output: extract_to_res(client, args.exportfile, args.messagedestination, args.function, args.workflow, args.rule, args.field, args.datatable, args.task, args.script, args.artifacttype, args.output, args.zip) elif args.reload: codegen_reload_package(client, args) elif args.package: # codegen an installable package output_base = os.path.join(os.curdir, args.package) codegen_package(client, args.exportfile, args.package, args.messagedestination, args.function, args.workflow, args.rule, args.field, args.datatable, args.task, args.script, args.artifacttype, os.path.expanduser(output_base)) elif args.function: # codegen a component for one or more functions if len(args.function) > 1: default_name = "functions.py" else: default_name = "{}.py".format(args.function[0]) output_dir = os.path.expanduser(opts["componentsdir"] or os.curdir) output_file = args.output or default_name if not output_file.endswith(".py"): output_file = output_file + ".py" codegen_functions(client, args.exportfile, args.function, args.workflow, args.rule, args.artifacttype, output_dir, output_file)
def main(): """Main""" # Parse commandline arguments parser = FinfoArgumentParser(config_file=resilient.get_config_file()) opts = parser.parse_args() # Connect to Resilient client = resilient.get_client(opts) # If no field is specified, list them all if not opts.fieldname: if opts.types: list_types(client) elif opts.field_values: list_fields_values(client, opts.field_type) elif opts.csv: list_fields_csv(client, opts.field_type) else: list_fields(client, opts.field_type) exit(0) # Find the field and display its properties field_data = find_field(client, opts.fieldname, opts.field_type) if field_data: if opts.json: print_json(field_data) else: print_details(field_data) exit(0) else: print(u"Field '{}' was not found.".format(opts.fieldname)) exit(1)
def create_tab(tab, opts, update_existing=False): """ If allowed by app.config - creates or updates a tab in the UI according to the specification passed in the class. Can be forbidden to make changes by adding `ui_lock=<true/on>` in app.config under integration section, resilient, or "integrations". :param tab: Subclass of ui.Tab that has required parameters and describes the required layout. :param update_existing: Defines the behavior if tab is already present in the system. Either simply leave it alone, or go through required elements and add those that are missing. """ try: if not permission_to_edit(tab, opts): LOG.info("No permission to edit UI for {}".format(tab.SECTION)) return client = resilient.get_client(opts) layout = get_incident_layout(client) # check if tab already exists in the layout if tab.exists_in(layout.get("content")): if update_existing: LOG.info("UI tab for {} already exists. Checking for updates.". format(tab.SECTION)) return update_tab(client, layout, tab) else: LOG.info("UI tab for {} already exists. Not updating.".format( tab.SECTION)) return LOG.info("Creating a UI tab for {}".format(tab.SECTION)) return add_tab_to_layout(client, layout, tab.as_dto()) except Exception as e: LOG.error("Failed to create/update tab in the UI for {}".format( tab.SECTION)) LOG.error(str(e))
def test_get_client_namespace(self, mock_get, mock_connect, co3_args): mock_connect.return_value = {"orgs": [{"id": 204}]} mock_get.return_value = {"actions_framework_enabled": True} parser = ArgumentParser() parser.add_argument('--proxy_host') parser.add_argument('--proxy_port') parser.add_argument('--proxy_user') parser.add_argument('--proxy_password') parser.add_argument('--email') parser.add_argument('--password') parser.add_argument('--host') parser.add_argument('--org') # args = parser.parse_args([ '--email', '*****@*****.**', '--password', 'password', '--host', 'resilient_host', '--org', 'org' ]) rest_client = resilient.get_client(args) assert not rest_client.proxies # args = parser.parse_args([ '--email', '*****@*****.**', '--password', 'password', '--host', 'resilient_host', '--org', 'org', '--proxy_host', 'proxy_host', '--proxy_port', '1443' ]) rest_client = resilient.get_client(args) assert rest_client.proxies assert rest_client.proxies['https'] == "https://*****:*****@example.com', '--password', 'password', '--host', 'resilient_host', '--org', 'org' ]) rest_client = resilient.get_client(args) assert rest_client.proxies assert rest_client.proxies[ 'https'] == "https://*****:*****@proxy_host:1443/"
def main(): """ program main """ config_file = resilient.get_config_file() parser = ExampleArgumentParser(config_file) opts = parser.parse_args() inc_types = opts["itype"] inc_queue = opts["queue"] # Create SimpleClient for a REST connection to the Resilient services client = resilient.get_client(opts) # Discovered Date will be set to the current time time_now = int(time.time() * 1000) try: uri = '/incidents' rf_config = configparser.ConfigParser() rf_config.read(config_file) rf_opts = dict(rf_config.items('fn_risk_fabric')) result = get_action_plans(rf_opts) for ap in result: if 'AssignToQueueName' in ap and ap[ 'AssignToQueueName'] == inc_queue: # Construct the basic incident DTO that will be posted description = ap['Notes'] if 'Notes' in ap else "" ActionPlanGUID = ap['ActionPlanGUID'] properties = {"rf_actionplanguid": ActionPlanGUID} new_incident = { "name": ap['Title'], "description": description, "incident_type_ids": inc_types, "properties": properties, "discovered_date": time_now } # Create the incident incident = client.post(uri, new_incident) inc_id = incident["id"] params = { 'ActionPlanGUID': ActionPlanGUID, 'Comment': "Created Resilient Incident ID #" + str(inc_id) } result = set_action_plan_comment(rf_opts, params) print("Created incident {}".format(inc_id)) except resilient.SimpleHTTPException as ecode: print("create failed : {}".format(ecode))
def customize_resilient(args): """import customizations to the resilient server""" parser = AppArgumentParser(config_file=resilient.get_config_file()) (opts, extra) = parser.parse_known_args() client = resilient.get_client(opts) # Call each of the 'customize' entry points to get type definitions, # then apply them to the resilient server entry_points = pkg_resources.iter_entry_points('resilient.circuits.customize') do_customize_resilient(client, entry_points, args.yflag, args.install_list)
def main(): """Main""" # Parse the commandline arguments and config file config = resilient.get_config_file() print("Configuration file: {}".format(config)) parser = resilient.ArgumentParser(config_file=config) opts = parser.parse_args() # Create SimpleClient for a REST connection to the Resilient services resilient_client = resilient.get_client(opts) # Report the list of users and groups report_users_and_groups(resilient_client)
def main(): """main""" # Parse the commandline arguments and config file config = resilient.get_config_file() print("Configuration file: {}".format(config)) parser = ReportArgumentParser(config_file=config) opts = parser.parse_args() # Create SimpleClient for a REST connection to the Resilient services resilient_client = resilient.get_client(opts) # Do the reports phases_report(opts, resilient_client)
def main(): """ program main """ config_file = resilient.get_config_file() parser = ExampleArgumentParser(config_file) opts = parser.parse_args() inc_types = opts["itype"] inc_limit = opts["limit"] # Create SimpleClient for a REST connection to the Resilient services client = resilient.get_client(opts) # Discovered Date will be set to the current time time_now = int(time.time() * 1000) try: uri = '/incidents' rf_config = configparser.ConfigParser() rf_config.read(config_file) rf_opts = dict(rf_config.items('fn_risk_fabric')) params = {'Limit': inc_limit} result = get_risk_model_instances(rf_opts, params) for ap in result['Records']: # Construct the basic incident DTO that will be posted inc_name = ap['RiskModelName'] inc_description = ap['Threats'] + ', ' + ap[ 'FocusEntityCaption'] + ', #' + str(ap['ID']) new_incident = { "name": inc_name, "description": inc_description, "incident_type_ids": inc_types, "discovered_date": time_now } # Create the incident incident = client.post(uri, new_incident) inc_id = incident["id"] print("Created incident {}".format(inc_id)) except resilient.SimpleHTTPException as ecode: print("create failed : {}".format(ecode))
def main(): """ program main """ parser = ExampleArgumentParser(config_file=resilient.get_config_file()) opts = parser.parse_args() inc_name = opts["name"] inc_desc = opts["description"] inc_types = opts["itype"] # Create SimpleClient for a REST connection to the Resilient services client = resilient.get_client(opts) # Discovered Date will be set to the current time time_now = int(time.time() * 1000) # Construct the basic incident DTO that will be posted new_incident = { "name": inc_name, "description": inc_desc, "incident_type_ids": inc_types, "discovered_date": time_now, "properties": {} } # Add the specified values for any custom fields, # per the command-line arguments provided. # Within the incident JSON structure, the values for custom fields # are all contained within a dictionary value named 'properties'. for custom in opts["custom"]: (field_name, field_value) = custom.split("=", 1) print("{} = {}".format(field_name, field_value)) new_incident["properties"][field_name] = field_value try: uri = '/incidents' # Create the incident incident = client.post(uri, new_incident) inc_id = incident["id"] print("Created incident {}".format(inc_id)) except resilient.SimpleHTTPException as ecode: print("create failed : {}".format(ecode))
def selftest_function(opts): """ Test connectivity back to SOAR """ try: rest_client = get_client(opts) return { "state": "success", "reason": None } except Exception as err: return { "state": "failure", "reason": str(err) }
def get_resilient_client(path_config_file=None, ALLOW_UNRECOGNIZED=False): """ Return a SimpleClient for Resilient REST API using configurations options from provided path_config_file or from ~/.resilient/app.config :param path_config_file: path to the app.config to parse :type path_config_file: str :param ALLOW_UNRECOGNIZED: bool to specify if AppArgumentParser will allow unknown comandline args or not. Default is False :type ALLOW_UNRECOGNIZED: bool :return: SimpleClient for Resilient REST API :rtype: SimpleClient """ client = get_client( get_configs(path_config_file=path_config_file, ALLOW_UNRECOGNIZED=ALLOW_UNRECOGNIZED)) return client
def create_authenticated_client(): """create_authenticated_client uses the resilient package to gather values from a standard app.config file; the configuration file used for an Integration Server or App Host App. This means all credentials needed to run this module can be kept separate and we can also avoid var prompts. Note: If your running this module on a host other than localhost, that host needs to have an app.config file or you need to copy one over. :return: An authenticated rest client to CP4S or Resilient :rtype: SimpleClient """ import resilient # Create Resilient API Client resilient_parser = resilient.ArgumentParser( config_file=resilient.get_config_file()) resilient_opts = resilient_parser.parse_known_args() # Instantiate a client using the gathered opts return resilient.get_client(resilient_opts[0])
def get_resilient_client(opts): """Get a connected instance of SimpleClient for Resilient REST API""" global resilient_client global connection_opts new_opts = (opts.get("cafile"), opts.get("org"), opts.get("host"), opts.get("port"), opts.get("proxy_host"), opts.get("proxy_port"), opts.get("proxy_user"), opts.get("proxy_password"), opts.get("email"), opts.get("api_key_id")) if new_opts != connection_opts: resilient_client = None connection_opts = new_opts if resilient_client: return resilient_client resilient_client = resilient.get_client(opts) return resilient_client
def get_resilient_client(path_config_file=None): """ Return a SimpleClient for Resilient REST API using configurations options from provided path_config_file or from ~/.resilient/app.config :param path_config_file: Path to app.config file to use :return: SimpleClient for Resilient REST API :rtype: SimpleClient """ LOG.info("Connecting to Resilient Appliance...") if not path_config_file: path_config_file = get_config_file() config_parser = ArgumentParser(config_file=path_config_file) opts = config_parser.parse_known_args()[0] return get_client(opts)
def get_client(): opts_dict = { 'host': SERVER, 'cafile': os.environ.get('SSL_CERT_FILE') if USE_SSL else 'false', 'org': ORG_NAME } if USERNAME and PASSWORD: opts_dict.update({'email': USERNAME, 'password': PASSWORD}) elif API_KEY_ID and API_KEY_SECRET: opts_dict.update({ 'api_key_id': API_KEY_ID, 'api_key_secret': API_KEY_SECRET }) else: return_error( 'Credentials were not provided. Configure either the username and password' ' or the API Key and API Secret') resilient_client = resilient.get_client(opts=opts_dict) return resilient_client
def main(): """ program main """ parser = ExampleArgumentParser(config_file=resilient.get_config_file()) opts = parser.parse_args() # Create SimpleClient for a REST connection to the Resilient services client = resilient.get_client(opts) inc_id = opts["incid"] desc = opts["desc"] try: uri = '/incidents/{}'.format(inc_id) incident = client.get(uri) # Create a patch object. You need to pass it the base object (the thing being patched). This # object contains the old values, which are sent to the server. patch = resilient.Patch(incident) patch.add_value("description", desc) print(''' At this point, we have a copy of the specified incident. If you want to trigger a conflict to see what will happen, then you can do so now. Press the Enter key to continue''') input() # Apply the patch and overwrite any conflicts. client.patch(uri, patch, overwrite_conflict=True) # Confirm that our change was applied. This is not something that you'd typically need to do since the # patch applied successfully, but this illustrates that the change was applied for the purposes of this # example. assert desc == client.get(uri)["description"] except resilient.SimpleHTTPException as ecode: print("patch failed : {}".format(ecode))
def main(): """ program main """ parser = ExampleArgumentParser(config_file=resilient.get_config_file()) opts = parser.parse_args() inc_name = opts["name"] inc_desc = opts["description"] inc_types = opts["itype"] # Create SimpleClient for a REST connection to the Resilient services client = resilient.get_client(opts) # Discovered Date will be set to the current time time_now = int(time.time() * 1000) # Construct the basic incident DTO that will be posted new_incident = { "name": inc_name, "description": inc_desc, "incident_type_ids": inc_types, "discovered_date": time_now } try: uri = '/incidents' # Create the incident incident = client.post(uri, new_incident) inc_id = incident["id"] print("Created incident {}".format(inc_id)) except resilient.SimpleHTTPException as ecode: print("create failed : {}".format(ecode))
def do_function(self, arg): """Execute a function""" if not arg: print("function command requires a function-name") return parser = AppArgumentParser(config_file=resilient.get_config_file()) (opts, more) = parser.parse_known_args() client = resilient.get_client(opts) args = iter(shlex.split(arg)) try: function_name = next(args) function_def = client.get("/functions/{}?handle_format=names".format(function_name)) param_defs = dict({fld["uuid"]: fld for fld in client.get("/types/__function/fields?handle_format=names")}) function_params = {} for param in function_def["view_items"]: param_uuid = param["content"] param_def = param_defs[param_uuid] prompt = "{} ({}, {}): ".format(param_def["name"], param_def["input_type"], param_def["tooltip"]) try: arg = next(args) except StopIteration: arg = None function_params[param_def["name"]] = get_input(param_def["input_type"], prompt, arg) action_message = { "function": { "name": function_name }, "inputs": function_params } message = json.dumps(action_message, indent=2) print(message) self._submit_action("function", message) except Exception as e: print(e)
def test_get_client(self, co3_args): client = resilient.get_client(co3_args) assert client
def clone(args): parser = AppArgumentParser(config_file=resilient.get_config_file()) (opts, extra) = parser.parse_known_args() client = resilient.get_client(opts) export_uri = "/configurations/exports/history" export_list = client.get(export_uri)["histories"] last_date = 0 last_id = 0 for export in export_list: if export["options"]["actions"] and export["options"][ "phases_and_tasks"]: if export["date"] > last_date: last_date = export["date"] last_id = export["id"] if last_date == 0: LOG.error( u"ERROR: No suitable export is available. " u"Create an export for code generation. (Administrator Settings -> Organization -> Export)." ) return dt = datetime.datetime.utcfromtimestamp(last_date / 1000.0) LOG.info( u"Codegen is based on the organization export from {}.".format(dt)) export_uri = "/configurations/exports/{}".format(last_id) export_data = client.get(export_uri) # Get latest export new_export_data = export_data.copy() whitelist_dict_keys = ["incident_types", "fields"] # Mandatory keys for dict_key in new_export_data: if dict_key not in whitelist_dict_keys and type( new_export_data[dict_key]) is list: new_export_data[dict_key] = [ ] # clear the new export data, the stuff we clear isn't necessary for cloning workflow_names = args.workflow # names of workflow a (target) and b (new workflow) if workflow_names: # if we're importing workflows if len(workflow_names) != 2: raise Exception( "Only specify the original workflow api name and a new workflow api name" ) # Check that 'workflows' are available (v28 onward) workflow_defs = export_data.get("workflows") if workflow_defs is None: raise Exception("Export does not contain workflows") original_workflow_api_name = workflow_names[0] new_workflow_api_name = workflow_names[1] duplicate_check = find_workflow_by_programmatic_name( workflow_defs, new_workflow_api_name) if duplicate_check is not None: raise Exception( "Workflow with the api name {} already exists".format( new_workflow_api_name)) original_workflow = find_workflow_by_programmatic_name( workflow_defs, original_workflow_api_name) if original_workflow is None: raise Exception("Could not find original workflow {}".format( original_workflow_api_name)) # This section just fills out the stuff we need to replace to duplicate new_workflow = original_workflow.copy() # Random UUID, not guaranteed to not collide but is extremely extremely extremely unlikely to collide new_workflow["uuid"] = str(uuid.uuid4()) new_workflow["programmatic_name"] = new_workflow_api_name new_workflow["export_key"] = new_workflow_api_name old_workflow_name = new_workflow["name"] new_workflow["name"] = new_workflow_api_name new_workflow["content"]["workflow_id"] = new_workflow_api_name new_workflow["content"]["xml"] = new_workflow["content"][ "xml"].replace(original_workflow_api_name, new_workflow_api_name) new_workflow["content"]["xml"] = new_workflow["content"][ "xml"].replace(old_workflow_name, new_workflow_api_name) new_export_data["workflows"] = [new_workflow] uri = "/configurations/imports" result = client.post(uri, new_export_data) import_id = result[ "id"] # if this isn't here and the response code is 200 OK, something went really wrong if result["status"] == "PENDING": result["status"] = "ACCEPTED" # Have to confirm changes uri = "/configurations/imports/{}".format(import_id) client.put(uri, result) LOG.info("Imported successfully") else: raise Exception( "Could not import because the server did not return an import ID")
def main(): """ program main """ parser = ExampleArgumentParser(config_file=resilient.get_config_file()) opts = parser.parse_args() inc_name = opts["name"] inc_desc = opts["description"] inc_types = opts["itype"] attachment = opts["attachment"] # Create SimpleClient for a REST connection to the Resilient services client = resilient.get_client(opts) # Discovered Date will be set to the current time time_now = int(time.time() * 1000) #Construct SearchExDTO to see if incident already exists searchExDTO = { "query": inc_name, "filters": { "incident": [{ "conditions": [{ "field_name": "name", "method": "equals", "value": inc_name }, { "field_name": "plan_status", "method": "in", "value": ["A"] }] }] }, "types": ["incident"] } # Construct the basic incident DTO that will be posted new_incident = { "name": inc_name, "description": { "format": "html", "content": inc_desc }, "incident_type_ids": inc_types, "discovered_date": time_now } try: # See if any active incidents exist with the same name search = client.search(searchExDTO) if (len(search['results']) > 0): #print("Incident already exists in IBM Resilient, not creating it again") print(searchExDTO) sys.exit(1) # Create the incident uri = '/incidents' incident = client.post(uri, new_incident) inc_id = incident["id"] #print("Created incident {}".format(inc_id)) upload = client.post_attachment( '/incidents/{0}/attachments'.format(inc_id), attachment) #print('Created attachment: ', file=sys.stderr) print(json.dumps(upload, indent=4)) #Normal Exit sys.exit(0) except resilient.SimpleHTTPException as ecode: print("create failed : {}".format(ecode))
def get_resilient_client(opts): """Get a connected instance of SimpleClient for Resilient REST API""" resilient_client = resilient.get_client(opts) return resilient_client
def main(): """ program main """ parser = ExampleArgumentParser(config_file=resilient.get_config_file()) opts = parser.parse_args() inc_id = opts["incid"] itypes = opts["itype"] # Create SimpleClient for a REST connection to the Resilient services client = resilient.get_client(opts) try: uri = '/incidents/{}?handle_format=names'.format(inc_id) incident = client.get(uri) # Create a patch object. You need to pass it the base object (the thing being patched). patch = resilient.Patch(incident) # The initial patch will contain the change we want to make. old_itypes = incident["incident_type_ids"] patch.add_value("incident_type_ids", old_itypes + itypes) def patch_conflict_handler(response, patch_status, patch): # If this gets called then there was a patch conflict, we we need to # adjust the patch to include an update. This only gets called if # a field we're trying to change has failed. In that case the actual # value currently on the server is included in the patch_status object. # # You can retrieve the current server value using # patch_status.get_actual_current_value(field_name). This # will return the actual value that exists on the server. # # In our case, we'll be appending to this value. # print("patch conflict detected, operation returned: ") print(json.dumps(patch_status.to_dict(), indent=4)) current_value = patch_status.get_actual_current_value( "incident_type_ids") patch.exchange_conflicting_value(patch_status, "incident_type_ids", current_value + itypes) print("existing itypes: {}".format(old_itypes)) print("wanted to add these: {}".format(itypes)) print(''' At this point, we have a copy of the specified incident. If you want to trigger a conflict to see what will happen, then you can do so now. Press the Enter key to continue''') input() client.patch_with_callback(uri, patch, patch_conflict_handler) # Confirm that our change was applied. This is not something that you'd typically need to do since the # patch applied successfully, but this illustrates that the change was applied for the purposes of this # example. # new_itypes = client.get(uri)["incident_type_ids"] # Remove the original description, which will leave only our addition. print("itypes after update: {}".format(new_itypes)) assert set(itypes).issubset(new_itypes) except resilient.SimpleHTTPException as ecode: print("patch failed : {}".format(ecode))
def _xforce_hybrid_artifacts_attachments_function(self, event, *args, **kwargs): """Function: function for analyze attachments and artifacts contain files or ips""" TEMP_FILES = [] #check workflow status if it terminate def get_workflow_status(workflow_instance_id, res_client): """Function to get the status of the current workflow""" res = res_client.get( "/workflow_instances/{0}".format(workflow_instance_id)) return res['status'] #check required values for function inputs def get_config_option(option_name, optional=False): """Function to check if a given option is in app.config""" option = self.options.get(option_name) if option is None and optional is False: error = "'{0}' is mandatory and is not set in ~/.resilient/app.config file . you must set it value".format( option) raise ValueError(error) else: return option #remove tmp files after finishing the operation def remove_tmp_files(files): """Function for remove temp files""" for tmp_file in files: os.remove(tmp_file) def get_input_workflow(client, incident_id, attachment_id, artifact_id): """ function to get workflow inputs and start to init the request""" #add validation schema for entered ip """ supported ip format 1.2.3.5 , 183.254.152.128 , 22,24,35,89 """ re_ip_match_pattern = r"\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3}$" #prepare the request body body = { "incident_id": incident_id, "id": None, "type": "", "meta_data": None, "data": None, "ip": None } if (attachment_id): body["type"] = "attachment" body["id"] = attachment_id body["meta_data"] = client.get( "/incidents/{0}/attachments/{1}".format( body['incident_id'], body['id'])) body["data"] = client.get_content( "/incidents/{0}/attachments/{1}/contents".format( body['incident_id'], body['id'])) elif (artifact_id): body["type"] = "artifact" body["id"] = artifact_id body["meta_data"] = client.get( "/incidents/{0}/artifacts/{1}".format( body["incident_id"], body["id"])) #parse artifact to identify if artifact is ip address or attachment if (body["meta_data"]["attachment"]): body["data"] = client.get_content( "/incidents/{0}/artifacts/{1}/contents".format( body["incident_id"], body["id"])) else: valid = re.match(re_ip_match_pattern, body["meta_data"]["value"]) if (valid): body["ip"] = valid.group() else: sample_format = "1.2.3.5" raise ValueError( "Enter a valid format ip like this {}".format( sample_format)) return body def generate_header(api_key, api_password): """Function to generate header for Xforce API""" header = api_key + ":" + api_password return b64encode(header.encode('utf-8')) def calculate_file_hash(path): """Function for calculation file hash""" hasher = hashlib.sha256() try: with open(path, 'r+b') as file: buffer = file.read() hasher.update(buffer) file.close() return hasher.hexdigest() except IOError as error: print("an error occured or %s", error) def write_temp_file(data, name=None): """Function for writing tempdir and writing binary""" path = None if (name): path = "{0}/{1}".format(tempfile.gettempdir(), name) else: tf = tempfile.mkstemp(suffix="test", prefix="suspect_attachment") dis, path = tf try: #path: /tmp/attachment_name build_file = open(path, 'wb') TEMP_FILES.append(path) build_file.write(data) build_file.close() except IOError as error: print("something wrong or %s", error) return path def check_response(response, body): """Function for checking response status""" if body["type"] == "attachment": malicious = None if response.status_code == 200: print("hash has been submited succssefully") malicious = True elif response.status_code == 404: malicious = False else: print("status_code : {0}".format(response.status_code), response.json()['error']) return malicious elif body["ip"]: return True if len(response.json()["malware"]) > 0 else False elif body["type"] == "artifact": print("init scanning artifact...") for item in response.json(): if item == "scanners": for element in response.json()[item]: return True if element[ "status"] == "malicious" else False def process_response(response, body): """ function for extracting important info about the submited hash""" if body["type"] == "attachment": attachment_info_status = dict() if check_response(response, body): res_json = response.json() print( "trying to extract brif info about the hash result ......" ) attachment_info_status['status'] = "malware" attachment_info_status['malicious'] = True for key, value in res_json["malware"].items(): if key == "origins": for nested_key in res_json["malware"][key]: attachment_info_status["family"] = res_json[ 'malware'][key][nested_key]['family'][0] elif key == "risk": attachment_info_status["risk"] = value print("Result obtained successfully.....") return attachment_info_status else: print("Hash is Not regconized as malicious hash.....") return {'status': 'clean_attachment', "malicious": False} elif body["ip"]: ip_info_status = { "label": "ip", "type": "", "malware_info": [], "family": "" } if check_response(response, body): res_json = response.json() print( "trying to extract brif info about the ip reputation") #limit the result to 3 prifix = [x for x in range(3)] ip_info_status["type"] = res_json['malware'][0]['type'] ip_info_status['malicious'] = True ip_info_status['status'] = "InfectedIP" for key in prifix: ip_info_status["malware_info"].append({ 'domain_name': res_json['malware'][key]['domain'], 'file_path': res_json['malware'][key]['filepath'], 'lastseen': res_json['malware'][key]['lastseen'] }) ip_info_status['family'] = res_json['malware'][0][ 'family'][0] print("Result obtained successfully.....") return ip_info_status else: print("the given ip is clear and not infected by malware") return { "status": "clean_ip", "malicious": False, "label": "ip" } elif body["type"] == "artifact": if check_response(response, body): scanners = response.json()["scanners"] artifact_info_status = { "scanners": [], "label": "attachment_suspect" } artifact_info_status["malicious"] = True print("trying extract brif infomartion about the artifact") for element in scanners: artifact_info_status["scanners"].append({ "name": element["name"], "status": element["status"], "progress": element["progress"] }) print(artifact_info_status) return artifact_info_status else: scanners = response.json()["scanners"] artifact_info_status = { "scanners": [], "label": "attachment_suspect" } artifact_info_status["malicious"] = False for element in scanners: artifact_info_status["scanners"].append({ "name": element["name"], "status": element["status"], "progress": element["progress"] }) return artifact_info_status def submit_hash_file(url_api, end_point, file_hash, header, body): """Function for submiting hash file """ response = requests.get("{0}{1}{2}".format(url_api, end_point, file_hash), headers=header, verify=False) if (response is not None): result = process_response(response, body) return result else: print( "something going wrong could be missing header or expired api or connection failed" ) def submit_artifact_attachment(url_api, end_point, attachment, header, data, body): """Function for scan artifact attachments via virustotal ..etc""" try: with open(attachment, "rb") as file: response = requests.post("{0}{1}".format( url_api, end_point), headers=header, data=data, files={'file': file}) file.close() if (response is not None): result = process_response(response, body) return result except IOError as error: raise ValueError("{0}".format(error)) def is_malicious(result): return True if result["malicious"] else False def submit_ip(url_api, end_point, target_ip, header, body): """Function for submiting ip for quering reputation""" response = requests.get("{0}{1}{2}".format(url_api, end_point, target_ip), headers=header, verify=False) if (response is not None): result = process_response(response, body) return result else: print( "something going wrong could be missing header or expired api or connection failed" ) try: # Get the wf_instance_id of the workflow this Function was called in wf_instance_id = event.message["workflow_instance"][ "workflow_instance_id"] # Get Xforce options from config file XFORCE_API_URL = get_config_option("xforce_api") XFORCE_API_KEY = get_config_option("xforce_api_key") XFORCE_API_PASSWORD = get_config_option("xforce_api_password") XFORCE_API_KEY = get_config_option("xforce_api_key") XFORCE_MALWARE_ENDPOINT = get_config_option( "xforce_malware_endpoint", optional=True) XFORCE_IP_ENDPOINT = get_config_option( "xforce_ipReputation_endpoint", optional=True) HYBRID_API_URL = get_config_option("hybrid_api") HYBRID_SCAN_ENDPOINT = get_config_option("hybrid_scan_endpoint") HYBRID_API_KEY = get_config_option("hybrid_api_key") #prepare header XFORCE API header_X = { "Content-Type": "application/json", "Authorization": "Basic {0}".format( generate_header(XFORCE_API_KEY, XFORCE_API_PASSWORD).decode("utf-8")) } #prepare header HYBRID API header_H = { "api-key": HYBRID_API_KEY, "user-agent": "Falcon Sandbox" } #prepare data for HYBRID Api data = {"scan_type": "all"} # Get the function parameters: attachment_name = kwargs.get("attachment_name") # text incident_id = kwargs.get("incident_id") # number attachment_id = kwargs.get("attachment_id") # number artifact_id = kwargs.get("artifact_id") # number #check required inputs are defined if incident_id is None: raise ValueError("incident_id is required value...") if not attachment_id and not artifact_id: raise ValueError("attachment_id or artifact_id is required") #init resilient client parser = resilient.ArgumentParser( config_file=resilient.get_config_file()) opts = parser.parse_args() client = resilient.get_client(opts) log = logging.getLogger(__name__) log.info("attachment_name: %s", attachment_name) log.info("incident_id: %s", incident_id) log.info("attachment_id: %s", attachment_id) log.info("artifact_id: %s", artifact_id) # PUT YOUR FUNCTION IMPLEMENTATION CODE HERE yield StatusMessage("starting.......") # Get Body we working on body = get_input_workflow(client, incident_id, attachment_id, artifact_id) workflow_status = get_workflow_status(wf_instance_id, client) file_hash = None query_ip = None if (body["type"] == "attachment" and body["data"] != None): #temp file name should be /tmp/attachment_name temp_file_path = write_temp_file(body["data"], attachment_name) # calculate hash file yield StatusMessage("Trying calculating file hash.........") file_hash = calculate_file_hash(temp_file_path) elif (body["ip"]): query_ip = body["ip"] elif (body["type"] == "artifact" and body["data"] != None): yield StatusMessage("Writing artifact attachment.........") temp_artifact_file_path = write_temp_file(body["data"]) if (file_hash): print("starting submiting file hash.........") response = submit_hash_file(XFORCE_API_URL, XFORCE_MALWARE_ENDPOINT, file_hash, header_X, body) if (response): print("getting the results successfully.........") if is_malicious(response): results = { "status": response["status"], "family": response["family"], "risk": response["risk"], "filename": attachment_name } else: results = { "status": response["status"], "filename": attachment_name } else: yield StatusMessage("file hash not provied well..") elif (query_ip): print("starting submiting ip.....") response = submit_ip(XFORCE_API_URL, XFORCE_IP_ENDPOINT, query_ip, header_X, body) if (response): print("getting the results successfully.........") if is_malicious(response): results = { "label": response["label"], "type": response["type"], "status": response["status"], "malware_info": response["malware_info"], "family": response["family"] } else: results = { "label": response["label"], "status": response["status"], "ip": query_ip } else: print("the given ip not provied well...........") else: print("starting submiting artifact attachment") response = submit_artifact_attachment(HYBRID_API_URL, HYBRID_SCAN_ENDPOINT, temp_artifact_file_path, header_H, data, body) if (response): print("getting the results successfully.........") if is_malicious(response): results = { "label": response["label"], "scanners": response["scanners"], "status": "danger" } else: results = { "label": response["label"], "scanners": response["scanners"], "status": "clean" } yield StatusMessage("done...") # Produce a FunctionResult with the results yield FunctionResult(results) except Exception: yield FunctionError() finally: remove_tmp_files(TEMP_FILES)
if FETCH_TIME: try: datetime.strptime(FETCH_TIME, TIME_FORMAT) except ValueError as error: return_error( 'There is something wrong with the fetch date. Error: {}'. format(error)) demisto.results('ok') ''' EXECUTION CODE ''' client = resilient.get_client({ 'email': USERNAME, 'password': PASSWORD, 'host': SERVER, 'cafile': 'true' if USE_SSL else 'false', 'org': ORG_NAME }) # Disable SDK logging warning messages integration_logger = logging.getLogger('resilient') # type: logging.Logger integration_logger.propagate = False LOG('command is %s' % (demisto.command(), )) try: if demisto.command() == 'test-module': # Checks if there is an authenticated session test() elif demisto.command() == 'fetch-incidents': fetch_incidents()