def test_user_agent(): joe = jbxapi.JoeSandbox() assert "jbxapi.py" in joe.session.headers["User-Agent"] assert jbxapi.__version__ in joe.session.headers["User-Agent"] joe = jbxapi.JoeSandbox(user_agent="My Integration") assert "jbxapi.py" in joe.session.headers["User-Agent"] assert "My Integration" in joe.session.headers["User-Agent"] assert jbxapi.__version__ in joe.session.headers["User-Agent"]
def test_api_url_input_methods(monkeypatch): monkeypatch.setattr("jbxapi.API_URL", "from_script") joe = jbxapi.JoeSandbox() assert joe.apiurl == "from_script" monkeypatch.setenv("JBX_API_URL", "from_env") joe = jbxapi.JoeSandbox() assert joe.apiurl == "from_env" joe = jbxapi.JoeSandbox(apiurl="from_arg") assert joe.apiurl == "from_arg"
def selftest_function(opts): """ Placeholder for selftest function. An example use would be to test package api connectivity. Suggested return values are be unimplemented, success, or failure. """ app_configs = opts.get("fn_joe_sandbox_analysis", {}) API_KEY = get_config_option("jsb_api_key", app_configs) ACCEPT_TAC = str_to_bool(get_config_option("jsb_accept_tac", app_configs)) HTTP_PROXY = get_config_option("jsb_http_proxy", app_configs, True) HTTPS_PROXY = get_config_option("jsb_https_proxy", app_configs, True) log.info(API_KEY) proxies = {} test = False try: proxies = get_proxies(opts, app_configs) if (HTTP_PROXY) and (len(proxies) == 0): proxies["http"] = HTTP_PROXY if (HTTPS_PROXY) and (len(proxies) == 0): proxies["https"] = HTTPS_PROXY if (len(proxies) == 0): proxies = None except Exception as proxy_error: proxies = None joesandbox = jbxapi.JoeSandbox(apikey=API_KEY, accept_tac=ACCEPT_TAC, proxies=proxies) test = joesandbox.server_online() if test: return {"state": "success", "reason": "Server Online"} else: return {"state": "failure", "reason": "Server Offline"}
def run(self): super(JoeSandbox, self).run() if not cfg.joesandbox: self.log("error", 'The JoeSandbox module cannot be used unless the configuration is defined.') return self.joe = jbxapi.JoeSandbox(apiurl=cfg.joesandbox.apiurl, apikey=cfg.joesandbox.apikey, accept_tac=cfg.joesandbox.accept_tac, verify_ssl=cfg.joesandbox.verify, user_agent="viper") if self.args is None: return if not __sessions__.is_set(): self.log('error', "No open session.") return try: if self.args.submit: self.submit() elif self.args.tasks: self.tasks() elif self.args.dropped: self.dropped() elif self.args.clear: self.clear() elif self.args.report: self.report() except jbxapi.JoeException as e: self.log("error", e)
def handler(q=False): if q is False: return False request = json.loads(q) apiurl = request['config'].get( 'apiurl') or 'https://jbxcloud.joesecurity.org/api' apikey = request['config'].get('apikey') parser_config = { "import_pe": request["config"].get('import_pe', "false") == "true", "mitre_attack": request["config"].get('import_mitre_attack', "false") == "true", "import_network_interactions": request["config"].get('import_network_interactions', "false") == "true", "import_malware_config": request["config"].get('import_malware_config', "false") == "true", "import_dropped_files": request["config"].get('import_dropped_files', "false") == "true", "import_registry_activities": request["config"].get('import_registry_activities', "false") == "true", "import_system_behavior": request["config"].get('import_system_behavior', "false") == "true", "import_network_behavior": request["config"].get('import_network_behavior', "false") == "true", } if not apikey: return {'error': 'No API key provided'} url = request['attribute']['value'] if "/submissions/" not in url: return {'error': "The URL does not point to a Joe Sandbox analysis."} submission_id = url.split('/')[ -1] # The URL has the format https://example.net/submissions/12345 joe = jbxapi.JoeSandbox(apiurl=apiurl, apikey=apikey, user_agent='MISP joesandbox_query') try: joe_info = joe.submission_info(submission_id) except jbxapi.ApiError as e: return {'error': str(e)} if joe_info["status"] != "finished": return {'error': "The analysis has not finished yet."} if joe_info['most_relevant_analysis'] is None: return {'error': "No analysis belongs to this submission."} analysis_webid = joe_info['most_relevant_analysis']['webid'] joe_parser = JoeParser(parser_config) joe_data = json.loads( joe.analysis_download(analysis_webid, 'jsonfixed')[1]) joe_parser.parse_data(joe_data['analysis']) joe_parser.finalize_results() # print("results: %s" % joe_parser.results) return {'results': joe_parser.results}
def test_accept_tac_input_methods(monkeypatch): # The test alternates between True and False to test the # order of precedence of the options monkeypatch.setattr("jbxapi.ACCEPT_TAC", True) joe = jbxapi.JoeSandbox() assert joe.accept_tac is True monkeypatch.setenv("JBX_ACCEPT_TAC", "0") joe = jbxapi.JoeSandbox() assert joe.accept_tac is False monkeypatch.setenv("JBX_ACCEPT_TAC", "1") joe = jbxapi.JoeSandbox() assert joe.accept_tac is True joe = jbxapi.JoeSandbox(accept_tac=False) assert joe.accept_tac is False
def connect(self, params): self.logger.info('Connect: Connecting...') api_key = params.get('api_key').get('secretKey') api_url = params.get('url') if api_url is None: api_url = jbxapi.API_URL self.api = jbxapi.JoeSandbox(api_key, api_url) self.logger.info('Connect: Connected successfully')
def __init__(self, apikey, apiurl, accept_tac, timeout=None, verify_ssl=True, retries=3): """Initialize the interface to Joe Sandbox API""" sandboxapi.SandboxAPI.__init__(self) self.jbx = jbxapi.JoeSandbox(apikey, apiurl or jbxapi.API_URL, accept_tac, timeout, verify_ssl, retries)
def connect(self, params): self.logger.info("Connect: Connecting...") api_key = params.get("api_key").get("secretKey") api_url = params.get("url") if api_url is None: api_url = jbxapi.API_URL self.api = jbxapi.JoeSandbox(api_key, api_url) self.logger.info("Connect: Connected successfully")
def handler(q=False): if q is False: return False request = json.loads(q) apiurl = request['config'].get('apiurl') or 'https://jbxcloud.joesecurity.org/api' apikey = request['config'].get('apikey') parser_config = { "import_pe": request["config"].get('import_pe', "false") == "true", "mitre_attack": request["config"].get('import_mitre_attack', "false") == "true", } if not apikey: return {'error': 'No API key provided'} if not request.get('attribute') or not check_input_attribute(request['attribute'], requirements=('type', 'value')): return {'error': f'{standard_error_message}, {checking_error} that is the link to the Joe Sandbox report.'} if request['attribute']['type'] != 'link': return {'error': 'Unsupported attribute type.'} url = request['attribute']['value'] if "/submissions/" not in url: return {'error': "The URL does not point to a Joe Sandbox analysis."} submission_id = url.split('/')[-1] # The URL has the format https://example.net/submissions/12345 joe = jbxapi.JoeSandbox(apiurl=apiurl, apikey=apikey, user_agent='MISP joesandbox_query') try: joe_info = joe.submission_info(submission_id) except jbxapi.ApiError as e: return {'error': str(e)} if joe_info["status"] != "finished": return {'error': "The analysis has not finished yet."} if joe_info['most_relevant_analysis'] is None: return {'error': "No analysis belongs to this submission."} analysis_webid = joe_info['most_relevant_analysis']['webid'] joe_parser = JoeParser(parser_config) joe_data = json.loads(joe.analysis_download(analysis_webid, 'jsonfixed')[1]) joe_parser.parse_data(joe_data['analysis']) joe_parser.finalize_results() return {'results': joe_parser.results}
def handler(q=False): if q is False: return False request = json.loads(q) apiurl = request['config'].get( 'apiurl') or 'https://jbxcloud.joesecurity.org/api' apikey = request['config'].get('apikey') if not apikey: return {'error': 'No API key provided'} url = request['attribute']['value'] if "/submissions/" not in url: return {'error': "The URL does not point to a Joe Sandbox analysis."} submission_id = url.split('/')[ -1] # The URL has the format https://example.net/submissions/12345 joe = jbxapi.JoeSandbox(apiurl=apiurl, apikey=apikey, user_agent='MISP joesandbox_query') try: joe_info = joe.submission_info(submission_id) except jbxapi.ApiError as e: return {'error': str(e)} if joe_info["status"] != "finished": return {'error': "The analysis has not finished yet."} if joe_info['most_relevant_analysis'] is None: return {'error': "No analysis belongs to this submission."} analysis_webid = joe_info['most_relevant_analysis']['webid'] joe_parser = JoeParser() joe_data = json.loads( joe.analysis_download(analysis_webid, 'jsonfixed')[1]) joe_parser.parse_data(joe_data['analysis']) joe_parser.finalize_results() return {'results': joe_parser.results}
def main(): joe = jbxapi.JoeSandbox(apiurl=API_URL, apikey=API_KEY, accept_tac=ACCEPT_TAC) print("Connecting to {0} ...".format(SERVER)) imap = imaplib.IMAP4_SSL(SERVER) print("Logging in as {0} ...".format(USERNAME)) imap.login(USERNAME, PASSWORD) # get message ids msg_ids = fetch_message_ids(imap) print("Found {0} unread mail(s).".format(len(msg_ids))) # extract attachments def attachments(): for msg_id in msg_ids: message = read_message(imap, msg_id) for name, content in extract_attachments(message): yield msg_id, name, content count = 0 for msg_id, name, content in attachments(): try: data = submit_sample(joe, name, content) except: # if the submission fails we reset the seen flag unset_seen_flag(imap, msg_id) raise else: count += 1 print("Submitted {0} to Joe Sandbox with webid: {1}".format( name, ", ".join(data["webids"]))) print("======================================================") if count: print("Submitted {0} samples for analysis.".format(count)) else: print("No new attachments found.") print("======================================================")
def handler(q=False): if q is False: return False request = json.loads(q) apiurl = request["config"].get( "apiurl") or "https://jbxcloud.joesecurity.org/api" apikey = request["config"].get("apikey") # systems systems = request["config"].get("systems") or "" systems = [s.strip() for s in re.split(r"[\s,;]", systems) if s.strip()] # tags tags = request["config"].get("tags") or "" tags = [s.strip() for s in re.split(r"[\s,;]", tags) if s.strip()] # others lia = request["config"].get("localized-internet-country") or "" analysis_time = request["config"].get("analysis-time") or 120 try: accept_tac = _parse_bool(request["config"].get("accept-tac"), "accept-tac") report_cache = _parse_bool(request["config"].get("report-cache"), "report-cache") secondary_results = _parse_bool( request["config"].get("secondary-results"), "secondary-results") ssl_inspection = _parse_bool(request["config"].get("ssl-inspection"), "ssl-inspection") except _ParseError as e: return {"error": str(e)} comments = { "source": "misp", "event_id": request["event_id"], "attribute_id": request["attribute"]["id"], "attribute_uuid": request["attribute"]["uuid"], "attribute_type": request["attribute"]["type"], "attribute_category": request["attribute"]["category"] } params = { "report-cache": report_cache, "systems": systems, "secondary-results": secondary_results, "localized-internet-country": lia, "ssl-inspection": ssl_inspection, "analysis-time": analysis_time, "comments": json.dumps(comments), "tags": tags } if not apikey: return {"error": "No API key provided"} joe = jbxapi.JoeSandbox(apiurl=apiurl, apikey=apikey, user_agent="MISP joesandbox_submit", accept_tac=accept_tac) try: is_url_submission = "url" in request or "domain" in request if is_url_submission: url = request.get("url") or request.get("domain") log.info("Submitting URL: %s", url) result = joe.submit_url(url, params=params) else: attr_type = request['attribute']['type'] attr_value = request['attribute']['value'] attr_data = request['attribute']['data'] if attr_type == "malware-sample": filename = attr_value.split("|", 1)[0] data = _decode_malware(attr_data, True) elif attr_type == "attachment": filename = attr_value data = _decode_malware(attr_data, False) data_fp = io.BytesIO(data) log.info("Submitting sample: %s", filename) result = joe.submit_sample((filename, data_fp), params=params) assert "submission_id" in result except jbxapi.JoeException as e: log.error("ERROR: %s" % str(e)) return {"error": str(e)} link_to_analysis = urljoin( apiurl, "../submissions/{}".format(result["submission_id"])) attribute = MISPAttribute() attribute.from_dict(**{ 'type': 'link', 'value': link_to_analysis, 'to_ids': False }) misp_event = MISPEvent() misp_event.add_attribute(**attribute) event = json.loads(misp_event.to_json()) results = { key: event[key] for key in ('Attribute', 'Object', 'Tag') if (key in event and event[key]) } log.debug(results) return {'results': results}
def main(args): # command line interface parser = argparse.ArgumentParser(description='Submit samples, directories or URLs to the server with the shortest queue. If the submission fails, the next server is selected, until no servers are left. Uses jbxapi.py. Please set your submission options there.') parser.add_argument('path_or_url', metavar="PATH_OR_URL", help='Path to file or directory, or URL.') group = parser.add_argument_group("submission mode") submission_mode_parser = group.add_mutually_exclusive_group(required=False) submission_mode_parser.add_argument('--url', dest="url_mode", action="store_true", help="Analyse the given URL instead of a sample.") submission_mode_parser.add_argument('--sample-url', dest="sample_url_mode", action="store_true", help="Download the sample from the given url.") parser.add_argument("--comments", default=None, help='comments (optional') parser.add_argument("--wait-for-results", "-wait", action="store_true", help='Set this option to let the script wait for the end of the analysis') parser.add_argument("--outdir", "-o", help='Directory for saving the xml reports (optional)') args = parser.parse_args() if args.outdir is not None: if not os.path.isdir(args.outdir): sys.exit("Output directory does not exist") # prepare servers joes = [jbxapi.JoeSandbox(apiurl=url, apikey=key) for url, key in SERVERS] job_queues = {joe: [] for joe in joes} params={"comments": args.comments} if args.url_mode or args.sample_url_mode: success=False # Try to submit to best server, if it fails continue until no server is left while joes and not success: try: joe = pick_best_joe(joes) except AllServersOfflineError as e: print("Failed to fetch any server: ", e , file=sys.stderr) break success = submit_url(args, joe, job_queues, params) if success: break joes.remove(joe) print("Trying to submit to next server") if not joes and not success: print("No more servers to submit to, submission failed", file=sys.stderr) # File or directory submission else: # if given a directory, collect all files if os.path.isdir(args.path_or_url): paths = [os.path.join(args.path_or_url, name) for name in os.listdir(args.path_or_url)] else: paths = [args.path_or_url] for path in paths: success=False joes_clone = list(joes) # Try to submit to best server, if it fails continue until no server is left while joes and not success: try: joe = pick_best_joe(joes_clone) except AllServersOfflineError as e: print("Failed to fetch any server: ", e , file=sys.stderr) break name = os.path.basename(path) try: with open(path, "rb") as f: data = joe.submit_sample(f, params=params) print("Submitted '{0}' with webid(s): {1} to server: {2}".format(name, ",".join(data["webids"]), joe.apiurl)) for webid in data["webids"]: job_queues[joe].append(Submission(name, webid)) success = True except Exception as e: print("Submitting '{0}' failed: {1}".format(name, e), file=sys.stderr) if success: break joes_clone.remove(joe) print("Trying to submit to next server") if not joes_clone and not success: print("No more servers to submit to, submission failed", file=sys.stderr) def job_count(): return sum(len(jobs) for jobs in job_queues.values()) print("Submitted {0} sample(s).".format(job_count())) if not args.wait_for_results: return print("Waiting for the analyses to finish ...".format(job_count())) # download reports while job_count() > 0: new_reports = 0 for joe, job_queue in job_queues.items(): # no jobs in queue if not job_queue: continue submission = job_queue[0] info = joe.info(submission.webid) if info["status"] == "finished": job_queue.pop(0) new_reports += 1 handle_finished_analysis(joe, submission, info, args.outdir) # sleep if no new reports were found last time if not new_reports: for i in range(60 * 5): print_progress(job_count()) time.sleep(.2)
def joe(): return jbxapi.JoeSandbox()
def handler(q=False): if q is False: return False request = json.loads(q) apiurl = request["config"].get( "apiurl") or "https://jbxcloud.joesecurity.org/api" apikey = request["config"].get("apikey") # systems systems = request["config"].get("systems") or "" systems = [s.strip() for s in re.split(r"[\s,;]", systems) if s.strip()] try: accept_tac = _parse_bool(request["config"].get("accept-tac"), "accept-tac") report_cache = _parse_bool(request["config"].get("report-cache"), "report-cache") except _ParseError as e: return {"error": str(e)} params = { "report-cache": report_cache, "systems": systems, } if not apikey: return {"error": "No API key provided"} joe = jbxapi.JoeSandbox(apiurl=apiurl, apikey=apikey, user_agent="MISP joesandbox_submit", accept_tac=accept_tac) try: is_url_submission = "url" in request or "domain" in request if is_url_submission: url = request.get("url") or request.get("domain") log.info("Submitting URL: %s", url) result = joe.submit_url(url, params=params) else: if "malware-sample" in request: filename = request.get("malware-sample").split("|", 1)[0] data = _decode_malware(request["data"], True) elif "attachment" in request: filename = request["attachment"] data = _decode_malware(request["data"], False) data_fp = io.BytesIO(data) log.info("Submitting sample: %s", filename) result = joe.submit_sample((filename, data_fp), params=params) assert "submission_id" in result except jbxapi.JoeException as e: return {"error": str(e)} link_to_analysis = urljoin( apiurl, "../submissions/{}".format(result["submission_id"])) return { "results": [{ "types": "link", "categories": "External analysis", "values": link_to_analysis, }] }
def _fn_joe_sandbox_analysis_function(self, event, *args, **kwargs): """Function: A function that allows an Attachment or Artifact (File/URL) to be analyzed by Joe Sandbox""" # Get the workflow_instance_id so we can raise an error if the workflow was terminated by the user workflow_instance_id = event.message["workflow_instance"]["workflow_instance_id"] # List to store paths of created temp files TEMP_FILES = [] # Dict to reference related mimetype MIMETYPES = {"pdf": "application/pdf", "json": "application/json", "html": "text/html"} def get_workflow_status(workflow_instance_id, res_client): """Function to get the status of the current running workflow""" res = res_client.get("/workflow_instances/{0}".format(workflow_instance_id)) return res["status"] def remove_temp_files(files): for f in files: os.remove(f) def str_to_bool(str): """Convert unicode string to equivalent boolean value. Converts a "true" or "false" string to a boolean value , string is case insensitive.""" if str.lower() == 'true': return True elif str.lower() == 'false': return False else: raise ValueError("{} is not a boolean".format(str)) def get_config_option(option_name, optional=False): """Given option_name, checks if it is in app.config. Raises ValueError if a mandatory option is missing""" option = self.options.get(option_name) if option is None and optional is False: err = "'{0}' is mandatory and is not set in ~/.resilient/app.config file. You must set this value to run this function".format(option_name) raise ValueError(err) else: return option def get_input_entity(client, incident_id, attachment_id, artifact_id): re_uri_match_pattern = r"""(?:(?:https?|ftp):\/\/|\b(?:[a-z\d]+\.))(?:(?:[^\s()<>]+|\((?:[^\s()<>]+|(?:\([^\s()<>]+\)))?\))+(?:\((?:[^\s()<>]+|(?:\(?:[^\s()<>]+\)))?\)|[^\s`!()\[\]{};:'".,<>?«»“”‘’]))?""" entity = {"incident_id": incident_id, "id": None, "type": "", "meta_data": None, "data": None} if (attachment_id): entity["id"] = attachment_id entity["type"] = "attachment" entity["meta_data"] = client.get("/incidents/{0}/attachments/{1}".format(entity["incident_id"], entity["id"])) entity["data"] = client.get_content("/incidents/{0}/attachments/{1}/contents".format(entity["incident_id"], entity["id"])) elif (artifact_id): entity["id"] = artifact_id entity["type"] = "artifact" entity["meta_data"] = client.get("/incidents/{0}/artifacts/{1}".format(entity["incident_id"], entity["id"])) # handle if artifact has attachment if (entity["meta_data"]["attachment"]): entity["data"] = client.get_content("/incidents/{0}/artifacts/{1}/contents".format(entity["incident_id"], entity["id"])) # else handle if artifact.value contains an URI using RegEx else: match = re.match(re_uri_match_pattern, entity["meta_data"]["value"]) if (match): entity["uri"] = match.group() else: raise FunctionError("Artifact has no attachment or supported URI") else: raise ValueError('attachment_id AND artifact_id both None') return entity def submit_sample(entity): # id of the sample that gets returned from Joe Sandbox sample_webid = None # Handle if entity is an attachment or an artifact (with an attachmet) if (entity["type"] == "attachment" or (entity["type"] == "artifact" and entity["data"] != None)): # Generate attachment name sample_name = None if(entity["type"] == "attachment"): sample_name = "[{0}_{1}] - {2}".format(entity["meta_data"]["inc_id"], entity["meta_data"]["id"], entity["meta_data"]["name"]) else: sample_name = "[{0}_{1}] - {2}".format(entity["meta_data"]["inc_id"], entity["meta_data"]["id"], entity["meta_data"]["attachment"]["name"]) # Write to temp file path = write_temp_file(entity["data"], sample_name) # Submit to Joe Sandbox sample_webid = submit_file(joesandbox, path) # Else if the artifact.value contains a url elif (entity["type"] == "artifact" and entity["uri"] != None): sample_webid = submit_uri(joesandbox, entity["uri"]) return sample_webid def fetch_report(joesandbox, sample_webid, ping_delay): time.sleep(ping_delay) return get_sample_info(joesandbox, sample_webid) def generate_report_name(entity, jsb_report_type, sample_webid): report_name = None if (entity["type"] == "attachment"): report_name = "js-report: {0}.{1}".format(entity["meta_data"]["name"], jsb_report_type) elif (entity["type"] == "artifact" and entity["data"] != None): report_name = "js-report: {0}.{1}".format(entity["meta_data"]["attachment"]["name"], jsb_report_type) elif (entity["type"] == "artifact" and entity["uri"] != None): parsed_uri = urlparse(entity["uri"]) if(parsed_uri.hostname): report_name = "js-report: {0}.{1}".format(parsed_uri.hostname, jsb_report_type) else: report_name = "js-report: URL Analysis: {0}.{1}".format(sample_webid, jsb_report_type) return report_name def write_temp_file(data, name=None): path = None if (name): path = "{0}/{1}".format(tempfile.gettempdir(), name) else: tf = tempfile.mkstemp() path = tf[1] fo = open(path, 'wb') TEMP_FILES.append(path) fo.write(data) fo.close() return path def submit_file(joesandbox, path): f = open(path, "rb") sample_response = joesandbox.submit_sample(f) f.close() return sample_response["webids"][0] def submit_uri(joesandbox, uri): sample_response = joesandbox.submit_sample_url(uri) return sample_response["webids"][0] def get_sample_info(joesandbox, sample_webid): return joesandbox.info(sample_webid) def should_timeout(ping_timeout, start_time): returnValue = (time.time() - start_time) > ping_timeout return returnValue try: # Get Joe Sandbox options from app.config file API_KEY = get_config_option("jsb_api_key") ACCEPT_TAC = str_to_bool(get_config_option("jsb_accept_tac")) ANALYSIS_URL = get_config_option("jsb_analysis_url") ANALYSIS_REPORT_PING_DELAY = int(get_config_option("jsb_analysis_report_ping_delay")) ANALYSIS_REPORT_REQUEST_TIMEOUT = float(get_config_option("jsb_analysis_report_request_timeout")) HTTP_PROXY = get_config_option("jsb_http_proxy", True) HTTPS_PROXY = get_config_option("jsb_https_proxy", True) # Check required inputs are defined incident_id = kwargs.get("incident_id") # number (required) if not incident_id: raise ValueError("incident_id is required") jsb_report_type = kwargs.get("jsb_report_type")["name"] # select (required) if not jsb_report_type: raise ValueError("jsb_report_type is required") # Get optional inputs, one of these must be defined attachment_id = kwargs.get("attachment_id") # number artifact_id = kwargs.get("artifact_id") # number if not attachment_id and not artifact_id: raise ValueError("attachment_id or artifact_id is required") # Setup proxies parameter if exist in appconfig file proxies = {} if (HTTP_PROXY): proxies["http"] = HTTP_PROXY if (HTTPS_PROXY): proxies["https"] = HTTPS_PROXY if (len(proxies) == 0): proxies = None # Instansiate new Joe Sandbox object joesandbox = jbxapi.JoeSandbox(apikey=API_KEY, accept_tac=ACCEPT_TAC, proxies=proxies) # Instansiate new Resilient API object client = self.rest_client() # Get entity we are dealing with (either attachment or artifact) entity = get_input_entity(client, incident_id, attachment_id, artifact_id) # Submit the sample and get its related webid yield StatusMessage("Submitting sample to Joe Sandbox") sample_webid = submit_sample(entity) # get the status of the sample sample_status = get_sample_info(joesandbox, sample_webid) # Get current time in seconds start_time = time.time() # Generate report name report_name = generate_report_name(entity, jsb_report_type, sample_webid) yield StatusMessage("{} being analyzed by Joe Sandbox".format(report_name)) # Keep requesting sample status until the analysis report is ready for download or ANALYSIS_REPORT_REQUEST_TIMEOUT in seconds has passed while (sample_status["status"].lower() != "finished"): # Check workflow status, if "terminated, raise error" workflow_status = get_workflow_status(workflow_instance_id, client) if workflow_status == "terminated": raise ValueError("Analysis report not fetched. Workflow was Terminated") if (should_timeout(ANALYSIS_REPORT_REQUEST_TIMEOUT, start_time)): raise ValueError("Timed out trying to get Analysis Report after {0} seconds".format(ANALYSIS_REPORT_REQUEST_TIMEOUT)) yield StatusMessage("Analysis Status: {0}. Fetch every {1}s".format(sample_status["status"], ANALYSIS_REPORT_PING_DELAY)) sample_status = fetch_report(joesandbox, sample_webid, ANALYSIS_REPORT_PING_DELAY) yield StatusMessage("Analysis Finished. Getting report & attaching to this incident") download = joesandbox.download(sample_webid, jsb_report_type) # Write temp file of report path = write_temp_file(download[1], report_name) # POST report as attachment to incident jsb_analysis_report = client.post_attachment('/incidents/{}/attachments'.format(incident_id), path, mimetype=MIMETYPES[jsb_report_type]) yield StatusMessage("Upload of attachment complete") results = { "analysis_report_name": report_name, "analysis_report_id": jsb_analysis_report["id"], "analysis_report_url": "{0}/{1}".format(ANALYSIS_URL, sample_webid), "analysis_status": sample_status["runs"][0]["detection"] } # Produce a FunctionResult with the results yield FunctionResult(results) except Exception: yield FunctionError() finally: remove_temp_files(TEMP_FILES)