def analyze_binary(self, md5sum, binary_file_stream): if not self.submit_full_binaries: raise AnalysisPermanentError( message="NOT SUBMITTING FULL BINARIES") response = self.tiscale_client.upload_file( md5sum=md5sum, binary_file_stream=binary_file_stream) if response.status_code == 200: log.info( "File {} uploaded successfully. Response status_code = {}". format(md5sum, response.status_code)) return self.make_result(md5sum=md5sum, uploaded=True) elif response.status_code == 403: log.info("Quota reached. Response status_code: {}".format( response.status_code)) raise AnalysisTemporaryError( message="Quota reached, will retry in 1 hour", retry_in=60 * 60) else: log.info("Unable to upload file. Response status_code: {}".format( response.status_code)) raise AnalysisTemporaryError( message="Uploading file failed, will try again in 30 min", retry_in=30 * 60)
def analyze_binary(self, md5sum, binary_file_stream): if not self.submit_full_binaries: raise AnalysisPermanentError( message="NOT SUBMITTING FULL BINARIES!") log.info("Submitting FULL binary %s to VT for analysis" % md5sum) try: response = self.virustotal_analysis.submit_file( resource_hash=md5sum, stream=binary_file_stream) except VTAPIQUOTAREACHED as vte: raise AnalysisTemporaryError(message="VTAPIQUOTAREACHED", retry_in=15 * 60) response_code = response.get("response_code", -1) verbose_msg = response.get("verbose_msg", "") # response_code == -2 or "scan request successfully queued" is the wait condition if response_code == -2 or "Scan request successfully queued" in verbose_msg: raise AnalysisTemporaryError( message="VirusTotal report not yet ready -> %s" % verbose_msg, retry_in=120) elif response_code == 1: scan_id = response.get("scan_id", None) return self.make_result(scan_id=scan_id, result=response) else: raise AnalysisTemporaryError(message="Unknown error? % s" % response, retry_in=120)
def analyze_binary(self, md5sum, binary_file_stream): log.debug("%s: in analyze_binary" % md5sum) d = binary_file_stream.read() try: start_analyze_time = time.time() matches = self.yara_rules.match(data=d, timeout=60) end_analyze_time = time.time() log.debug("%s: Took %0.3f seconds to analyze the file" % (md5sum, end_analyze_time - start_analyze_time)) except yara.TimeoutError: raise AnalysisPermanentError( message="Analysis timed out after 60 seconds") except yara.Error: raise AnalysisTemporaryError(message="Yara exception", retry_in=10) else: if matches: score = self.getHighScore(matches) return AnalysisResult( message="Matched yara rules: %s" % ', '.join([match.rule for match in matches]), extended_message="%s" % ', '.join([match.rule for match in matches]), analysis_version=1, score=score) else: return AnalysisResult(score=0)
def analyze_binary(self, md5sum, binary_file_stream): log.info("Submitting {0} to FortiSandbox for analysis".format( str(md5sum))) try: response = self.fortisandbox_analysis.submit_file( resource_hash=md5sum, stream=binary_file_stream) except BaseException as be: log.error("EXCEPTION WHEN trying to submit binary: " + str(md5sum)) log.error(str(be)) log.error(traceback.format_exc()) raise AnalysisTemporaryError(message=str(be), retry_in=15 * 60) result = response.json().get("result", {}) response_code = result.get("status", {}).get("message", None) if response_code == "OK": log.info("Sucessfully submitted {0} to FortiSandbox for scanning". format(md5sum)) else: if response_code == "INVALID_SESSION": self.fortisandbox_analysis.invalidate_session() raise AnalysisPermanentError( message="FortiSandbox analysis failed -> %s" % response.json()) try: response = self.fortisandbox_analysis.get_report( resource_hash=md5sum) log.debug("Fortinet report: " + str(response.json())) result = response.json().get("result", {}) response_code = result.get("status", {}).get("message", None) if response_code == "OK": log.info("Got analysis report from Fortisandbox for %s" % md5sum) return self.make_result(md5=md5sum, result=response.json()) else: log.info( "No analysis report from Fortisandbox for %s, try again in 180 seconds" % md5sum) raise AnalysisTemporaryError( message="FortiSandbox analysis failed -> %s" % response_code, retry_in=180) except AnalysisTemporaryError as ate: raise ate except: log.error("Fortisandbox Analysis failed , permanent!") log.error(traceback.format_exc()) raise AnalysisPermanentError( message="FortiSandbox Anlaysis failed -> %s" % response_code)
def analyze_binary(self, md5sum, binary_file_stream): if not self.submit_full_binaries: raise AnalysisPermanentError( "Submitting full binaries is not enabled") log.info("Submitting binary {}".format(md5sum)) successfull_upload = self.rl_analysis.submit_file( md5sum=md5sum, stream=binary_file_stream) if successfull_upload: return self.make_result(md5=md5sum) else: raise AnalysisTemporaryError( "Unable to upload file. md5sum: {}".format(md5sum), retry_in=30 * 60)
def query_wildfire(self, md5sum): """ query the wildfire api to get a report on an md5 """ log.info("Querying wildfire for md5sum %s" % md5sum) status_code, content = self._call_wildfire_api("POST", "/publicapi/get/verdict", {'hash': md5sum.lower()}) if status_code == 404: return None # can't find the binary elif status_code != 200: log.info("Received unknown HTTP status code %d from WildFire" % status_code) log.info("-> response content: %s" % content) raise AnalysisTemporaryError("Received unknown HTTP status code %d from WildFire" % status_code, retry_in=120) response = etree.fromstring(content) # Return 0 Benign verdict # 1 Malware verdict # 2 Grayware verdict # -100 Verdict is pending # -101 Indicates a file error # -102 The file could not be found # -103 The hash submitted is invalid if md5sum.lower() == response.findtext("./get-verdict-info/md5").lower(): verdict = response.findtext("./get-verdict-info/verdict").strip() if verdict == "-100": return None # waiting for WildFire verdict elif verdict == "-102": return None # file not in WildFire yet elif verdict.startswith("-"): raise AnalysisPermanentError("WildFire could not process file: error %s" % verdict) elif verdict == "1": return self.generate_malware_result(md5sum, 100) elif verdict == "2": return self.generate_malware_result(md5sum, 50) else: return AnalysisResult(score=0)
def analyze_binary(self, md5sum, binary_file_stream): if not self.submit_full_binaries: raise AnalysisPermanentError( message="NOT SUBMITTING FULL BINARIES") log.info("Submitting FULL binary %s to ReversingLabs for analysis" % md5sum) try: response = self.rl_analysis.submit_file(resource_hash=md5sum, stream=binary_file_stream) except RLAPIQUOTAREACHED: raise AnalysisTemporaryError(message="RLAPIQUOTAREACHED", retry_in=15 * 60) if response.status_code == 200 or response.status_code == 201: return self.check_result_for(md5sum=md5sum) else: raise AnalysisTemporaryError(message="Unknown error: %s" % str(response), retry_in=15 * 60)
def analyze_binary(self, md5_hash, binary_file_stream): LOGGER.info("Submitting binary with md5 %s to VMRay" % (md5_hash)) # submit file to VMRay try: result = self.rest_api.call("POST", "/rest/sample/submit", params={ "archive_action": "ignore", "sample_file": binary_file_stream, "sample_filename_b64enc": base64.encodestring(md5_hash), "reanalyze": True, "max_jobs": self.max_jobs }) except VMRayRESTAPIError as exc: LOGGER.debug("Error submitting sample with md5 %s", md5_hash, exc_info=True) raise AnalysisTemporaryError(message="API error: %s" % str(exc), retry_in=self.retry_wait_time) if result.get("errors", None): raise AnalysisPermanentError( message="API error: %s" % str(result["errors"][0].get("error_msg", ""))) sample_id = result["samples"][0]["sample_id"] submission_id = result["submissions"][0]["submission_id"] LOGGER.debug("Waiting for submission with ID %u to finish all jobs", submission_id) # wait until all analyses have finished if "submission_finished" in result["submissions"][0]: wait_start = time.time() while True: time.sleep(self.loop_wait_time) if (time.time() - wait_start) > self.max_analysis_wait_time: LOGGER.debug( "Timed out waiting for result of submission with ID %u", submission_id) raise AnalysisTemporaryError( message= "Timed out waiting for analysis jobs to finish for submission %u" % (submission_id), retry_in=self.retry_wait_time) try: submission = self.rest_api.call( "GET", "/rest/submission/%u" % (submission_id)) except: LOGGER.debug("Could not get submission ID %u", submission_id) continue if submission.get("submission_finished", False): break else: # old method open_jobs = list(result["jobs"]) wait_start = time.time() while len(open_jobs) > 0: # check for timeout if (time.time() - wait_start) > self.max_analysis_wait_time: LOGGER.debug( "Timed out waiting for result of submission with ID %u", submission_id) raise AnalysisTemporaryError( message= "Timed out waiting for analysis jobs to finish for submission %u" % (submission_id), retry_in=self.retry_wait_time) check_jobs = list(open_jobs) open_jobs = [] for job in check_jobs: try: self.rest_api.call("GET", "/rest/job/%u" % (job["job_id"])) except VMRayRESTAPIError as exc: if exc.status_code == 404: # job has finished continue # job is still there or server is unreachable open_jobs.append(job) if len(open_jobs) == 0: break time.sleep(self.loop_wait_time) LOGGER.debug("All jobs for submission with ID %u have finished", submission_id) return self.create_result(sample_id, submission_id=submission_id)
def query_checkpoint(self, md5sum): """ query the checkpoint api to get a report on an md5 """ headers = { "Content-Type": "application/json", "Authorization": self.api_key, } payload = json.dumps({ "request": [{ "md5": md5sum, "features": ["te", "av", "extraction"] }] }) log.info("Querying checkpoint for md5sum %s" % md5sum) try: status_code, content = self._call_checkpoint_api( method="POST", path="/tecloud/api/v1/file/query", headers=headers, payload=payload, files=None) if status_code != 200: log.info( "Received unknown HTTP status code %d from checkpoint" % status_code) log.info("-> response content: %s" % content) raise AnalysisTemporaryError( "Received unknown HTTP status code %d from checkpoint" % status_code, retry_in=120) except Exception as e: log.info(str(e)) raise AnalysisTemporaryError( "There was an error connecting to Checkpoint %s" % str(e), retry_in=120) dict_response = json.loads(content) try: checkpoint_status_code = dict_response.get("response", [])[0].get( "te", {}).get("status", {}).get("code", -1) except Exception as e: checkpoint_status_code = -1 log.error("Failed to parse checkpoint response JSON") log.error(traceback.format_exc()) log.info("md5: {0} returned status_code: {1}".format( md5sum, checkpoint_status_code)) if checkpoint_status_code == 1001: severity = dict_response.get("response", [])[0].get("te", {}).get( "combined_verdict", None) if severity.lower() == "malicious": score = 100 elif severity.lower() == "unknown": return None elif severity.lower() == "benign": score = 0 else: return None log.info("{0} has score of {1}".format(md5sum, score)) return AnalysisResult(score=score) elif checkpoint_status_code == 1003: # # Pending # return None elif checkpoint_status_code == 1004: # # 1004 NOT_Found # File is not in checkpoint yet # return None elif checkpoint_status_code == 1005: # # Out of Quota # return AnalysisTemporaryError("Out of Quota") elif checkpoint_status_code == 1006: # # Partially found # return None elif checkpoint_status_code == 1007: # # FILE_TYPE_NOT_SUPPORTED # return AnalysisPermanentError("Filetype is not supported") elif checkpoint_status_code == 1009: # # Internal Error # return AnalysisTemporaryError("Internal Error from Checkpoint") elif checkpoint_status_code == 1011: # # Insufficient resources # return AnalysisTemporaryError( "Checkpoint reports insufficient resources") else: return None