def vt_get_report(api_key, observable_name, observable_classification): params = {"apikey": api_key} if observable_classification == "domain": params["domain"] = observable_name uri = "domain/report" elif observable_classification == "ip": params["ip"] = observable_name uri = "ip-address/report" elif observable_classification == "url": params["resource"] = observable_name uri = "url/report" elif observable_classification == "hash": params["resource"] = observable_name params["allinfo"] = 1 uri = "file/report" else: raise AnalyzerRunException( "not supported observable type {}. Supported are: hash, ip, domain and url" "".format(observable_classification) ) try: response = requests.get(vt_base + uri, params=params) response.raise_for_status() except requests.RequestException as e: raise AnalyzerRunException(e) result = response.json() response_code = result.get("response_code", 1) if response_code == -1: raise AnalyzerRunException("response code -1. result:{}".format(result)) return result
def _req_with_checks(self, url, files=None, post=False): try: if post: r = requests.post(self.base_url + url, files=files, headers=self.headers) else: headers = self.headers if self.private == "private" else {} r = requests.get(self.base_url + url, files=files, headers=headers) r.raise_for_status() except requests.exceptions.HTTPError as e: logger.error( f"md5 {self.md5} job {self.job_id} url {url} has http error {str(e)}" ) if post: raise AnalyzerRunException("Monthly quota exceeded!") raise AnalyzerRunException(e) except requests.exceptions.Timeout as e: logger.error( f"md5 {self.md5} job {self.job_id} url {url} has timeout error {str(e)}" ) raise AnalyzerRunException(e) except requests.exceptions.RequestException as e: logger.error( f"md5 {self.md5} job {self.job_id} url {url} failed with error {str(e)}" ) raise AnalyzerRunException(e) return r
def run(self): if not self.__api_key: raise AnalyzerRunException( f"no API key retrieved with name: '{self.api_key_name}'" ) headers = { "Authorization": f"apikey {self.__api_key}", "Content-Type": "application/json", } obs_clsfn = self.observable_classification if obs_clsfn == "domain": uri = f"domain/{self.observable_name}" elif obs_clsfn == "ip": uri = f"ip/{self.observable_name}" elif obs_clsfn == "url": uri = f"hostname/{self.observable_name}" else: raise AnalyzerRunException( f"not supported observable type {obs_clsfn}." " Supported are: ip, domain and url." ) try: response = requests.get(self.base_url + uri, headers=headers) response.raise_for_status() except requests.RequestException as e: raise AnalyzerRunException(e) return response.json()
def run(self): # You should save CIRCL credentials with this template: "<user>|<pwd>" if not self.__credentials: raise AnalyzerRunException("no credentials retrieved") split_credentials = self.__credentials.split("|") if len(split_credentials) != 2: raise AnalyzerRunException( "CIRCL credentials not properly configured." "Template to use: '<user>|<pwd>'") user = split_credentials[0] pwd = split_credentials[1] pdns = pypdns.PyPDNS(basic_auth=(user, pwd)) try: result = pdns.query(self.domain, timeout=5) except pypdns.errors.UnauthorizedError as e: raise AnalyzerRunException( f"Credentials are not valid: UnauthorizedError: {e}") for result_item in result: keys_to_decode = ["time_first", "time_last"] for key_to_decode in keys_to_decode: time_extracted = result_item.get(key_to_decode, None) if time_extracted and isinstance(time_extracted, datetime.datetime): result_item[key_to_decode] = time_extracted.strftime( "%Y-%m-%d %H:%M:%S") return result
def run(self): results = {} if self.observable_classification != self.ObservableTypes.HASH: raise AnalyzerRunException( f"observable type {self.observable_classification} not supported" ) hash_length = len(self.observable_name) if hash_length == 64: raise AnalyzerRunException("sha256 are not supported by the service") results["found"] = False # reference: https://team-cymru.com/community-services/mhr/ # if the resolution works, this means that the file is reported # as malware by Cymru domains = None try: query_to_perform = f"{self.observable_name}.malware.hash.cymru.com" domains = socket.gethostbyaddr(query_to_perform) except (socket.gaierror, socket.herror): logger.info(f"observable {self.observable_name} not found in HMR DB") if domains: results["found"] = True results["resolution_data"] = domains[2] return results
def _intezer_scan_file(intezer_token, md5, filename, binary): session = requests.session() session.headers['Authorization'] = 'Bearer {}'.format(intezer_token) name_to_send = filename if filename else md5 files = {'file': (name_to_send, binary)} logger.info("intezer md5 {} sending sample for analysis".format(md5)) response = session.post(base_url + '/analyze', files=files) if response.status_code != 201: raise AnalyzerRunException("failed analyze request, status code {}".format(response.status_code)) max_tries = 200 polling_time = 3 for chance in range(max_tries): if response.status_code != 200: time.sleep(polling_time) logger.info("intezer md5 {} polling for result try n.{}".format(md5, chance+1)) result_url = response.json()['result_url'] response = session.get(base_url + result_url) response.raise_for_status() if response.status_code != 200: raise AnalyzerRunException("received max tries attempts") return response.json()
def __raise_in_case_bad_request(name, resp, params_to_check=None) -> bool: """ Raises: :class: `AnalyzerRunException`, if bad status code or no key in response """ if params_to_check is None: params_to_check = ["key"] # different error messages for different cases if resp.status_code == 404: raise AnalyzerConfigurationException( f"{name} docker container is not running." ) if resp.status_code == 400: err = resp.json().get("error", "") raise AnalyzerRunException(err) if resp.status_code == 500: raise AnalyzerRunException( f"Internal Server Error in {name} docker container" ) # check to make sure there was a valid params in response for param in params_to_check: param_value = resp.json().get(param, None) if not param_value: raise AnalyzerRunException( "Unexpected Error. " f"Please check log files under /var/log/intel_owl/{name.lower()}/" ) # just in case couldn't catch the error manually resp.raise_for_status() return True
def _ha_get_report(api_key, observable_name, observable_classification): headers = { "api-key": api_key, "user-agent": "Falcon Sandbox", "accept": "application/json", } if observable_classification == "domain": data = {"domain": observable_name} uri = "search/terms" elif observable_classification == "ip": data = {"host": observable_name} uri = "search/terms" elif observable_classification == "url": data = {"url": observable_name} uri = "search/terms" elif observable_classification == "hash": data = {"hash": observable_name} uri = "search/hash" else: raise AnalyzerRunException( "not supported observable type {}. Supported are: hash, ip, domain and url" "".format(observable_classification)) try: response = requests.post(ha_base + uri, data=data, headers=headers) response.raise_for_status() except requests.RequestException as e: raise AnalyzerRunException(e) result = response.json() return result
def _cuckoo_poll_result(cuckoo_analysis, filename, md5, additional_config_params): logger.info("polling result for {} {}, task_id {}".format( filename, md5, cuckoo_analysis.task_id)) # poll for the result max_get_tries = additional_config_params.get('max_poll_tries', 50) poll_time = 15 get_success = False for chance in range(max_get_tries): logger.info("polling request n.{} for file {} {}" "".format(chance + 1, filename, md5)) url = cuckoo_analysis.cuckoo_url + 'tasks/view/' + str( cuckoo_analysis.task_id) response = cuckoo_analysis.session.get(url) json_response = response.json() status = json_response.get('task', {}).get('status', '') if status == 'reported': get_success = True break elif status == 'failed_processing': raise AnalyzerRunException( "sandbox analysis failed. cuckoo id: {} status 'failed_processing'" "".format(cuckoo_analysis.task_id)) else: time.sleep(poll_time) if not get_success: raise AnalyzerRunException("sandbox analysis timed out. cuckoo id: {}" "".format(cuckoo_analysis.task_id))
def _docker_run(self, req_data, req_files=None): """ Helper function that takes of care of requesting new analysis, reading response, polling for result and exception handling for a docker based analyzer. Args: req_data (Dict): Dict of request JSON. req_files (Dict, optional): Dict of files to send. Defaults to None. Raises: AnalyzerConfigurationException: In case docker service is not running AnalyzerRunException: Any other error Returns: Dict: Final analysis results """ # handle in case this is a test if hasattr(self, "is_test") and getattr(self, "is_test"): # only happens in case of testing self.report["success"] = True return {} # step #1: request new analysis args = req_data.get("args", []) logger.debug( f"Making request with arguments: {args} <- {self.__repr__()}") try: if req_files: form_data = {"request_json": json.dumps(req_data)} resp1 = requests.post(self.url, files=req_files, data=form_data) else: resp1 = requests.post(self.url, json=req_data) except requests.exceptions.ConnectionError: self._raise_container_not_running() # step #2: raise AnalyzerRunException in case of error assert self.__raise_in_case_bad_request(self.name, resp1) # step #3: if no error, continue and try to fetch result key = resp1.json().get("key") final_resp = self.__poll_for_result(key) err = final_resp.get("error", None) report = final_resp.get("report", None) if not report: raise AnalyzerRunException(f"Report is empty. Reason: {err}") if isinstance(report, dict): return report try: report = json.loads(report) except json.JSONDecodeError: raise AnalyzerRunException(str(err)) return report
def run(self): if self.analysis_type == "ip_query": uri = f"ipquery/{self.observable_name}" elif self.analysis_type == "reverse_pdns": uri = f"pdns/reverse/{self.observable_name}" elif self.analysis_type == "forward_pdns": domain = self.observable_name if self.observable_classification == "url": domain = urlparse(self.observable_name).hostname uri = f"pdns/forward/{domain}" else: raise AnalyzerRunException( f"not supported analysis type {self.analysis_type}." ) try: response = requests.get(self.base_url + uri) response.raise_for_status() result = response.text.split("\r\n") except requests.ConnectionError as e: raise AnalyzerRunException(f"Connection error: {e}") else: loaded_results = [] for item in result: if len(item) > 0: loaded_results.append(json.loads(item)) return loaded_results
def set_params(self, params): self.recipe_name = params.get("recipe_name", "") if self.recipe_name: try: try: with open( f"{PROJECT_LOCATION}/configuration/{self.config_filename}", "r") as recipes: parsed_recipes = json.load(recipes) self.recipe = parsed_recipes[self.recipe_name] except FileNotFoundError: raise AnalyzerRunException( f"Could not open configuration file {self.config_filename}" ) except json.JSONDecodeError: raise AnalyzerRunException( f"Could not parse the configuration file. Please check " f"{self.config_filename}") except KeyError: raise AnalyzerRunException( f"Unknown predefined recipe: {self.recipe_name}") else: self.recipe = params.get("recipe_code", []) self.output_type = params.get("output_type", "")
def run(self): result = {} headers = { "Content-Type": "application/json", "User-Agent": "IntelOwl/v1.x" } api_key = secrets.get_secret(self.api_key_name) if not api_key: if self.analysis_type == "search": logger.warning( f"{self.__repr__()} -> Continuing w/o API key..") else: raise AnalyzerRunException( f"No API key retrieved for name {self.api_key_name}.") else: headers["API-Key"] = api_key self.session = requests.Session() self.session.headers = headers if self.analysis_type == "search": result = self.__urlscan_search() elif self.analysis_type == "submit_result": req_api_token = self.__urlscan_submit() result = self.__poll_for_result(req_api_token) else: raise AnalyzerRunException( f"not supported analysis_type {self.analysis_type}." " Supported is 'search' and 'submit_result'.") return result
def vt_get_report(api_key, observable_name, obs_clsfn): params = {"apikey": api_key} if obs_clsfn == ObservableTypes.DOMAIN: params["domain"] = observable_name uri = "domain/report" elif obs_clsfn == ObservableTypes.IP: params["ip"] = observable_name uri = "ip-address/report" elif obs_clsfn == ObservableTypes.URL: params["resource"] = observable_name uri = "url/report" elif obs_clsfn == ObservableTypes.HASH: params["resource"] = observable_name params["allinfo"] = 1 uri = "file/report" else: raise AnalyzerRunException( f"not supported observable type {obs_clsfn}. " "Supported are: hash, ip, domain and url.") try: response = requests.get(vt_base + uri, params=params) response.raise_for_status() except requests.RequestException as e: raise AnalyzerRunException(e) try: return_item = response.json() except Exception as e: raise AnalyzerRunException( f"Response is not a JSON!? Response type:{response.text} Error:{e}" ) return return_item
def run(self): mwdb = mwdblib.MWDB(api_key=self.__api_key) binary = get_binary(self.job_id) query = str(hashlib.sha256(binary).hexdigest()) if self.upload_file: logger.info(f"mwdb_scan uploading sample: {self.md5}") file_object = mwdb.upload_file(query, binary) file_object.flush() for _try in range(self.max_tries): logger.info( f"mwdb_scan sample: {self.md5} polling for result try #{_try + 1}" ) time.sleep(self.poll_distance) file_info = mwdb.query_file(file_object.data["id"]) if self.file_analysis(file_info): break if not self.file_analysis(file_info): raise AnalyzerRunException("max retry attempts exceeded") else: try: file_info = mwdb.query_file(query) except Exception: raise AnalyzerRunException( "File not found in the MWDB. Set 'upload_file=true' " "if you want to upload and poll results. ") result = {"data": file_info.data, "metakeys": file_info.metakeys} result["permalink"] = f"https://mwdb.cert.pl/file/{query}" return result
def run(self): if self.use_proxy and not self.proxy: raise AnalyzerConfigurationException( "No proxy retrieved when use_proxy is true.") if self.output not in ["image", "json"]: raise AnalyzerConfigurationException( "output param can only be 'image' or 'json'") try: if isinstance(self.extra_api_params, dict): params = self.extra_api_params else: params = {} params["url"] = self.observable_name params["token"] = self.__api_key params["output"] = self.output if self.use_proxy: params["proxy"] = self.proxy resp = requests.get(self.base_url, params=params) resp.raise_for_status() except requests.RequestException as e: raise AnalyzerRunException(e) if self.output == "image": try: b64_img = base64.b64encode(resp.content).decode("utf-8") return {"screenshot": b64_img} except Exception as err: raise AnalyzerRunException( f"Failed to convert to base64 string {err}") return resp.json()
def vt_scan_file(api_key, md5, job_id, additional_config_params): try: binary = general.get_binary(job_id) except Exception: raise AnalyzerRunException( "couldn't retrieve the binary to perform a scan") headers = {'x-apikey': api_key} files = {'file': binary} uri = 'files' try: response = requests.post(vt_base + uri, files=files, headers=headers) response.raise_for_status() except requests.RequestException as e: raise AnalyzerRunException(e) result = response.json() # pprint.pprint(result) result_data = result.get('data', {}) scan_id = result_data.get('id', '') if not scan_id: raise AnalyzerRunException( "no scan_id given by VirusTotal to retrieve the results") # max 5 minutes waiting max_tries = additional_config_params.get('max_tries', 100) poll_distance = 5 got_result = False uri = "analyses/{}".format(scan_id) for chance in range(max_tries): time.sleep(poll_distance) logger.info( "vt polling, try n.{}. job_id {}. starting the query".format( chance + 1, job_id)) try: response = requests.get(vt_base + uri, headers=headers) response.raise_for_status() except requests.RequestException as e: raise AnalyzerRunException(e) json_response = response.json() # pprint.pprint(json_response) analysis_status = json_response.get('data', {}).get('attributes', {}).get('status', '') if analysis_status == "completed": got_result = True break else: logger.info("vt polling, try n.{}. job_id {}. status:{}".format( chance + 1, job_id, analysis_status)) if not got_result: raise AnalyzerRunException( "max VT polls tried without getting any result. job_id {}".format( job_id)) # retrieve the FULL report, not only scans results. If it's a new sample, it's free of charge result = vt3_get.vt_get_report(api_key, md5, "hash", {}, job_id) # pprint.pprint(result) return result
def run(self): api_key = secrets.get_secret(self.api_key_name) if not api_key: raise AnalyzerRunException( f"no MISP API key retrieved with name: {self.api_key_name}") if not self.url_name: raise AnalyzerRunException( f"no MISP URL retrieved, key value: {self.url_key_name}") misp_instance = pymisp.ExpandedPyMISP(self.url_name, api_key) # debug=True) # we check only for events not older than 90 days and max 50 results now = datetime.datetime.now() date_from = now - datetime.timedelta(days=90) params = { # even if docs say to use "values",... # .. at the moment it works correctly only with "value" "value": self.observable_name, "type_attribute": [self.observable_classification], "date_from": date_from.strftime("%Y-%m-%d %H:%M:%S"), "limit": 50, "enforce_warninglist": True, } if self.observable_classification == "hash": params["type_attribute"] = ["md5", "sha1", "sha256"] result_search = misp_instance.search(**params) if isinstance(result_search, dict): errors = result_search.get("errors", []) if errors: raise AnalyzerRunException(errors) return {"result_search": result_search, "instance_url": self.url_name}
def run(self): """ API key is not mandatory, emailrep supports requests with no key: a valid key let you to do more requests per day. therefore we're not checking if a key has been configured. """ headers = { "User-Agent": "IntelOwl v2", "Key": self.__api_key, "Accept": "application/json", } if self.observable_classification not in ["generic"]: raise AnalyzerRunException( f"not supported observable type {self.observable_classification}." f" Supported: generic") url = self.base_url.format(self.observable_name) try: response = requests.get(url, headers=headers) response.raise_for_status() except requests.RequestException as e: raise AnalyzerRunException(e) return response.json()
def __cuckoo_poll_result(self): logger.info( f"polling result for ({self.filename},{self.md5}), task_id: #{self.task_id}" ) # poll for the result poll_time = 15 get_success = False for chance in range(self.max_get_tries): logger.info( f"polling request #{chance + 1} for file ({self.filename},{self.md5})" ) url = self.cuckoo_url + "tasks/view/" + str(self.task_id) response = self.session.get(url) json_response = response.json() status = json_response.get("task", {}).get("status", None) if status == "reported": get_success = True break elif status == "failed_processing": raise AnalyzerRunException( "sandbox analysis failed." f"cuckoo id: #{self.task_id}, status: 'failed_processing'" ) else: time.sleep(poll_time) if not get_success: raise AnalyzerRunException( f"sandbox analysis timed out. cuckoo id: #{self.task_id}" )
def __poll_for_result(self, req_key: str) -> dict: got_result = False json_data = {} for chance in range(self.max_tries): time.sleep(self.poll_distance) logger.info( f"Result Polling. Try #{chance + 1}. Starting the query..." f"<-- {self.__repr__()}" ) try: status_code, json_data = self.__query_for_result(self.url, req_key) except (requests.RequestException, json.JSONDecodeError) as e: raise AnalyzerRunException(e) status = json_data.get("status", None) if status and status == "running": logger.info( f"Poll number #{chance + 1}, " f"status: 'running' <-- {self.__repr__()}" ) else: got_result = True break if not got_result: raise AnalyzerRunException("max polls tried without getting any result.") return json_data
def _intezer_scan_file(intezer_token, md5, filename, binary, additional_config_params): session = requests.session() session.headers["Authorization"] = "Bearer {}".format(intezer_token) name_to_send = filename if filename else md5 files = {"file": (name_to_send, binary)} logger.info("intezer md5 {} sending sample for analysis".format(md5)) response = session.post(base_url + "/analyze", files=files) if response.status_code != 201: raise AnalyzerRunException( "failed analyze request, status code {}".format( response.status_code)) max_tries = additional_config_params.get("max_tries", 200) polling_time = 3 for chance in range(max_tries): if response.status_code != 200: time.sleep(polling_time) logger.info("intezer md5 {} polling for result try n.{}".format( md5, chance + 1)) result_url = response.json().get("result_url", "") response = session.get(base_url + result_url) response.raise_for_status() is_test = additional_config_params.get("is_test", False) if response.status_code != 200 and not is_test: raise AnalyzerRunException("received max tries attempts") return response.json()
def run(self): if not self.__api_key: raise AnalyzerRunException( f"No API key retrieved with name: '{self.api_key_name}'") self.__build_zoomeye_url() try: response = requests.get(self.url, headers={"API-KEY": self.__api_key}) response.raise_for_status() except requests.RequestException as e: raise AnalyzerRunException(e) result = {"custom_options": {}} result["custom_options"]["search_type"] = self.search_type result["custom_options"]["query"] = self.query if self.page: result["custom_options"]["page"] = self.page if self.facets: result["custom_options"]["facet"] = self.facets if self.history and self.search_type == "both": result["custom_options"]["history"] = self.history result.update(response.json()) return result
def run( analyzer_name, job_id, observable_name, observable_classification, additional_config_params, ): logger.info("started analyzer {} job_id {} observable {}" "".format(analyzer_name, job_id, observable_name)) report = general.get_basic_report_template(analyzer_name) try: api_version = additional_config_params.get("greynoise_api_version", "v1") if api_version == "v1": url = "https://api.greynoise.io/v1/query/ip" headers = {"Content-Type": "application/x-www-form-urlencoded"} data = {"ip": observable_name} response = requests.post(url, data=data, headers=headers) response.raise_for_status() elif api_version == "v2": url = f"https://api.greynoise.io/v2/noise/context/{observable_name}" api_key_name = additional_config_params.get( "api_key_name", "GREYNOISE_API_KEY") api_key = secrets.get_secret(api_key_name) if not api_key: raise AnalyzerRunException("GREYNOISE_API_KEY not specified.") headers = {"Accept": "application/json", "key": api_key} response = requests.get(url, headers=headers) response.raise_for_status() else: raise AnalyzerRunException( "Invalid API Version. Supported are: v1 (free) & v2 (paid).") result = response.json() report["report"] = result except AnalyzerRunException as e: error_message = ( "job_id:{} analyzer:{} observable_name:{} Analyzer error {}" "".format(job_id, analyzer_name, observable_name, e)) logger.error(error_message) report["errors"].append(error_message) report["success"] = False except Exception as e: traceback.print_exc() error_message = ( "job_id:{} analyzer:{} observable_name:{} Unexpected error {}" "".format(job_id, analyzer_name, observable_name, e)) logger.exception(error_message) report["errors"].append(str(e)) report["success"] = False else: report["success"] = True general.set_report_and_cleanup(job_id, report) logger.info("ended analyzer {} job_id {} observable {}" "".format(analyzer_name, job_id, observable_name)) return report
def run(self): # You should save CIRCL credentials with this template: "<user>|<pwd>" if not self.__credentials: raise AnalyzerRunException("no credentials retrieved") split_credentials = self.__credentials.split("|") if len(split_credentials) != 2: raise AnalyzerRunException( "CIRCL credentials not properly configured." "Template to use: '<user>|<pwd>'") user = split_credentials[0] pwd = split_credentials[1] pssl = pypssl.PyPSSL(basic_auth=(user, pwd)) result = pssl.query(self.observable_name, timeout=5) certificates = [] if result.get(self.observable_name, {}): certificates = list( result.get(self.observable_name).get("certificates", [])) parsed_result = {"ip": self.observable_name, "certificates": []} for cert in certificates: subject = (result.get(self.observable_name).get( "subjects", {}).get(cert, {}).get("values", [])) if subject: parsed_result["certificates"].append({ "fingerprint": cert, "subject": subject[0] }) return parsed_result
def _ha_get_report(api_key, observable_name, observable_classification): headers = { 'api-key': api_key, 'user-agent': 'Falcon Sandbox', 'accept': 'application/json' } if observable_classification == 'domain': data = {'domain': observable_name} uri = 'search/terms' elif observable_classification == 'ip': data = {'host': observable_name} uri = 'search/terms' elif observable_classification == 'url': data = {'url': observable_name} uri = 'search/terms' elif observable_classification == 'hash': data = {'hash': observable_name} uri = 'search/hash' else: raise AnalyzerRunException("not supported observable type {}. Supported are: hash, ip, domain and url" "".format(observable_classification)) try: response = requests.post(ha_base + uri, data=data, headers=headers) response.raise_for_status() except requests.RequestException as e: raise AnalyzerRunException(e) result = response.json() return result
def vt_get_report(api_key, observable_name, observable_classification): params = {'apikey': api_key} if observable_classification == 'domain': params['domain'] = observable_name uri = 'domain/report' elif observable_classification == 'ip': params['ip'] = observable_name uri = 'ip-address/report' elif observable_classification == 'url': params['resource'] = observable_name uri = 'url/report' elif observable_classification == 'hash': params['resource'] = observable_name params['allinfo'] = 1 uri = 'file/report' else: raise AnalyzerRunException( "not supported observable type {}. Supported are: hash, ip, domain and url" "".format(observable_classification)) try: response = requests.get(vt_base + uri, params=params) response.raise_for_status() except requests.RequestException as e: raise AnalyzerRunException(e) result = response.json() response_code = result.get('response_code', 1) if response_code == -1: raise AnalyzerRunException( "response code -1. result:{}".format(result)) return result
def __intezer_scan_file(self, intezer_token): session = requests.session() session.headers["Authorization"] = f"Bearer {intezer_token}" name_to_send = self.filename if self.filename else self.md5 binary = get_binary(self.job_id) files = {"file": (name_to_send, binary)} logger.info(f"intezer md5 {self.md5} sending sample for analysis") response = session.post(self.base_url + "/analyze", files=files) if response.status_code != 201: raise AnalyzerRunException( f"failed analyze request, status code {response.status_code}") for chance in range(self.max_tries): if response.status_code != 200: time.sleep(self.poll_distance) logger.info( f"intezer md5 {self.md5} polling for result try #{chance + 1}" ) result_url = response.json().get("result_url", "") response = session.get(self.base_url + result_url) response.raise_for_status() if response.status_code != 200 and not self.is_test: raise AnalyzerRunException("received max tries attempts") return response.json()
def vt_get_report(api_key, observable_name, obs_clsfn): params = {"apikey": api_key} if obs_clsfn == "domain": params["domain"] = observable_name uri = "domain/report" elif obs_clsfn == "ip": params["ip"] = observable_name uri = "ip-address/report" elif obs_clsfn == "url": params["resource"] = observable_name uri = "url/report" elif obs_clsfn == "hash": params["resource"] = observable_name params["allinfo"] = 1 uri = "file/report" else: raise AnalyzerRunException( f"not supported observable type {obs_clsfn}. " "Supported are: hash, ip, domain and url.") try: response = requests.get(vt_base + uri, params=params) response.raise_for_status() except requests.RequestException as e: raise AnalyzerRunException(e) return response.json()
def _poll_for_result(self, req_key): got_result = False for chance in range(self.max_tries): time.sleep(self.poll_distance) logger.info( f"({self.analyzer_name}, job_id: #{self.job_id}) polling." f"Try #{chance+1}. Starting the query...") try: status_code, json_data = self._query_for_result( self.url, req_key) except (requests.RequestException, json.JSONDecodeError) as e: raise AnalyzerRunException(e) analysis_status = json_data.get("status", None) if analysis_status in ["success", "reported_with_fails", "failed"]: got_result = True break elif status_code == 404: pass else: logger.info( f"Result Polling. Try n:{chance+1}, status: {analysis_status}" f" ({self.analyzer_name}, job_id: #{self.job_id})") if not got_result: raise AnalyzerRunException( "max polls tried without getting any result.") return json_data