def run(self): Analyzer.run(self) if self.data_type == 'file': hashes = self.get_param('attachment.hashes', None) if hashes is None: filepath = self.get_param('file', None, 'File is missing') hash = hashlib.sha256(open(filepath, 'r').read()).hexdigest(); else: # find SHA256 hash hash = next(h for h in hashes if len(h) == 64) self.otx_query_file(hash) elif self.data_type == 'url': data = self.get_param('data', None, 'Data is missing') self.otx_query_url(data) elif self.data_type == 'domain': data = self.get_param('data', None, 'Data is missing') self.otx_query_domain(data) elif self.data_type == 'ip': data = self.get_param('data', None, 'Data is missing') self.otx_query_ip(data) elif self.data_type == 'hash': data = self.get_param('data', None, 'Data is missing') self.otx_query_file(data) else: self.error('Invalid data type')
def run(self): Analyzer.run(self) if self.data_type == 'ip': try: data = self.get_data() city = geoip2.database.Reader(os.path.dirname(__file__) + '/GeoLite2-City.mmdb').city(data) self.report({ 'city': self.dump_city(city.city), 'continent': self.dump_continent(city.continent), 'country': self.dump_country(city.country), 'location': self.dump_location(city.location), 'registered_country': self.dump_country(city.registered_country), 'represented_country': self.dump_country(city.represented_country), 'subdivisions': self.dump_country(city.subdivisions.most_specific), 'traits': self.dump_traits(city.traits) }) except ValueError as e: self.error('Invalid IP address') except AddressNotFoundError as e: self.error('Unknown IP address') except Exception as e: self.unexpectedError(type(e)) else: self.notSupported()
def run(self): Analyzer.run(self) if self.service == 'domainsearch' and (self.data_type == 'domain' or self.data_type == 'fqdn'): try: offset = 0 firstResponse = requests.get("{}domain-search?domain={}&api_key={}&limit=100&offset={}".format(self.URI, self.get_data(), self.key, offset)) firstResponse = firstResponse.json() if firstResponse.get('meta'): meta = firstResponse.get('meta') while meta.get('results') > offset: offset = meta.get('limit') + meta.get('offset') additionalResponse = requests.get("{}domain-search?domain={}&api_key={}&limit=100&offset={}".format( self.URI, self.get_data(), self.key, offset)) additionalResponse = additionalResponse.json() meta = additionalResponse.get('meta') firstResponse['data']['emails'] += additionalResponse['data']['emails'] self.report(firstResponse) except Exception as e: self.unexpectedError(e) else: self.notSupported()
def run(self): Analyzer.run(self) if self.data_type == 'file': hashes = self.get_param('attachment.hashes', None) if hashes is None: filepath = self.get_param('file', None, 'File is missing') sha256 = hashlib.sha256() with io.open(filepath, 'rb') as fh: while True: data = fh.read(4096) if not data: break sha256.update(data) hash = sha256.hexdigest() else: # find SHA256 hash hash = next(h for h in hashes if len(h) == 64) self.otx_query_file(hash) elif self.data_type == 'url': data = self.get_param('data', None, 'Data is missing') self.otx_query_url(data) elif self.data_type == 'domain': data = self.get_param('data', None, 'Data is missing') self.otx_query_domain(data) elif self.data_type == 'ip': data = self.get_param('data', None, 'Data is missing') self.otx_query_ip(data) elif self.data_type == 'hash': data = self.get_param('data', None, 'Data is missing') self.otx_query_file(data) else: self.error('Invalid data type')
def run(self): Analyzer.run(self) if self.service == 'ThreatScore' and (self.data_type == 'domain' or self.data_type == 'ip'): try: response = requests.get("{}{}".format(self.URI, self.get_data())) result = response.json() self.report(result if len(result) > 0 else {}) except Exception as e: self.unexpectedError(e) else: self.notSupported()
def run(self): Analyzer.run(self) if self.data_type == 'domain' or self.data_type == 'ip' or self.data_type == 'mail': threatcrowd_data_type = self.data_type if self.data_type != 'mail' else 'email' try: response = requests.get("{}/{}/report/".format(self.URI, threatcrowd_data_type), {threatcrowd_data_type: self.get_data()}) self.report(response.json()) except Exception as e: self.unexpectedError(e) else: self.notSupported()
def run(self): Analyzer.run(self) info = {} try: if self.data_type != 'file': object_name = self.get_data() if self.data_type in ['domain', 'fqdn']: url = "https://api.emergingthreats.net/v1/domains/" features = {'reputation', 'urls', 'samples', 'ips', 'events', 'nameservers', 'whois', 'geoloc'} elif self.data_type == 'ip': url = "https://api.emergingthreats.net/v1/ips/" features = {'reputation', 'urls', 'samples', 'domains', 'events', 'geoloc'} elif self.data_type == 'hash': url = "https://api.emergingthreats.net/v1/samples/" features = {'', 'connections', 'dns', 'http', 'events'} elif self.data_type == 'file': url = "https://api.emergingthreats.net/v1/samples/" features = {'', 'connections', 'dns', 'http', 'events'} hashes = self.get_param('attachment.hashes', None) if hashes is None: filepath = self.get_param('file', None, 'File is missing') object_name = hashlib.md5(open(filepath, 'r').read()).hexdigest() else: # find MD5 hash object_name = next(h for h in hashes if len(h) == 32) else: self.error('Invalid data type !') for feature in features: end = '/' if feature else '' time.sleep(1) r = self.session.get(url + object_name + end + feature) if feature == '': feature = 'main' r_json = r.json() if r.status_code == 200 and r_json['response'] not in [{}, []]: info[feature] = r_json['response'] elif r.status_code != 200: info[feature] = "Error" else: info[feature] = "-" self.report(info) except Exception as e: self.unexpectedError(e)
def run(self): Analyzer.run(self) if self.data_type == 'domain': try: data = self.getData() mydata = data self.report({ 'certobj': self.dump_data(mydata) }) except Exception as e: self.unexpectedError(e) else: self.notSupported()
def run(self): Analyzer.run(self) data = self.get_data() try: p = PyEUPI(self.phishinginitiative_key) api_response = p.lookup(url=data) if "status" in api_response and api_response["status"] != 200: self.error(api_response["message"]) else: self.report(api_response["results"][0]) except Exception: self.unexpectedError("Service unavailable")
def run(self): Analyzer.run(self) if self.data_type == 'domain' or self.data_type == 'url': try: pattern = re.compile("(?:Category: )([\w\s]+)") baseurl = 'https://www.fortiguard.com/webfilter?q=' url = baseurl + self.get_data() req = requests.get(url) category_match = re.search(pattern, req.content, flags=0) self.report({ 'category': category_match.group(1) }) except ValueError as e: self.unexpectedError(e) else: self.notSupported()
def run(self): Analyzer.run(self) url = self.get_data() if len(re.findall( r"^(http:\/\/)?(https:\/\/)?[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}(:[0-9]{1,5})?(\/)?$", url)) > 0 \ or len(re.findall(r"^(http:\/\/)?(https:\/\/)?.+:[0-9]{1,5}$", url)) \ or len(re.findall(r'^(http:\/\/\[)?(https:\/\/\[)(' '([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|' '([0-9a-fA-F]{1,4}:){1,7}:|' '([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|' '([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|' '([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|' '([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|' '([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|' '[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|' ':((:[0-9a-fA-F]{1,4}){1,7}|:)|' 'fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|' '::(ffff(:0{1,4}){0,1}:){0,1}' + \ '((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}' '(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|' '([0-9a-fA-F]{1,4}:){1,4}:' '((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}' '(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])' ')(\])?(:[0-9]{1,5})?$', url)): self.error("Searching for Ports and IPs not allowed.") if self.proxies: proxies = self.proxies else: proxies = {} result = {'found': False, 'url': None} try: response = requests.get(url, proxies=proxies, allow_redirects=False) if (response.status_code == 301) or (response.status_code == 302): result['url'] = response.headers['Location'] result['found'] = True except Exception as e: self.unexpectedError("Service unavailable: %s" % e) self.report(result)
def run(self): Analyzer.run(self) try: user_agent = {'User-agent': 'Cortex Analyzer'} sha256 = None report = {} if self.service in ['query']: if self.data_type == 'file': filename = self.get_param('attachment.name', 'noname.ext') filepath = self.get_param('file', None, 'File is missing') sha256 = hashlib.sha256(open(filepath, 'r').read()).hexdigest() elif self.data_type == 'hash' and len(self.get_data()) == 64: sha256 = self.get_data() else: sha256 = hashlib.sha256(self.get_data()).hexdigest() else: self.error('unknown service') if sha256 != None: params = {'threatId': sha256} response = requests.get(self.url.strip('/') + '/v2/forensics', params=params, headers=user_agent, verify=self.verify, auth=HTTPBasicAuth(self.apikey, self.secret)) if response.status_code == 200: data = response.json() report['known'] = True if 'reports' in data: report['reports'] = data['reports'] if 'generated' in data: report['generated'] = data['generated'] self.report(report) elif response.status_code == 400: self.error('bad request sent') elif response.status_code == 401: self.error('unauthorized access, verify your key and secret values') elif response.status_code == 404: report = {'known': False} self.report(report) else: self.error('unknown error') else: self.error('no hash defined') except requests.exceptions.RequestException as e: self.error(e) except Exception as e: self.unexpectedError(e)
def run(self): Analyzer.run(self) try: data = { 'apikey': self.apikey } # Check whether API v2 is supported or not response = requests.post(self.url + 'api/v2/server/online', data=data, timeout=self.networktimeout, allow_redirects=False) if response.status_code == 200: self.runv2() else: self.runv1() except Exception as e: self.unexpectedError(e)
def run(self): Analyzer.run(self) data = self.get_data() try: if self.service == 'Check_IP': if self.data_type == 'ip': result = self.con.search(data) self.report(result) else: self.notSupported() except ValueError as e: self.error('Invalid IP address') except Exception as e: self.unexpectedError(type(e))
def run(self): Analyzer.run(self) try: self.shodan_client = ShodanAPIPublic(self.shodan_key) if self.data_type == 'ip': ip = self.get_param('data', None, 'Data is missing') results = {'reverse_dns': {'hostnames': self.shodan_client.reverse_dns(ip)[ip]}, 'host': self.shodan_client.host(ip)} self.report(results) if self.data_type == 'domain': domain = self.get_param('data', None, 'Data is missing') result = {'dns_resolve': self.shodan_client.dns_resolve(domain), 'infos_domain': self.shodan_client.info_domains(domain)} self.report(result) except APIError as e: self.error(str(e)) except Exception as e: self.unexpectedError(e)
def run(self): Analyzer.run(self) data = self.get_data() try: p = PyEUPI(self.phishinginitiative_key) api_response = p.post_submission(url=data, comment="Submitted by Cortex") api_response_url = "".join(api_response["url"]) if "Elle a été marquée comme étant du phishing" in api_response_url: self.report({"status":"phishing"}) elif "Elle est en cours d'analyse" in api_response_url: self.report({"status":"analyzing"}) elif "Elle n'est pas considérée comme étant du phishing" in api_response_url: self.report({"status":"clean"}) else: self.report({"status":"report"}) except Exception: self.unexpectedError("Service unavailable")
def run(self): Analyzer.run(self) if (self.data_type == 'fqdn' or self.data_type == 'domain' or self.data_type == 'ip' or self.data_type == 'url'): try: endpoint = "ip" id = self.get_data() if (self.data_type == 'fqdn' or self.data_type == 'domain'): endpoint = "hostname" elif self.data_type == 'url': endpoint = "url" id = hashlib.sha256(id).hexdigest() response = requests.get("https://api.maltiverse.com/{}/{}".format(endpoint, id)) self.report(response.json()) except Exception as e: self.unexpectedError(e) else: self.notSupported()
def run(self): Analyzer.run(self) data = self.get_param('data', None, 'Data is missing') if self.data_type != 'fqdn' and self.data_type != 'ip': self.error('Invalid data type') if self.allowed_networks is not None: if self.data_type == 'fqdn': address = IPAddress(socket.gethostbyname(data)) else: try: address = IPAddress(data) except Exception as e: self.error("{}".format(e)) if not any(address in IPNetwork(network) for network in self.allowed_networks): self.error('Invalid target: not in any allowed network') scanner_args = { 'url': self.url, 'login': self.login, 'password': self.password } if self.ca_bundle is not None: scanner_args.update({'ca_bundle': self.ca_bundle}) else: scanner_args.update({'insecure': True}) try: scanner = ness6rest.Scanner(**scanner_args) scanner.policy_set(name=self.policy) scanner.scan_add(targets=data, name="cortex scan for " + data) self._run_scan(scanner) results = self._get_scan_results(scanner) self._delete_scan(scanner) except Exception as ex: self.error('Scanner error: %s' % ex) self.report(results)
def run(self): Analyzer.run(self) url = self.getData() if self.proxies: proxies = self.proxies else: proxies = {} result = {'found': False, 'url': None} try: response = requests.get(url, proxies=proxies, allow_redirects=False) if (response.status_code == 301) or (response.status_code == 302): result['url'] = response.headers['Location'] result['found'] = True except Exception as e: self.unexpectedError("Service unavailable: %s" % e) self.report(result)
def run(self): Analyzer.run(self) data = self.get_param('data', None, 'Data is missing') hits = self.staxx_query(data) self.report({'hits': hits, 'count': len(hits)})
def run(self): Analyzer.run(self) try: user_agent = {'User-agent': 'Cortex Analyzer'} # Submit Analysis # File if self.data_type == 'file': data = {'environmentId': self.environmentid, 'comment': 'Submitted by Cortex'} filepath = self.get_param('file', None, 'File is missing') f = open(filepath, "rb") files = {"file": f} response = requests.post(self.url.strip('/') + '/api/submit', data=data, headers=user_agent, files=files, auth=HTTPBasicAuth(self.apikey, self.secret), verify=self.verify) if response.status_code == 200: data = response.json() if data['response_code'] == 0: if data.get('response'): if data['response'].get('sha256'): sha256 = data['response']['sha256'] elif data['response_code'] != 0: if data.get('response'): if data['response'].get('error'): self.error(data['response']['error']) else: self.error('unknown error return by server') else: self.error('unknown error return by server') elif response.status_code == 400: self.error('File upload failed or unknown submission related error') elif response.status_code == 429: self.error('Your API key quota has been reached') else: self.error('Unknown Server Error') # URL elif self.data_type == 'url': data = {'environmentId': self.environmentid, 'analyzeurl': self.get_data(), 'comment': 'Submitted by Cortex'} response = requests.post(self.url.strip('/') + '/api/submiturl', data=data, headers=user_agent, verify=self.verify, auth=HTTPBasicAuth(self.apikey, self.secret)) if response.status_code == 200: data = response.json() if data['response_code'] == 0: if data.get('response'): if data['response'].get('sha256'): sha256 = data['response']['sha256'] elif data['response_code'] != 0: if data.get('response'): if data['response'].get('error'): self.error(data['response']['error']) else: self.error('unknown error return by server') else: self.error('Not expected answer received from server') elif response.status_code == 400: self.error('File upload failed or unknown submission related error') elif response.status_code == 429: self.error('Your API key quota has been reached') else: self.error('Unknown Server Error') else: self.error('Unknown PayloadSecurity service error') # Check analysis status state_url = self.url.strip('/') + '/api/state/' + sha256 params = {'environmentId': self.environmentid} finished = False tries = 0 while not finished and tries <= self.timeout: time.sleep(60) response = requests.get(state_url, headers=user_agent, params=params, verify=self.verify, auth=HTTPBasicAuth(self.apikey, self.secret)) data = response.json() if data["response_code"] == 0 and data["response"]["state"] == 'SUCCESS': finished = True tries += 1 if not finished: self.error('PayloadSecurity analysis timed out') # Retrieve report summary report = {} summary_url = self.url.strip('/') + '/api/summary/' + sha256 params = {'environmentId': self.environmentid, 'type': 'json'} response = requests.get(summary_url, headers=user_agent, params=params, verify=self.verify, auth=HTTPBasicAuth(self.apikey, self.secret)) if response.status_code == 200: data = response.json() if data['response_code'] == 0 and data.get('response'): report = data['response'] report['reporturl'] = self.url.strip('/') + '/sample/' + sha256 + '?environmentId=' + str( self.environmentid) else: self.error('unknown error return by server') else: self.error('Unknown Server Error') # Retrieve associated screenshots # Associated Sha256 can be different if submitted file is an archive if 'sha256' in report: sha256 = report['sha256'] screenshots_url = self.url.strip('/') + '/api/sample-screenshots/' + sha256 params = {'environmentId': self.environmentid, 'type': 'json'} response = requests.get(screenshots_url, headers=user_agent, params=params, verify=self.verify, auth=HTTPBasicAuth(self.apikey, self.secret)) if response.status_code == 200: data = response.json() if data['response_code'] == 0 and data.get('response') and data['response'].get('screenshots'): report['screenshots'] = data['response']['screenshots'] else: self.error('unknown error return by server') else: self.error('Unknown Server Error') if 'reporturl' in report: self.report(report) except requests.exceptions.RequestException as e: self.error(e) except Exception as e: self.unexpectedError(e)
def run(self): Analyzer.run(self) try: request_headers = { 'User-agent': 'Cortex Analyzer', 'api-key': self.apikey } url_is_file = False # Analyse file: if self.data_type == 'file': file_submit_url = '{0}/api/v2/submit/file'.format( self.url.strip('/')) data = {'environment_id': self.environmentid} filepath = self.get_param('file', None, 'File path is missing.') f = open(filepath, 'rb') files = {'file': f} response = requests.post(file_submit_url, data=data, headers=request_headers, files=files, verify=self.verify) data = response.json() if response.status_code == 201: if 'sha256' in data: sha256 = data['sha256'] else: self.error( 'The server returned an incomplete response.') elif response.status_code == 429: self.error('Your API quota has been reached.') else: if 'message' in data: self.error( 'File analysis failed due to a submission related error: {0}' .format(data['message'])) else: self.error( 'File analysis failed due to a submission related error.' ) # Analyse URL: elif self.data_type == 'url': url_submit_url = '{0}/api/v2/submit/url-for-analysis'.format( self.url.strip('/')) data = { 'environment_id': self.environmentid, 'url': self.get_data() } response = requests.post(url_submit_url, data=data, headers=request_headers, verify=self.verify) data = response.json() if response.status_code == 201: if 'sha256' in data: sha256 = data['sha256'] else: self.error( 'The server returned an incomplete response.') elif response.status_code == 429: self.error('Your API quota has been reached.') else: if 'message' in data: if data['message'] == 'download-not-a-url': url_is_file = True else: self.error( 'URL analysis failed due to a submission related error: {0}' .format(data['message'])) else: self.error( 'URL upload failed due to a submission related error.' ) else: self.error( 'Unknown Hybrid Analysis analyzer error encountered.') # Resubmit URLs that return a file: if url_is_file: url_submit_url = '{0}/api/v2/submit/url-to-file'.format( self.url.strip('/')) data = { 'environment_id': self.environmentid, 'url': self.get_data() } response = requests.post(url_submit_url, data=data, headers=request_headers, verify=self.verify) data = response.json() if response.status_code == 201: if 'sha256' in data: sha256 = data['sha256'] else: self.error( 'The server returned an incomplete response.') elif response.status_code == 429: self.error('Your API quota has been reached.') else: if 'message' in data: self.error( 'File (via URL) analysis failed due to a submission related error: {0}' .format(data['message'])) else: self.error( 'File (via URL) analysis failed due to a submission related error.' ) # Poll service until analysis completes: state_url = '{0}/api/v2/report/{1}:{2}/state'.format( self.url.strip('/'), sha256, self.environmentid) finished = False tries = 0 while not finished and tries <= self.timeout: time.sleep(60) response = requests.get(state_url, headers=request_headers, verify=self.verify) data = response.json() if response.status_code == 200: if 'state' in data: if data['state'] == 'SUCCESS': finished = True tries += 1 else: if 'message' in data: self.error( 'Error encountered fetching report state: {0}'. format(data['message'])) else: self.error('Error encountered fetching report state.') if not finished: self.error('Hybrid Analysis analysis timed out') # Fetch summary report: report = {} summary_url = '{0}/api/v2/report/{1}:{2}/summary'.format( self.url.strip('/'), sha256, self.environmentid) response = requests.get(summary_url, headers=request_headers, verify=self.verify) report = response.json() if response.status_code == 200: report[ 'report_url'] = '{0}/sample/{1}?environmentId={2}'.format( self.url.strip('/'), sha256, str(self.environmentid)) else: if 'message' in data: self.error( 'Error encountered fetching report summary: {0}'. format(data['message'])) else: self.error('Error encountered fetching report summary.') if 'sha256' in report: sha256 = report['sha256'] screenshots_url = '{0}/api/v2/report/{1}:{2}/screenshots'.format( self.url.strip('/'), sha256, self.environmentid) # Fetch screenshots: response = requests.get(screenshots_url, headers=request_headers, verify=self.verify) data = response.json() if response.status_code == 200: if data: report['screenshots'] = data else: self.error('The server returned an incomplete response.') else: if 'message' in data: self.error( 'Error encountered fetching report screenshots: {0}'. format(data['message'])) else: self.error( 'Error encountered fetching report screenshots.') # Return report: if 'report_url' in report: self.report(report) except requests.exceptions.RequestException as e: self.error(e) except Exception as e: self.unexpectedError(e)
def run(self): Analyzer.run(self) self.report({'data': self.get_data(), 'input': self._input})
def run(self): Analyzer.run(self) data = self.get_param('data', None, 'Data is missing') hits = self.staxx_query(data) self.report({'hits': hits, 'count': len(hits)})
def run(self): Analyzer.run(self) try: user_agent = {'User-agent': 'Cortex Analyzer'} # Submit Analysis # File if self.data_type == 'file': data = { 'environmentId': self.environmentid, 'comment': 'Submitted by Cortex' } filepath = self.get_param('file', None, 'File is missing') f = open(filepath, "rb") files = {"file": f} response = requests.post(self.url.strip('/') + '/api/submit', data=data, headers=user_agent, files=files, auth=HTTPBasicAuth( self.apikey, self.secret), verify=self.verify) if response.status_code == 200: data = response.json() if data['response_code'] == 0: if data.get('response'): if data['response'].get('sha256'): sha256 = data['response']['sha256'] elif data['response_code'] != 0: if data.get('response'): if data['response'].get('error'): self.error(data['response']['error']) else: self.error('unknown error return by server') else: self.error('unknown error return by server') elif response.status_code == 400: self.error( 'File upload failed or unknown submission related error' ) elif response.status_code == 429: self.error('Your API key quota has been reached') else: self.error('Unknown Server Error') # URL elif self.data_type == 'url': data = { 'environmentId': self.environmentid, 'analyzeurl': self.get_data(), 'comment': 'Submitted by Cortex' } response = requests.post( self.url.strip('/') + '/api/submiturl', data=data, headers=user_agent, verify=self.verify, auth=HTTPBasicAuth(self.apikey, self.secret)) if response.status_code == 200: data = response.json() if data['response_code'] == 0: if data.get('response'): if data['response'].get('sha256'): sha256 = data['response']['sha256'] elif data['response_code'] != 0: if data.get('response'): if data['response'].get('error'): self.error(data['response']['error']) else: self.error('unknown error return by server') else: self.error('Not expected answer received from server') elif response.status_code == 400: self.error( 'File upload failed or unknown submission related error' ) elif response.status_code == 429: self.error('Your API key quota has been reached') else: self.error('Unknown Server Error') else: self.error('Unknown PayloadSecurity service error') # Check analysis status state_url = self.url.strip('/') + '/api/state/' + sha256 params = {'environmentId': self.environmentid} finished = False tries = 0 while not finished and tries <= self.timeout: time.sleep(60) response = requests.get(state_url, headers=user_agent, params=params, verify=self.verify, auth=HTTPBasicAuth( self.apikey, self.secret)) data = response.json() if data["response_code"] == 0 and data["response"][ "state"] == 'SUCCESS': finished = True tries += 1 if not finished: self.error('PayloadSecurity analysis timed out') # Retrieve report summary report = {} summary_url = self.url.strip('/') + '/api/summary/' + sha256 params = {'environmentId': self.environmentid, 'type': 'json'} response = requests.get(summary_url, headers=user_agent, params=params, verify=self.verify, auth=HTTPBasicAuth(self.apikey, self.secret)) if response.status_code == 200: data = response.json() if data['response_code'] == 0 and data.get('response'): report = data['response'] report['reporturl'] = self.url.strip( '/') + '/sample/' + sha256 + '?environmentId=' + str( self.environmentid) else: self.error('unknown error return by server') else: self.error('Unknown Server Error') # Retrieve associated screenshots # Associated Sha256 can be different if submitted file is an archive if 'sha256' in report: sha256 = report['sha256'] screenshots_url = self.url.strip( '/') + '/api/sample-screenshots/' + sha256 params = {'environmentId': self.environmentid, 'type': 'json'} response = requests.get(screenshots_url, headers=user_agent, params=params, verify=self.verify, auth=HTTPBasicAuth(self.apikey, self.secret)) if response.status_code == 200: data = response.json() if data['response_code'] == 0 and data.get( 'response') and data['response'].get('screenshots'): report['screenshots'] = data['response']['screenshots'] else: self.error('unknown error return by server') else: self.error('Unknown Server Error') if 'reporturl' in report: self.report(report) except requests.exceptions.RequestException as e: self.error(e) except Exception as e: self.unexpectedError(e)
def run(self): Analyzer.run(self) try: # file analysis if self.data_type == 'file': filepath = self.get_param('file', None, 'File is missing') filename = self.get_param('filename', basename(filepath)) with open(filepath, "rb") as sample: files = {"file": (filename, sample)} response = requests.post(self.url + 'tasks/create/file', files=files) task_id = response.json()['task_ids'][0] if 'task_ids' in response.json().keys() \ else response.json()['task_id'] # url analysis elif self.data_type == 'url': data = {"url": self.get_data()} response = requests.post(self.url + 'tasks/create/url', data=data) task_id = response.json()['task_id'] else: self.error('Invalid data type !') finished = False tries = 0 while not finished and tries <= 15: # wait max 15 mins time.sleep(60) response = requests.get(self.url + 'tasks/view/' + str(task_id)) content = response.json()['task']['status'] if content == 'reported': finished = True tries += 1 if not finished: self.error('CuckooSandbox analysis timed out') # Download the report response = requests.get(self.url + 'tasks/report/' + str(task_id) + '/json') resp_json = response.json() list_description = [x['description'] for x in resp_json['signatures']] if 'suricata' in resp_json.keys() and 'alerts' in resp_json['suricata'].keys(): if any('dstport' in x for x in resp_json['suricata']['alerts']): suri_alerts = [(x['signature'], x['dstip'], x['dstport'], x['severity']) for x in resp_json['suricata']['alerts'] if 'dstport' in x.keys()] elif any('dst_port' in x for x in resp_json['suricata']['alerts']): suri_alerts = [(x['signature'], x['dst_ip'], x['dst_port'], x['severity']) for x in resp_json['suricata']['alerts']] else: suri_alerts = [] else: suri_alerts = [] if 'snort' in resp_json.keys() and 'alerts' in resp_json['snort'].keys(): if any('dstport' in x for x in resp_json['snort']['alerts']): snort_alerts = [(x['message'], x['dstip'], x['dstport'], x['priority']) for x in resp_json['snort']['alerts']] elif any('dst_port' in x for x in resp_json['snort']['alerts']): snort_alerts = [(x['message'], x['dst_ip'], x['dst_port'], x['priority']) for x in resp_json['snort']['alerts']] else: snort_alerts = [] else: snort_alerts = [] try: hosts = [(x['ip'], x['hostname'], x['country_name']) for x in resp_json['network']['hosts']] if 'hosts' in resp_json['network'].keys() else None except TypeError as e: hosts = [x for x in resp_json['network']['hosts']] if 'hosts' in resp_json['network'].keys() else [] uri = [(x['uri']) for x in resp_json['network']['http']] if 'http' in resp_json['network'].keys() else [] if self.data_type == 'url': self.report({ 'signatures': list_description, 'suricata_alerts': suri_alerts, 'snort_alerts': snort_alerts, 'hosts': hosts, 'uri': uri, 'malscore': resp_json['malscore'] if 'malscore' in resp_json.keys() else resp_json['info'].get( 'score', None), 'malfamily': resp_json.get('malfamily', None), 'file_type': 'url', 'yara': resp_json['target']['url'] if 'target' in resp_json.keys() and 'url' in resp_json[ 'target'].keys() else '-' }) else: self.report({ 'signatures': list_description, 'suricata_alerts': suri_alerts, 'snort_alerts': snort_alerts, 'hosts': hosts, 'uri': uri, 'malscore': resp_json['malscore'] if 'malscore' in resp_json.keys() else resp_json['info'].get( 'score', None), 'malfamily': resp_json.get('malfamily', None), 'file_type': "".join([x for x in resp_json['target']['file']['type']]), 'yara': [ x['name'] + " - " + x['meta']['description'] if 'description' in x['meta'].keys() else x['name'] for x in resp_json['target']['file']['yara']] }) except requests.exceptions.RequestException as e: self.error(e) except Exception as e: self.unexpectedError(e)
def run(self): Analyzer.run(self) try: for endpoint,key,user,password in zip(self.endpoints,self.keys,self.users,self.passwords): if key: es = Elasticsearch( endpoint, api_key = (key), ca_certs=self.cert, verify_certs=self.verify, timeout=30 ) elif user: es = Elasticsearch( endpoint, http_auth = (user,password), ca_certs=self.cert, verify_certs=self.verify, timeout=30 ) else: es = Elasticsearch( endpoint, ca_certs=self.cert, verify_certs=self.verify, timeout=30 ) info = {} hits = [] devices = [] total = 'eq' #url that links to kibana dashboard info['query'] = "" #query string to show kql search info['querystring'] = "" self.fields = [x.lower() for x in self.fields] #remove all hash fields if not a hash if self.data_type != 'hash': self.fields = list(filter( lambda s: not ("hash" in s), self.fields)) #remove all ip fields if not an ip if self.data_type != 'ip': self.fields = list(filter( lambda s: not ("ip" in s), self.fields)) #remove all url and domain fields if not a url or domain or fqdn if self.data_type != 'domain' and self.data_type != 'url' and self.data_type != 'fqdn': self.fields = list(filter( lambda s: not ("url" in s or "domain" in s), self.fields)) if self.kibana and self.dashboard: #building query info['query'] += self.kibana+"/app/kibana#/dashboard/"+self.dashboard+\ "?_g=(filters:!(),refreshInterval:(pause:!t,value:0),time:(from:now-1M,to:now))&_a=(columns:!(_source),interval:auto,query:(language:kuery,query:'" #building query and query string info['query'] += self.fields[0] + "%20:%20%22" + self.data info['querystring'] = self.fields[0] + ':"' + self.data for field in self.fields[1:]: info['query'] += "%22%20or%20" + field + "%20:%20%22" + self.data info['querystring'] += '" or ' + field + ':"' + self.data info['query'] += "%22'),sort:!(!(start_time,desc)))" info['querystring'] += '"' #loop to get hits from each index for index in self.index: #search elastic for fields in each index res = es.search(size=self.size,index=index,body={'sort':[{"@timestamp":{"order":"desc"}}],'query':{'multi_match':{'query':self.data, 'fields':self.fields}}}) #if relation is gte then more logs exist than we will display if res['hits']['total']['relation'] == 'gte' or res['hits']['total']['relation'] == 'gt': total = 'gte' #adding results from each query for hit in res['hits']['hits']: hitindex = hit['_index'] hitid = hit['_id'] #process fields process_parent_name = "" process_name = "" process_command_line = "" #user fields user_name = "" #host fields host_name = "" #base fields timestamp = "" #destination fields destination_ip = "" destination_port = "" #source fields source_ip = "" source_port = "" source_user_name = "" #event fields event_action = "" event_type = "" event_dataset = "" #url fields url_domain = "" url_path = "" url_full = "" #dns fields dns_question_name = "" dns_resolvedip = "" #rule fields rule_category = "" #base fields if '@timestamp' in hit['_source']: if isinstance(hit['_source']['@timestamp'],str): timestamp = dateutil.parser.parse(hit['_source']['@timestamp']) time = timestamp.astimezone().strftime("%m/%d/%Y %I:%M %p") timestamp = str(timestamp) else: timestamp = dateutil.parser.parse(datetime.fromtimestamp(float(hit['_source']['@timestamp']/1000)).strftime('%c')) time = timestamp.astimezone().strftime("%m/%d/%Y %I:%M %p") timestamp = str(timestamp) #host fields if 'host' in hit['_source']: if 'name' in hit['_source']['host']: host_name = hit['_source']['host']['name'] #process fields if 'process' in hit['_source']: if 'parent' in hit['_source']['process']: if 'name' in hit['_source']['process']['parent']: process_parent_name = hit['_source']['process']['parent']['name'] if 'name' in hit['_source']['process']: process_name = hit['_source']['process']['name'] if 'command_line' in hit['_source']['process']: process_command_line = hit['_source']['process']['command_line'] #destination fields if 'destination' in hit['_source']: if 'ip' in hit['_source']['destination']: destination_ip = hit['_source']['destination']['ip'] if 'port' in hit['_source']['destination']: destination_port = hit['_source']['destination']['port'] #source fields if 'source' in hit['_source']: if 'ip' in hit['_source']['source']: source_ip = hit['_source']['source']['ip'] if 'port' in hit['_source']['source']: source_port = hit['_source']['source']['port'] if 'user' in hit['_source']['source']: if 'name' in hit['_source']['source']['user']: source_user_name = hit['_source']['source']['user']['name'] #event fields if 'event' in hit['_source']: if 'action' in hit['_source']['event']: event_action = hit['_source']['event']['action'] if 'type' in hit['_source']['event']: event_type = hit['_source']['event']['type'] if 'dataset' in hit['_source']['event']: event_dataset = hit['_source']['event']['dataset'] #url fields if 'url' in hit['_source']: if 'domain' in hit['_source']['url']: url_domain = hit['_source']['url']['domain'] if 'path' in hit['_source']['url']: url_path = hit['_source']['url']['path'] if 'full' in hit['_source']['url']: url_full = hit['_source']['url']['full'] #user fields if 'user' in hit['_source']: if 'name' in hit['_source']['user']: user_name = hit['_source']['user']['name'] #rule fields if 'rule' in hit['_source']: if 'category' in hit['_source']['rule']: rule_category = hit['_source']['rule']['category'] #dns fields if 'dns' in hit['_source']: if 'question' in hit['_source']['dns']: if 'name' in hit['_source']['dns']['question']: dns_question_name = hit['_source']['dns']['question']['name'] if 'resolved_ip' in hit['_source']['dns']: dns_resolvedip = hit['_source']['dns']['resolved_ip'] hits.append(Hit(hitindex,hitid,process_parent_name,process_name,process_command_line,user_name,host_name,\ timestamp,time,destination_ip,destination_port,source_ip,source_port,source_user_name,\ url_domain,url_path,url_full,rule_category,dns_question_name,dns_resolvedip,event_dataset,event_action,event_type)) #setup users usernames = [item.user_name for item in hits] source_usernames = [item.source_user_name for item in hits] usernames.extend(source_usernames) info['uniqueusers'] = list(set(usernames)) if "" in info['uniqueusers']: info['uniqueusers'].remove("") info['userhitcount'] = len(info['uniqueusers']) #setup devices devices = [item.host_name for item in hits] info['uniquedevices'] = list(set(devices)) if "" in info['uniquedevices']: info['uniquedevices'].remove("") info['devicehitcount'] = len(info['uniquedevices']) #observable that was searched on info['data'] = self.data info['dashboard'] = self.dashboard info['hitcount'] = len(hits) #sort the hits based on timestamp hits.sort(key=operator.attrgetter('timestamp'), reverse=True) hits = [ob.__dict__ for ob in hits] self.report({'hits' : hits, 'info' : info, 'total': total}) except Exception as e: self.unexpectedError(e) return
def run(self): Analyzer.run(self) data = self.getData() try: # enrichment service if self.service == 'enrichment': enrichment_request = EnrichmentRequest(username=self.username, api_key=self.api_key) result = enrichment_request.get_enrichment(query=data) self.report(result) # malware service elif self.service == 'malware': enrichment_request = EnrichmentRequest(username=self.username, api_key=self.api_key) result = enrichment_request.get_malware(query=data) self.report(result) # osint service elif self.service == 'osint': enrichment_request = EnrichmentRequest(username=self.username, api_key=self.api_key) result = enrichment_request.get_osint(query=data) self.report(result) # passive dns service elif self.service == 'passive_dns': dns_request = DnsRequest(username=self.username, api_key=self.api_key) result = dns_request.get_passive_dns(query=data) self.report(result) # ssl certificate details service elif self.service == 'ssl_certificate_details': ssl_request = SslRequest(username=self.username, api_key=self.api_key) result = ssl_request.get_ssl_certificate_details(query=data) self.report(result) # ssl certificate history service elif self.service == 'ssl_certificate_history': ssl_request = SslRequest(username=self.username, api_key=self.api_key) result = ssl_request.get_ssl_certificate_history(query=data) self.report(result) # unique resolutions service elif self.service == 'unique_resolutions': dns_request = DnsRequest(username=self.username, api_key=self.api_key) result = dns_request.get_unique_resolutions(query=data) self.report(result) # whois details service elif self.service == 'whois_details': whois_request = WhoisRequest(username=self.username, api_key=self.api_key) result = whois_request.get_whois_details(query=data) self.report(result) else: self.error('Unknown PassiveTotal service') except Exception as e: self.unexpectedError(e)
def run(self): Analyzer.run(self) try: # file analysis if self.data_type == 'file': filepath = self.get_param('file', None, 'File is missing') filename = self.get_param('filename', basename(filepath)) with open(filepath, "rb") as sample: files = {"file": (filename, sample)} response = requests.post(self.url + 'tasks/create/file', files=files, verify=self.verify) task_id = response.json()['task_ids'][0] if 'task_ids' in response.json().keys() \ else response.json()['task_id'] # url analysis elif self.data_type == 'url': data = {"url": self.get_data()} response = requests.post( self.url + 'tasks/create/url', data=data, verify=self.verify) task_id = response.json()['task_id'] else: self.error('Invalid data type !') finished = False tries = 0 while not finished and tries <= 15: # wait max 15 mins time.sleep(60) response = requests.get( self.url + 'tasks/view/' + str(task_id), verify=self.verify) content = response.json()['task']['status'] if content == 'reported': finished = True tries += 1 if not finished: self.error('CuckooSandbox analysis timed out') # Download the report response = requests.get( self.url + 'tasks/report/' + str(task_id) + '/json', verify=self.verify) resp_json = response.json() list_description = [x['description'] for x in resp_json['signatures']] if 'suricata' in resp_json.keys() and 'alerts' in resp_json['suricata'].keys(): if any('dstport' in x for x in resp_json['suricata']['alerts']): suri_alerts = [(x['signature'], x['dstip'], x['dstport'], x['severity']) for x in resp_json['suricata']['alerts'] if 'dstport' in x.keys()] elif any('dst_port' in x for x in resp_json['suricata']['alerts']): suri_alerts = [(x['signature'], x['dst_ip'], x['dst_port'], x['severity']) for x in resp_json['suricata']['alerts']] else: suri_alerts = [] else: suri_alerts = [] if 'snort' in resp_json.keys() and 'alerts' in resp_json['snort'].keys(): if any('dstport' in x for x in resp_json['snort']['alerts']): snort_alerts = [(x['message'], x['dstip'], x['dstport'], x['priority']) for x in resp_json['snort']['alerts']] elif any('dst_port' in x for x in resp_json['snort']['alerts']): snort_alerts = [(x['message'], x['dst_ip'], x['dst_port'], x['priority']) for x in resp_json['snort']['alerts']] else: snort_alerts = [] else: snort_alerts = [] try: domains = [(x['ip'], x['domain']) for x in resp_json['network']['domains']] if 'domains' in resp_json['network'].keys() else None except TypeError as e: domains = [x for x in resp_json['network']['domains']] if 'domains' in resp_json['network'].keys() else [] uri = [(x['uri']) for x in resp_json['network']['http']] if 'http' in resp_json['network'].keys() else [] if self.data_type == 'url': self.report({ 'signatures': list_description, 'suricata_alerts': suri_alerts, 'snort_alerts': snort_alerts, 'domains': domains, 'uri': uri, 'malscore': resp_json['malscore'] if 'malscore' in resp_json.keys() else resp_json['info'].get( 'score', None), 'malfamily': resp_json.get('malfamily', None), 'file_type': 'url', 'yara': resp_json['target']['url'] if 'target' in resp_json.keys() and 'url' in resp_json[ 'target'].keys() else '-' }) else: self.report({ 'signatures': list_description, 'suricata_alerts': suri_alerts, 'snort_alerts': snort_alerts, 'domains': domains, 'uri': uri, 'malscore': resp_json['malscore'] if 'malscore' in resp_json.keys() else resp_json['info'].get( 'score', None), 'malfamily': resp_json.get('malfamily', None), 'file_type': "".join([x for x in resp_json['target']['file']['type']]), 'yara': [ x['name'] + " - " + x['meta']['description'] if 'description' in x['meta'].keys() else x['name'] for x in resp_json['target']['file']['yara']] }) except requests.exceptions.RequestException as e: self.error(e) except Exception as e: self.unexpectedError(e)
def run(self): Analyzer.run(self) try: headers = {"Authorization": "API-Key {0}".format(self.token)} status_code = None tries = 0 if self.data_type == "file": filepath = self.get_param("file", None, "File is missing") filename = self.get_param("filename", basename(filepath)) while status_code in (None, 429) and tries <= 15: with open(filepath, "rb") as sample: files = {"file": (filename, sample)} response = requests.post( "{0}/analysis".format(self.url), files=files, headers=headers, verify=self.verify_ssl, ) status_code = response.status_code if status_code == 200: task_id = response.json()["data"]["taskid"] elif status_code == 201: task_id = response.json()["taskid"] elif status_code == 429: # it not support parallel runs, so we wait and resubmit later time.sleep(60) tries += 1 else: self.error(response.json()["message"]) elif self.data_type == "url": url = self.get_param("data", None, "Url is missing") data = {"obj_type": "url", "obj_url": url} while status_code in (None, 429) and tries <= 15: response = requests.post( "{0}/analysis".format(self.url), data=data, headers=headers, verify=self.verify_ssl, ) status_code = response.status_code if status_code == 200: task_id = response.json()["data"]["taskid"] elif status_code == 201: task_id = response.json()["taskid"] elif status_code == 429: # it not support parallel runs, so we wait and resubmit later time.sleep(60) tries += 1 else: self.error(response.json()["message"]) else: self.error("Invalid data type!") finished = False tries = 0 while not finished and tries <= 15: # wait max 15 mins time.sleep(60) response = requests.get( "{0}/analysis/{1}".format(self.url, task_id), headers=headers, verify=self.verify_ssl, ) if response.status_code == 200: finished = (True if response.json()["data"]["status"] == "done" else False) elif 400 < response.status_code < 500: self.error(response.json()["message"]) tries += 1 if not finished: self.error("AnyRun analysis timed out") # this items could be huge, we provide link to the report so avoid them in cortex final_report = response.json()["data"] final_report.pop("environments", None) final_report.pop("modified", None) for incident in final_report.get("incidents", []): incident.pop("events", None) for process in final_report.get("processes", []): process.pop("modules", None) self.report(final_report) except requests.exceptions.RequestException as e: self.error(str(e)) except Exception as e: self.unexpectedError(e)
def run(self): Analyzer.run(self) try: data = { 'apikey': self.apikey, 'auto': 1, 'comments': 'Submitted by Cortex' } files = {} # file analysis with internet access if self.service == 'file_analysis_inet': filepath = self.getParam('file', None, 'File is missing') files['sample'] = open(filepath, 'rb') data['type'] = 'file' data['inet'] = 1 # file analysis without internet access elif self.service == 'file_analysis_noinet': filepath = self.getParam('file', None, 'File is missing') files['sample'] = open(filepath, 'rb') data['type'] = 'file' data['inet'] = 0 # url analysis elif self.service == 'url_analysis': data['url'] = self.getData() data['type'] = 'url' data['inet'] = 1 else: self.error('Unknown JoeSandbox service') # Submit the file/url for analysis response = requests.post(self.url + 'api/analysis', files=files, data=data, timeout=self.networktimeout) webid = response.json()['webid'] # Wait for the analysis to finish data = { 'apikey': self.apikey, 'webid': webid } finished = False tries = 0 while not finished and tries <= self.analysistimeout/60: time.sleep(60) response = requests.post(self.url + 'api/analysis/check', data=data, timeout=self.networktimeout) content = response.json() if content['status'] == 'finished': finished = True tries += 1 if not finished: self.error('JoeSandbox analysis timed out') # Download the report data = { 'apikey': self.apikey, 'webid': webid, 'type': 'irjsonfixed', 'run': 0 } response = requests.post(self.url + 'api/analysis/download', data=data, timeout=self.networktimeout) analysis = response.json()['analysis'] analysis['htmlreport'] = self.url + 'analysis/' + str(analysis['id']) + '/0/html' analysis['pdfreport'] = self.url + 'analysis/' + str(analysis['id']) + '/0/pdf' self.report(analysis) except Exception as e: self.unexpectedError(e)
def run(self): Analyzer.run(self) data = self.get_param("data", None, "Data is missing") data = data.upper() if self.data_type not in ['filename', "hash"]: self.error("Invalid data type") if self.data_type == 'hash': md5_re = re.compile(r"^[a-f0-9]{32}(:.+)?$", re.IGNORECASE) sha1_re = re.compile(r"^[a-f0-9]{40}(:.+)?$", re.IGNORECASE) if md5_re.match(data): variable = "md5" elif sha1_re.match(data): variable = "sha1" else: self.error("Invalid hash type") else: variable = "filename" results = {} results["records"] = [] if not self.engine: if not os.path.exists(self.nsrl_folder) and not os.path.isdir( self.nsrl_folder): self.error("NSRL folder not found or not valid") try: output = subprocess.Popen( [self.grep_path, "-r", "-i", data, self.nsrl_folder], stdout=subprocess.PIPE, universal_newlines=True, ) for line in output.stdout.readlines(): tmp = {} file_path, values = line.strip().split(":") values = [x.replace('"', "") for x in values.split(",")] for key, value in zip(FIELDS, values): tmp[key] = value tmp["dbname"], tmp["release"] = ( file_path.split("/")[-1].replace(".txt", "").split("_")) results["records"].append(tmp) results["found"] = True except subprocess.CalledProcessError as e: results["found"] = False results["mode"] = "file" else: if variable != 'filename': sql = "SELECT %s FROM nsrl WHERE %s='%s'" % ( ", ".join(FIELDS + ["dbname", "release"]), variable, data) else: sql = "SELECT %s FROM nsrl WHERE %s ilike '%s'" % ( ", ".join(FIELDS + ["dbname", "release"]), variable, "%%{}%%".format(data)) values = self.engine.execute(sql) self.engine.dispose() if values.rowcount > 0: for row in values: results["records"].append({ key: value for (key, value) in zip(FIELDS + ["dbname", "release"], row) }) results["found"] = True else: results["found"] = False results["mode"] = "db" self.report(results)
def run(self): # get input data Analyzer.run(self) data = self.getData() try: if self.data_type == "email": r = requests.get( "https://www.threatcrowd.org/searchApi/v2/email/report/", params={"email": data}) elif (self.data_type == "domain") or (self.data_type == "url"): r = requests.get( "https://www.threatcrowd.org/searchApi/v2/domain/report/", params={"domain": data}) elif self.data_type == "ip": r = requests.get( "https://www.threatcrowd.org/searchApi/v2/ip/report/", params={"ip": data}) elif self.data_type == "file": f = file(self.getParam('file', None, 'File is missing')) r = requests.get( "https://www.threatcrowd.org/searchApi/v2/file/report/", params={"report": f.md5()}) else: self.unexpectedError('Unknown ThreatCrowd data type') rep = json.loads(r.content.decode()) # do enrichment with misp search if rep.get('hashes'): rep['hashes'] = [ self.doMISPLookup(h, "md5") for h in rep['hashes'] ] if rep.get('resolutions'): if (self.data_type == "domain") or (self.data_type == "url"): rep['resolutions'] = [ self.doMISPLookup(r['ip_address'], "ip", r['last_resolved']) for r in rep['resolutions'] ] elif self.data_type == "ip": rep['resolutions'] = [ self.doMISPLookup(r['domain'], "domain", r['last_resolved']) for r in rep['resolutions'] ] if rep.get('subdomains'): rep['subdomains'] = [ self.doMISPLookup(s, "domain") for s in rep['subdomains'] ] if rep.get('domains'): rep['domains'] = [ self.doMISPLookup(d, "domain") for d in rep['domains'] ] if rep.get('emails'): rep['emails'] = [ self.doMISPLookup(e, "email") for e in rep['emails'] ] # send result self.report(rep) except Exception as e: self.unexpectedError(e)