def read_iocs(cb, file=sys.stdin): iocs = defaultdict(list) report_id = hashlib.md5() report_id.update(str(time.time()).encode("utf-8")) for idx, line in enumerate(sys.stdin): line = line.rstrip("\r\n") report_id.update(line.encode("utf-8")) if validators.md5(line): iocs["md5"].append(line) elif validators.sha256(line): eprint("line {}: sha256 provided but not yet supported by backend". format(idx + 1)) iocs["sha256"].append(line) elif validators.ipv4(line): iocs["ipv4"].append(line) elif validators.ipv6(line): iocs["ipv6"].append(line) elif validators.domain(line): iocs["dns"].append(line) else: if cb.validate_query(line): query_ioc = {"search_query": line} iocs["query"].append(query_ioc) else: eprint("line {}: invalid query".format(idx + 1)) return (report_id.hexdigest(), dict(iocs))
def process_iocs(results): """Return data formatted for Splunk from urlscan.io.""" if results != None: provided_iocs = [y for x in results for y in x.values()] else: provided_iocs = sys.argv[1:] session = commons.create_session() splunk_table = [] for provided_ioc in set(provided_iocs): provided_ioc = commons.deobfuscate_url(provided_ioc) if provided_ioc == "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855": splunk_table.append({"no data": provided_ioc}) continue if validators.domain(provided_ioc) or validators.ipv4(provided_ioc) or \ validators.sha256(provided_ioc) or "certstream-suspicious" in provided_ioc: ioc_dicts = query_urlscan(session, provided_ioc) else: splunk_table.append({"invalid": provided_ioc}) continue for ioc_dict in ioc_dicts: splunk_table.append(ioc_dict) session.close() return splunk_table
def read_iocs(cb, file=sys.stdin): iocs = defaultdict(list) report_id = hashlib.md5() report_id.update(str(time.time()).encode("utf-8")) for idx, line in enumerate(sys.stdin): line = line.rstrip("\r\n") report_id.update(line.encode("utf-8")) if validators.md5(line): iocs["md5"].append(line) elif validators.sha256(line): eprint("line {}: sha256 provided but not yet supported by backend".format(idx + 1)) iocs["sha256"].append(line) elif validators.ipv4(line): iocs["ipv4"].append(line) elif validators.ipv6(line): iocs["ipv6"].append(line) elif validators.domain(line): iocs["dns"].append(line) else: if cb.validate_query(line): query_ioc = {"search_query": line} iocs["query"].append(query_ioc) else: eprint("line {}: invalid query".format(idx + 1)) return (report_id.hexdigest(), dict(iocs))
def checkType(self, argument): """ Identify observable type """ if not argument or len(argument.strip()) == 0: return None elif argument[0] is '#': return None elif validators.url(argument): return "URL" elif validators.md5(argument): return "MD5" elif validators.sha1(argument): return "SHA1" elif validators.sha256(argument): return "SHA256" elif validators.sha512(argument): return "SHA512" elif validators.ipv4(argument): return "IPv4" elif validators.ipv6(argument): return "IPv6" elif validators.domain(argument): return "domain" else: return None
def run(self, params={}): success = False hash_input = params.get(Input.HASH) if not validators.sha256(hash_input): raise PluginException(cause="An invalid hash was provided.", assistance="Please enter a SHA256 hash and try again.") if params.get(Input.BLACKLIST_STATE): action = self.connection.client.blacklist(hash_input, params.get(Input.DESCRIPTION)) success = action.get("id") is not None else: for page in range(1, 9999): list_of_blacklist_item = self.connection.client.get_blacklists(page) uuid = None for e in list_of_blacklist_item.get("items", []): if e.get("properties", {}).get("sha256") == hash_input: uuid = e.get("id") break if uuid is None: raise PluginException(cause="Unable to unblacklist a hash that is not in the blacklist.", assistance="Please provide a hash that is already blacklisted.") action = self.connection.client.unblacklist(uuid) success = action.get("deleted") is not None if page + 1 > list_of_blacklist_item.get("pages", {}).get("total"): break return { Output.SUCCESS: success }
def checkType(self, argument): """ Identify observable type """ if len(argument.strip()) == 0: return None elif argument[0] is '#': return None elif validators.url(argument): return "URL" elif validators.md5(argument): return "MD5" elif validators.sha1(argument): return "SHA1" elif validators.sha256(argument): return "SHA256" elif validators.sha512(argument): return "SHA512" elif validators.ipv4(argument): return "IPv4" elif validators.ipv6(argument): return "IPv6" elif validators.domain(argument): return "domain" else: mod.display("MAIN", argument, "ERROR", "Unable to retrieve observable type") return None
def handle_data(self, data): """Feed source code to parser and extract URLs and hashes.""" if self.url_match.match(data): self.parsed_urls.append(data) if validators.sha256(data): self.parsed_payloads.append(data) return
def __init__(self, cb, model_unique_id): if not validators.sha256(model_unique_id): raise ApiError("model_unique_id must be a valid SHA256") url = self.urlobject_single.format(cb.credentials.org_key, model_unique_id) item = cb.get_object(url) super(Binary, self).__init__(cb, model_unique_id=model_unique_id, initial_data=item, force_init=False, full_doc=True)
def process_iocs(results): """Return data formatted for Splunk from Twitter.""" if results != None: provided_iocs = [y for x in results for y in x.values()] else: provided_iocs = sys.argv[1:] if len(provided_iocs) > 180: return { "error": "Search term limit: 180\nTotal Search Terms Provided: {}".format( len(provided_iocs)) } session = create_session() splunk_table = [] if isinstance(session, dict): splunk_table.append(session) return splunk_table rate_limit = check_rate_limit(session, provided_iocs) if isinstance(rate_limit, dict): splunk_table.append(rate_limit) return splunk_table empty_files = [ "d41d8cd98f00b204e9800998ecf8427e", "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" ] splunk_table = [] for provided_ioc in set(provided_iocs): provided_ioc = provided_ioc.replace("[.]", ".") provided_ioc = provided_ioc.replace("[d]", ".") provided_ioc = provided_ioc.replace("[D]", ".") if provided_ioc in empty_files: splunk_table.append({"invalid": provided_ioc}) continue if validators.url(provided_ioc) or validators.domain(provided_ioc) or \ validators.ipv4(provided_ioc) or validators.md5(provided_ioc) or \ validators.sha256(provided_ioc) or \ len(provided_ioc) > 2 and len(provided_ioc) <= 140: ioc_dicts = query_twitter(session, provided_ioc) else: splunk_table.append({"invalid": provided_ioc}) continue for ioc_dict in ioc_dicts: ioc_dict = commons.lower_keys(ioc_dict) splunk_table.append(ioc_dict) return splunk_table
def parse_items(items: Optional[str]) -> List[Attr]: parsed_items: List[Attr] = [] if not items: return parsed_items for item in items.split('\n'): if item: item = ''.join(item.split()) # Normalize whitespace item = urllib.parse.unquote_plus(item) if domain(item): typ = AttrType.DOMAIN search_types = [AttrType.DOMAIN] report_types = [AttrType.DOMAIN] elif url(item): typ = AttrType.URL search_types = [AttrType.URL] report_types = [AttrType.URL] # Remove arguments from URLs item = get_canonical_url(item) elif defanged_url(item): typ = AttrType.URL search_types = [AttrType.URL] report_types = [AttrType.URL] # MISP wants a correct URL, so replace hxx with htt item = item.replace('hxx', 'htt', 1) elif ipv4(item) or ipv6(item): typ = AttrType.IP_SRC search_types = [ AttrType.DOMAIN_IP, AttrType.IP_SRC, AttrType.IP_SRC_PORT, AttrType.IP_DST, AttrType.IP_DST_PORT, ] report_types = [AttrType.IP_SRC] elif md5(item): typ = AttrType.MD5 search_types = [AttrType.MD5, AttrType.FILENAME_MD5] report_types = [AttrType.MD5] elif sha1(item): typ = AttrType.SHA1 search_types = [AttrType.SHA1, AttrType.FILENAME_SHA1] report_types = [AttrType.SHA1] elif sha256(item): typ = AttrType.SHA256 search_types = [AttrType.SHA256, AttrType.FILENAME_SHA256] report_types = [AttrType.SHA256] else: raise ParseException(f'Could not parse {item}') parsed_items.append(Attr(value=item, type=typ, search_types=search_types, report_types=report_types)) return parsed_items
def __init__(self, cb, model_unique_id): """ Initialize the Binary object. Args: cb (CBCloudAPI): A reference to the CBCloudAPI object. model_unique_id (str): The SHA-256 of the binary being retrieved. """ if not validators.sha256(model_unique_id): raise ApiError("model_unique_id must be a valid SHA256") url = self.urlobject_single.format(cb.credentials.org_key, model_unique_id) item = cb.get_object(url) super(Binary, self).__init__(cb, model_unique_id=model_unique_id, initial_data=item, force_init=False, full_doc=True)
def hash_scanner(): hashs = open("hashs.txt", "r") ## make sure you have this file existing i = 0 for Hash in hashs: i += 1 if Hash.rstrip('\n') == '': i -= 1 continue while i % 5 == 0: # #### waiting because of quota limitation #### # <= this delay because the public API has only (4) requests/minute time.sleep( 60) ## If You Have A Private API Key Change It To (1) ## i += 1 else: if validators.md5(Hash) == True or validators.sha1( Hash) == True or validators.sha256(Hash) == True: url = 'https://www.virustotal.com/vtapi/v2/file/report' params = {'apikey': api_key, 'resource': Hash} response = requests.get(url, params=params) if response.status_code == 200: json_response = response.json() if json_response['response_code'] == 1: pass else: print( 'There was an error submitting the File_Hash for scanning.' ) positives = json_response['positives'] if positives == 0: result = ' => Clean' else: result = ' => Malicious' # a single detection qualifies for malicious print(Hash.rstrip('\n') + result.rstrip('\n')) elif response.status_code == 204: print( 'You may have exceeded your API request quota, try again later.' ) break elif response.status_code == 403: print('Check Your API Key Please.') break else: # [Usage] Your Hash Must Be 32 or 40 or 64 Alpha Numeric characters. print(Hash.rstrip('\n') + " => invalid\n")
def process_iocs(provided_iocs): """Return a list of strings ('URL, Payload, URLHaus Link').""" lookup_path = '/opt/splunk/etc/apps/osweep/lookups' open_file = open('{}/urlhaus_url_feed.csv'.format(lookup_path), 'r') global data_feed data_feed = open_file.read().splitlines() open_file.close() empty_files = [ 'd41d8cd98f00b204e9800998ecf8427e', 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855' ] ioc_list = [] # is_url = re.compile(r'^h..ps?:\/\/.+\.\w{2,}') global parser parser = ParserHTML() for provided_ioc in set(provided_iocs): provided_ioc = provided_ioc.replace('hxxp', 'http') provided_ioc = provided_ioc.replace('hxtp', 'http') provided_ioc = provided_ioc.replace('[.]', '.') provided_ioc = provided_ioc.replace('[d]', '.') provided_ioc = provided_ioc.replace('[D]', '.') if provided_ioc in empty_files: invalid_str = 'N/A,N/A,{},{}'.format(provided_ioc, provided_ioc) ioc_list.append(invalid_str) continue if validators.url(provided_ioc) or validators.domain( provided_ioc): #is_url.match was here analysis_strs = get_analysis(provided_ioc) ioc_strs = get_payloads(analysis_strs) elif validators.md5(provided_ioc) or validators.sha256(provided_ioc): ioc_strs = get_urls(provided_ioc) else: invalid_str = 'N/A,N/A,N/A,{}'.format(provided_ioc) ioc_list.append(invalid_str) continue for ioc_str in ioc_strs: ioc_list.append(ioc_str) return ioc_list
def process_iocs(results): """Return data formatted for Splunk from urlscan.io.""" if results != None: provided_iocs = [y for x in results for y in x.values()] elif sys.argv[1] in usfs.queries.keys(): if len(sys.argv[1:]) < 3: return [{ "error": "3 positional args needed. {} given.".format( str(len(sys.argv[1:]))) }] provided_iocs = sys.argv[3:] else: provided_iocs = sys.argv[1:] session = commons.create_session() splunk_table = [] for provided_ioc in set(provided_iocs): provided_ioc = commons.deobfuscate_string(provided_ioc) if provided_ioc == "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855": splunk_table.append({"no data": provided_ioc}) continue if provided_ioc.lower() in usfs.extensions.keys(): ioc_dicts = query_urlscan_file(session, provided_ioc) elif validators.domain(provided_ioc) or validators.ipv4(provided_ioc) or \ validators.sha256(provided_ioc) or "certstream-suspicious" in provided_ioc: ioc_dicts = query_urlscan(session, provided_ioc) else: splunk_table.append({"invalid": provided_ioc}) continue for ioc_dict in ioc_dicts: if "ip" not in ioc_dict: splunk_table.append({"no data": provided_ioc}) continue splunk_table.append(ioc_dict) session.close() return splunk_table
def process_iocs(provided_iocs): """Return data from urlscan.io API.""" splunk_table = [] for provided_ioc in set(provided_iocs): if validators.domain(provided_ioc) or validators.ipv4(provided_ioc) or \ validators.md5(provided_ioc) or validators.sha256(provided_ioc): api = 'https://urlscan.io/api/v1/search/?size=10000&q=' uagent = 'Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko' resp = requests.get('{}{}'.format(api, provided_ioc), headers={"User-Agent": uagent}) ioc_dicts = rename_dicts(resp.json()["results"]) for ioc_dict in ioc_dicts: splunk_table.append(ioc_dict) else: invalid_ioc = invalid_dict(provided_ioc) splunk_table.append(invalid_ioc) return splunk_table
def _get_type(indicator): if validators.ipv4(indicator) or validators.ipv6(indicator): return "IpAddress" elif validators.url(indicator): return "Url" elif validators.domain(indicator): return "DomainName" elif validators.sha1(indicator): return "FileSha1" elif validators.sha256(indicator): return "FileSha256" elif validators.md5(indicator): raise PluginException( cause="MD5 hash is not supported.", assistance= "API supported only SHA256 and SHA1. Please check provided hash and try again.", ) raise PluginException(cause="Could not determine type of indicator.", assistance="Indicator not added.")
def process_iocs(results): """Return data formatted for Splunk from Cymon.""" if results != None: provided_iocs = [y for x in results for y in x.values()] else: provided_iocs = sys.argv[1:] session = commons.create_session() splunk_table = [] for provided_ioc in provided_iocs: provided_ioc = provided_ioc.replace("[.]", ".") provided_ioc = provided_ioc.replace("[d]", ".") provided_ioc = provided_ioc.replace("[D]", ".") if validators.ipv4(provided_ioc): ioc_type = "ip" elif validators.domain(provided_ioc): ioc_type = "domain" elif validators.md5(provided_ioc): ioc_type = "md5" elif validators.sha256(provided_ioc): ioc_type = "sha256" else: splunk_table.append({"invalid": provided_ioc}) continue ioc_dicts = query_cymon(ioc_type, session, provided_ioc) if isinstance(ioc_dicts, dict): splunk_table.append(ioc_dicts) continue for ioc_dict in ioc_dicts: ioc_dict = commons.lower_keys(ioc_dict) splunk_table.append(ioc_dict) session.close() return splunk_table
def read_iocs(cb, file=sys.stdin): """ Read indicators of compromise from standard input. Args: cb (CBCloudAPI): Reference to the CBCloudAPI. file: Not used. Returns: str: New report ID to be used. dict: The indicators of compromise that were read in. """ iocs = defaultdict(list) report_id = hashlib.md5() report_id.update(str(time.time()).encode("utf-8")) for idx, line in enumerate(sys.stdin): line = line.rstrip("\r\n") report_id.update(line.encode("utf-8")) if validators.md5(line): iocs["md5"].append(line) elif validators.sha256(line): eprint("line {}: sha256 provided but not yet supported by backend". format(idx + 1)) iocs["sha256"].append(line) elif validators.ipv4(line): iocs["ipv4"].append(line) elif validators.ipv6(line): iocs["ipv6"].append(line) elif validators.domain(line): iocs["dns"].append(line) else: if cb.validate_query(line): query_ioc = {"search_query": line} iocs["query"].append(query_ioc) else: eprint("line {}: invalid query".format(idx + 1)) return (report_id.hexdigest(), dict(iocs))
def checkType(self, argument): """ Identify IOC type """ if validators.url(argument): return "URL" elif validators.md5(argument): return "MD5" elif validators.sha1(argument): return "SHA1" elif validators.sha256(argument): return "SHA256" elif validators.sha512(argument): return "SHA512" elif validators.ipv4(argument): return "IPv4" elif validators.ipv6(argument): return "IPv6" elif validators.domain(argument): return "domain" else: display("MAIN", argument, "ERROR", "Unable to retrieve IOC type") return None
def test_returns_true_on_valid_sha256(value): assert validators.sha256(value)
def test_returns_failed_validation_on_invalid_sha256(value): result = validators.sha256(value) assert isinstance(result, validators.ValidationFailure)
def process_iocs(results): """Return data formatted for Splunk from URLhaus.""" if results != None: provided_iocs = [y for x in results for y in x.values()] else: provided_iocs = sys.argv[1:] session = commons.create_session() lookup_path = "/opt/splunk/etc/apps/osweep/lookups" open_file = open("{}/urlhaus_url_feed.csv".format(lookup_path), "r") contents = open_file.read().splitlines() open_file.close() header = contents[0].split(",") global data_feed data_feed = [] for line in contents: line = line.split(",") ioc_dict = OrderedDict(zip(header, line)) data_feed.append(ioc_dict) global parser parser = ParserHTML() empty_files = [ "d41d8cd98f00b204e9800998ecf8427e", "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" ] urlhaus_match = re.compile(r"^h..ps?:\/\/urlhaus\.abuse\.ch") splunk_table = [] for provided_ioc in set(provided_iocs): provided_ioc = commons.deobfuscate_url(provided_ioc) if provided_ioc in empty_files: splunk_table.append({"invalid": provided_ioc}) continue if urlhaus_match.match(provided_ioc): splunk_table.append({"invalid": provided_ioc}) continue if validators.url(provided_ioc) or validators.domain(provided_ioc) or \ validators.ipv4(provided_ioc): analysis_dicts = get_analysis(provided_ioc) if isinstance(analysis_dicts, dict): splunk_table.append(analysis_dicts) continue ioc_dicts = get_payloads(analysis_dicts, session) elif validators.md5(provided_ioc) or validators.sha256(provided_ioc): ioc_dicts = get_urls(session, provided_ioc) else: splunk_table.append({"invalid": provided_ioc}) continue for ioc_dict in ioc_dicts: ioc_dict = commons.lower_keys(ioc_dict) splunk_table.append(ioc_dict) time.sleep(1) session.close() return splunk_table