Exemplo n.º 1
0
def query_threatcrowd(provided_ioc, ioc_type, session):
    """Pivot off an IP or domain and return data as an dictonary."""
    ioc_dicts = []

    if ioc_type == "resource":
        resp = session.get(api.format("file", ioc_type, provided_ioc),
                           timeout=180)
    else:
        resp = session.get(api.format(ioc_type, ioc_type, provided_ioc),
                           timeout=180)

    if resp.status_code == 200 and "permalink" in resp.json().keys() and \
       provided_ioc in resp.json()["permalink"]:
        for key in resp.json().keys():
            if key == "votes" or key == "permalink" or key == "response_code":
                continue
            elif key in ("md5", "sha1"):
                value = resp.json()[key]
                ioc_dicts.append({key: value})
            elif key == "resolutions":
                for res in resp.json()[key]:
                    res = commons.lower_keys(res)
                    ioc_dicts.append(res)
            else:
                for value in resp.json()[key]:
                    key = commons.lower_keys(key)
                    ioc_dicts.append({key: value})
    else:
        ioc_dicts.append({"no data": provided_ioc})
    return ioc_dicts
Exemplo n.º 2
0
def process_iocs(results):
    """Return data formatted for Splunk from Twitter."""
    if results != None:
        provided_iocs = [y for x in results for y in x.values()]
    else:
        provided_iocs = sys.argv[1:]

    if len(provided_iocs) > 180:
        return {
            "error":
            "Search term limit: 180\nTotal Search Terms Provided: {}".format(
                len(provided_iocs))
        }

    session = create_session()
    splunk_table = []

    if isinstance(session, dict):
        splunk_table.append(session)
        return splunk_table

    rate_limit = check_rate_limit(session, provided_iocs)
    if isinstance(rate_limit, dict):
        splunk_table.append(rate_limit)
        return splunk_table

    empty_files = [
        "d41d8cd98f00b204e9800998ecf8427e",
        "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
    ]
    splunk_table = []

    for provided_ioc in set(provided_iocs):
        provided_ioc = provided_ioc.replace("[.]", ".")
        provided_ioc = provided_ioc.replace("[d]", ".")
        provided_ioc = provided_ioc.replace("[D]", ".")

        if provided_ioc in empty_files:
            splunk_table.append({"invalid": provided_ioc})
            continue

        if validators.url(provided_ioc) or validators.domain(provided_ioc) or \
           validators.ipv4(provided_ioc) or validators.md5(provided_ioc) or \
           validators.sha256(provided_ioc) or \
           len(provided_ioc) > 2 and len(provided_ioc) <= 140:
            ioc_dicts = query_twitter(session, provided_ioc)
        else:
            splunk_table.append({"invalid": provided_ioc})
            continue

        for ioc_dict in ioc_dicts:
            ioc_dict = commons.lower_keys(ioc_dict)
            splunk_table.append(ioc_dict)
    return splunk_table
Exemplo n.º 3
0
def process_iocs(results):
    """Return data formatted for Splunk from URLhaus."""
    if results != None:
        provided_iocs = [y for x in results for y in x.values()]
    else:
        provided_iocs = sys.argv[1:]

    session = commons.create_session()
    empty_files = [
        "d41d8cd98f00b204e9800998ecf8427e",
        "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
    ]
    urlhaus_match = re.compile(r"^h..ps?:\/\/urlhaus\.abuse\.ch")
    splunk_table = []

    for provided_ioc in set(provided_iocs):
        provided_ioc = commons.deobfuscate_string(provided_ioc)

        if provided_ioc in empty_files:
            splunk_table.append({"invalid": provided_ioc})
            continue

        if urlhaus_match.match(provided_ioc):
            splunk_table.append({"invalid": provided_ioc})
            continue

        if validators.domain(provided_ioc) or validators.ipv4(provided_ioc):
            ioc_type = "host"
        elif validators.url(provided_ioc):
            ioc_type = "url"
        elif re.match("^[a-f\d]{32}$", provided_ioc) or re.match(
                "^[a-f\d]{64}$", provided_ioc):
            ioc_type = "payload"
        else:
            splunk_table.append({"invalid": provided_ioc})
            continue

        ioc_dicts = query_urlhaus(session, provided_ioc, ioc_type)

        for ioc_dict in ioc_dicts:
            ioc_dict = commons.lower_keys(ioc_dict)
            splunk_table.append(ioc_dict)

    session.close()
    return splunk_table
Exemplo n.º 4
0
def psbdmp_search(provided_ioc, session):
    """ """
    base_url = "https://psbdmp.ws/api/search/{}"
    url = base_url.format(provided_ioc)
    resp = session.get(url, timeout=180)
    psd_dicts = []

    if resp.status_code == 200 and resp.json()["error"] != 1 and len(
            resp.json()["data"]) > 0:
        data = resp.json()["data"]

        for result in data:
            result = commons.lower_keys(result)
            result.update({"provided_ioc": provided_ioc})
            psd_dicts.append(result)
    else:
        psd_dicts.append({"no data": provided_ioc})
    return psd_dicts
Exemplo n.º 5
0
def process_iocs(results):
    """Return data formatted for Splunk from GreyNoise."""
    if results != None:
        provided_iocs = [y for x in results for y in x.values()]
    else:
        provided_iocs = sys.argv[1:]

    splunk_table = []
    lookup_path = "/opt/splunk/etc/apps/osweep/lookups"
    open_file = open("{}/greynoise_feed.csv".format(lookup_path), "r")
    data_feed = open_file.read().splitlines()
    header = data_feed[0].split(",")
    open_file.close()

    open_file = open("{}/greynoise_scanners.csv".format(lookup_path), "r")
    scanners = set(open_file.read().splitlines()[1:])
    scanners = [x.lower() for x in scanners]
    open_file.close()

    for provided_ioc in set(provided_iocs):
        provided_ioc = provided_ioc.replace("[.]", ".")
        provided_ioc = provided_ioc.replace("[d]", ".")
        provided_ioc = provided_ioc.replace("[D]", ".")

        if not validators.ipv4(provided_ioc) and \
           not validators.domain(provided_ioc) and \
           provided_ioc.lower() not in scanners:
            splunk_table.append({"invalid": provided_ioc})
            continue

        line_found = False

        for line in data_feed:
            if provided_ioc.lower() in line.lower():
                line_found = True
                scanner_data = line.split(",")
                scanner_dict = OrderedDict(zip(header, scanner_data))
                scanner_dict = commons.lower_keys(scanner_dict)
                splunk_table.append(scanner_dict)

        if line_found == False:
            splunk_table.append({"no data": provided_ioc})
    return splunk_table
Exemplo n.º 6
0
def process_iocs(results):
    """Return data formatted for Splunk from Cymon."""
    if results != None:
        provided_iocs = [y for x in results for y in x.values()]
    else:
        provided_iocs = sys.argv[1:]

    session      = commons.create_session()
    splunk_table = []

    for provided_ioc in provided_iocs:
        provided_ioc = provided_ioc.replace("[.]", ".")
        provided_ioc = provided_ioc.replace("[d]", ".")
        provided_ioc = provided_ioc.replace("[D]", ".")

        if validators.ipv4(provided_ioc):
            ioc_type = "ip"
        elif validators.domain(provided_ioc):
            ioc_type = "domain"
        elif validators.md5(provided_ioc):
            ioc_type = "md5"
        elif validators.sha256(provided_ioc):
            ioc_type = "sha256"
        else:
            splunk_table.append({"invalid": provided_ioc})
            continue

        ioc_dicts = query_cymon(ioc_type, session, provided_ioc)

        if isinstance(ioc_dicts, dict):
            splunk_table.append(ioc_dicts)
            continue

        for ioc_dict in ioc_dicts:
            ioc_dict = commons.lower_keys(ioc_dict)
            splunk_table.append(ioc_dict)

    session.close()
    return splunk_table
Exemplo n.º 7
0
def query_crtsh(provided_ioc, session):
    """Search crt.sh for the given domain."""
    if sys.argv[1] == "subdomain":
        provided_ioc = "%25.{}".format(provided_ioc)
    elif sys.argv[1] == "wildcard":
        provided_ioc = "%25{}".format(provided_ioc)

    base_url = "https://crt.sh/?q={}&output=json"
    url = base_url.format(provided_ioc)
    resp = session.get(url, timeout=180)
    crt_dicts = []

    if resp.status_code == 200 and resp.content != "":
        content = resp.content.decode("UTF-8")
        cert_history = json.loads("[{}]".format(content.replace("}{", "},{")))

        for cert in cert_history:
            cert = commons.lower_keys(cert)
            crt_dicts.append(cert)
    else:
        provided_ioc = provided_ioc.replace("%25.", "").replace("%25", "")
        crt_dicts.append({"no data": provided_ioc})
    return crt_dicts
Exemplo n.º 8
0
def process_iocs(results):
    """Return data formatted for Splunk from URLhaus."""
    if results != None:
        provided_iocs = [y for x in results for y in x.values()]
    else:
        provided_iocs = sys.argv[1:]

    session = commons.create_session()
    lookup_path = "/opt/splunk/etc/apps/osweep/lookups"
    open_file = open("{}/urlhaus_url_feed.csv".format(lookup_path), "r")
    contents = open_file.read().splitlines()
    open_file.close()

    header = contents[0].split(",")
    global data_feed
    data_feed = []

    for line in contents:
        line = line.split(",")
        ioc_dict = OrderedDict(zip(header, line))
        data_feed.append(ioc_dict)

    global parser
    parser = ParserHTML()

    empty_files = [
        "d41d8cd98f00b204e9800998ecf8427e",
        "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
    ]
    urlhaus_match = re.compile(r"^h..ps?:\/\/urlhaus\.abuse\.ch")
    splunk_table = []

    for provided_ioc in set(provided_iocs):
        provided_ioc = commons.deobfuscate_url(provided_ioc)

        if provided_ioc in empty_files:
            splunk_table.append({"invalid": provided_ioc})
            continue

        if urlhaus_match.match(provided_ioc):
            splunk_table.append({"invalid": provided_ioc})
            continue

        if validators.url(provided_ioc) or validators.domain(provided_ioc) or \
           validators.ipv4(provided_ioc):
            analysis_dicts = get_analysis(provided_ioc)

            if isinstance(analysis_dicts, dict):
                splunk_table.append(analysis_dicts)
                continue

            ioc_dicts = get_payloads(analysis_dicts, session)
        elif validators.md5(provided_ioc) or validators.sha256(provided_ioc):
            ioc_dicts = get_urls(session, provided_ioc)
        else:
            splunk_table.append({"invalid": provided_ioc})
            continue

        for ioc_dict in ioc_dicts:
            ioc_dict = commons.lower_keys(ioc_dict)
            splunk_table.append(ioc_dict)

        time.sleep(1)

    session.close()
    return splunk_table
Exemplo n.º 9
0
def merge_dict(page, download):
    """Return a dictionary containing both page and download data."""
    merged_dict = {}
    merged_dict.update(commons.lower_keys(page))
    merged_dict.update(commons.lower_keys(download))
    return merged_dict