def process(file, dst, type):
    hashes = set()
    with open(file, 'r') as f_in:
        for obj in csv.DictReader(f_in):
            pem = obj['PEM Info'].strip("'").replace('\r',
                                                     '').replace('\n\n', '\n')
            try:
                obj['Certificate Name']
            except:
                obj['Common Name or Certificate Name']
            cert = load_certificate(FILETYPE_PEM, pem)
            hashes.add(gethash(cert, 'md5'))
            hashes.add(gethash(cert, 'sha1'))
            hashes.add(obj['SHA-256 Fingerprint'].lower())

    warninglist = {
        'name':
        'Fingerprint of {type}'.format(type=type),
        'version':
        get_version(),
        'description':
        "Fingerprint of {type} taken from Mozilla's lists at https://wiki.mozilla.org/CA"
        .format(type=type),
        'list':
        hashes,
        'type':
        'string',
        'matching_attributes': [
            "md5", "sha1", "sha256", "filename|md5", "filename|sha1",
            "filename|sha256", "x509-fingerprint-md5", "x509-fingerprint-sha1",
            "x509-fingerprint-sha256"
        ]
    }

    write_to_file(warninglist, dst)
Ejemplo n.º 2
0
def process(url, warninglist, dst):
    whitelist = download(url).text
    whitelist = list(set(whitelist.split()))

    warninglist['type'] = 'hostname'
    warninglist['matching_attributes'] = ['domain', 'hostname', 'url']
    warninglist['version'] = get_version()
    warninglist['list'] = whitelist

    write_to_file(warninglist, dst)
Ejemplo n.º 3
0
def process(url, dst):
    warninglist = {
        'name': 'Specialized list of {} addresses belonging to common VPN providers and datacenters'.format(dst.split('-')[1].replace('ip', 'IP')),
        'version': get_version(),
        'description': 'Specialized list of {} addresses belonging to common VPN providers and datacenters'.format(dst.split('-')[1].replace('ip', 'IP')),
        'list': process_stream(url),
        'type': 'cidr',
        'matching_attributes': ["ip-src", "ip-dst", "domain|ip"]
    }

    write_to_file(warninglist, dst)
Ejemplo n.º 4
0
def generate(sites, warninglist, dst):
    warninglist['version'] = get_version()
    warninglist['type'] = 'string'
    warninglist['matching_attributes'] = [
        'hostname', 'domain', 'url', 'domain|ip']
    warninglist['list'] = []

    for site in sites:
        v = site.decode('UTF-8').split(',')[1]
        warninglist['list'].append(v.strip().replace('\\r\\n', ''))

    write_to_file(warninglist, dst)
Ejemplo n.º 5
0
def process(warninglist_name):
    description = {
        'description':
        'Numbers that cannot be attributed because they reserved for different purposes.',
        'name': 'Unattributed phone number.',
        'matching_attributes': ['phone-number', 'whois-registrant-phone'],
        'type': 'regex',
        'version': get_version()
    }

    warninglist = generate_french_warninglist()
    # The list can be extended by adding other entries: `warninglist.extend(generate_some_warninglist())`

    description['list'] = warninglist
    write_to_file(description, warninglist_name)
Ejemplo n.º 6
0
def process(url, dst):
    warninglist = {
        'name': 'TLDs as known by IANA',
        'version': get_version(),
        'description': 'Event contains one or more TLDs as attribute with an IDS flag set',
        'list': [],
        'matching_attributes': ["hostname", "domain", "domain|ip"],
        'type': 'string'
    }

    r = download(url)
    for tld in r.text.splitlines():
        if tld.startswith('#'):
            continue
        warninglist['list'].append(tld)

    write_to_file(warninglist, dst)
def process(file, dst):

    warninglist = {
        'name': 'List of known Microsoft Azure Datacenter IP Ranges',
        'version': get_version(),
        'description': 'Microsoft Azure Datacenter IP Ranges',
        'list': [],
        'matching_attributes': ["ip-src", "ip-dst", "domain|ip"],
        'type': 'cidr'
    }

    with open(file, 'r') as json_file:
        ms_azure_ip_list = json.load(json_file)

    for value in ms_azure_ip_list['values']:
        warninglist['list'] += value['properties']['addressPrefixes']

    write_to_file(warninglist, dst)
Ejemplo n.º 8
0
def process(files, dst):
    warninglist = {
        'name': "List of known Cloudflare IP ranges",
        'version': get_version(),
        'description':
        "List of known Cloudflare IP ranges (https://www.cloudflare.com/ips/)",
        'type': "cidr",
        'list': [],
        'matching_attributes': ["ip-dst", "ip-src", "domain|ip"]
    }

    for file in files:
        with open(file, 'r') as f:
            ips = f.readlines()
        for ip in ips:
            warninglist['list'].append(ip.strip())

    write_to_file(warninglist, dst)
def process(files, dst):

    warninglist = {
        'type': "string",
        'matching_attributes': ["hostname", "domain", "ip-dst", "ip-src", "url", "domain|ip"],
        'name': "CRL Warninglist",
        'version': get_version(),
        'description': "CRL Warninglist from threatstop (https://github.com/threatstop/crl-ocsp-whitelist/)",
        'list': []
    }

    for file in files:
        with open(get_abspath_source_file(file), 'r') as f:
            ips = f.readlines()
        for ip in ips:
            warninglist['list'].append(ip.strip())

    write_to_file(warninglist, dst)
Ejemplo n.º 10
0
def process(files, dst):

    warninglist = {
        'description': "Event contains one or more entries from the top 500 of the most used domains (Mozilla).",
        'version': get_version(),
        'name': "Top 500 domains and pages from https://moz.com/top500",
        'type': 'string',
        'list': [],
        'matching_attributes': ['hostname', 'domain', 'uri', 'url']
    }

    for file in files:
        with open(get_abspath_source_file(file)) as csv_file:
            csv_reader = csv.reader(csv_file, delimiter=',')
            for row in csv_reader:
                v = row[1]
                warninglist['list'].append(v.rstrip().rstrip('/'))

    write_to_file(warninglist, dst)
Ejemplo n.º 11
0
def process(file, dst):

    with open(get_abspath_source_file(file), newline='\n', encoding='utf-8', errors='replace') as csv_file:
        sites = csv_file.readlines()[:10000]

    warninglist = {
        'name': 'Top 10K websites from Majestic Million',
        'version': get_version(),
        'description': 'Event contains one or more entries from the top 10K of the most used websites (Majestic Million).',
        'matching_attributes': ['hostname', 'domain'],
        'type': 'hostname',
        'list': []
    }

    for site in sites:
        v = site.split(',')[2]
        warninglist['list'].append(v.rstrip())

    write_to_file(warninglist, dst)
def process(file, dst, name: str, description: str):
    warninglist = {
        'name': name,
        'version': get_version(),
        'description': description,
        'matching_attributes': ["ip-src", "ip-dst", "domain|ip"],
        'type': 'cidr'
    }

    with open(get_abspath_source_file(file), 'r') as json_file:
        ms_azure_ip_list = json.load(json_file)

    values = []
    for value in ms_azure_ip_list['values']:
        values += value['properties']['addressPrefixes']

    warninglist['list'] = consolidate_networks(values)

    write_to_file(warninglist, dst)
Ejemplo n.º 13
0
def process(url, dst):

    university_list = download(url).json()

    warninglist = {
        'type': "string",
        'name': "University domains",
        'matching_attributes': ['hostname', 'domain', 'url', 'domain|ip'],
        'version': get_version(),
        'description':
        "List of University domains from https://raw.githubusercontent.com/Hipo/university-domains-list/master/world_universities_and_domains.json",
        'list': []
    }

    for university in university_list:
        for domain in university.get('domains'):
            if domain not in warninglist['list']:
                warninglist['list'].append(domain)

    write_to_file(warninglist, dst)
def process(url, dst):

    warninglist = {
        'name':
        'List of disposable email domains',
        'version':
        get_version(),
        'description':
        'List of disposable email domains',
        'list':
        process_stream(url),
        'type':
        'substring',
        'matching_attributes': [
            "email-src", "email-dst", "whois-registrant-email", "domain|ip",
            "dns-soa-email"
        ]
    }

    write_to_file(warninglist, dst)
Ejemplo n.º 15
0
def process(url, dst):
    warninglist = {
        'name': 'List of known Wikimedia address ranges',
        'version': get_version(),
        'description':
        'Wikimedia address ranges (http://noc.wikimedia.org/conf/reverse-proxy.php.txt)',
        'type': 'cidr',
        'list': [],
        'matching_attributes': ["ip-src", "ip-dst", "domain|ip"]
    }

    matched = re.findall(r'\'(.*?)\'',
                         codecs.decode(download(url).content, 'UTF-8'))
    for ip in matched:
        try:
            ipaddress.ip_network(ip)
            warninglist['list'].append(ip)
        except ValueError:
            pass

    write_to_file(warninglist, dst)
Ejemplo n.º 16
0
def process(file, dst):
    with open(get_abspath_source_file(file), 'r') as json_file:
        amazon_aws_ip_list = json.load(json_file)
    l = []

    for prefix in amazon_aws_ip_list['prefixes']:
        l.append(prefix['ip_prefix'])

    for prefix in amazon_aws_ip_list['ipv6_prefixes']:
        l.append(prefix['ipv6_prefix'])

    warninglist = {
        'name': 'List of known Amazon AWS IP address ranges',
        'version': get_version(),
        'description':
        'Amazon AWS IP address ranges (https://ip-ranges.amazonaws.com/ip-ranges.json)',
        'type': 'cidr',
        'list': l,
        'matching_attributes': ["ip-src", "ip-dst", "domain|ip"]
    }

    write_to_file(warninglist, dst)
Ejemplo n.º 17
0
def process(files, dst):
    warninglist = {
        'name': "List of known Stackpath CDN IP ranges",
        'version': get_version(),
        'description': "List of known Stackpath (Highwinds) CDN IP ranges (https://support.stackpath.com/hc/en-us/articles/360001091666-Whitelist-CDN-WAF-IP-Blocks)",
        'type': "cidr",
        'list': [],
        'matching_attributes': ["ip-dst", "ip-src", "domain|ip"]
    }

    for file in files:
        with open(get_abspath_source_file(file), 'r') as f:
            ips = f.readlines()
        for ip in ips:
            iptoadd = ip.strip()
            try:
                ipaddress.ip_network(ip.strip())
            except ValueError as err:# if it's host given strip to the subnet
                iptoadd = str(ipaddress.IPv6Interface(ip.strip()).ip)
            warninglist['list'].append(iptoadd)

    write_to_file(warninglist, dst)
Ejemplo n.º 18
0
def process(file, dst):
    with zipfile.ZipFile(file, 'r') as alexa_lists:
        for name in alexa_lists.namelist():
            if name == "top-1m.csv":
                with alexa_lists.open(name) as top:
                    top1000 = top.readlines()[:1000]
            else:
                continue

    warninglist = {
        'description': "Event contains one or more entries from the top 1000 of the most used website (Alexa).",
        'version': get_version(),
        'name': "Top 1000 website from Alexa",
        'type': 'hostname',
        'list': [],
        'matching_attributes': ['hostname', 'domain', 'url', 'domain|ip']
    }

    for site in top1000:
        v = site.decode('UTF-8').split(',')[1]
        warninglist['list'].append(v.rstrip())

    write_to_file(warninglist, dst)
Ejemplo n.º 19
0
    networks = set()
    asn_to_fetch = []
    for asn in search_result["data"]["asns"]:
        if is_akamai(asn):
            asn_to_fetch.append(asn["asn"])

    for prefix in search_result["data"]["ipv4_prefixes"]:
        if is_akamai(prefix):
            networks.add(prefix["prefix"])

    for prefix in search_result["data"]["ipv6_prefixes"]:
        if is_akamai(prefix):
            networks.add(prefix["prefix"])

    for asn in asn_to_fetch:
        try:
            networks.update(get_networks_for_asn(asn))
        except Exception as e:
            print(str(e))

    warninglist = {
        'name': 'List of known Akamai IP ranges',
        'version': get_version(),
        'description': 'Akamai IP ranges from BGP search',
        'type': 'cidr',
        'list': consolidate_networks(networks),
        'matching_attributes': ["ip-src", "ip-dst", "domain|ip"]
    }
    write_to_file(warninglist, "akamai")
Ejemplo n.º 20
0
def generate(data_list, dst, warninglist):

    warninglist['version'] = get_version()
    warninglist['list'] = data_list

    write_to_file(warninglist, dst)