Exemplo n.º 1
0
def main():
    """
    Begin Main...
    """
    now = datetime.now()
    print("Starting: " + str(now))

    # Make database connections
    mc_connector = MongoConnector.MongoConnector()

    file_path = "/mnt/workspace/ct_facebook/"

    fb_connector = FacebookConnector.FacebookConnector()
    access_token = fb_connector.get_facebook_access_token()

    zones = ZoneManager.get_distinct_zones(mc_connector)

    for zone in zones:
        time.sleep(15)
        results = fetch_domain(fb_connector, access_token, zone)

        if results is None:
            print("ERROR looking up: " + zone)
            continue

        print(zone + ": " + str(len(results)))

        for result in results:
            cert_f = open(file_path + zone + "_" + result['id'] + ".pem", "w")
            cert_f.write(result['certificate_pem'])
            cert_f.close()

    now = datetime.now()
    print("Complete: " + str(now))
Exemplo n.º 2
0
def main():
    """
    Begin Main...
    """

    print("Starting: " + str(datetime.now()))

    # Make database connections
    mc = MongoConnector.MongoConnector()
    jobs_collection = mc.get_jobs_connection()

    iem = InfobloxExtattrManager.InfobloxExtattrManager('host')
    iem.get_infoblox_extattr()

    # Record status
    jobs_collection.update_one({'job_name': 'get_infoblox_host_extattrs'}, {
        '$currentDate': {
            "updated": True
        },
        "$set": {
            'status': 'COMPLETE'
        }
    })

    print("Ending: " + str(datetime.now()))
Exemplo n.º 3
0
def main():
    now = datetime.now()
    print("Starting: " + str(now))

    mongo_connector = MongoConnector.MongoConnector()
    splunk_query_manager = SplunkQueryManager.SplunkQueryManager()
    splunk_collection = mongo_connector.get_splunk_connection()
    dns_manager = DNSManager.DNSManager(mongo_connector)

    jobs_manager = JobsManager.JobsManager(mongo_connector, "get_splunk_data")
    jobs_manager.record_job_start()

    print("Starting Splunk Query")

    results_per_page = 100

    # Put your custom Splunk search query here.
    results = splunk_query_manager.do_search('search index=...',
                                             results_per_page)

    print("Processing " + str(splunk_query_manager.RESULTCOUNT) + " results")

    parse_splunk_results(results, dns_manager, splunk_collection)

    while True:
        results = splunk_query_manager.get_next_page()
        if results is None:
            break
        parse_splunk_results(results, dns_manager, splunk_collection)

    jobs_manager.record_job_complete()

    now = datetime.now()
    print("Complete: " + str(now))
def main():
    """
    Begin main...
    """
    logger = LoggingUtil.create_log(__name__)

    now = datetime.now()
    print("Starting: " + str(now))

    parser = argparse.ArgumentParser(
        description='Search Splunk logs for IP address')
    parser.add_argument('--collection_name',
                        choices=["http_80", "http_443"],
                        metavar="COLLECTION",
                        required=True,
                        help='The collection to upload to Splunk')
    args = parser.parse_args()

    mongo_connector = MongoConnector.MongoConnector()
    splunk_manager = SplunkHECManager.SplunkHECManager()

    jobs_manager = JobsManager.JobsManager(mongo_connector,
                                           "splunk_headers_upload")
    jobs_manager.record_job_start()

    if args.collection_name == "http_443":
        upload_zgrab_443(logger, splunk_manager, mongo_connector)
    elif args.collection_name == "http_80":
        upload_zgrab_80(logger, splunk_manager, mongo_connector)

    jobs_manager.record_job_complete()

    now = datetime.now()
    print("Complete: " + str(now))
    logger.info("Complete.")
Exemplo n.º 5
0
def main():
    """
    Begin Main...
    """
    logger = LoggingUtil.create_log(__name__)

    now = datetime.now()
    print("Starting: " + str(now))
    logger.info("Starting...")

    # Obtain the list of known email addresses from the config collection
    MC = MongoConnector.MongoConnector()
    PT = PassiveTotal.PassiveTotal()
    zi = ZoneIngestor.ZoneIngestor()
    config_collection = MC.get_config_connection()
    res = config_collection.find({})

    jobs_manager = JobsManager.JobsManager(MC, 'get_passivetotal_data')
    jobs_manager.record_job_start()

    # Perform a search for each email address
    for i in range(0, len(res[0]['DNS_Admins'])):
        search_pt_email(logger, res[0]['DNS_Admins'][i], PT, zi, jobs_manager)

    for i in range(0, len(res[0]['Whois_Orgs'])):
        search_pt_org(logger, res[0]['Whois_Orgs'][i], PT, zi, jobs_manager)

    # Record status
    jobs_manager.record_job_complete()

    now = datetime.now()
    print("Complete: " + str(now))
    logger.info("Complete.")
Exemplo n.º 6
0
class APIHelper(object):

    _logger = logging.getLogger(__name__)

    MC = MongoConnector.MongoConnector()

    INCORRECT_RESPONSE_JSON_ALLOWED = 20

    def handle_api_error(self, err, jobs_reference):
        """
        Exits the script execution post setting the status in database.
        :param err: Exception causing script exit.
        :param jobs_reference: A string with the job name or the JobsManager for the exiting script.
        """
        self._logger.error(err)
        self._logger.error("Exiting script execution.")
        if isinstance(jobs_reference, str):
            jobs_manager = JobsManager.JobsManager(self.MC, jobs_reference)
            jobs_manager.record_job_error()
        else:
            jobs_reference.record_job_error()

        exit(1)

    def connection_error_retry(self, details):
        self._logger.error(
            "Connection Error encountered. Retrying in {wait:0.1f} seconds".
            format(**details))
def main():
    """
    Begin Main...
    """
    logger = LoggingUtil.create_log(__name__)

    now = datetime.now()
    print ("Starting: " + str(now))
    logger.info("Starting...")

    mongo_connector = MongoConnector.MongoConnector()
    dead_dns_collection = mongo_connector.get_dead_dns_connection()
    jobs_manager = JobsManager.JobsManager(mongo_connector, 'dead_dns_cleanup')
    jobs_manager.record_job_start()

    google_dns = GoogleDNS.GoogleDNS()

    results = dead_dns_collection.find({})

    for result in results:
        time.sleep(1)
        lookup_result = google_dns.fetch_DNS_records(result['fqdn'])
        if lookup_result == []:
            logger.info ("Removing " + result['fqdn'])
            dead_dns_collection.remove({"_id":ObjectId(result['_id'])})

    # Record status
    jobs_manager.record_job_complete()

    now = datetime.now()
    print ("Ending: " + str(now))
    logger.info("Complete.")
def main():
    """
    Begin Main...
    """

    now = datetime.now()
    print("Starting: " + str(now))

    mongo_connector = MongoConnector.MongoConnector()
    jobs_collection = mongo_connector.get_jobs_connection()
    dead_dns_collection = mongo_connector.get_dead_dns_connection()

    google_dns = GoogleDNS.GoogleDNS()

    results = dead_dns_collection.find({})

    for result in results:
        time.sleep(1)
        lookup_result = google_dns.fetch_DNS_records(result['fqdn'])
        if lookup_result == []:
            print("Removing " + result['fqdn'])
            dead_dns_collection.remove({"_id": ObjectId(result['_id'])})

    # Record status
    jobs_collection.update_one({'job_name': 'dead_dns_cleanup'}, {
        '$currentDate': {
            "updated": True
        },
        "$set": {
            'status': 'COMPLETE'
        }
    })

    now = datetime.now()
    print("Ending: " + str(now))
Exemplo n.º 9
0
def main():
    """
    Begin main...
    """
    # Make database connections
    mongo_connector = MongoConnector.MongoConnector()

    now = datetime.now()
    print("Starting: " + str(now))

    jobs_manager = JobsManager.JobsManager(mongo_connector, 'get_aws_data')
    jobs_manager.record_job_start()

    # Download the JSON file
    req = requests.get(JSON_LOCATION)

    if req.status_code != 200:
        print("Bad Request")
        jobs_manager.record_job_error()
        exit(0)

    # Convert the response to JSON
    json_data = json.loads(req.text)

    # Replace the old entries with the new entries
    aws_collection = mongo_connector.get_aws_ips_connection()
    aws_collection.remove({})
    aws_collection.insert(json_data)

    # Record status
    jobs_manager.record_job_complete()

    now = datetime.now()
    print("Complete: " + str(now))
Exemplo n.º 10
0
class APIHelper(object):

    MC = MongoConnector.MongoConnector()
    jobs_collection = MC.get_jobs_connection()

    INCORRECT_RESPONSE_JSON_ALLOWED = 20

    def handle_api_error(self, err, job_name):
        """
        Exits the script execution post setting the status in database.
        :param err: Exception causing script exit.
        :param job_name: Script exiting.
        """
        print(err)
        print('Exiting script execution.')
        self.jobs_collection.update_one({'job_name': job_name}, {
            '$currentDate': {
                'updated': True
            },
            '$set': {
                'status': 'ERROR'
            }
        })
        exit(1)

    @staticmethod
    def connection_error_retry(details):
        print('Connection Error encountered. Retrying in {wait:0.1f} seconds'.
              format(**details))
Exemplo n.º 11
0
def main():
    """
    Begin Main...
    """
    now = datetime.now()
    print("Starting: " + str(now))

    mongo_connector = MongoConnector.MongoConnector()
    remote_mongo_connector = RemoteMongoConnector.RemoteMongoConnector()

    jobs_collection = mongo_connector.get_jobs_connection()

    zone_list = update_zones(mongo_connector, remote_mongo_connector)
    update_ip_zones(mongo_connector, remote_mongo_connector)
    update_aws_cidrs(mongo_connector, remote_mongo_connector)
    update_azure_cidrs(mongo_connector, remote_mongo_connector)
    update_config(mongo_connector, remote_mongo_connector)
    update_braas(mongo_connector, remote_mongo_connector)
    update_all_dns(mongo_connector, remote_mongo_connector, zone_list)

    # Record status
    jobs_collection.update_one({'job_name': 'send_remote_server'}, {
        '$currentDate': {
            "updated": True
        },
        "$set": {
            'status': 'COMPLETE'
        }
    })

    now = datetime.now()
    print("Complete: " + str(now))
Exemplo n.º 12
0
def main():
    """
    Begin Main...
    """
    now = datetime.now()
    print("Starting: " + str(now))

    # Obtain the list of known email addresses from the config collection
    MC = MongoConnector.MongoConnector()
    PT = PassiveTotal.PassiveTotal()
    zi = ZoneIngestor.ZoneIngestor()
    config_collection = MC.get_config_connection()
    res = config_collection.find({})

    jobs_collection = MC.get_jobs_connection()

    # Perform a search for each email address
    for i in range(0, len(res[0]['DNS_Admins'])):
        search_pt_email(res[0]['DNS_Admins'][i], PT, zi, jobs_collection)

    for i in range(0, len(res[0]['Whois_Orgs'])):
        search_pt_org(res[0]['Whois_Orgs'][i], PT, zi, jobs_collection)

    # Record status
    jobs_collection.update_one({'job_name': 'get_passivetotal_data'}, {
        '$currentDate': {
            "updated": True
        },
        "$set": {
            'status': 'COMPLETE'
        }
    })

    now = datetime.now()
    print("Complete: " + str(now))
Exemplo n.º 13
0
def main():
    """
    Begin Main...
    """
    logger = LoggingUtil.create_log(__name__)

    now = datetime.now()
    print("Starting: " + str(now))
    logger.info("Starting...")

    mongo_connector = MongoConnector.MongoConnector()
    umbrella = Umbrella.Umbrella()
    zi = ZoneIngestor.ZoneIngestor()

    # Obtain the list of known email addresses and name servers from the config collection
    config_collection = mongo_connector.get_config_connection()
    res = config_collection.find({})

    jobs_manager = JobsManager.JobsManager(mongo_connector, 'get_umbrella_whois')
    jobs_manager.record_job_start()

    # Perform a search for each email address
    for i in range(0, len(res[0]['DNS_Admins'])):
        search_umbrella_by_email(logger, res[0]['DNS_Admins'][i], umbrella, zi, jobs_manager)

    # Perform a search based on each name server
    for i in range(0, len(res[0]['Whois_Name_Servers'])):
        search_umbrella_by_nameserver(logger, res[0]['Whois_Name_Servers'][i], res[0]['Whois_Orgs'], umbrella, zi, jobs_manager)

    # Record status
    jobs_manager.record_job_complete()

    now = datetime.now()
    print("Complete: " + str(now))
    logger.info("Complete.")
Exemplo n.º 14
0
def main():
    """
    Begin main...
    """
    logger = LoggingUtil.create_log(__name__)

    now = datetime.now()
    print ("Starting: " + str(now))
    logger.info("Starting...")

    mongo_connector = MongoConnector.MongoConnector()
    jobs_manager = JobsManager.JobsManager(mongo_connector, 'get_azure_data')
    jobs_manager.record_job_start()

    # Download the XML file
    req = requests.get(XML_LOCATION)

    if req.status_code != 200:
        logger.error("Bad Request")
        jobs_manager.record_job_error()
        exit(1)

    parser = MyHTMLParser()
    parser.feed(req.text)

    if parser.URL == "":
        logger.error("Unable to identify URL in Microsoft HTML")
        jobs_manager.record_job_error()
        exit(1)

    req = requests.get(parser.URL)

    if req.status_code != 200:
        logger.error("Bad Request")
        jobs_manager.record_job_error()
        exit(1)

    root = ET.fromstring(req.text)

    insert_json = {}
    insert_json['created'] = datetime.now()
    insert_json['prefixes'] = []

    for region in root.findall('Region'):
        region_name = region.get("Name")
        for iprange in region.findall('IpRange'):
            cidr = iprange.get("Subnet")
            insert_json['prefixes'].append({'region': region_name, 'ip_prefix': cidr})

    azure_ips = mongo_connector.get_azure_ips_connection()
    azure_ips.remove({})
    azure_ips.insert(insert_json)

    # Record status
    jobs_manager.record_job_complete()

    now = datetime.now()
    print("Complete: " + str(now))
    logger.info("Complete.")
Exemplo n.º 15
0
def main():
    """
    Begin Main...
    """
    logger = LoggingUtil.create_log(__name__)

    now = datetime.now()
    print ("Starting: " + str(now))
    logger.info("Starting...")

    dns_types = {"a":1, "ns":2, "cname":5, "soa":6, "ptr":12, "hinfo": 13, "mx": 15, "txt":16, "aaaa":28, "srv":33, "naptr": 35, "ds": 43, "rrsig": 46, "dnskey": 48}

    mongo_connector = MongoConnector.MongoConnector()
    all_dns_collection = mongo_connector.get_all_dns_connection()
    jobs_manager = JobsManager.JobsManager(mongo_connector, 'marinus_dns')
    jobs_manager.record_job_start()

    dns_manager = DNSManager.DNSManager(mongo_connector)

    zones = ZoneManager.get_distinct_zones(mongo_connector)

    google_dns = GoogleDNS.GoogleDNS()

    for zone in zones:
        time.sleep(1)
        for dtype, dnum in dns_types.items():
            result = google_dns.fetch_DNS_records(zone, dnum)

            if result == []:
                logger.debug("No records found for " + zone)
            else:
                new_record = result[0]
                new_record['status'] = 'confirmed'
                new_record['zone'] = zone
                new_record['created'] = datetime.now()
                logger.debug ("Found " + dtype + " for: " + zone)
                dns_manager.insert_record(new_record, "marinus")

    logger.info("Starting SOA Search")

    soa_searches = find_sub_zones(all_dns_collection)
    for entry in soa_searches:
        time.sleep(1)
        result = google_dns.fetch_DNS_records(zone, dns_types['soa'])
        if result != []:
            new_record = result[0]
            new_record['status'] = 'confirmed'
            new_record['zone'] = get_fld_from_value(entry, '')
            new_record['created'] = datetime.now()
            logger.debug ("Found SOA: " + entry)
            if new_record['zone'] != '':
                dns_manager.insert_record(new_record, "marinus")

    jobs_manager.record_job_complete()

    now = datetime.now()
    print ("Complete: " + str(now))
    logger.info("Complete.")
Exemplo n.º 16
0
def main():
    """
    Begin Main...
    """
    now = datetime.now()
    print("Starting: " + str(now))

    mongo_connector = MongoConnector.MongoConnector()
    dns_manager = DNSManager.DNSManager(mongo_connector)
    jobs_collection = mongo_connector.get_jobs_connection()
    zone_ingestor = ZoneIngestor.ZoneIngestor()

    current_zones = ZoneManager.get_distinct_zones(mongo_connector)

    # For cases with multiple R53 accounts, include the account id for reference
    sts = boto3.client('sts')
    account_id = sts.get_caller_identity()["Arn"].split(':')[4]
    r53_source = "R53:" + str(account_id)

    r53_client = boto3.client('route53')

    r53_domains = r53_client.list_hosted_zones()
    r53_zone_list = []
    while r53_domains != {}:
        for zone_data in r53_domains['HostedZones']:
            # Only add public zones
            if zone_data['Config']['PrivateZone'] == False:
                r53_zone_list.append(zone_data)

        if r53_domains['IsTruncated'] == True:
            r53_domains = r53_client.list_domains(
                Marker=r53_domains['NextMarker'])
        else:
            r53_domains = {}

    for zone_data in r53_zone_list:
        # Double check that this is not a new zone
        zone_name = zone_data['Name'][:-1]
        if zone_name not in current_zones:
            print("Creating zone: " + zone_name)
            zone_ingestor.add_zone(zone_data['Name'], r53_source)

        # Add hosts to the zone
        update_records(r53_client, dns_manager, zone_data, r53_source)

    # Record status
    jobs_collection.update_one({'job_name': 'get_route53'}, {
        '$currentDate': {
            "updated": True
        },
        "$set": {
            'status': 'COMPLETE'
        }
    })

    now = datetime.now()
    print("Ending: " + str(now))
Exemplo n.º 17
0
def main():
    """
    Begin Main...
    """
    logger = LoggingUtil.create_log(__name__)

    now = datetime.now()
    print("Starting: " + str(now))
    logger.info("Starting...")

    mongo_connector = MongoConnector.MongoConnector()
    dns_manager = DNSManager.DNSManager(mongo_connector)
    zone_ingestor = ZoneIngestor.ZoneIngestor()

    jobs_manager = JobsManager.JobsManager(mongo_connector, "get_route53")
    jobs_manager.record_job_start()

    current_zones = ZoneManager.get_distinct_zones(mongo_connector)

    # For cases with multiple R53 accounts, include the account id for reference
    sts = boto3.client("sts")
    account_id = sts.get_caller_identity()["Arn"].split(":")[4]
    r53_source = "R53:" + str(account_id)

    r53_client = boto3.client("route53")

    r53_domains = r53_client.list_hosted_zones()
    r53_zone_list = []
    while r53_domains != {}:
        for zone_data in r53_domains["HostedZones"]:
            # Only add public zones
            if zone_data["Config"]["PrivateZone"] == False:
                r53_zone_list.append(zone_data)

        if r53_domains["IsTruncated"] == True:
            r53_domains = r53_client.list_domains(
                Marker=r53_domains["NextMarker"])
        else:
            r53_domains = {}

    for zone_data in r53_zone_list:
        # Double check that this is not a new zone
        zone_name = zone_data["Name"][:-1]
        if zone_name not in current_zones:
            logger.info("Creating zone: " + zone_name)
            zone_ingestor.add_zone(zone_data["Name"], r53_source)

        # Add hosts to the zone
        update_records(r53_client, dns_manager, zone_data, r53_source)

    # Record status
    jobs_manager.record_job_complete()

    now = datetime.now()
    print("Ending: " + str(now))
    logger.info("Complete.")
Exemplo n.º 18
0
def main():
    """
    Begin Main...
    """
    now = datetime.now()
    print("Starting: " + str(now))

    # Create an instance of the VirusTotal class
    vt_instance = VirusTotal.VirusTotal()

    # Get collections for the queries
    mongo_connector = MongoConnector.MongoConnector()
    vt_collection = mongo_connector.get_virustotal_connection()

    jobs_manager = JobsManager.JobsManager(mongo_connector,
                                           'get_virustotal_data')
    jobs_manager.record_job_start()

    # Collect the list of tracked TLDs
    zones = ZoneManager.get_distinct_zones(mongo_connector)

    # For each tracked TLD
    for zone in zones:
        print("Checking " + zone)
        results = vt_instance.get_domain_report(zone)

        if results is None:
            print("Error querying zone " + zone)
        elif results['response_code'] == -1:
            print("VT unhappy with " + zone)
        elif results['response_code'] == 0:
            print("VT doesn't have " + zone)
        else:
            print("Matched " + zone)

            results['zone'] = zone
            results['created'] = datetime.now()

            # Mongo doesn't allow key names with periods in them
            # Re-assign to an undotted key name
            if "Dr.Web category" in results:
                results['Dr Web category'] = results.pop("Dr.Web category")

            vt_collection.delete_one({"zone": zone})
            vt_collection.insert(results)

        # This sleep command is so that we don't exceed the daily limit on the free API
        # This setting results in this script taking several days to complete
        time.sleep(25)

    # Record status
    jobs_manager.record_job_complete()

    now = datetime.now()
    print("Complete: " + str(now))
Exemplo n.º 19
0
def main():
    """
    Begin Main...
    """
    logger = LoggingUtil.create_log(__name__)

    now = datetime.now()
    print("Starting: " + str(now))
    logger.info("Starting...")

    mongo_connector = MongoConnector.MongoConnector()
    dns_manager = DNSManager.DNSManager(mongo_connector)
    jobs_manager = JobsManager.JobsManager(mongo_connector,
                                           "get_external_cnames")
    jobs_manager.record_job_start()

    groups = {}

    # Collect zones
    zone_results = ZoneManager.get_distinct_zones(mongo_connector)

    zones = []
    for zone in zone_results:
        if zone.find(".") >= 0:
            zones.append(zone)

    # Collect the all_dns cnames.
    logger.info("Starting All DNS...")
    all_dns_recs = dns_manager.find_multiple({"type": "cname"}, None)

    for srec in all_dns_recs:
        if not is_tracked_zone(srec["value"], zones):
            add_to_list(
                get_fld_from_value(srec["value"], srec["zone"]),
                srec["fqdn"],
                srec["value"],
                srec["zone"],
                groups,
            )

    # Update the database
    tpds_collection = mongo_connector.get_tpds_connection()

    tpds_collection.delete_many({})
    for key in groups.keys():
        tpds_collection.insert_one(groups[key])

    # Record status
    jobs_manager.record_job_complete()

    now = datetime.now()
    print("Ending: " + str(now))
    logger.info("Complete.")
Exemplo n.º 20
0
def main():
    now = datetime.now()
    print ("Starting: " + str(now))

    dns_types = {"a":1, "ns":2, "cname":5, "soa":6, "ptr":12, "hinfo": 13, "mx": 15, "txt":16, "aaaa":28, "srv":33, "naptr": 35, "ds": 43, "rrsig": 46, "dnskey": 48}

    mongo_connector = MongoConnector.MongoConnector()
    all_dns_collection = mongo_connector.get_all_dns_connection()
    jobs_collection = mongo_connector.get_jobs_connection()

    dns_manager = DNSManager.DNSManager(mongo_connector)

    zones = ZoneManager.get_distinct_zones(mongo_connector)

    google_dns = GoogleDNS.GoogleDNS()

    for zone in zones:
        time.sleep(1)
        for dtype, dnum in dns_types.items():
            result = google_dns.fetch_DNS_records(zone, dnum)

            if result == []:
                print("No records found for " + zone)
            else:
                new_record = result[0]
                new_record['status'] = 'confirmed'
                new_record['zone'] = zone
                new_record['created'] = datetime.now()
                print ("Found " + dtype + " for: " + zone)
                dns_manager.insert_record(new_record, "marinus")

    print("Starting SOA Search")

    soa_searches = find_sub_zones(all_dns_collection)
    for entry in soa_searches:
        time.sleep(1)
        result = google_dns.fetch_DNS_records(zone, dns_types['soa'])
        if result != []:
            new_record = result[0]
            new_record['status'] = 'confirmed'
            new_record['zone'] = get_fld_from_value(entry, '')
            new_record['created'] = datetime.now()
            print ("Found SOA: " + entry)
            if new_record['zone'] != '':
                dns_manager.insert_record(new_record, "marinus")

    jobs_collection.update_one({'job_name': 'marinus_dns'},
                               {'$currentDate': {"updated" : True},
                                "$set": {'status': 'COMPLETE'}})

    now = datetime.now()
    print ("Complete: " + str(now))
Exemplo n.º 21
0
def main():
    """
    Begin Main...
    """

    now = datetime.now()
    print("Starting: " + str(now))

    mongo_connector = MongoConnector.MongoConnector()
    dns_manager = DNSManager.DNSManager(mongo_connector)

    jobs_collection = mongo_connector.get_jobs_connection()

    groups = {}

    # Collect zones
    zone_results = ZoneManager.get_distinct_zones(mongo_connector)

    zones = []
    for zone in zone_results:
        if zone.find(".") >= 0:
            zones.append(zone)

    # Collect the all_dns cnames.
    print("Starting All DNS...")
    all_dns_recs = dns_manager.find_multiple({'type': 'cname'}, None)

    for srec in all_dns_recs:
        if not is_tracked_zone(srec['value'], zones):
            add_to_list(get_fld_from_value(srec['value'], srec['zone']),
                        srec['fqdn'], srec['value'], srec['zone'], groups)

    # Update the database
    tpds_collection = mongo_connector.get_tpds_connection()

    tpds_collection.remove({})
    for key in groups.keys():
        tpds_collection.insert(groups[key])

    # Record status
    jobs_collection.update_one({'job_name': 'get_external_cnames'}, {
        '$currentDate': {
            "updated": True
        },
        "$set": {
            'status': 'COMPLETE'
        }
    })

    now = datetime.now()
    print("Ending: " + str(now))
Exemplo n.º 22
0
def main():
    """
    This function extract the IP address ranges from the TXT records
    and stores them in gcp_ips collection within the database.
    """
    logger = LoggingUtil.create_log(__name__)

    now = datetime.now()
    print("Starting: " + str(now))
    logger.info("Starting...")

    mongo_connector = MongoConnector.MongoConnector()
    gcp_collection = mongo_connector.get_gcp_ips_connection()
    google_dns = GoogleDNS.GoogleDNS()
    jobs_manager = JobsManager.JobsManager(mongo_connector, 'get_gcp_ranges')
    jobs_manager.record_job_start()

    ip_ranges = recursive_search(logger,
                                 "_cloud-netblocks.googleusercontent.com",
                                 google_dns)

    ipv4_ranges = []
    ipv6_ranges = []

    for entry in ip_ranges:
        parts = entry.split(":", 1)
        if parts[0] == "ip4" and parts[1] not in ipv4_ranges:
            ipv4_ranges.append({"ip_prefix": parts[1]})
        elif parts[0] == "ip6" and parts[1] not in ipv6_ranges:
            ipv6_ranges.append({"ipv6_prefix": parts[1]})
        else:
            logger.warning("Unrecognized data: " + entry)

    new_data = {}
    new_data['prefixes'] = ipv4_ranges
    new_data['ipv6_prefixes'] = ipv6_ranges
    new_data['created'] = now

    gcp_collection.remove({})
    gcp_collection.insert(new_data)

    jobs_manager.record_job_complete()

    now = datetime.now()
    print("Ending: " + str(now))
    logger.info("Complete.")
Exemplo n.º 23
0
def main():
    """
    Begin Main...
    """

    print("Starting: " + str(datetime.now()))

    # Make database connections
    mc = MongoConnector.MongoConnector()
    jobs_manager = JobsManager.JobsManager(mc, 'get_iblox_host')
    jobs_manager.record_job_start()

    idm = InfobloxDNSManager.InfobloxDNSManager('host')
    idm.get_infoblox_dns()

    # Record status
    jobs_manager.record_job_complete()

    print("Ending: " + str(datetime.now()))
def main():
    """
    Begin Main...
    """

    print("Starting: " + str(datetime.now()))

    # Make database connections
    mc = MongoConnector.MongoConnector()
    jobs_manager = JobsManager.JobsManager(mc, 'get_infoblox_cname_extattrs')
    jobs_manager.record_job_start()

    iem = InfobloxExtattrManager.InfobloxExtattrManager('cname')
    iem.get_infoblox_extattr()

    # Record status
    jobs_manager.record_job_complete()

    print("Ending: " + str(datetime.now()))
Exemplo n.º 25
0
def main():
    """
    Begin Main...
    """
    logger = LoggingUtil.create_log(__name__)

    print("Starting: " + str(datetime.now()))
    logger.info("Starting...")

    # Make database connections
    mc = MongoConnector.MongoConnector()
    jobs_manager = JobsManager.JobsManager(mc, "get_iblox_txt")
    jobs_manager.record_job_start()

    idm = InfobloxDNSManager.InfobloxDNSManager("txt")
    idm.get_infoblox_dns()

    # Record status
    jobs_manager.record_job_complete()

    print("Ending: " + str(datetime.now()))
    logger.info("Complete.")
Exemplo n.º 26
0
class APIHelper(object):

    MC = MongoConnector.MongoConnector()

    INCORRECT_RESPONSE_JSON_ALLOWED = 20

    def handle_api_error(self, err, job_name):
        """
        Exits the script execution post setting the status in database.
        :param err: Exception causing script exit.
        :param job_manager: The JobManager for the exiting script.
        """
        print(err)
        print('Exiting script execution.')
        jobs_manager = JobsManager.JobsManager(self.MC, job_name)
        jobs_manager.record_job_error()
        exit(1)

    @staticmethod
    def connection_error_retry(details):
        print('Connection Error encountered. Retrying in {wait:0.1f} seconds'.
              format(**details))
Exemplo n.º 27
0
def main():
    """
    Begin Main...
    """
    logger = LoggingUtil.create_log(__name__)

    print("Starting: " + str(datetime.now()))
    logger.info("Starting...")

    # Make database connections
    mc = MongoConnector.MongoConnector()
    jobs_manager = JobsManager.JobsManager(mc, 'get_infoblox_host_extattrs')
    jobs_manager.record_job_start()

    iem = InfobloxExtattrManager.InfobloxExtattrManager('host')
    iem.get_infoblox_extattr()

    # Record status
    jobs_manager.record_job_complete()

    print("Ending: " + str(datetime.now()))
    logger.info("Complete.")
Exemplo n.º 28
0
def main():
    """
    Begin main...
    """
    logger = LoggingUtil.create_log(__name__)

    # Make database connections
    mongo_connector = MongoConnector.MongoConnector()

    now = datetime.now()
    print("Starting: " + str(now))
    logger.info("Starting...")

    jobs_manager = JobsManager.JobsManager(mongo_connector, "get_aws_data")
    jobs_manager.record_job_start()

    # Download the JSON file
    req = requests.get(JSON_LOCATION)

    if req.status_code != 200:
        logger.error("Bad Request")
        jobs_manager.record_job_error()
        exit(1)

    # Convert the response to JSON
    json_data = json.loads(req.text)

    # Replace the old entries with the new entries
    aws_collection = mongo_connector.get_aws_ips_connection()
    aws_collection.delete_many({})
    mongo_connector.perform_insert(aws_collection, json_data)

    # Record status
    jobs_manager.record_job_complete()

    now = datetime.now()
    print("Complete: " + str(now))
    logger.info("Complete.")
Exemplo n.º 29
0
def main():
    """
    Begin Main...
    """

    # The sources for which to remove expired entries
    # Infoblox is handled separately
    # {"source_name": date_difference_in_months}
    sources = [{
        "name": "sonar_dns",
        "diff": -2
    }, {
        "name": "sonar_dns_saved",
        "diff": -2
    }, {
        "name": "sonar_rdns",
        "diff": -2
    }, {
        "name": "sonar_rdns_saved",
        "diff": -2
    }, {
        "name": "ssl",
        "diff": -2
    }, {
        "name": "ssl_saved",
        "diff": -2
    }, {
        "name": "virustotal",
        "diff": -2
    }, {
        "name": "virustotal_saved",
        "diff": -2
    }, {
        "name": "UltraDNS",
        "diff": -2
    }, {
        "name": "UltraDNS_saved",
        "diff": -2
    }, {
        "name": "marinus",
        "diff": -2
    }, {
        "name": "marinus_saved",
        "diff": -2
    }, {
        "name": "mx",
        "diff": -2
    }, {
        "name": "mx_saved",
        "diff": -2
    }, {
        "name": "common_crawl",
        "diff": -4
    }, {
        "name": "common_crawl_saved",
        "diff": -4
    }]

    amass_diff = -2

    now = datetime.now()
    print("Starting: " + str(now))

    mongo_connector = MongoConnector.MongoConnector()
    all_dns_collection = mongo_connector.get_all_dns_connection()
    dns_manager = DNSManager.DNSManager(mongo_connector)
    GDNS = GoogleDNS.GoogleDNS()
    ip_manager = IPManager.IPManager(mongo_connector)

    jobs_manager = JobsManager.JobsManager(mongo_connector,
                                           'remove_expired_entries')
    jobs_manager.record_job_start()

    zones = ZoneManager.get_distinct_zones(mongo_connector)

    # Get the date for today minus two months
    d_minus_2m = monthdelta(datetime.now(), -2)

    print("Removing SRDNS as of: " + str(d_minus_2m))

    # Remove the old records
    srdns_collection = mongo_connector.get_sonar_reverse_dns_connection()
    srdns_collection.remove({'updated': {"$lt": d_minus_2m}})

    ip_manager.delete_records_by_date(d_minus_2m)

    # Before completely removing old entries, make an attempt to see if they are still valid.
    # Occasionally, a host name will still be valid but, for whatever reason, is no longer tracked by a source.
    # Rather than throw away valid information, this will archive it.
    for entry in sources:
        removal_date = monthdelta(datetime.now(), entry['diff'])
        source = entry['name']
        print("Removing " + source + " as of: " + str(removal_date))

        last_domain = ""
        results = all_dns_collection.find({
            'sources': {
                "$size": 1
            },
            'sources.source': source,
            'sources.updated': {
                "$lt": removal_date
            }
        })
        for result in results:
            if result['fqdn'] != last_domain:
                last_domain = result['fqdn']

                lookup_int = get_lookup_int(result, GDNS)
                dns_result = GDNS.fetch_DNS_records(result['fqdn'], lookup_int)

                if dns_result != []:
                    insert_current_results(dns_result, dns_manager, zones,
                                           result, source)

        dns_manager.remove_all_by_source_and_date(source, entry['diff'])

    # Process amass entries
    temp_sources = mongo_connector.perform_distinct(all_dns_collection,
                                                    'sources.source')
    amass_sources = []
    for entry in temp_sources:
        if entry.startswith("amass:"):
            amass_sources.append(entry)

    for source in amass_sources:
        removal_date = monthdelta(datetime.now(), amass_diff)
        print("Removing " + source + " as of: " + str(removal_date))

        last_domain = ""
        results = mongo_connector.perform_find(
            all_dns_collection, {
                'sources': {
                    "$size": 1
                },
                'sources.source': source,
                'sources.updated': {
                    "$lt": removal_date
                }
            })
        for result in results:
            if result['fqdn'] != last_domain:
                last_domain = result['fqdn']

                lookup_int = get_lookup_int(result, GDNS)
                dns_result = GDNS.fetch_DNS_records(result['fqdn'], lookup_int)

                if dns_result != []:
                    insert_current_results(dns_result, dns_manager, zones,
                                           result, source)

        dns_manager.remove_all_by_source_and_date(source, amass_diff)

    # Record status
    jobs_manager.record_job_complete()

    now = datetime.now()
    print("Complete: " + str(now))
Exemplo n.º 30
0
def main():
    """
    Begin Main...
    """
    logger = LoggingUtil.create_log(__name__)

    now = datetime.now()
    print("Starting: " + str(now))
    logger.info("Starting...")

    # Set up the common objects
    mongo_connector = MongoConnector.MongoConnector()
    ct_collection = mongo_connector.get_certificate_transparency_connection()
    zones = ZoneManager.get_distinct_zones(mongo_connector)
    jobs_manager = JobsManager.JobsManager(mongo_connector, "get_crt_sh")
    jobs_manager.record_job_start()

    save_location = "/mnt/workspace/crt_sh"
    download_method = 'dbAndSave'

    parser = argparse.ArgumentParser(
        description='Download DNS and/or certificate information from crt.sh.')
    parser.add_argument(
        '--fetch_dns_records',
        action='store_true',
        help='Indicates whether to add DNS entries to the database')
    parser.add_argument(
        '--download_methods',
        choices=['dbAndSave', 'dbOnly'],
        default=download_method,
        help=
        'Indicates whether to download the raw files or just record in the database.'
    )
    parser.add_argument(
        '--cert_save_location',
        required=False,
        default=save_location,
        help=
        'Indicates where to save the certificates on disk when choosing dbAndSave'
    )
    args = parser.parse_args()

    if args.cert_save_location:
        save_location = args.cert_save_location
        if not save_location.endswith("/"):
            save_location = save_location + "/"

    if args.download_methods == 'dbAndSave':
        check_save_location(save_location)

    for zone in zones:
        # Pace out requests so as not to DoS crt.sh and Google DNS
        time.sleep(5)

        # This could be done with backoff but we don't want to be overly aggressive.
        json_result = make_https_request(
            logger, "https://crt.sh/?q=%25." + zone + "&output=json")
        if json_result is None:
            logger.warning("Can't find result for: " + zone)
            json_result = "{}"

        json_data = json.loads(json_result)

        new_names = []
        new_ids = []
        for entry in json_data:
            if entry['id'] not in new_ids:
                new_ids.append(entry['id'])

            if "*" not in entry["name_value"] and entry[
                    "name_value"] not in new_names:
                new_names.append(entry["name_value"])

        if args.fetch_dns_records:
            add_new_domain_names(new_names, zones, mongo_connector)

        if args.download_methods == "dbAndSave":
            add_new_certificate_values(logger, new_ids, ct_collection, zones,
                                       save_location)
        elif args.download_methods == "dbOnly":
            add_new_certificate_values(logger, new_ids, ct_collection, zones,
                                       None)

    # Set isExpired for any entries that have recently expired.
    ct_collection.update(
        {
            "not_after": {
                "$lt": datetime.utcnow()
            },
            "isExpired": False
        }, {"$set": {
            "isExpired": True
        }},
        multi=True)

    jobs_manager.record_job_complete()

    now = datetime.now()
    print("Ending: " + str(now))
    logger.info("Complete.")