Ejemplo n.º 1
0
class InfobloxZone(object):
    alphabets = list(string.ascii_lowercase + string.digits)
    alphabet_queried = None
    APIH = APIHelper.APIHelper()
    IH = InfobloxHelper.InfobloxHelper()

    _logger = None

    # Connect to the database
    MC = MongoConnector.MongoConnector()
    zone_collection = MC.get_zone_connection()
    ip_collection = MC.get_ipzone_connection()
    job_manager = None

    ZI = ZoneIngestor.ZoneIngestor()

    next_page_id = None
    source = 'Infoblox'

    def __get_base_url(self):
        """
        Returns the Infoblox zone API URL
        :return: Infoblox zone API URL
        """
        return 'https://' + self.IH.IBLOX_HOST + '/wapi/v' + self.IH.IBLOX_VERSION + '/zone_auth'

    def __get_previous_zones(self):
        """
        Fetches the currently present zones/sub-zones in the zone collection with source 'Infoblox'.
        The result is a dictionary with the zones as keys. The value of the key is True if the zone
        is sub_zone.
        """
        zones = self.zone_collection.find(
            {
                '$or': [{
                    'reporting_sources.source': self.source
                }, {
                    'sub_zones.source': self.source
                }]
            }, {
                'reporting_sources': 1,
                'zone': 1,
                'sub_zones': 1
            })
        self.previous_zones = {}
        for zone in zones:
            for reporting_source in zone['reporting_sources']:
                if reporting_source['source'] == self.source:
                    self.previous_zones[zone['zone']] = False
            for sub_zone in zone['sub_zones']:
                if sub_zone['source'] == self.source:
                    self.previous_zones[sub_zone['sub_zone']] = True

    def __clean_collection(self):
        """
        Cleans the zone collection of the zones which were earlier seen in the Infoblox API
        but are not seen now. Such zones/sub-zones are marked with source 'Infoblox-Retired'.
        """
        parent_zones = []
        sub_zones = []
        for zone_name, is_sub_zone in self.previous_zones.items():
            if is_sub_zone:
                sub_zones.append(zone_name)
            else:
                parent_zones.append(zone_name)

        # Update the sub_zones from 'Infoblox' to 'Infoblox-Retired'
        self.zone_collection.update_many(
            {
                'sub_zones': {
                    '$elemMatch': {
                        'sub_zone': {
                            '$in': sub_zones
                        },
                        'source': self.source
                    }
                }
            }, {'$set': {
                'sub_zones.$.source': 'Infoblox-Retired'
            }})

        self.zone_collection.update_many(
            {
                'zone': {
                    '$in': parent_zones
                },
                'reporting_sources.source': self.source
            }, {'$set': {
                'reporting_sources.$.source': 'Infoblox-Retired'
            }})

    def __insert_zone(self, zone):
        """
        Inserts the zone into the zone collection or into ip_zones collection in case
        it is an IP.
        :param zone: Zone value to be inserted into collections. This is a dictionary
                     with keys 'fqdn' and 'parent'.
        """
        # Some zones are actually IP addresses.
        # If the IP address is new, add it.
        # Change the update date if it already exists
        utf8_zone = zone['fqdn'].encode('utf-8').decode('utf8')
        if re.match(r"^([0-9]{1,3}\.){3}[0-9]{1,3}\/\d\d$",
                    utf8_zone) is not None:
            cursor = self.ip_collection.find({"zone": zone['fqdn']})
            if cursor.count() == 0:
                insert_text = dict()
                insert_text['zone'] = utf8_zone
                insert_text['source'] = 'Infoblox'
                insert_text['status'] = 'unconfirmed'
                insert_text['created'] = datetime.now()
                insert_text['updated'] = datetime.now()
                self.ip_collection.insert_one(insert_text)
                self._logger.info("Added IP: " + utf8_zone)
            else:
                for _ in cursor:
                    self.ip_collection.update_one(
                        {'zone': zone['fqdn']},
                        {'$currentDate': {
                            "updated": True
                        }})
                    self._logger.info("Updated IP: " + utf8_zone)
        else:
            # cleaning the values from the previous zones found. The resultant set
            # will need to be cleared of the source value 'Infoblox'.
            if zone['fqdn'] in self.previous_zones:
                del self.previous_zones[zone['fqdn']]
            self.ZI.add_zone(zone['fqdn'], self.source, zone['parent'])

    def __infoblox_response_handler(self, response):
        """
        Handles the API response. Incorrect JSON parsing is allowed upto 20 times post which the
        script exits. If the 'next_page_id' is received in the response, then that is set as an
        identification for the next page of the API to be queried.
        :param response: Response object
        """
        try:
            response_data = response.json()
            response_result = response_data['result']
        except (ValueError, AttributeError) as err:
            if self.incorrect_response_json_allowed > 0:
                self._logger.warning(
                    'Unable to parse response JSON for alphabet ' +
                    self.alphabet_queried)
                self.incorrect_response_json_allowed -= 1
            else:
                self.APIH.handle_api_error(
                    'Unable to parse response JSON for 20 alphabets: ' +
                    repr(err),
                    self.job_manager,
                )
        else:
            for entry in response_result:
                zone = dict()
                zone['fqdn'] = entry['fqdn']
                zone['parent'] = entry['parent']
                self.__insert_zone(zone)

            if 'next_page_id' in response_data:
                self.next_page_id = response_data['next_page_id']

    @backoff.on_exception(backoff.expo,
                          requests.exceptions.ConnectionError,
                          max_tries=4,
                          factor=10,
                          on_backoff=APIH.connection_error_retry)
    def __backoff_api_retry(self):
        """
        Makes API calls to Infoblox with exponential retry capabilities using 'backoff'. The API is
        retried 3 times in case of ConnectionError exception before the script exists.
        """
        url = self.__get_base_url()
        params = {
            'view': 'External',
            'fqdn~': '.*' + self.alphabet_queried + '$',
            '_return_fields': 'parent,fqdn',
        }
        if not self.next_page_id:
            params.update({
                '_paging': '1',
                '_return_as_object': '1',
                '_max_results': '1500'
            })
        else:
            params.update({'_page_id': self.next_page_id})

        return requests.get(url,
                            params,
                            auth=HTTPBasicAuth(self.IH.IBLOX_UNAME,
                                               self.IH.IBLOX_PASSWD),
                            verify=False)

    def __infoblox_paginated_request(self):
        """
        Makes paginated API calls to Infoblox. The API is retried 3 times in case of ConnectionError
        exception before the script exists. The script exists on encountering HTTPError or any other
        RequestException.
        """
        try:
            response = self.__backoff_api_retry()
            response.raise_for_status()
        except requests.exceptions.HTTPError as herr:
            self.APIH.handle_api_error(herr, self.job_manager)
        except requests.exceptions.RequestException as err:
            self.APIH.handle_api_error(err, self.job_manager)
        else:
            self.next_page_id = None
            self.__infoblox_response_handler(response)

    def get_infoblox_zones(self):
        """
        Extracts the Infoblox zones using paginated requests.
        """
        print("Starting: " + str(datetime.now()))
        self._logger.info("Starting....")
        self.job_manager = JobsManager.JobsManager(self.MC,
                                                   'get_iblox_alpha_zones')
        self.job_manager.record_job_start()

        self.__get_previous_zones()
        for alphabet in self.alphabets:
            self.alphabet_queried = alphabet
            self.next_page_id = None
            self.__infoblox_paginated_request()
            while self.next_page_id:
                self.__infoblox_paginated_request()

        self.__clean_collection()

        # Record status
        self.job_manager.record_job_complete()

        print("Ending: " + str(datetime.now()))
        self._logger.info("Complete")

    def __init__(self):
        self._logger = LoggingUtil.create_log(__name__)
        self.incorrect_response_json_allowed = self.APIH.INCORRECT_RESPONSE_JSON_ALLOWED
        self.get_infoblox_zones()
Ejemplo n.º 2
0
class UltraDNSZone(object):

    UH = UltraDNSHelper.UltraDNSHelper('get_ultradns_zones')
    APIH = APIHelper.APIHelper()
    ZI = ZoneIngestor.ZoneIngestor()

    def __ultradns_zone_response_handler(self, response):
        """
        Handles the API response. Incorrect JSON parsing is allowed upto 20 times post which the
        script exits. No action is performed when the zone name ends in "in-addr.arpa".
        :param response: Response object
        """
        try:
            response = response.json()
        except (ValueError, AttributeError) as err:
            if self.UH.incorrect_response_json_allowed > 0:
                print(
                    'Unable to parse response JSON for retrieving UltraDNS zones for the offset'
                    + self.UH.offset)
                self.UH.incorrect_response_json_allowed -= 1
            else:
                self.APIH.handle_api_error(
                    'Unable to parse response JSON for 20 zones: ' + repr(err),
                    self.UH.jobs_manager,
                )
        else:
            # the zone names end in '.'. Removing that before ingesting into collection.
            for zone in response['zones']:
                zone_name = zone['properties']['name'][:-1]

                if not zone_name.endswith('in-addr.arpa'):
                    # Part of clean_collection code.
                    # if zone_name in self.UH.previous_zones:
                    #     del self.UH.previous_zones[zone_name]

                    # Add the zone to the zones collection
                    self.ZI.add_zone(zone_name, self.UH.source)

            self.UH.set_offset(response['resultInfo'])

    def __paginated_ultradns_zones_request(self):
        """
        Makes paginated API calls to UltraDNS. The API is retried 3 times in case of ConnectionError
        exception before the script exists. The script exists on encountering HTTPError or any other
        RequestException.
        The value of the limit has been set as mentioned in the docs.
        In case a 401 is encountered along with the required token expiration message, another login
        API is sent with grant_type set as 'refresh_token' to retrieve a valid access token.
        """
        url = self.UH.ULTRACONNECT.ZONES
        try:
            res = self.UH.backoff_api_retry(
                url,
                {
                    'limit': 1000,
                    'offset': self.UH.offset,
                    'q': 'zone_type:PRIMARY'
                },
                {'authorization': 'Bearer ' + self.UH.access_token},
            )
            res.raise_for_status()
        except requests.exceptions.HTTPError as herr:
            err_msg = json.loads(res.text)['errorMessage']
            if res.status_code == 401 and err_msg == self.UH.access_token_expiration_error:
                self.UH.login('refresh_token')
                self.__paginated_ultradns_zones_request()
            else:
                self.APIH.handle_api_error(herr, self.UH.jobs_manager)
        except requests.exceptions.RequestException as err:
            self.APIH.handle_api_error(err, self.UH.jobs_manager)
        else:
            self.__ultradns_zone_response_handler(res)

    def get_ultradns_zones(self):
        """
        Extracts the zones listing from UltraDNS in a paginated manner.
        """
        print("Starting: " + str(datetime.now()))
        self.UH.jobs_manager.record_job_start()

        # Part of clean_collection code.
        # self.UH.get_previous_zones()

        self.__paginated_ultradns_zones_request()
        while self.UH.offset:
            self.__paginated_ultradns_zones_request()

        # Record status
        self.UH.jobs_manager.record_job_complete()

        print("Ending: " + str(datetime.now()))

    def __init__(self):
        self.get_ultradns_zones()
Ejemplo n.º 3
0
class UltraDNSZonesInfo(object):

    UH = UltraDNSHelper.UltraDNSHelper('get_ultradns_zones_info')
    APIH = APIHelper.APIHelper()
    DNS_MGR = DNSManager.DNSManager(UH.MC)

    def __ultradns_zone_info_response_handler(self, response):
        """
        Handles the API response. Incorrect JSON parsing is allowed upto 20 times post which the
        script exits.
        :param response: Response object
        """
        try:
            response_data = response.json()
            record_sets = response_data['rrSets']
        except (ValueError, AttributeError) as err:
            if self.UH.incorrect_response_json_allowed > 0:
                print('Unable to parse response JSON for zone ' +
                      self.zone_queried)
                self.UH.incorrect_response_json_allowed -= 1
            else:
                self.APIH.handle_api_error(
                    'Unable to parse response JSON for 20 zones: ' + repr(err),
                    self.UH.jobs_manager,
                )
        else:
            for record in record_sets:
                dns_info = dict()
                # The ownerName could be either the FQDN or a relative domain name.
                # In case it is a FQDN it will end in '.'
                fqdn = record['ownerName'] + '.' + self.zone_queried
                if record['ownerName'].endswith('.'):
                    fqdn = record['ownerName'][:-1]

                dns_info['zone'] = self.zone_queried
                dns_info['fqdn'] = fqdn
                dns_info['type'] = record['rrtype'].split(' ')[0].lower()
                dns_info['status'] = 'unknown'

                for dns in record['rdata']:
                    if dns_info['type'] in ['a', 'ptr']:
                        try:
                            if IPAddress(dns).is_private():
                                continue
                        except AddrFormatError as err:
                            print('For ' + fqdn + ' encountered: ' + str(err))
                            continue

                    if not (dns_info['type'] in ['soa', 'txt'
                                                 ]) and dns.endswith('.'):
                        dns = dns[:-1]
                    dns_info['value'] = dns
                    dns_info['created'] = datetime.now()
                    self.DNS_MGR.insert_record(dns_info.copy(), self.UH.source)

            self.UH.set_offset(response_data['resultInfo'])

    def __paginated_ultradns_zones_info_request(self):
        """
        Makes paginated API calls to UltraDNS. The API is retried 3 times in case of ConnectionError
        exception before the script exists. The script exists on encountering HTTPError or any other
        RequestException.
        In case a 401 is encountered along with the required token expiration message, another login
        API is sent with grant_type set as 'refresh_token' to retrieve a valid access token.
        """
        url = self.UH.ULTRACONNECT.ZONEINFO.format(
            zone_queried=self.zone_queried)
        try:
            response = self.UH.backoff_api_retry(
                url, {
                    'q': 'kind:RECORDS',
                    'limit': 2000,
                    'offset': self.UH.offset,
                }, {'authorization': 'Bearer ' + self.UH.access_token})
            response.raise_for_status()
        except requests.exceptions.HTTPError as herr:
            err_msg = json.loads(response.text)['errorMessage']
            if response.status_code == 401 and err_msg == self.UH.access_token_expiration_error:
                self.UH.login('refresh_token')
                self.__paginated_ultradns_zones_info_request()
            else:
                self.APIH.handle_api_error(herr, self.UH.jobs_manager)
        except requests.exceptions.RequestException as err:
            self.APIH.handle_api_error(err, self.UH.jobs_manager)
        else:
            self.__ultradns_zone_info_response_handler(response)

    def __get_ultradns_zones_info(self):
        """
        Extracts the zone DNS information from UltraDNS in a paginated manner for the UltraDNS zones.
        """
        print("Starting: " + str(datetime.now()))
        self.UH.jobs_manager.record_job_start()
        self.UH.get_previous_zones()

        # For querying UltraDNS, we need to query on the exact zones reported
        # hence we query for previous_zones.
        for zone in self.UH.previous_zones:
            self.zone_queried = zone
            self.UH.offset = 0
            self.__paginated_ultradns_zones_info_request()
            while self.UH.offset:
                self.__paginated_ultradns_zones_info_request()

        # Record status
        self.UH.jobs_manager.record_job_complete()
        print("Ending: " + str(datetime.now()))

    def __init__(self):
        self.__get_ultradns_zones_info()
Ejemplo n.º 4
0
class InfobloxExtattrManager(object):
    # Make database connections
    MC = MongoConnector.MongoConnector()
    APIH = APIHelper.APIHelper()
    IH = InfobloxHelper.InfobloxHelper()

    iblox_extattr_collection = MC.get_infoblox_extattr_connection()
    zone_queried = None
    record_type = None
    next_page_id = None
    _logger = None


    def _log(self):
        """
        Get the log
        """
        return logging.getLogger(__name__)


    def __get_record_type_url(self):
        """
        Returns the url to be queried at infoblox to return the extattr information.
        Paging information is appended to the URL as per the condition satisfied.
        :return: string: URL to be queried
        """
        paging_info = self.IH.get_pagination_params(self.next_page_id)

        return_fields = '&_return_fields=extattrs,zone'
        if self.record_type == 'a':
            return_fields += ',ipv4addr'
        elif self.record_type == 'aaaa':
            return_fields += ',ipv6addr'
        elif self.record_type == 'zone':
            return_fields = '&_return_fields=extattrs'

        url = self.IH.get_infoblox_base_url(
            self.zone_queried,
            self.record_type,
        ).format(
            return_fields=return_fields,
            paging_info=paging_info,
        )
        return url

    def __insert_extattrs(self, insert_object):
        """
        Inserts/Updates the extattr information in the database. '_ref' uniquely identifies the
        resource.
        :param insert_object: Dictionary containing the details of the resource.
        """
        if not insert_object['_ref'] in self.previous_records:
            insert_object['created'] = datetime.now()
            insert_object['updated'] = datetime.now()
            self.iblox_extattr_collection.insert(insert_object)
        else:
            self.previous_records.remove(insert_object['_ref'])
            self.iblox_extattr_collection.update_one({'_ref': insert_object['_ref']},
                                                    {"$set": {
                                                        'updated': datetime.now(),
                                                        'extattrs': insert_object['extattrs']
                                                    }})

    def __get_previous_records(self):
        """
        Retrieve the current data related to the zone and record_type. This is evaluated
        against the data that we receive in the latest script run to determine stale records.
        The data is stored as a list of _ref
        """
        self.previous_records = []
        previous_records = self.iblox_extattr_collection.find({'zone': self.zone_queried,
                                                              'record_type': self.record_type,
                                                              }, {'_ref': 1})
        for record in previous_records:
            self.previous_records.append(record['_ref'])

    def __sanitise_response(self, response_object):
        """
        For record_type of zone type, we extract 'infoblox_zone' from the '_ref'.
        For record_type of a type, we get the 'value' from the 'ipv4addr' key for other
        record_types it is extracted from '_ref'
        The general format of _ref is: {record_type_iden}/{hash_iden}:{value}/External
        The valid record_type_iden values are 'zone_auth', 'record:cname', 'record:host', 'record:a'
        :param response_object: Value of 'result' key of response in JSON format.
        """
        insert_object = {
            'record_type': self.record_type,
            'zone': self.zone_queried,
        }

        if self.record_type == 'zone':
            response_object['infoblox_zone'] = response_object['_ref'].split(':')[1].split('/')[0]
        else:
            response_object['infoblox_zone'] = response_object['zone']
            response_object.pop('zone')

        if self.record_type == 'a':
            response_object['value'] = response_object['ipv4addr']
            response_object.pop('ipv4addr')
        elif self.record_type == 'aaaa':
            response_object['value'] = response_object['ipv6addr']
            response_object.pop('ipv6addr')
        else:
            response_object['value'] = response_object['_ref'].split('/')[1].split(':')[1]

        response_object.update(insert_object)

    def __infoblox_response_handler(self, response):
        """
        Handles the API response. Incorrect JSON parsing is allowed upto 20 times post which the
        script exits. No action is performed when the 'extattrs' is an empty dictionary.
        :param response: Response object
        """
        try:
            response_data = response.json()
            response_result = response_data['result']
        except (ValueError, AttributeError) as err:
            if self.incorrect_response_json_allowed > 0:
                self._logger.warning('Unable to parse response JSON for zone ' + self.zone_queried)
                self.incorrect_response_json_allowed -= 1
            else:
                self.APIH.handle_api_error(
                    'Unable to parse response JSON for 20 zones: ' + repr(err),
                    'get_infoblox_' + self.record_type.lower() + '_extattr',
                )
        else:
            for response_object in response_result:
                if not response_object['extattrs']:
                    continue

                # Adding the exception handling for the scenario when the '_ref' format
                # changes and leads to 'split' not working as expected.
                try:
                    self.__sanitise_response(response_object)
                except IndexError as err:
                    self.APIH.handle_api_error(err, 'get_infoblox_' + self.record_type.lower() + '_extattr')
                else:
                    self.__insert_extattrs(response_object)

            if "next_page_id" in response_data:
                self.next_page_id = response_data['next_page_id']

    @backoff.on_exception(backoff.expo,
                          requests.exceptions.ConnectionError,
                          max_tries=4,
                          factor=10,
                          on_backoff=APIH.connection_error_retry)
    def __backoff_api_retry(self):
        """
        Makes API calls to Infoblox with exponential retry capabilities using 'backoff'. The API is
        retried 3 times in case of ConnectionError exception before the script exists.
        :return:
        """
        return requests.get((self.__get_record_type_url()),
                            auth=HTTPBasicAuth(self.IH.IBLOX_UNAME, self.IH.IBLOX_PASSWD), verify='/etc/ssl/certs/ca-bundle.crt')

    def __infoblox_paginated_request(self):
        """
        Makes paginated API calls to Infoblox. The API is retried 3 times in case of ConnectionError
        exception before the script exists. The script exists on encountering HTTPError or any other
        RequestException. On success, the next_page_id is set to None for the next API call.
        """
        try:
            response = self.__backoff_api_retry()
            response.raise_for_status()
        except requests.exceptions.HTTPError as herr:
            self.APIH.handle_api_error(herr, 'get_infoblox_' + self.record_type.lower() + '_extattr')
        except requests.exceptions.RequestException as err:
            self.APIH.handle_api_error(err, 'get_infoblox_' + self.record_type.lower() + '_extattr')
        else:
            self.next_page_id = None
            self.__infoblox_response_handler(response)

    def get_infoblox_extattr(self):
        """
        Extracts the zones from the zone collection to query Infoblox. The API calls continue to be made
        for the zone till the next_page_id is set to None indicating no new results to be fetched.
        Post the retrieval of all the data, the archaic data for a zone and record_type is purged.
        """
        zones = ZoneManager.get_zones_by_source(self.MC, 'Infoblox')
        for zone in zones:
            self.zone_queried = zone
            self.next_page_id = None
            self.__get_previous_records()
            self.__infoblox_paginated_request()
            while self.next_page_id:
                self.__infoblox_paginated_request()
            self.IH.clean_collection(self.previous_records, self.iblox_extattr_collection)


    def __init__(self, record_type):
        self.record_type = record_type
        self.incorrect_response_json_allowed = self.APIH.INCORRECT_RESPONSE_JSON_ALLOWED
        self._logger = self._log()
Ejemplo n.º 5
0
class InfobloxDNSManager(object):

    # Make database connections
    MC = MongoConnector.MongoConnector()
    zone_collection = MC.get_zone_connection()

    APIH = APIHelper.APIHelper()
    IH = InfobloxHelper.InfobloxHelper()
    DNS_MGR = DNSManager.DNSManager(MC)

    next_page_id = None
    zone_queried = None
    record_type = None
    iblox_collection = None
    dns_value_mapper = {
        "mx": "mail_exchanger",
        "txt": "text",
        "a": "ipv4addr",
        "cname": "canonical",
        "aaaa": "ipv6addr",
    }
    _logger = None

    def _log(self):
        """
        Get the log
        """
        return logging.getLogger(__name__)

    def __get_record_type_url(self):
        """
        Returns the url to be queried at infoblox to return the DNS information.
        Paging information is appended to the URL as per the condition satisfied.
        :return: string: URL to be queried
        """
        paging_info = self.IH.get_pagination_params(self.next_page_id)

        url = self.IH.get_infoblox_base_url(
            self.zone_queried,
            self.record_type,
        ).format(
            return_fields="&_return_fields%2B=zone",
            paging_info=paging_info,
        )
        return url

    def __get_previous_records(self):
        """
        Retrieve the current data related to the zone. This is evaluated against the data that we
        receive in the latest script run to determine stale records.
        The data is stored as a list of _ref.
        """
        self.previous_records = self.iblox_collection.distinct(
            "_ref", {"zone": self.zone_queried})

    def __insert_dns_information(self, dns_information):
        """
        Inserts the DNS information into the all_dns collection. For the 'host' records, iterate over
        all the ipv4addrs mentioned to get the data.
        :param dns_information: DNS data for the zone and 'record_type'
        """
        del dns_information["_ref"]
        del dns_information["view"]
        del dns_information["infoblox_zone"]

        if self.record_type == "host":
            # In order to resolve multiple ipv4addrs
            for ipv4 in dns_information["ipv4addrs"]:
                dns_info = dict()
                dns_info["zone"] = dns_information["zone"]
                dns_info["type"] = "a"
                dns_info["value"] = ipv4["ipv4addr"]
                dns_info["fqdn"] = ipv4["host"]
                dns_info["status"] = "unknown"
                dns_info["created"] = datetime.now()
                self.DNS_MGR.insert_record(dns_info, "infoblox-host")
        else:
            # Removing the 'preference' key from the 'mx' records
            if self.record_type == "mx":
                del dns_information["preference"]
            dns_information["value"] = dns_information[self.dns_value_mapper[
                self.record_type]]
            del dns_information[self.dns_value_mapper[self.record_type]]
            dns_information["fqdn"] = dns_information["name"]
            del dns_information["name"]
            dns_information["type"] = self.record_type
            dns_information["status"] = "unknown"
            dns_information["created"] = datetime.now()
            self.DNS_MGR.insert_record(dns_information,
                                       "infoblox-" + self.record_type)

    def __insert_records(self, insert_object):
        """
        Inserts/Updates the dns information in the database. '_ref' uniquely identifies the
        resource. The data is injected into the individual collections belonging to the
        record_type and also into the all_dns collection.
        :param insert_object: Dictionary containing the details of the resource.
        """
        dns_information = insert_object.copy()
        if not insert_object["_ref"] in self.previous_records:
            insert_object["created"] = datetime.now()
            insert_object["updated"] = datetime.now()
            self.iblox_collection.insert(insert_object)
        else:
            self.previous_records.remove(insert_object["_ref"])
            insert_object["updated"] = datetime.now()
            self.iblox_collection.update_one({"_ref": insert_object["_ref"]},
                                             {"$set": insert_object})
        # Update DNS Information.
        self.__insert_dns_information(dns_information)

    def __infoblox_response_handler(self, response):
        """
        Handles the API response. Incorrect JSON parsing is allowed upto 20 times post which the
        script exits. "next_page_id" holds the pagination information.
        :param response: Response object
        """
        try:
            response_data = response.json()
            response_result = response_data["result"]
        except (ValueError, AttributeError) as err:
            if self.incorrect_response_json_allowed > 0:
                self._logger.warning(
                    "Unable to parse response JSON for zone " +
                    self.zone_queried)
                self.incorrect_response_json_allowed -= 1
            else:
                self.APIH.handle_api_error(
                    "Unable to parse response JSON for 20 zones: " + repr(err),
                    "get_iblox_" + self.record_type.lower(),
                )
        else:
            # Add the zone parameter to each record and insert
            for entry in response_result:
                entry["infoblox_zone"] = entry["zone"]
                entry["zone"] = self.zone_queried
                self.__insert_records(entry)

            if "next_page_id" in response_data:
                self.next_page_id = response_data["next_page_id"]

    @backoff.on_exception(
        backoff.expo,
        requests.exceptions.ConnectionError,
        max_tries=4,
        factor=10,
        on_backoff=APIH.connection_error_retry,
    )
    def __backoff_api_retry(self):
        """
        Makes API calls to Infoblox with exponential retry capabilities using 'backoff'. The API is
        retried 3 times in case of ConnectionError exception before the script exists.
        :return:
        """
        return requests.get(
            (self.__get_record_type_url()),
            auth=HTTPBasicAuth(self.IH.IBLOX_UNAME, self.IH.IBLOX_PASSWD),
            verify="/etc/ssl/certs/ca-bundle.crt",
            timeout=120,
        )

    def __infoblox_paginated_request(self):
        """
        Makes paginated API calls to Infoblox. The API is retried 3 times in case of ConnectionError
        exception before the script exists. The script exists on encountering HTTPError or any other
        RequestException. On success, the next_page_id is set to None for the next API call.
        """
        try:
            response = self.__backoff_api_retry()
            response.raise_for_status()
        except requests.exceptions.HTTPError as herr:
            self.APIH.handle_api_error(herr,
                                       "get_iblox_" + self.record_type.lower())
        except requests.exceptions.RequestException as err:
            self.APIH.handle_api_error(err,
                                       "get_iblox_" + self.record_type.lower())
        else:
            self.next_page_id = None
            self.__infoblox_response_handler(response)

    def get_infoblox_dns(self):
        """
        Extracts the zones from the zone collection to query Infoblox. The API calls continue to be made
        for the zone till the next_page_id is set to None indicating no new results to be fetched.
        Post the retrieval of all the data, the archaic data for a zone is purged.
        """
        zones = ZoneManager.get_zones_by_source(self.MC, "Infoblox")
        for zone in zones:
            self.zone_queried = zone
            self.next_page_id = None
            self.__get_previous_records()
            self.__infoblox_paginated_request()
            while self.next_page_id:
                self.__infoblox_paginated_request()
            self.IH.clean_collection(self.previous_records,
                                     self.iblox_collection)

    def __init__(self, record_type):
        self.record_type = record_type
        self.iblox_collection = self.MC.__getattribute__(
            self.IH.IBLOX_COLLECTIONS[self.record_type])()
        self.incorrect_response_json_allowed = self.APIH.INCORRECT_RESPONSE_JSON_ALLOWED
        self._logger = self._log()
Ejemplo n.º 6
0
class UltraDNSHelper(object):
    refresh_token = None
    access_token = None
    zone_queried = None
    previous_zones = None
    jobs_manager = None
    offset = 0
    source = 'UltraDNS'
    # This is as required by the UltraDNS documentation.
    access_token_expiration_error = 'invalid_grant:Token not found, expired or invalid.'

    MC = MongoConnector.MongoConnector()
    APIH = APIHelper.APIHelper()

    # Get the UltraDNS connection data
    ULTRACONNECT = UltraDNSConnector.UltraDNSConnector()

    zones_collection = MC.get_zone_connection()

    @backoff.on_exception(backoff.expo,
                          requests.exceptions.ConnectionError,
                          max_tries=4,
                          factor=10,
                          on_backoff=APIH.connection_error_retry)
    def backoff_api_retry(self, url, params, headers):
        """
        Makes API calls with exponential retry capabilities using 'backoff'. The API is
        retried 3 times in case of ConnectionError exception before the script exists.
        """
        return requests.get(url, params, headers=headers)

    @backoff.on_exception(backoff.expo,
                          requests.exceptions.ConnectionError,
                          max_tries=4,
                          factor=10,
                          on_backoff=APIH.connection_error_retry)
    def login(self, grant_type):
        """
        Retrieves the access and refresh token to login into UltraDNS.
        The first call is made with grant_type=password. Any subsequent request
        to fetch the accessToken is made with grant_type=refresh_token.
        :param grant_type: String specifying the grant_type
        """
        login_url = self.ULTRACONNECT.LOGIN
        data = dict()
        data['grant_type'] = grant_type

        if grant_type == 'password':
            data['username'] = self.ULTRACONNECT.USERNAME
            data['password'] = self.ULTRACONNECT.PASSWORD
        elif grant_type == 'refresh_token':
            data['refresh_token'] = self.refresh_token

        try:
            res = requests.post(login_url, data)
            res.raise_for_status()
        except requests.exceptions.HTTPError as herr:
            self.APIH.handle_api_error(
                str(herr) + ' : ' + res.text, self.jobs_manager)
        else:
            token = res.json()
            self.refresh_token = token['refreshToken']
            self.access_token = token['accessToken']

    def get_previous_zones(self):
        """
        Fetches the currently present zones/sub-zones in the zone collection with source 'UltraDNS'.
        The result is a dictionary with the zones as keys. The value of the key is True if the zone
        is sub_zone.
        """
        zones = self.zones_collection.find(
            {
                '$or': [{
                    'reporting_sources.source': self.source
                }, {
                    'sub_zones.source': self.source
                }]
            }, {
                'reporting_sources': 1,
                'zone': 1,
                'sub_zones': 1
            })
        self.previous_zones = {}
        for zone in zones:
            for reporting_source in zone['reporting_sources']:
                if reporting_source['source'] == self.source:
                    self.previous_zones[zone['zone']] = False
            for sub_zone in zone['sub_zones']:
                if sub_zone['source'] == self.source:
                    self.previous_zones[sub_zone['sub_zone']] = True

    def set_offset(self, result_info):
        """
        Sets the offset value for the next API call to be made to UtraDNS.
        :param result_info: Part of response containing pagination infomation.
        """
        # The 'returnedCount' is the number of entries returned in the current API call.
        # Add this to the previous offset to get the new offset. If the new offset value
        # equals the 'totalCount' of records, unset offset to 0 to symbolise end of records.
        self.offset += result_info['returnedCount']
        if self.offset == result_info['totalCount']:
            self.offset = 0

    def __init__(self, invoking_job):
        self.incorrect_response_json_allowed = self.APIH.INCORRECT_RESPONSE_JSON_ALLOWED
        # invoking_job is the job accessing the helper.
        self.jobs_manager = JobsManager.JobsManager(self.MC, invoking_job)
        self.login('password')
Ejemplo n.º 7
0
class UltraDNSZonesInfo(object):

    UH = UltraDNSHelper.UltraDNSHelper("get_ultradns_zones_info")
    APIH = APIHelper.APIHelper()
    DNS_MGR = DNSManager.DNSManager(UH.MC)
    _logger = None

    def __ultradns_zone_info_response_handler(self, response):
        """
        Handles the API response. Incorrect JSON parsing is allowed upto 20 times post which the
        script exits.
        :param response: Response object
        """
        try:
            response_data = response.json()
            record_sets = response_data["rrSets"]
        except (ValueError, AttributeError) as err:
            if self.UH.incorrect_response_json_allowed > 0:
                self._logger.warning(
                    "Unable to parse response JSON for zone " +
                    self.zone_queried)
                self.UH.incorrect_response_json_allowed -= 1
            else:
                self.APIH.handle_api_error(
                    "Unable to parse response JSON for 20 zones: " + repr(err),
                    self.UH.jobs_manager,
                )
        else:
            for record in record_sets:
                dns_info = dict()
                # The ownerName could be either the FQDN or a relative domain name.
                # In case it is a FQDN it will end in '.'
                fqdn = record["ownerName"] + "." + self.zone_queried
                if record["ownerName"].endswith("."):
                    fqdn = record["ownerName"][:-1]

                # A get_root_domain lookup is performed because UDNS supports sub-zones
                dns_info["zone"] = ZoneManager.get_root_domain(
                    self.zone_queried)
                dns_info["fqdn"] = fqdn
                dns_info["type"] = record["rrtype"].split(" ")[0].lower()
                dns_info["status"] = "unknown"

                for dns in record["rdata"]:
                    if dns_info["type"] in ["a", "ptr"]:
                        try:
                            if IPAddress(dns).is_private():
                                continue
                        except AddrFormatError as err:
                            self._logger.warning("For " + fqdn +
                                                 " encountered: " + str(err))
                            continue

                    if not (dns_info["type"] in ["soa", "txt"
                                                 ]) and dns.endswith("."):
                        dns = dns[:-1]
                    dns_info["value"] = dns
                    dns_info["created"] = datetime.now()
                    self.DNS_MGR.insert_record(dns_info.copy(), self.UH.source)

            self.UH.set_offset(response_data["resultInfo"])

    def __paginated_ultradns_zones_info_request(self):
        """
        Makes paginated API calls to UltraDNS. The API is retried 3 times in case of ConnectionError
        exception before the script exists. The script exists on encountering HTTPError or any other
        RequestException.
        In case a 401 is encountered along with the required token expiration message, another login
        API is sent with grant_type set as 'refresh_token' to retrieve a valid access token.
        """
        url = self.UH.ULTRACONNECT.ZONEINFO.format(
            zone_queried=self.zone_queried)
        try:
            response = self.UH.backoff_api_retry(
                url,
                {
                    "q": "kind:RECORDS",
                    "limit": 2000,
                    "offset": self.UH.offset,
                },
                {"authorization": "Bearer " + self.UH.access_token},
            )
            response.raise_for_status()
        except requests.exceptions.HTTPError as herr:
            message = json.loads(response.text)
            if isinstance(message, list):
                err_msg = json.loads(response.text)[0]["errorMessage"]
            else:
                err_msg = json.loads(response.text)["errorMessage"]

            if (response.status_code == 401
                    and err_msg == self.UH.access_token_expiration_error):
                self.UH.login("refresh_token")
                self.__paginated_ultradns_zones_info_request()
            elif response.status_code == 404:
                self._logger.warning("ERROR: Could not find data for: " +
                                     str(self.zone_queried))
            else:
                self.APIH.handle_api_error(herr, self.UH.jobs_manager)
        except requests.exceptions.RequestException as err:
            self.APIH.handle_api_error(err, self.UH.jobs_manager)
        else:
            self.__ultradns_zone_info_response_handler(response)

    def __get_ultradns_zones_info(self):
        """
        Extracts the zone DNS information from UltraDNS in a paginated manner for the UltraDNS zones.
        """
        print("Starting: " + str(datetime.now()))
        self._logger.info("Starting...")

        self.UH.jobs_manager.record_job_start()
        self.UH.get_previous_zones()

        # For querying UltraDNS, we need to query on the exact zones reported
        # hence we query for previous_zones.
        for zone in self.UH.previous_zones:
            self.zone_queried = zone
            self.UH.offset = 0
            self.__paginated_ultradns_zones_info_request()
            while self.UH.offset:
                self.__paginated_ultradns_zones_info_request()

        # Record status
        self.UH.jobs_manager.record_job_complete()
        print("Ending: " + str(datetime.now()))
        self._logger.info("Complete.")

    def __init__(self):
        self._logger = LoggingUtil.create_log(__name__)
        self.__get_ultradns_zones_info()