Example #1
0
    def _staging_reload_check(self, log_success=True) -> List[str]:
        """
        Reload the staging configuration, and run the config checks on it.
        Returns a list of errors if any were found, or an empty list of the
        staging config is valid.
        """
        # While in testing, Configuration does not demand a valid config file
        # This simplifies test setup, as most tests do not need it.
        # If a non-default path is set during testing, it is still checked.
        if hasattr(sys, '_called_from_test'
                   ) and self.user_config_path == CONFIG_PATH_DEFAULT:
            self.user_config_staging = DottedDict({})
            return []

        try:
            with open(self.user_config_path) as fh:
                user_config_yaml = yaml.safe_load(fh)
        except OSError as oe:
            return [f'Error opening config file {self.user_config_path}: {oe}']
        except yaml.YAMLError as ye:
            return [f'Error parsing YAML file: {ye}']

        if not isinstance(user_config_yaml,
                          dict) or 'irrd' not in user_config_yaml:
            return [
                f'Could not find root item "irrd" in config file {self.user_config_path}'
            ]
        self.user_config_staging = DottedDict(user_config_yaml['irrd'])

        errors = self._check_staging_config()
        if not errors and log_success:
            logger.info(
                f'Configuration successfully (re)loaded from {self.user_config_path} in PID {os.getpid()}'
            )
        return errors
Example #2
0
    def __init__(self, user_config_path: Optional[str] = None, commit=True):
        """
        Load the default config and load and check the user provided config.
        If a logfile was specified, direct logs there.
        """
        self.user_config_path = user_config_path if user_config_path else CONFIG_PATH_DEFAULT
        default_config_path = str(
            Path(__file__).resolve().parents[0] / 'default_config.yaml')
        default_config_yaml = yaml.safe_load(open(default_config_path))
        self.default_config = DottedDict(default_config_yaml['irrd'])

        errors = self._staging_reload_check(log_success=False)
        if errors:
            raise ConfigurationError(
                f'Errors found in configuration, unable to start: {errors}')

        if commit:
            self._commit_staging()

            logfile_path = self.get_setting_live('log.logfile_path')
            if logfile_path:
                LOGGING['handlers']['file'] = {   # type:ignore
                    'class': 'logging.handlers.WatchedFileHandler',
                    'filename': logfile_path,
                    'formatter': 'verbose',
                }
                # noinspection PyTypeChecker
                LOGGING['loggers']['']['handlers'] = ['file']  # type:ignore
                logging.config.dictConfig(LOGGING)

            # Re-commit to apply loglevel
            self._commit_staging()
Example #3
0
def update_constants(constants: dict[str, Any]) -> dict[str, Any]:
    if "types" in constants:
        del constants["types"]
    dotted_constants = DottedCollection.factory(constants)

    updated_constants = DottedDict()
    for key, type_ in TYPES.items():
        try:
            # do not write "key in .keys()", it doesn't check for dotted keys.
            if dotted_constants.get(key, _MARKER) is not _MARKER:
                value = dotted_constants[key]
            elif key in TYPES.keys():
                value = get_constant(key)
            else:
                value = default_value(type_)
        except KeyError:
            value = default_value(type_)

        try:
            value = coerce(value, type_)
        except TypeError:
            msg = "Wrong type for key: {}, value{} "
            raise TypeError(msg.format(key, value))
        check_type(key, value, type_)
        updated_constants[key] = value

    return updated_constants.to_python()
Example #4
0
    def __init__(self):
        """
        Load the default config and load and check the user provided config.
        If a logfile was specified, direct logs there.
        """
        default_config_path = os.path.join(
            os.path.dirname(os.path.realpath(__file__)), 'default_config.yaml')
        default_config_yaml = yaml.safe_load(open(default_config_path))
        self.default_config = DottedDict(default_config_yaml['irrd'])

        errors = self._staging_reload_check()
        if errors:
            raise ConfigurationError(
                f'Errors found in configuration, unable to start: {errors}')
        self._commit_staging()

        logfile_path = self.get_setting_live('log.logfile_path')
        if logfile_path:
            LOGGING['handlers']['file'] = {
                'class': 'logging.handlers.WatchedFileHandler',
                'filename': logfile_path,
                'formatter': 'verbose',
            }
            # noinspection PyTypeChecker
            LOGGING['loggers']['']['handlers'] = ['file']
            logging.config.dictConfig(LOGGING)
Example #5
0
def no_implicit_int64(data):
    swaggerD = DottedDict(data)
    for l in json_objects:
        n = swaggerD[l]
        if n.get("type") == "integer" and n.get("format") is None:
            n.format = "uint64"
            print(l, n)

    return swaggerD.to_python()
Example #6
0
def get_schedule_a_results():
    results = []

    api_key = api_config
    per_page = 100
    committee_id = 'C00580100'  # DJT for president
    sort = '-contribution_receipt_date'
    parameters = '?two_year_transaction_period=2020&two_year_transaction_period=2018&api_key={}&per_page={}&committee_id={}&sort={}'.format(
        api_key, per_page, committee_id, sort)

    last_indexes = True
    loop_count = 0

    while last_indexes is not None:
        # Need to limit this to 120 calls per minute
        response = requests.get(
            'https://api.open.fec.gov/v1/schedules/schedule_a/{}'.format(
                parameters))
        json_response = response.json()

        pagination = json_response['pagination']

        last_indexes = pagination.get('last_indexes')
        last_indexes_dict = DottedDict(last_indexes)

        last_index = last_indexes_dict.get('last_index')
        last_contribution_receipt_date = last_indexes_dict.get(
            'last_contribution_receipt_date')

        results += json_response['results']

        if loop_count == 0:
            parameters = parameters + '&last_index={}'.format(
                last_index) + '&last_contribution_receipt_date={}'.format(
                    last_contribution_receipt_date)
        else:
            parameters = '?two_year_transaction_period=2020&two_year_transaction_period=2018&api_key={}&per_page={}&committee_id={}&sort={}&last_index={}&last_contribution_receipt_date={}'.format(
                api_key, per_page, committee_id, sort, last_index,
                last_contribution_receipt_date)

        loop_count += 1
        if loop_count == 2:
            break
        print(loop_count)
        print(pagination)
        time.sleep(1)

    return results
Example #7
0
def schedule_b_results_to_rows(results):
    """
    Converts schedule_b result object records into a list of google sheet rows.
    """

    # Create an empty list of rows.  Each element in this list is a row.
    rows = []

    # Iterate over each result object in the list of results.
    for result in results:
        # Convert the result to a DottedDict so we can use nested dotted keys
        # instead of having to look deeper into the nested structure.
        # This lets us do e.g.
        # result['committee.name'] instead of result['committee']['name']
        result = DottedDict(result)

        # Create an empty row.  Each element in this will be a cell value in google sheets.
        row = []
        # Iterate over each of our result keys in order.
        for result_key in dotted_result_keys_to_column_names.keys():
            # Use the dotted key to lookup the value we want and append it to the row.
            row.append(result[result_key])

        # Store the row we just created in our larger list of rows.
        rows.append(row)

    # Return all of the rows with the column name headers prepended as the first row.
    column_header_row = list(dotted_result_keys_to_column_names.values())
    return column_header_row + rows
Example #8
0
    async def _async_update_data(self):
        """Fetch data from FordPass."""
        try:
            async with async_timeout.timeout(30):
                data = await self._hass.async_add_executor_job(
                    self.vehicle.status  # Fetch new status
                )

                data["guardstatus"] = await self._hass.async_add_executor_job(
                    self.vehicle.guardStatus  # Fetch new status
                )

                # If data has now been fetched but was previously unavailable, log and reset
                if not self._available:
                    _LOGGER.info("Restored connection to FordPass for %s",
                                 self.vin)
                    self._available = True

                return DottedDict(data)
        except Exception as ex:
            self._available = False  # Mark as unavailable
            _LOGGER.warning(str(ex))
            _LOGGER.warning("Error communicating with FordPass for %s",
                            self.vin)
            raise UpdateFailed(
                f"Error communicating with FordPass for {self.vin}") from ex
Example #9
0
def _print_table_from(print_obj, fields):
    if isinstance(print_obj, dict):
        print_obj = [print_obj]
    arr = DottedCollection.factory(print_obj)
    col_lengths = []
    if fields is None:
        fields = [x for x in arr[0].keys()]
    else:
        fields = fields.split(",")
    for col in fields:
        try:
            col_lengths.append(
                max([
                    len(str(DottedDict(item)[col])) for item in arr
                    if col in item
                ]))
        except ValueError:
            # we don't have a "col" field or it's not used.
            # and we can't use 0 as width cause this will cause a weird
            # exception
            col_lengths.append(1)
            print(f"WARNING: field {col} either never filled or non-existant.",
                  file=sys.stderr)
    for row in arr:
        for col_idx, col in enumerate(fields):
            val = str(row[col]) if col in row else ""
            print(f"{val:{col_lengths[col_idx]}}  ", end="")
        print("")
Example #10
0
def _dict_flat_to_nested(flat_dict, defaults=None):
    """
    Takes a "flat" dictionary, whose keys are of the form "one.two.three".
    It will return a nested dictionary with this content:
    {one: {two: {three: value}}}.

    :param flat_dict: The dictionary to convert to a nested one
    :param defaults: Default values for nested dict in (with flat (!) keys)
    :return: A nested python dictionary
    """
    tmp = DottedDict()
    if defaults is None:
        defaults = {}
    for key, val in defaults.items():
        tmp[key] = val
    for key, val in flat_dict.items():
        tmp[key] = val
    return tmp.to_python()
Example #11
0
    def _staging_reload_check(self) -> List[str]:
        """
        Reload the staging configuration, and run the config checks on it.
        Returns a list of errors if any were found, or an empty list of the
        staging config is valid.
        """
        # While in testing, Configuration does not demand a valid config file
        # in IRRD_CONFIG_PATH_ENV. This simplifies test setup, as most tests
        # do not need it. If IRRD_CONFIG_PATH_ENV is set, it is checked,
        # and the check is forced with IRRD_CONFIG_CHECK_FORCE_ENV (to test
        # the error message for the empty environment variable).
        if all([
                hasattr(sys, '_called_from_test'), IRRD_CONFIG_PATH_ENV
                not in os.environ, IRRD_CONFIG_CHECK_FORCE_ENV
                not in os.environ
        ]):
            self.user_config_staging = DottedDict({})
            return []

        try:
            user_config_path = os.environ[IRRD_CONFIG_PATH_ENV]
        except KeyError:
            return [f'Environment variable {IRRD_CONFIG_PATH_ENV} not set.']

        try:
            user_config_yaml = yaml.safe_load(open(user_config_path))
        except OSError as oe:
            return [f'Error opening config file {user_config_path}: {oe}']
        except yaml.YAMLError as ye:
            return [f'Error parsing YAML file: {ye}']

        if not isinstance(user_config_yaml,
                          dict) or 'irrd' not in user_config_yaml:
            return [
                f'Could not find root item "irrd" in config file {user_config_path}'
            ]
        self.user_config_staging = DottedDict(user_config_yaml['irrd'])

        errors = self._check_staging_config()
        if not errors:
            logger.info(
                f'Configuration successfully (re)loaded from {user_config_path}'
            )
        return errors
Example #12
0
        def process_line(item):
            # Read variables
            mh_src_dataset = item.get("source_dataset", None)
            mh_src_hierarchy = item.get("source_hierarchy", None)
            mh_src_code = item.get("source_code", None)
            mh_dst_hierarchy = item.get("destination_hierarchy", None)
            mh_dst_code = item.get("destination_code", None)
            mh_weight = item.get("weight", 1.0)

            # Mapping name
            name = ((mh_src_dataset + ".") if mh_src_dataset else
                    "") + mh_src_hierarchy + " -> " + mh_dst_hierarchy

            if name in mappings:
                issues.append(
                    Issue(itype=IType.ERROR,
                          description="The mapping '" + name +
                          "' has been declared previously. Skipped.",
                          location=IssueLocation(sheet_name=name,
                                                 row=r,
                                                 column=None)))
                return

            if name in local_mappings:
                d = local_mappings[name]
            else:
                d = DottedDict()
                local_mappings[name] = d
                d.name = name
                d.origin_dataset = mh_src_dataset
                d.origin_hierarchy = mh_src_hierarchy
                d.destination_hierarchy = mh_dst_hierarchy
                d.mapping = create_dictionary()

            # Specific code
            if mh_src_code in d.mapping:
                to_dict = d.mapping[mh_src_code]
            else:
                to_dict = create_dictionary()
            if mh_dst_code in to_dict:
                issues.append(
                    Issue(itype=IType.ERROR,
                          description="The mapping of '" + mh_src_code +
                          "' into '" + mh_dst_code +
                          "' has been already defined",
                          location=IssueLocation(sheet_name=name,
                                                 row=r,
                                                 column=None)))
                return
            else:
                to_dict[mh_dst_code] = (
                    mh_weight, r
                )  # NOTE: This could be an object instead of just a FLOAT or expression
                d.mapping[mh_src_code] = to_dict
Example #13
0
 def test_bad_json(self):
     with self.assertRaises(ValueError):
         DottedCollection.factory({"bad.key": "value"})
     with self.assertRaises(ValueError):
         DottedCollection.load_json('{"bad.key": "value"}')
     with self.assertRaises(ValueError):
         DottedDict({"bad.key": "value"})
     with self.assertRaises(ValueError):
         DottedList([{}, {"bad.key": "value"}])
     with self.assertRaises(ValueError):
         DottedCollection.load_json('{"key": "value"')
     with self.assertRaises(ValueError):
         DottedCollection.load_json('value')
Example #14
0
def add_references_to_definitions(data):
    swaggerD = DottedDict(data)
    for l in json_objects:
        l_last_key = l.split('.')[-1]
        swaggerD_old = swaggerD[l]
        if 'fee' == l_last_key:
            swaggerD[l] = {"$ref": "#/definitions/Fee"}
        elif "balance" == l_last_key:
            swaggerD[l] = {"$ref": "#/definitions/Balance"}
        elif "amount" == l_last_key:
            swaggerD[l] = {"$ref": "#/definitions/Amount"}
        elif "channel_reserve" == l_last_key:
            swaggerD[l] = {"$ref": "#/definitions/Amount"}
        elif "name_salt" == l_last_key:
            swaggerD[l] = {"$ref": "#/definitions/NameSalt"}
        elif "gas" == l_last_key:
            swaggerD[l] = {"$ref": "#/definitions/Gas"}
        elif "gas_price" == l_last_key:
            swaggerD[l] = {"$ref": "#/definitions/GasPrice"}
        # I want to see what was changed.
        if swaggerD[l] != swaggerD_old:
            print(l, swaggerD[l])
    return swaggerD.to_python()
Example #15
0
def schedule_b_results_to_rows(results):
    rows = []

    for result in results:
        result = DottedDict(result)
        row = []

        for result_key in dotted_result_keys_to_column_names.keys():
            if result_key not in result:
                normalized = None
            else:
                normalized = str(result[result_key]).strip('[]')
            row.append(normalized)
        rows.append(row)

    column_header_row = list(dotted_result_keys_to_column_names.values())
    return [column_header_row] + rows
Example #16
0
import json
import logging
import os
import re
from typing import List, Dict, TextIO

from dotted.collection import DottedDict
from ruamel import yaml
from orca.schema.validation import validate
from orca.core.errors import ConfigurationError
log = logging.getLogger(__name__)

# all payload data during processing. must be global!
task = DottedDict()
var = DottedDict()


class OrcaConfig(object):
    """ Orca configuration class"""
    @staticmethod
    def __process_config(file: TextIO) -> Dict:
        try:
            # first pass: start by validating the yaml file against the schema version.
            data = validate(file)
            # processing single quote string literals: " ' '
            repl = r"^(?P<key>\s*[^#:]*):\s+(?P<value>['].*['])\s*$"
            fixed_data = re.sub(repl,
                                '\g<key>: "\g<value>"',
                                data,
                                flags=re.MULTILINE)
            log.debug("Processed yaml: {0}".format(fixed_data))
Example #17
0
class Configuration:
    """
    The Configuration class stores the current IRRD configuration,
    checks the validity of the settings, and offers graceful reloads.
    """
    user_config_staging: DottedDict
    user_config_live: DottedDict

    def __init__(self, user_config_path: Optional[str] = None, commit=True):
        """
        Load the default config and load and check the user provided config.
        If a logfile was specified, direct logs there.
        """
        self.user_config_path = user_config_path if user_config_path else CONFIG_PATH_DEFAULT
        default_config_path = str(
            Path(__file__).resolve().parents[0] / 'default_config.yaml')
        default_config_yaml = yaml.safe_load(open(default_config_path))
        self.default_config = DottedDict(default_config_yaml['irrd'])

        errors = self._staging_reload_check(log_success=False)
        if errors:
            raise ConfigurationError(
                f'Errors found in configuration, unable to start: {errors}')

        if commit:
            self._commit_staging()

            logfile_path = self.get_setting_live('log.logfile_path')
            if logfile_path:
                LOGGING['handlers']['file'] = {   # type:ignore
                    'class': 'logging.handlers.WatchedFileHandler',
                    'filename': logfile_path,
                    'formatter': 'verbose',
                }
                # noinspection PyTypeChecker
                LOGGING['loggers']['']['handlers'] = ['file']  # type:ignore
                logging.config.dictConfig(LOGGING)

            # Re-commit to apply loglevel
            self._commit_staging()

    def get_setting_live(self, setting_name: str, default: Any = None) -> Any:
        """
        Get a setting from the live config.
        In order, this will look in:
        - A env variable, uppercase and dots replaced by underscores, e.g.
          IRRD_SERVER_WHOIS_INTERFACE
        - The testing_overrides DottedDict
        - The live user config.
        - The default config.

        If it is not found in any, the value of the default paramater
        is returned, which is None by default.
        """
        env_key = 'IRRD_' + setting_name.upper().replace('.', '_')
        if env_key in os.environ:
            return os.environ[env_key]
        if testing_overrides:
            try:
                return testing_overrides[setting_name]
            except KeyError:
                pass
        try:
            return self.user_config_live[setting_name]
        except KeyError:
            return self.default_config.get(setting_name, default)

    def reload(self) -> bool:
        """
        Reload the configuration, if it passes the checks.
        """
        errors = self._staging_reload_check()
        if errors:
            logger.error(
                f'Errors found in configuration, continuing with current settings: {errors}'
            )
            return False

        self._commit_staging()
        return True

    def _commit_staging(self) -> None:
        """
        Activate the current staging config as the live config.
        """
        self.user_config_live = self.user_config_staging
        logging.getLogger('').setLevel(
            self.get_setting_live('log.level', default='INFO'))
        if hasattr(sys, '_called_from_test'):
            logging.getLogger('').setLevel('DEBUG')

    def _staging_reload_check(self, log_success=True) -> List[str]:
        """
        Reload the staging configuration, and run the config checks on it.
        Returns a list of errors if any were found, or an empty list of the
        staging config is valid.
        """
        # While in testing, Configuration does not demand a valid config file
        # This simplifies test setup, as most tests do not need it.
        # If a non-default path is set during testing, it is still checked.
        if hasattr(sys, '_called_from_test'
                   ) and self.user_config_path == CONFIG_PATH_DEFAULT:
            self.user_config_staging = DottedDict({})
            return []

        try:
            with open(self.user_config_path) as fh:
                user_config_yaml = yaml.safe_load(fh)
        except OSError as oe:
            return [f'Error opening config file {self.user_config_path}: {oe}']
        except yaml.YAMLError as ye:
            return [f'Error parsing YAML file: {ye}']

        if not isinstance(user_config_yaml,
                          dict) or 'irrd' not in user_config_yaml:
            return [
                f'Could not find root item "irrd" in config file {self.user_config_path}'
            ]
        self.user_config_staging = DottedDict(user_config_yaml['irrd'])

        errors = self._check_staging_config()
        if not errors and log_success:
            logger.info(
                f'Configuration successfully (re)loaded from {self.user_config_path} in PID {os.getpid()}'
            )
        return errors

    def _check_staging_config(self) -> List[str]:
        """
        Validate the current staging configuration.
        Returns a list of any errors, or an empty list for a valid config.
        """
        errors = []
        config = self.user_config_staging

        if not self._check_is_str(config, 'database_url'):
            errors.append('Setting database_url is required.')

        if not self._check_is_str(config, 'redis_url'):
            errors.append('Setting redis_url is required.')

        if not self._check_is_str(config, 'piddir') or not os.path.isdir(
                config['piddir']):
            errors.append(
                'Setting piddir is required and must point to an existing directory.'
            )

        expected_access_lists = {
            config.get('server.whois.access_list'),
            config.get('server.http.access_list'),
        }

        if not self._check_is_str(
                config, 'email.from') or '@' not in config.get('email.from'):
            errors.append(
                'Setting email.from is required and must be an email address.')
        if not self._check_is_str(config, 'email.smtp'):
            errors.append('Setting email.smtp is required.')
        if not self._check_is_str(config, 'email.recipient_override', required=False) \
                or '@' not in config.get('email.recipient_override', '@'):
            errors.append(
                'Setting email.recipient_override must be an email address if set.'
            )

        string_not_required = [
            'email.footer', 'server.whois.access_list',
            'server.http.access_list', 'rpki.notify_invalid_subject',
            'rpki.notify_invalid_header', 'rpki.slurm_source'
        ]
        for setting in string_not_required:
            if not self._check_is_str(config, setting, required=False):
                errors.append(
                    f'Setting {setting} must be a string, if defined.')

        if not self._check_is_str(config, 'auth.gnupg_keyring'):
            errors.append('Setting auth.gnupg_keyring is required.')

        access_lists = set(config.get('access_lists', {}).keys())
        unresolved_access_lists = {
            x
            for x in expected_access_lists.difference(access_lists)
            if x and isinstance(x, str)
        }
        if unresolved_access_lists:
            errors.append(
                f'Access lists {", ".join(unresolved_access_lists)} referenced in settings, but not defined.'
            )

        for name, access_list in config.get('access_lists', {}).items():
            for item in access_list:
                try:
                    IP(item)
                except ValueError as ve:
                    errors.append(f'Invalid item in access list {name}: {ve}.')

        known_sources = set(config.get('sources', {}).keys())
        if config.get('rpki.roa_source',
                      'https://rpki.gin.ntt.net/api/export.json'):
            known_sources.add(RPKI_IRR_PSEUDO_SOURCE)
            if config.get('rpki.notify_invalid_enabled') is None:
                errors.append(
                    'RPKI-aware mode is enabled, but rpki.notify_invalid_enabled '
                    'is not set. Set to true or false. DANGER: care is required with '
                    'this setting in testing setups with live data, as it may send bulk '
                    'emails to real resource contacts unless email.recipient_override '
                    'is also set. Read documentation carefully.')

        unknown_default_sources = set(config.get('sources_default',
                                                 [])).difference(known_sources)
        if unknown_default_sources:
            errors.append(
                f'Setting sources_default contains unknown sources: {", ".join(unknown_default_sources)}'
            )

        if not str(config.get('rpki.roa_import_timer', '0')).isnumeric():
            errors.append(
                'Setting rpki.roa_import_timer must be set to a number.')

        for name, details in config.get('sources', {}).items():
            if config.get(
                    'rpki.roa_source') and name == RPKI_IRR_PSEUDO_SOURCE:
                errors.append(
                    f'Setting sources contains reserved source name: {RPKI_IRR_PSEUDO_SOURCE}'
                )
            if not SOURCE_NAME_RE.match(name):
                errors.append(f'Invalid source name: {name}')

            nrtm_mirror = details.get('nrtm_host') and details.get(
                'import_serial_source')
            if details.get('keep_journal') and not (
                    nrtm_mirror or details.get('authoritative')):
                errors.append(
                    f'Setting keep_journal for source {name} can not be enabled unless either authoritative '
                    f'is enabled, or all three of nrtm_host, nrtm_port and import_serial_source.'
                )
            if details.get(
                    'nrtm_host') and not details.get('import_serial_source'):
                errors.append(
                    f'Setting nrtm_host for source {name} can not be enabled without setting '
                    f'import_serial_source.')

            if details.get('authoritative') and (details.get('nrtm_host') or
                                                 details.get('import_source')):
                errors.append(
                    f'Setting authoritative for source {name} can not be enabled when either '
                    f'nrtm_host or import_source are set.')

            if not str(details.get('nrtm_port', '43')).isnumeric():
                errors.append(
                    f'Setting nrtm_port for source {name} must be a number.')
            if not str(details.get('import_timer', '0')).isnumeric():
                errors.append(
                    f'Setting import_timer for source {name} must be a number.'
                )
            if not str(details.get('export_timer', '0')).isnumeric():
                errors.append(
                    f'Setting export_timer for source {name} must be a number.'
                )

        if config.get('log.level') and not config.get('log.level') in [
                'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'
        ]:
            errors.append(
                f'Invalid log.level: {config.get("log.level")}. '
                f'Valid settings for log.level are `DEBUG`, `INFO`, `WARNING`, `ERROR`, `CRITICAL`.'
            )

        return errors

    def _check_is_str(self, config, key, required=True):
        if required:
            return config.get(key) and isinstance(config.get(key), str)
        return config.get(key) is None or isinstance(config.get(key), str)
Example #18
0
print(dotted_arr[0])
print(dotted_arr['0.0'])

print(dotted_arr['1'])
print(dotted_arr['1.1'])

print(dotted_arr[2])

dotted_arr.append(11)
print(dotted_arr)

dotted_arr[len(dotted_arr)] = 12
print(dotted_arr)

dotted_dict = DottedDict({'hello': {'world': {'python': '3'}}})
print(dotted_dict['hello'])
print(dotted_dict['hello.world'])
print(dotted_dict['hello.world.python'])

print(dotted_dict.hello)
print(dotted_dict.hello.world)
print(dotted_dict.hello.world.python)

dotted_dict2 = DottedCollection.factory(
    {'hello': [{
        'world': {
            'python': ['3', '7', '3']
        }
    }]})
print(dotted_dict2['hello'][0]['world']['python'][0])
Example #19
0
    def test_dotteddict(self):
        """DottedDict Tests"""
        obj = DottedDict()

        self.assertNotIsInstance(obj, dict)

        with self.assertRaisesRegexp(
                KeyError,
                'DottedDict keys must be str or unicode'):
            obj[0] = 0

        with self.assertRaisesRegexp(
                KeyError,
                'DottedDict keys must be str or unicode'):
            obj[1.0] = 0

        obj['0'] = 0

        self.assertReprsEqual(repr(obj), "{'0': 0}")

        obj = DottedDict()
        obj.update({'hello': 'world'})

        self.assertReprsEqual(repr(obj), "{'hello': 'world'}")

        obj.update({'hello': {'world': {'wide': 'web'}}})

        self.assertReprsEqual(repr(obj),
                              "{'hello': {'world': {'wide': 'web'}}}")

        self.assertIsInstance(obj['hello'], DottedDict)
        self.assertIsInstance(obj['hello.world'], DottedDict)
        self.assertIsInstance(obj['hello.world.wide'], str)

        self.assertEqual(obj['hello.world'], obj['hello']['world'])
        self.assertEqual(obj['hello.world.wide'], obj['hello']['world']['wide'])

        obj['hello.world'].update({'free': 'tour'})

        self.assertReprsEqual(
            repr(obj),
            "{'hello': {'world': {'wide': 'web', 'free': 'tour'}}}"
        )

        # Access via __getattr__ and __setattr__

        self.assertEqual(obj.hello.world.wide, 'web')
        self.assertEqual(obj.hello.world.free, 'tour')

        obj.hello.world.wide = 'tour'
        obj.hello.world.free = 'web'

        self.assertEqual(obj.hello.world.wide, 'tour')
        self.assertEqual(obj.hello.world.free, 'web')

        self.assertReprsEqual(
            repr(obj),
            "{'hello': {'world': {'wide': 'tour', 'free': 'web'}}}"
        )

        obj.hello.world.wide = 'web'
        obj.hello.world.free = 'tour'

        self.assertReprsEqual(
            repr(obj),
            "{'hello': {'world': {'wide': 'web', 'free': 'tour'}}}"
        )

        del obj['hello.world.free']

        self.assertReprsEqual(repr(obj),
                              "{'hello': {'world': {'wide': 'web'}}}")

        del obj['hello']['world']['wide']

        self.assertReprsEqual(repr(obj), "{'hello': {'world': {}}}")

        obj['hello']['world.wide'] = 'web'

        self.assertReprsEqual(repr(obj),
                              "{'hello': {'world': {'wide': 'web'}}}")

        del obj['hello']['world.wide']

        self.assertReprsEqual(repr(obj), "{'hello': {'world': {}}}")

        obj['hello'] = 'goodbye'

        self.assertReprsEqual(repr(obj), "{'hello': 'goodbye'}")

        del obj.hello

        self.assertReprsEqual(repr(obj), "{}")

        obj.hello = 'goodbye'

        self.assertReprsEqual(repr(obj), "{'hello': 'goodbye'}")

        python_obj = obj.to_python()

        self.assertReprsEqual(repr(python_obj), repr(obj))
        self.assertIsInstance(python_obj, dict)
        self.assertNotIsInstance(obj, dict)
Example #20
0
 def _override(override_data: Dict[Any, Any]):
     monkeypatch.setattr('irrd.conf.testing_overrides', DottedDict(override_data))
Example #21
0
class Configuration:
    """
    The Configuration class stores the current IRRD configuration,
    checks the validity of the settings, and offers graceful reloads.
    """
    user_config_staging: DottedDict
    user_config_live: DottedDict

    def __init__(self):
        """
        Load the default config and load and check the user provided config.
        If a logfile was specified, direct logs there.
        """
        default_config_path = os.path.join(
            os.path.dirname(os.path.realpath(__file__)), 'default_config.yaml')
        default_config_yaml = yaml.safe_load(open(default_config_path))
        self.default_config = DottedDict(default_config_yaml['irrd'])

        errors = self._staging_reload_check()
        if errors:
            raise ConfigurationError(
                f'Errors found in configuration, unable to start: {errors}')
        self._commit_staging()

        logfile_path = self.get_setting_live('log.logfile_path')
        if logfile_path:
            LOGGING['handlers']['file'] = {
                'class': 'logging.handlers.WatchedFileHandler',
                'filename': logfile_path,
                'formatter': 'verbose',
            }
            # noinspection PyTypeChecker
            LOGGING['loggers']['']['handlers'] = ['file']
            logging.config.dictConfig(LOGGING)

    def get_setting_live(self, setting_name: str, default: Any = None) -> Any:
        """
        Get a setting from the live config.
        In order, this will look in:
        - A env variable, uppercase and dots replaced by underscores, e.g.
          IRRD_SERVER_WHOIS_INTERFACE
        - The testing_overrides DottedDict
        - The live user config.
        - The default config.

        If it is not found in any, the value of the default paramater
        is returned, which is None by default.
        """
        env_key = 'IRRD_' + setting_name.upper().replace('.', '_')
        if env_key in os.environ:
            return os.environ[env_key]
        if testing_overrides:
            try:
                return testing_overrides[setting_name]
            except KeyError:
                pass
        try:
            return self.user_config_live[setting_name]
        except KeyError:
            return self.default_config.get(setting_name, default)

    def reload(self) -> bool:
        """
        Reload the configuration, if it passes the checks.
        """
        errors = self._staging_reload_check()
        if errors:
            logger.error(
                f'Errors found in configuration, continuing with current settings: {errors}'
            )
            return False

        self._commit_staging()
        return True

    def _commit_staging(self):
        """
        Activate the current staging config as the live config.
        """
        self.user_config_live = self.user_config_staging
        logging.getLogger('').setLevel(
            self.get_setting_live('log.level', default='INFO'))

    def _staging_reload_check(self) -> List[str]:
        """
        Reload the staging configuration, and run the config checks on it.
        Returns a list of errors if any were found, or an empty list of the
        staging config is valid.
        """
        # While in testing, Configuration does not demand a valid config file
        # in IRRD_CONFIG_PATH_ENV. This simplifies test setup, as most tests
        # do not need it. If IRRD_CONFIG_PATH_ENV is set, it is checked,
        # and the check is forced with IRRD_CONFIG_CHECK_FORCE_ENV (to test
        # the error message for the empty environment variable).
        if all([
                hasattr(sys, '_called_from_test'), IRRD_CONFIG_PATH_ENV
                not in os.environ, IRRD_CONFIG_CHECK_FORCE_ENV
                not in os.environ
        ]):
            self.user_config_staging = DottedDict({})
            return []

        try:
            user_config_path = os.environ[IRRD_CONFIG_PATH_ENV]
        except KeyError:
            return [f'Environment variable {IRRD_CONFIG_PATH_ENV} not set.']

        try:
            user_config_yaml = yaml.safe_load(open(user_config_path))
        except OSError as oe:
            return [f'Error opening config file {user_config_path}: {oe}']
        except yaml.YAMLError as ye:
            return [f'Error parsing YAML file: {ye}']

        if not isinstance(user_config_yaml,
                          dict) or 'irrd' not in user_config_yaml:
            return [
                f'Could not find root item "irrd" in config file {user_config_path}'
            ]
        self.user_config_staging = DottedDict(user_config_yaml['irrd'])

        errors = self._check_staging_config()
        if not errors:
            logger.info(
                f'Configuration successfully (re)loaded from {user_config_path}'
            )
        return errors

    def _check_staging_config(self) -> List[str]:
        """
        Validate the current staging configuration.
        Returns a list of any errors, or an empty list for a valid config.
        """
        errors = []

        config = self.user_config_staging

        if not self._check_is_str(config, 'database_url'):
            errors.append(f'Setting database_url is required.')

        expected_access_lists = {
            config.get('server.whois.access_list'),
            config.get('server.http.access_list'),
        }

        if not self._check_is_str(
                config, 'email.from') or '@' not in config.get('email.from'):
            errors.append(
                f'Setting email.from is required and must be an email address.'
            )
        if not self._check_is_str(config, 'email.smtp'):
            errors.append(f'Setting email.smtp is required.')

        string_not_required = [
            'email.footer', 'server.whois.access_list',
            'server.http.access_list'
        ]
        for setting in string_not_required:
            if not self._check_is_str(config, setting, required=False):
                errors.append(
                    f'Setting {setting} must be a string, if defined.')

        if not self._check_is_str(config, 'auth.gnupg_keyring'):
            errors.append(f'Setting auth.gnupg_keyring is required.')

        access_lists = set(config.get('access_lists', {}).keys())
        unresolved_access_lists = {
            x
            for x in expected_access_lists.difference(access_lists)
            if x and isinstance(x, str)
        }
        if unresolved_access_lists:
            errors.append(
                f'Access lists {", ".join(unresolved_access_lists)} referenced in settings, but not defined.'
            )

        for name, access_list in config.get('access_lists', {}).items():
            for item in access_list:
                try:
                    IP(item)
                except ValueError as ve:
                    errors.append(f'Invalid item in access list {name}: {ve}.')

        known_sources = set(config.get('sources', {}).keys())
        unknown_default_sources = set(config.get('sources_default',
                                                 [])).difference(known_sources)
        if unknown_default_sources:
            errors.append(
                f'Setting sources_default contains unknown sources: {", ".join(unknown_default_sources)}'
            )

        for name, details in config.get('sources', {}).items():
            nrtm_mirror = details.get('nrtm_host') and details.get(
                'nrtm_port') and details.get('import_serial_source')
            if details.get('keep_journal') and not (
                    nrtm_mirror or details.get('authoritative')):
                errors.append(
                    f'Setting keep_journal for source {name} can not be enabled unless either authoritative '
                    f'is enabled, or all three of nrtm_host, nrtm_port and import_serial_source.'
                )
            if details.get('nrtm_host') and not (
                    details.get('import_serial_source')):
                errors.append(
                    f'Setting nrtm_host for source {name} can not be enabled without setting '
                    f'import_serial_source.')

            if not details.get('import_timer', '0').isnumeric():
                errors.append(
                    f'Setting import_timer for source {name} must be a number.'
                )
            if not details.get('export_timer', '0').isnumeric():
                errors.append(
                    f'Setting export_timer for source {name} must be a number.'
                )

        if config.get('log.level') and not config.get('log.level') in [
                'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'
        ]:
            errors.append(
                f'Invalid log.level: {config.get("log.level")}. '
                f'Valid settings for log.level are `DEBUG`, `INFO`, `WARNING`, `ERROR`, `CRITICAL`.'
            )

        return errors

    def _check_is_str(self, config, key, required=True):
        if required:
            return config.get(key) and isinstance(config.get(key), str)
        return config.get(key) is None or isinstance(config.get(key), str)
Example #22
0
    def test_dotteddict(self):
        """DottedDict Tests"""
        obj = DottedDict()

        self.assertNotIsInstance(obj, dict)

        with self.assertRaisesRegexp(KeyError,
                                     'DottedDict keys must be str or unicode'):
            obj[0] = 0

        with self.assertRaisesRegexp(KeyError,
                                     'DottedDict keys must be str or unicode'):
            obj[1.0] = 0

        obj['0'] = 0

        self.assertReprsEqual(repr(obj), "{'0': 0}")

        obj = DottedDict()
        obj.update({'hello': 'world'})

        self.assertReprsEqual(repr(obj), "{'hello': 'world'}")

        obj.update({'hello': {'world': {'wide': 'web'}}})

        self.assertReprsEqual(repr(obj),
                              "{'hello': {'world': {'wide': 'web'}}}")

        self.assertIsInstance(obj['hello'], DottedDict)
        self.assertIsInstance(obj['hello.world'], DottedDict)
        self.assertIsInstance(obj['hello.world.wide'], str)

        self.assertEqual(obj['hello.world'], obj['hello']['world'])
        self.assertEqual(obj['hello.world.wide'],
                         obj['hello']['world']['wide'])

        obj['hello.world'].update({'free': 'tour'})

        self.assertReprsEqual(
            repr(obj), "{'hello': {'world': {'wide': 'web', 'free': 'tour'}}}")

        # Access via __getattr__ and __setattr__

        self.assertEqual(obj.hello.world.wide, 'web')
        self.assertEqual(obj.hello.world.free, 'tour')

        obj.hello.world.wide = 'tour'
        obj.hello.world.free = 'web'

        self.assertEqual(obj.hello.world.wide, 'tour')
        self.assertEqual(obj.hello.world.free, 'web')

        self.assertReprsEqual(
            repr(obj), "{'hello': {'world': {'wide': 'tour', 'free': 'web'}}}")

        obj.hello.world.wide = 'web'
        obj.hello.world.free = 'tour'

        self.assertReprsEqual(
            repr(obj), "{'hello': {'world': {'wide': 'web', 'free': 'tour'}}}")

        del obj['hello.world.free']

        self.assertReprsEqual(repr(obj),
                              "{'hello': {'world': {'wide': 'web'}}}")

        del obj['hello']['world']['wide']

        self.assertReprsEqual(repr(obj), "{'hello': {'world': {}}}")

        obj['hello']['world.wide'] = 'web'

        self.assertReprsEqual(repr(obj),
                              "{'hello': {'world': {'wide': 'web'}}}")

        del obj['hello']['world.wide']

        self.assertReprsEqual(repr(obj), "{'hello': {'world': {}}}")

        obj['hello'] = 'goodbye'

        self.assertReprsEqual(repr(obj), "{'hello': 'goodbye'}")

        del obj.hello

        self.assertReprsEqual(repr(obj), "{}")

        obj.hello = 'goodbye'

        self.assertReprsEqual(repr(obj), "{'hello': 'goodbye'}")

        python_obj = obj.to_python()

        self.assertReprsEqual(repr(python_obj), repr(obj))
        self.assertIsInstance(python_obj, dict)
        self.assertNotIsInstance(obj, dict)
    res = string_to_ast(h_name, s)

    s = State()
    examples = [
        "5-2017", "2017-5", "2017/5", "2017-05 - 2018-01", "2017",
        "5-2017 - 2018-1", "2017-2018", "Year", "Month"
    ]
    for example in examples:
        print(example)
        res = string_to_ast(time_expression, example)
        print(res)
        print(f'Is year = {is_year(example)}')
        print(f'Is month = {is_month(example)}')
        print("-------------------")

    s.set("HH", DottedDict({"Power": {"p": 34.5, "Price": 2.3}}))
    s.set("EN", DottedDict({"Power": {"Price": 1.5}}))
    s.set("HH", DottedDict({"Power": 25}), "ns2")
    s.set("param1", 0.93)
    s.set("param2", 0.9)
    s.set("param3", 0.96)
    examples = [
        "EN(p1=1.5, p2=2.3)[d1='C11', d2='C21'].v2",  # Simply sliced Variable Dataset (function call)
        "a_function(p1=2, p2='cc', p3=1.3*param3)",
        "-5+4*2",  # Simple expression #1
        "HH",  # Simple name
        "HH.Power.p",  # Hierarchic name
        "5",  # Integer
        "1.5",  # Float
        "1e-10",  # Float scientific notation
        "(5+4)*2",  # Simple expression #2 (parenthesis)
 def environment(self):
     if not isinstance(self.__environment, DottedDict):
         self.__environment = DottedDict(self.__environment)
     return self.__environment
Example #25
0
from os.path import dirname, join, exists, abspath, normpath

import yaml
from dotted.collection import DottedDict
from numpy.core.tests.test_scalarinherit import C

PROJECT_NAME = 'lazy-arxiv'
PROJECT_ROOT_DIR_PATH = abspath(join(dirname(__file__), '..', '..'))

CONFIG_FILE_NAME = 'configs/admin.conf'
LOG_FILE_NAME = f'{PROJECT_NAME}.log'

CONFIG_FILE_PATH = join(PROJECT_ROOT_DIR_PATH, PROJECT_NAME, CONFIG_FILE_NAME)

LOG_DIR_PATH = join(PROJECT_ROOT_DIR_PATH, PROJECT_NAME, 'logs')
LOG_FILE_PATH = join(LOG_DIR_PATH, LOG_FILE_NAME)

DATA_DIR_PATH = join(PROJECT_ROOT_DIR_PATH, PROJECT_NAME, 'data')

assert exists(CONFIG_FILE_PATH), 'CONFIG_FILE_PATH not detected!'
# assert exists(LOG_DIR_PATH), 'LOG_FILE_PATH not detected!'

with open(CONFIG_FILE_PATH) as conf_file:
    CONFIG = DottedDict(yaml.safe_load(conf_file))
    CONFIG.data.sqllite = join(DATA_DIR_PATH, CONFIG.data.sqllite)
Example #26
0
DEFAULT_SETTINGS = DottedDict({
    'database_url': 'postgresql://*****:*****@example.com',
        'footer': '',
        'smtp': 'localhost',
    },
    'gnupg': {
        'homedir':
        os.path.join(
            os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
            '../gnupg/'),
    },
    'sources': {
        # TODO: validate that source names are upper case
        'AFRINIC': {
            'authoritative':
            False,
            'keep_journal':
            True,
            'nrtm_host':
            'rr.ntt.net',
            'nrtm_port':
            43,
            'dump_source':
            'ftp://ftp.afrinic.net/pub/dbase/afrinic.db.gz',
            'dump_serial_source':
            'ftp://ftp.afrinic.net/pub/dbase/AFRINIC.CURRENTSERIAL',
            'object_class_filter':
            'as-block,as-set,autnum,filter-set,inet-rtr,peering-set,role,route-set,route,route6,rtr-set',
        },
        'ALTDB': {
            'nrtm_host':
            'rr.ntt.net',
            'keep_journal':
            True,
            'nrtm_port':
            43,
            'dump_source':
            'ftp://ftp.radb.net/radb/dbase/altdb.db.gz',
            'dump_serial_source':
            'ftp://ftp.radb.net/radb/dbase/ALTDB.CURRENTSERIAL',
        },
        'APNIC': {
            'authoritative':
            False,
            'keep_journal':
            True,
            'nrtm_host':
            'rr.ntt.net',
            'nrtm_port':
            43,
            'dump_source':
            'ftp://ftp.apnic.net/pub/apnic/whois/apnic.db.as-block.gz,'
            'ftp://ftp.apnic.net/pub/apnic/whois/apnic.db.as-set.gz,'
            'ftp://ftp.apnic.net/pub/apnic/whois/apnic.db.aut-num.gz,'
            'ftp://ftp.apnic.net/pub/apnic/whois/apnic.db.filter-set.gz,'
            'ftp://ftp.apnic.net/pub/apnic/whois/apnic.db.inet-rtr.gz,'
            'ftp://ftp.apnic.net/pub/apnic/whois/apnic.db.peering-set.gz,'
            'ftp://ftp.apnic.net/pub/apnic/whois/apnic.db.route-set.gz,'
            'ftp://ftp.apnic.net/pub/apnic/whois/apnic.db.route.gz,'
            'ftp://ftp.apnic.net/pub/apnic/whois/apnic.db.route6.gz,'
            'ftp://ftp.apnic.net/pub/apnic/whois/apnic.db.rtr-set.gz',
            'dump_serial_source':
            'ftp://ftp.arin.net/pub/rr/ARIN.CURRENTSERIAL',
        },
        'ARIN': {
            'authoritative': False,
            'keep_journal': True,
            'nrtm_host': 'rr.arin.net',
            'nrtm_port': 4444,
            'dump_source': 'ftp://ftp.arin.net/pub/rr/arin.db',
            'dump_serial_source':
            'ftp://ftp.arin.net/pub/rr/ARIN.CURRENTSERIAL',
        },
        # ARIN-WHOIS source unknown
        'BBOI': {
            'authoritative': False,
            'keep_journal': True,
            'nrtm_host': 'irr.bboi.net',
            'nrtm_port': 43,
            'dump_source': 'ftp://irr.bboi.net/bboi.db.gz',
            'dump_serial_source': 'ftp://irr.bboi.net/BBOI.CURRENTSERIAL',
        },
        'BELL': {
            'keep_journal':
            True,
            'nrtm_host':
            'rr.ntt.net',
            'nrtm_port':
            43,
            'dump_source':
            'ftp://ftp.radb.net/radb/dbase/bell.db.gz',
            'dump_serial_source':
            'ftp://ftp.radb.net/radb/dbase/BELL.CURRENTSERIAL',
        },
        # INTERNAL source unknown
        'JPIRR': {
            'keep_journal':
            True,
            'nrtm_host':
            'rr.ntt.net',
            'nrtm_port':
            43,
            'dump_source':
            'ftp://ftp.radb.net/radb/dbase/jpirr.db.gz',
            'dump_serial_source':
            'ftp://ftp.radb.net/radb/dbase/JPIRR.CURRENTSERIAL',
        },
        'LEVEL3': {
            'authoritative':
            False,
            'keep_journal':
            True,
            'nrtm_host':
            'rr.level3.net',
            'nrtm_port':
            43,
            'dump_source':
            'ftp://rr.Level3.net/pub/rr/level3.db.gz',
            'dump_serial_source':
            'ftp://rr.level3.net/pub/rr/LEVEL3.CURRENTSERIAL',
        },
        'NTTCOM': {
            'authoritative':
            True,
            'keep_journal':
            True,
            'nrtm_host':
            'rr.ntt.net',
            'nrtm_port':
            43,
            'dump_source':
            'ftp://rr1.ntt.net/nttcomRR/nttcom.db.gz',
            'dump_serial_source':
            'ftp://rr1.ntt.net/nttcomRR/NTTCOM.CURRENTSERIAL',
            # export schedule
            # TODO: authoritative should block mirror downloads?
        },
        'RADB': {
            'authoritative':
            False,
            'keep_journal':
            True,
            'nrtm_host':
            'whois.radb.net',
            'nrtm_port':
            43,
            'dump_source':
            'ftp://ftp.radb.net/radb/dbase/radb.db.gz',
            'dump_serial_source':
            'ftp://ftp.radb.net/radb/dbase/RADB.CURRENTSERIAL',
        },
        # REGISTROBR source unknown
        'RGNET': {
            'authoritative': False,
            'keep_journal': True,
            'nrtm_host': 'whois.rg.net',
            'nrtm_port': 43,
            'dump_source': 'ftp://rg.net/rgnet/RGNET.db.gz',
            'dump_serial_source': 'ftp://rg.net/rgnet/RGNET.CURRENTSERIAL',
        },
        'RIPE': {
            'authoritative':
            False,
            'keep_journal':
            True,
            'nrtm_host':
            '193.0.6.145',
            'nrtm_port':
            4444,
            'dump_source':
            'ftp://ftp.ripe.net/ripe/dbase/rc/split/ripe.db.as-block.gz,'
            'ftp://ftp.ripe.net/ripe/dbase/rc/split/ripe.db.as-set.gz,'
            'ftp://ftp.ripe.net/ripe/dbase/rc/split/ripe.db.aut-num.gz,'
            'ftp://ftp.ripe.net/ripe/dbase/rc/split/ripe.db.filter-set.gz,'
            'ftp://ftp.ripe.net/ripe/dbase/rc/split/ripe.db.inet-rtr.gz,'
            'ftp://ftp.ripe.net/ripe/dbase/rc/split/ripe.db.organisation.gz,'
            'ftp://ftp.ripe.net/ripe/dbase/rc/split/ripe.db.peering-set.gz,'
            'ftp://ftp.ripe.net/ripe/dbase/rc/split/ripe.db.role.gz,'
            'ftp://ftp.ripe.net/ripe/dbase/rc/split/ripe.db.route.gz,'
            'ftp://ftp.ripe.net/ripe/dbase/rc/split/ripe.db.route6.gz,'
            'ftp://ftp.ripe.net/ripe/dbase/rc/split/ripe.db.route-set.gz,'
            'ftp://ftp.ripe.net/ripe/dbase/rc/split/ripe.db.rtr-set.gz',
            'dump_serial_source':
            'ftp://ftp.ripe.net/ripe/dbase/rc/RIPE.CURRENTSERIAL',
        },
        'RIPE-NONAUTH': {
            'authoritative':
            False,
            'keep_journal':
            True,
            'nrtm_host':
            '193.0.6.145',
            'nrtm_port':
            4444,
            'dump_source':
            'ftp://ftp.ripe.net/ripe/dbase/rc/split/ripe-nonauth.db.as-block.gz,'
            'ftp://ftp.ripe.net/ripe/dbase/rc/split/ripe-nonauth.db.as-set.gz,'
            'ftp://ftp.ripe.net/ripe/dbase/rc/split/ripe-nonauth.db.aut-num.gz,'
            'ftp://ftp.ripe.net/ripe/dbase/rc/split/ripe-nonauth.db.filter-set.gz,'
            'ftp://ftp.ripe.net/ripe/dbase/rc/split/ripe-nonauth.db.inet-rtr.gz,'
            'ftp://ftp.ripe.net/ripe/dbase/rc/split/ripe-nonauth.db.organisation.gz,'
            'ftp://ftp.ripe.net/ripe/dbase/rc/split/ripe-nonauth.db.peering-set.gz,'
            'ftp://ftp.ripe.net/ripe/dbase/rc/split/ripe-nonauth.db.role.gz,'
            'ftp://ftp.ripe.net/ripe/dbase/rc/split/ripe-nonauth.db.route.gz,'
            'ftp://ftp.ripe.net/ripe/dbase/rc/split/ripe-nonauth.db.route6.gz,'
            'ftp://ftp.ripe.net/ripe/dbase/rc/split/ripe-nonauth.db.route-set.gz,'
            'ftp://ftp.ripe.net/ripe/dbase/rc/split/ripe-nonauth.db.rtr-set.gz',
            'dump_serial_source':
            'ftp://ftp.ripe.net/ripe/dbase/rc/RIPE-NONAUTH.CURRENTSERIAL',
        },
        # RPKI source unknown
        'TC': {
            'keep_journal': True,
            'nrtm_host': 'rr.ntt.net',
            'nrtm_port': 43,
            'dump_source': 'ftp://ftp.radb.net/radb/dbase/tc.db.gz',
            'dump_serial_source':
            'ftp://ftp.radb.net/radb/dbase/TC.CURRENTSERIAL',
        },
    }
})