Beispiel #1
0
def update(source, filename) -> int:
    if any([
            get_setting(f'sources.{source}.import_source'),
            get_setting(f'sources.{source}.import_serial_source')
    ]):
        print(
            f'Error: to use this command, import_source and import_serial_source '
            f'for source {source} must not be set.')
        return 2

    dh = DatabaseHandler()
    roa_validator = BulkRouteROAValidator(dh)
    parser = MirrorUpdateFileImportParser(source,
                                          filename,
                                          database_handler=dh,
                                          direct_error_return=True,
                                          roa_validator=roa_validator)
    error = parser.run_import()
    if error:
        dh.rollback()
    else:
        dh.commit()
    dh.close()
    if error:
        print(f'Error occurred while processing object:\n{error}')
        return 1
    return 0
Beispiel #2
0
async def startup():
    """
    Prepare the database connection and preloader, which
    is shared between different queries in this process.
    As these are run in a separate process, the config file
    is read from the environment.
    """
    setproctitle('irrd-http-server-listener')
    global app
    config_path = os.getenv(ENV_UVICORN_WORKER_CONFIG_PATH)
    config_init(config_path)
    try:
        app.state.database_handler = DatabaseHandler(readonly=True)
        app.state.preloader = Preloader(enable_queries=True)
    except Exception as e:
        logger.critical(
            f'HTTP worker failed to initialise preloader or database, '
            f'unable to start, terminating IRRd, traceback follows: {e}',
            exc_info=e)
        main_pid = os.getenv(ENV_MAIN_PROCESS_PID)
        if main_pid:
            os.kill(int(main_pid), signal.SIGTERM)
        else:
            logger.error(
                'Failed to terminate IRRd, unable to find main process PID')
        return
Beispiel #3
0
    def run(self):
        self.database_handler = DatabaseHandler()

        try:
            self.database_handler.disable_journaling()
            roa_objs = self._import_roas()
            # Do an early commit to make the new ROAs available to other processes.
            self.database_handler.commit()
            # The ROA import does not use journaling, but updating the RPKI
            # status may create journal entries.
            self.database_handler.enable_journaling()

            validator = BulkRouteROAValidator(self.database_handler, roa_objs)
            objs_now_valid, objs_now_invalid, objs_now_not_found = validator.validate_all_routes()
            self.database_handler.update_rpki_status(
                rpsl_objs_now_valid=objs_now_valid,
                rpsl_objs_now_invalid=objs_now_invalid,
                rpsl_objs_now_not_found=objs_now_not_found,
            )
            self.database_handler.commit()
            notified = notify_rpki_invalid_owners(self.database_handler, objs_now_invalid)
            logger.info(f'RPKI status updated for all routes, {len(objs_now_valid)} newly valid, '
                        f'{len(objs_now_invalid)} newly invalid, '
                        f'{len(objs_now_not_found)} newly not_found routes, '
                        f'{notified} emails sent to contacts of newly invalid authoritative objects')

        except OSError as ose:
            # I/O errors can occur and should not log a full traceback (#177)
            logger.error(f'An error occurred while attempting a ROA import: {ose}')
        except ROAParserException as rpe:
            logger.error(f'An exception occurred while attempting a ROA import: {rpe}')
        except Exception as exc:
            logger.error(f'An exception occurred while attempting a ROA import: {exc}', exc_info=exc)
        finally:
            self.database_handler.close()
    def run(self) -> None:
        self.database_handler = DatabaseHandler()

        try:
            serial_newest_seen, force_reload = self._status()
            nrtm_enabled = bool(
                get_setting(f'sources.{self.source}.nrtm_host'))
            logger.debug(
                f'Most recent serial seen for {self.source}: {serial_newest_seen},'
                f'force_reload: {force_reload}, nrtm enabled: {nrtm_enabled}')
            if force_reload or not serial_newest_seen or not nrtm_enabled:
                self.full_import_runner.run(
                    database_handler=self.database_handler,
                    serial_newest_seen=serial_newest_seen,
                    force_reload=force_reload)
            else:
                self.update_stream_runner.run(
                    serial_newest_seen, database_handler=self.database_handler)

            self.database_handler.commit()
        except OSError as ose:
            # I/O errors can occur and should not log a full traceback (#177)
            logger.error(
                f'An error occurred while attempting a mirror update or initial import '
                f'for {self.source}: {ose}')
        except Exception as exc:
            logger.error(
                f'An exception occurred while attempting a mirror update or initial import '
                f'for {self.source}: {exc}',
                exc_info=exc)
        finally:
            self.database_handler.close()
Beispiel #5
0
def load(source, filename, serial) -> int:
    if any([
        get_setting(f'sources.{source}.import_source'),
        get_setting(f'sources.{source}.import_serial_source')
    ]):
        print(f'Error: to use this command, import_source and import_serial_source '
              f'for source {source} must not be set.')
        return 2

    dh = DatabaseHandler()
    roa_validator = BulkRouteROAValidator(dh)
    dh.delete_all_rpsl_objects_with_journal(source)
    dh.disable_journaling()
    parser = MirrorFileImportParser(
        source=source, filename=filename, serial=serial, database_handler=dh,
        direct_error_return=True, roa_validator=roa_validator)
    error = parser.run_import()
    if error:
        dh.rollback()
    else:
        dh.commit()
    dh.close()
    if error:
        print(f'Error occurred while processing object:\n{error}')
        return 1
    return 0
Beispiel #6
0
    def run(self):
        self.database_handler = DatabaseHandler()

        try:
            validator = ScopeFilterValidator()
            status = validator.validate_all_rpsl_objects(self.database_handler)
            rpsl_objs_now_in_scope, rpsl_objs_now_out_scope_as, rpsl_objs_now_out_scope_prefix = status
            self.database_handler.update_scopefilter_status(
                rpsl_objs_now_in_scope=rpsl_objs_now_in_scope,
                rpsl_objs_now_out_scope_as=rpsl_objs_now_out_scope_as,
                rpsl_objs_now_out_scope_prefix=rpsl_objs_now_out_scope_prefix,
            )
            self.database_handler.commit()
            logger.info(
                f'Scopefilter status updated for all routes, '
                f'{len(rpsl_objs_now_in_scope)} newly in scope, '
                f'{len(rpsl_objs_now_out_scope_as)} newly out of scope AS, '
                f'{len(rpsl_objs_now_out_scope_prefix)} newly out of scope prefix'
            )

        except Exception as exc:
            logger.error(
                f'An exception occurred while attempting a scopefilter status update: {exc}',
                exc_info=exc)
        finally:
            self.database_handler.close()
Beispiel #7
0
    def load_suspension_submission(self,
                                   data: RPSLSuspensionSubmission,
                                   request_meta: Dict[str,
                                                      Optional[str]] = None):
        self.database_handler = DatabaseHandler()
        self.request_meta = request_meta if request_meta else {}

        reference_validator = ReferenceValidator(self.database_handler)
        auth_validator = AuthValidator(self.database_handler)
        change_requests: List[Union[ChangeRequest, SuspensionRequest]] = []

        auth_validator.overrides = [data.override] if data.override else []

        for rpsl_obj in data.objects:
            # We don't have a neat way to process individual attribute pairs,
            # so construct a pseudo-object by appending the text.
            object_text = f"mntner: {rpsl_obj.mntner}\nsource: {rpsl_obj.source}\n"
            change_requests.append(
                SuspensionRequest(
                    object_text,
                    self.database_handler,
                    auth_validator,
                    rpsl_obj.request_type.value,
                ))

        self._handle_change_requests(change_requests, reference_validator,
                                     auth_validator)
        self.database_handler.commit()
        self.database_handler.close()
        return self
Beispiel #8
0
 def __init__(self, object_texts: str, pgp_fingerprint: str=None, request_meta: Dict[str, Optional[str]]=None) -> None:
     self.database_handler = DatabaseHandler()
     self.request_meta = request_meta if request_meta else {}
     self._pgp_key_id = self._resolve_pgp_key_id(pgp_fingerprint) if pgp_fingerprint else None
     self._handle_object_texts(object_texts)
     self.database_handler.commit()
     self.database_handler.close()
Beispiel #9
0
    def run(self) -> None:
        self.database_handler = DatabaseHandler()
        try:
            export_destination = get_setting(
                f'sources.{self.source}.export_destination')
            if export_destination:
                logger.info(
                    f'Starting a source export for {self.source} to {export_destination}'
                )
                self._export(export_destination)

            export_destination_unfiltered = get_setting(
                f'sources.{self.source}.export_destination_unfiltered')
            if export_destination_unfiltered:
                logger.info(
                    f'Starting an unfiltered source export for {self.source} '
                    f'to {export_destination_unfiltered}')
                self._export(export_destination_unfiltered,
                             remove_auth_hashes=False)

            self.database_handler.commit()
        except Exception as exc:
            logger.error(
                f'An exception occurred while attempting to run an export '
                f'for {self.source}: {exc}',
                exc_info=exc)
        finally:
            self.database_handler.close()
Beispiel #10
0
    def main(self, filename, strict_validation, database, show_info=True):
        self.show_info = show_info
        if database:
            self.database_handler = DatabaseHandler()
            self.database_handler.disable_journaling()

        if filename == '-':  # pragma: no cover
            f = sys.stdin
        else:
            f = open(filename, encoding='utf-8', errors='backslashreplace')

        for paragraph in split_paragraphs_rpsl(f):
            self.parse_object(paragraph, strict_validation)

        print(
            f'Processed {self.obj_parsed} objects, {self.obj_errors} with errors'
        )
        if self.obj_unknown:
            unknown_formatted = ', '.join(self.unknown_object_classes)
            print(
                f'Ignored {self.obj_unknown} objects due to unknown object classes: {unknown_formatted}'
            )

        if self.database_handler:
            self.database_handler.commit()
            self.database_handler.close()
Beispiel #11
0
def set_last_modified():
    dh = DatabaseHandler()
    auth_sources = [
        k for k, v in get_setting('sources').items() if v.get('authoritative')
    ]
    q = RPSLDatabaseQuery(column_names=['pk', 'object_text', 'updated'],
                          enable_ordering=False)
    q = q.sources(auth_sources)

    results = list(dh.execute_query(q))
    print(f'Updating {len(results)} objects in sources {auth_sources}')
    for result in results:
        rpsl_obj = rpsl_object_from_text(result['object_text'],
                                         strict_validation=False)
        if rpsl_obj.messages.errors():  # pragma: no cover
            print(
                f'Failed to process {rpsl_obj}: {rpsl_obj.messages.errors()}')
            continue
        new_text = rpsl_obj.render_rpsl_text(result['updated'])
        stmt = RPSLDatabaseObject.__table__.update().where(
            RPSLDatabaseObject.__table__.c.pk == result['pk']).values(
                object_text=new_text, )
        dh.execute_statement(stmt)
    dh.commit()
    dh.close()
Beispiel #12
0
    def handle_query(self, query: str) -> WhoisQueryResponse:
        """
        Process a single query. Always returns a WhoisQueryResponse object.
        Not thread safe - only one call must be made to this method at the same time.
        """
        # These flags are reset with every query.
        self.database_handler = DatabaseHandler()
        self.key_fields_only = False
        self.object_classes = []
        self.preloader = get_preloader()

        if query.startswith('!'):
            try:
                return self.handle_irrd_command(query[1:])
            except WhoisQueryParserException as exc:
                logger.info(
                    f'{self.peer_str}: encountered parsing error while parsing query "{query}": {exc}'
                )
                return WhoisQueryResponse(
                    response_type=WhoisQueryResponseType.ERROR,
                    mode=WhoisQueryResponseMode.IRRD,
                    result=str(exc))
            except Exception as exc:
                logger.error(
                    f'An exception occurred while processing whois query "{query}": {exc}',
                    exc_info=exc)
                return WhoisQueryResponse(
                    response_type=WhoisQueryResponseType.ERROR,
                    mode=WhoisQueryResponseMode.IRRD,
                    result=
                    'An internal error occurred while processing this query.')
            finally:
                self.database_handler.close()

        try:
            return self.handle_ripe_command(query)
        except WhoisQueryParserException as exc:
            logger.info(
                f'{self.peer_str}: encountered parsing error while parsing query "{query}": {exc}'
            )
            return WhoisQueryResponse(
                response_type=WhoisQueryResponseType.ERROR,
                mode=WhoisQueryResponseMode.RIPE,
                result=str(exc))
        except Exception as exc:
            logger.error(
                f'An exception occurred while processing whois query "{query}": {exc}',
                exc_info=exc)
            return WhoisQueryResponse(
                response_type=WhoisQueryResponseType.ERROR,
                mode=WhoisQueryResponseMode.RIPE,
                result='An internal error occurred while processing this query.'
            )
        finally:
            self.database_handler.close()
Beispiel #13
0
    def run(self) -> None:
        self.database_handler = DatabaseHandler()
        try:
            export_destination = get_setting(f'sources.{self.source}.export_destination')
            logger.info(f'Starting a source export for {self.source} to {export_destination}')
            self._export(export_destination)

            self.database_handler.commit()
        except Exception as exc:
            logger.error(f'An exception occurred while attempting to run an export '
                         f'for {self.source}: {exc}', exc_info=exc)
        finally:
            self.database_handler.close()
Beispiel #14
0
    def generate_status(self) -> str:
        """
        Generate a human-readable overview of database status.
        """
        database_handler = DatabaseHandler()

        statistics_query = RPSLDatabaseObjectStatisticsQuery()
        self.statistics_results = list(database_handler.execute_query(statistics_query))
        status_query = DatabaseStatusQuery()
        self.status_results = list(database_handler.execute_query(status_query))

        results = [self._generate_header(), self._generate_statistics_table(), self._generate_source_detail()]
        database_handler.close()
        return '\n\n'.join(results)
Beispiel #15
0
def load(source, filename, serial) -> int:
    dh = DatabaseHandler()
    dh.delete_all_rpsl_objects_with_journal(source)
    dh.disable_journaling()
    parser = MirrorFileImportParser(source, filename, serial=serial, database_handler=dh, direct_error_return=True)
    error = parser.run_import()
    if error:
        dh.rollback()
    else:
        dh.commit()
    dh.close()
    if error:
        print(f'Error occurred while processing object:\n{error}')
        return 1
    return 0
Beispiel #16
0
def load_pgp_keys(source: str) -> None:
    dh = DatabaseHandler()
    query = RPSLDatabaseQuery(column_names=['rpsl_pk', 'object_text'])
    query = query.sources([source]).object_classes(['key-cert'])
    keycerts = dh.execute_query(query)

    for keycert in keycerts:
        rpsl_pk = keycert["rpsl_pk"]
        print(f'Loading key-cert {rpsl_pk}')
        # Parsing the keycert in strict mode will load it into the GPG keychain
        result = rpsl_object_from_text(keycert['object_text'], strict_validation=True)
        if result.messages.errors():
            print(f'Errors in PGP key {rpsl_pk}: {result.messages.errors()}')

    print('All valid key-certs loaded into the GnuPG keychain.')
    dh.close()
Beispiel #17
0
    def run(self, keep_running=True) -> None:
        """
        Whois worker run loop.
        This method does not return, except if it failed to initialise a preloader,
        or if keep_running is False, after the first request is handled. The latter
        is used in the tests.
        """
        # Disable the special sigterm_handler defined in start_whois_server()
        # (signal handlers are inherited)
        signal.signal(signal.SIGTERM, signal.SIG_DFL)

        try:
            self.preloader = Preloader()
            self.database_handler = DatabaseHandler(readonly=True)
        except Exception as e:
            logger.critical(
                f'Whois worker failed to initialise preloader or database, '
                f'unable to start, terminating IRRd, traceback follows: {e}',
                exc_info=e)
            main_pid = os.getenv(ENV_MAIN_PROCESS_PID)
            if main_pid:  # pragma: no cover
                os.kill(int(main_pid), signal.SIGTERM)
            else:
                logger.error(
                    'Failed to terminate IRRd, unable to find main process PID'
                )
            return

        while True:
            try:
                setproctitle('irrd-whois-worker')
                self.request, self.client_address = self.connection_queue.get()
                self.setup()
                self.handle_connection()
                self.finish()
                self.close_request()
                memory_trim()
            except Exception as e:
                try:
                    self.close_request()
                except Exception:  # pragma: no cover
                    pass
                logger.error(
                    f'Failed to handle whois connection, traceback follows: {e}',
                    exc_info=e)
            if not keep_running:
                break
Beispiel #18
0
    def load_text_blob(self,
                       object_texts_blob: str,
                       pgp_fingerprint: str = None,
                       request_meta: Dict[str, Optional[str]] = None):
        self.database_handler = DatabaseHandler()
        self.request_meta = request_meta if request_meta else {}
        self._pgp_key_id = self._resolve_pgp_key_id(
            pgp_fingerprint) if pgp_fingerprint else None

        reference_validator = ReferenceValidator(self.database_handler)
        auth_validator = AuthValidator(self.database_handler, self._pgp_key_id)
        change_requests = parse_change_requests(object_texts_blob,
                                                self.database_handler,
                                                auth_validator,
                                                reference_validator)

        self._handle_change_requests(change_requests, reference_validator,
                                     auth_validator)
        self.database_handler.commit()
        self.database_handler.close()
        return self
Beispiel #19
0
    def load_change_submission(self,
                               data: RPSLChangeSubmission,
                               delete=False,
                               request_meta: Dict[str, Optional[str]] = None):
        self.database_handler = DatabaseHandler()
        self.request_meta = request_meta if request_meta else {}

        reference_validator = ReferenceValidator(self.database_handler)
        auth_validator = AuthValidator(self.database_handler)
        change_requests: List[Union[ChangeRequest, SuspensionRequest]] = []

        delete_reason = None
        if delete:
            delete_reason = data.delete_reason

        auth_validator.passwords = data.passwords
        auth_validator.overrides = [data.override] if data.override else []

        for rpsl_obj in data.objects:
            object_text = rpsl_obj.object_text
            if rpsl_obj.attributes:
                # We don't have a neat way to process individual attribute pairs,
                # so construct a pseudo-object by appending the text.
                composite_object = []
                for attribute in rpsl_obj.attributes:
                    composite_object.append(attribute.name + ': ' +
                                            attribute.value)  # type: ignore
                object_text = '\n'.join(composite_object) + '\n'

            assert object_text  # enforced by pydantic
            change_requests.append(
                ChangeRequest(object_text, self.database_handler,
                              auth_validator, reference_validator,
                              delete_reason))

        self._handle_change_requests(change_requests, reference_validator,
                                     auth_validator)
        self.database_handler.commit()
        self.database_handler.close()
        return self
Beispiel #20
0
    def run(self) -> None:
        self.database_handler = DatabaseHandler()

        try:
            serial_newest_seen, force_reload = self._status()
            logger.debug(
                f'Most recent serial seen for {self.source}: {serial_newest_seen}, force_reload: {force_reload}'
            )
            if not serial_newest_seen or force_reload:
                self.full_import_runner.run(
                    database_handler=self.database_handler)
            else:
                self.update_stream_runner.run(
                    serial_newest_seen, database_handler=self.database_handler)

            self.database_handler.commit()
        except Exception as exc:
            logger.critical(
                f'An exception occurred while attempting a mirror update or initial import '
                f'for {self.source}: {exc}',
                exc_info=exc)
        finally:
            self.database_handler.close()
Beispiel #21
0
    def run(self, keep_running=True) -> None:
        """
        Whois worker run loop.
        This method does not return, except if it failed to initialise a preloader,
        or if keep_running is set, after the first request is handled. The latter
        is used in the tests.
        """
        # Disable the special sigterm_handler defined in start_whois_server()
        # (signal handlers are inherited)
        signal.signal(signal.SIGTERM, signal.SIG_DFL)

        try:
            self.preloader = Preloader()
            self.database_handler = DatabaseHandler()
        except Exception as e:
            logger.error(f'Whois worker failed to initialise preloader or database,'
                         f'unable to start, traceback follows: {e}', exc_info=e)
            return

        while True:
            try:
                setproctitle('irrd-whois-worker')
                self.request, self.client_address = self.connection_queue.get()
                self.setup()
                self.handle_connection()
                self.finish()
                self.close_request()
            except Exception as e:
                try:
                    self.close_request()
                except Exception:  # pragma: no cover
                    pass
                logger.error(f'Failed to handle whois connection, traceback follows: {e}',
                             exc_info=e)
            if not keep_running:
                break
Beispiel #22
0
def set_force_reload(source) -> None:
    dh = DatabaseHandler(enable_preload_update=False)
    dh.set_force_reload(source)
    dh.commit()
    dh.close()
Beispiel #23
0
def set_force_reload(source) -> None:
    dh = DatabaseHandler()
    dh.set_force_reload(source)
    dh.commit()
    dh.close()