def run(self) -> None:
        self.database_handler = DatabaseHandler()

        try:
            serial_newest_seen, force_reload = self._status()
            nrtm_enabled = bool(
                get_setting(f'sources.{self.source}.nrtm_host'))
            logger.debug(
                f'Most recent serial seen for {self.source}: {serial_newest_seen},'
                f'force_reload: {force_reload}, nrtm enabled: {nrtm_enabled}')
            if force_reload or not serial_newest_seen or not nrtm_enabled:
                self.full_import_runner.run(
                    database_handler=self.database_handler,
                    serial_newest_seen=serial_newest_seen,
                    force_reload=force_reload)
            else:
                self.update_stream_runner.run(
                    serial_newest_seen, database_handler=self.database_handler)

            self.database_handler.commit()
        except OSError as ose:
            # I/O errors can occur and should not log a full traceback (#177)
            logger.error(
                f'An error occurred while attempting a mirror update or initial import '
                f'for {self.source}: {ose}')
        except Exception as exc:
            logger.error(
                f'An exception occurred while attempting a mirror update or initial import '
                f'for {self.source}: {exc}',
                exc_info=exc)
        finally:
            self.database_handler.close()
Exemple #2
0
    def run(self):
        self.database_handler = DatabaseHandler()

        try:
            validator = ScopeFilterValidator()
            status = validator.validate_all_rpsl_objects(self.database_handler)
            rpsl_objs_now_in_scope, rpsl_objs_now_out_scope_as, rpsl_objs_now_out_scope_prefix = status
            self.database_handler.update_scopefilter_status(
                rpsl_objs_now_in_scope=rpsl_objs_now_in_scope,
                rpsl_objs_now_out_scope_as=rpsl_objs_now_out_scope_as,
                rpsl_objs_now_out_scope_prefix=rpsl_objs_now_out_scope_prefix,
            )
            self.database_handler.commit()
            logger.info(
                f'Scopefilter status updated for all routes, '
                f'{len(rpsl_objs_now_in_scope)} newly in scope, '
                f'{len(rpsl_objs_now_out_scope_as)} newly out of scope AS, '
                f'{len(rpsl_objs_now_out_scope_prefix)} newly out of scope prefix'
            )

        except Exception as exc:
            logger.error(
                f'An exception occurred while attempting a scopefilter status update: {exc}',
                exc_info=exc)
        finally:
            self.database_handler.close()
Exemple #3
0
    def load_suspension_submission(self,
                                   data: RPSLSuspensionSubmission,
                                   request_meta: Dict[str,
                                                      Optional[str]] = None):
        self.database_handler = DatabaseHandler()
        self.request_meta = request_meta if request_meta else {}

        reference_validator = ReferenceValidator(self.database_handler)
        auth_validator = AuthValidator(self.database_handler)
        change_requests: List[Union[ChangeRequest, SuspensionRequest]] = []

        auth_validator.overrides = [data.override] if data.override else []

        for rpsl_obj in data.objects:
            # We don't have a neat way to process individual attribute pairs,
            # so construct a pseudo-object by appending the text.
            object_text = f"mntner: {rpsl_obj.mntner}\nsource: {rpsl_obj.source}\n"
            change_requests.append(
                SuspensionRequest(
                    object_text,
                    self.database_handler,
                    auth_validator,
                    rpsl_obj.request_type.value,
                ))

        self._handle_change_requests(change_requests, reference_validator,
                                     auth_validator)
        self.database_handler.commit()
        self.database_handler.close()
        return self
Exemple #4
0
    def run(self) -> None:
        self.database_handler = DatabaseHandler()
        try:
            export_destination = get_setting(
                f'sources.{self.source}.export_destination')
            if export_destination:
                logger.info(
                    f'Starting a source export for {self.source} to {export_destination}'
                )
                self._export(export_destination)

            export_destination_unfiltered = get_setting(
                f'sources.{self.source}.export_destination_unfiltered')
            if export_destination_unfiltered:
                logger.info(
                    f'Starting an unfiltered source export for {self.source} '
                    f'to {export_destination_unfiltered}')
                self._export(export_destination_unfiltered,
                             remove_auth_hashes=False)

            self.database_handler.commit()
        except Exception as exc:
            logger.error(
                f'An exception occurred while attempting to run an export '
                f'for {self.source}: {exc}',
                exc_info=exc)
        finally:
            self.database_handler.close()
Exemple #5
0
    def main(self, filename, strict_validation, database, show_info=True):
        self.show_info = show_info
        if database:
            self.database_handler = DatabaseHandler()
            self.database_handler.disable_journaling()

        if filename == '-':  # pragma: no cover
            f = sys.stdin
        else:
            f = open(filename, encoding='utf-8', errors='backslashreplace')

        for paragraph in split_paragraphs_rpsl(f):
            self.parse_object(paragraph, strict_validation)

        print(
            f'Processed {self.obj_parsed} objects, {self.obj_errors} with errors'
        )
        if self.obj_unknown:
            unknown_formatted = ', '.join(self.unknown_object_classes)
            print(
                f'Ignored {self.obj_unknown} objects due to unknown object classes: {unknown_formatted}'
            )

        if self.database_handler:
            self.database_handler.commit()
            self.database_handler.close()
Exemple #6
0
 def __init__(self, object_texts: str, pgp_fingerprint: str=None, request_meta: Dict[str, Optional[str]]=None) -> None:
     self.database_handler = DatabaseHandler()
     self.request_meta = request_meta if request_meta else {}
     self._pgp_key_id = self._resolve_pgp_key_id(pgp_fingerprint) if pgp_fingerprint else None
     self._handle_object_texts(object_texts)
     self.database_handler.commit()
     self.database_handler.close()
Exemple #7
0
    def run(self):
        self.database_handler = DatabaseHandler()

        try:
            self.database_handler.disable_journaling()
            roa_objs = self._import_roas()
            # Do an early commit to make the new ROAs available to other processes.
            self.database_handler.commit()
            # The ROA import does not use journaling, but updating the RPKI
            # status may create journal entries.
            self.database_handler.enable_journaling()

            validator = BulkRouteROAValidator(self.database_handler, roa_objs)
            objs_now_valid, objs_now_invalid, objs_now_not_found = validator.validate_all_routes()
            self.database_handler.update_rpki_status(
                rpsl_objs_now_valid=objs_now_valid,
                rpsl_objs_now_invalid=objs_now_invalid,
                rpsl_objs_now_not_found=objs_now_not_found,
            )
            self.database_handler.commit()
            notified = notify_rpki_invalid_owners(self.database_handler, objs_now_invalid)
            logger.info(f'RPKI status updated for all routes, {len(objs_now_valid)} newly valid, '
                        f'{len(objs_now_invalid)} newly invalid, '
                        f'{len(objs_now_not_found)} newly not_found routes, '
                        f'{notified} emails sent to contacts of newly invalid authoritative objects')

        except OSError as ose:
            # I/O errors can occur and should not log a full traceback (#177)
            logger.error(f'An error occurred while attempting a ROA import: {ose}')
        except ROAParserException as rpe:
            logger.error(f'An exception occurred while attempting a ROA import: {rpe}')
        except Exception as exc:
            logger.error(f'An exception occurred while attempting a ROA import: {exc}', exc_info=exc)
        finally:
            self.database_handler.close()
Exemple #8
0
class RPSLMirrorImportUpdateRunner:
    """
    This RPSLMirrorImportUpdateRunner is the entry point for updating a single
    database mirror, depending on current state.

    If there is no current mirrored data, will call RPSLMirrorFullImportRunner
    to run a new import from full export files. Otherwise, will call
    NRTMImportUpdateStreamRunner to retrieve new updates from NRTM.
    """
    def __init__(self, source: str) -> None:
        self.source = source
        self.full_import_runner = RPSLMirrorFullImportRunner(source)
        self.update_stream_runner = NRTMImportUpdateStreamRunner(source)

    def run(self) -> None:
        self.database_handler = DatabaseHandler()

        try:
            serial_newest_mirror, force_reload = self._status()
            nrtm_enabled = bool(
                get_setting(f'sources.{self.source}.nrtm_host'))
            logger.debug(
                f'Most recent mirrored serial for {self.source}: {serial_newest_mirror}, '
                f'force_reload: {force_reload}, nrtm enabled: {nrtm_enabled}')
            if force_reload or not serial_newest_mirror or not nrtm_enabled:
                self.full_import_runner.run(
                    database_handler=self.database_handler,
                    serial_newest_mirror=serial_newest_mirror,
                    force_reload=force_reload)
            else:
                self.update_stream_runner.run(
                    serial_newest_mirror,
                    database_handler=self.database_handler)

            self.database_handler.commit()
        except OSError as ose:
            # I/O errors can occur and should not log a full traceback (#177)
            logger.error(
                f'An error occurred while attempting a mirror update or initial import '
                f'for {self.source}: {ose}')
        except Exception as exc:
            logger.error(
                f'An exception occurred while attempting a mirror update or initial import '
                f'for {self.source}: {exc}',
                exc_info=exc)
        finally:
            self.database_handler.close()

    def _status(self) -> Tuple[Optional[int], Optional[bool]]:
        query = DatabaseStatusQuery().source(self.source)
        result = self.database_handler.execute_query(query)
        try:
            status = next(result)
            return status['serial_newest_mirror'], status['force_reload']
        except StopIteration:
            return None, None
def set_last_modified():
    dh = DatabaseHandler()
    auth_sources = [
        k for k, v in get_setting('sources').items() if v.get('authoritative')
    ]
    q = RPSLDatabaseQuery(column_names=['pk', 'object_text', 'updated'],
                          enable_ordering=False)
    q = q.sources(auth_sources)

    results = list(dh.execute_query(q))
    print(f'Updating {len(results)} objects in sources {auth_sources}')
    for result in results:
        rpsl_obj = rpsl_object_from_text(result['object_text'],
                                         strict_validation=False)
        if rpsl_obj.messages.errors():  # pragma: no cover
            print(
                f'Failed to process {rpsl_obj}: {rpsl_obj.messages.errors()}')
            continue
        new_text = rpsl_obj.render_rpsl_text(result['updated'])
        stmt = RPSLDatabaseObject.__table__.update().where(
            RPSLDatabaseObject.__table__.c.pk == result['pk']).values(
                object_text=new_text, )
        dh.execute_statement(stmt)
    dh.commit()
    dh.close()
Exemple #10
0
    def handle_query(self, query: str) -> WhoisQueryResponse:
        """
        Process a single query. Always returns a WhoisQueryResponse object.
        Not thread safe - only one call must be made to this method at the same time.
        """
        # These flags are reset with every query.
        self.database_handler = DatabaseHandler()
        self.key_fields_only = False
        self.object_classes = []
        self.preloader = get_preloader()

        if query.startswith('!'):
            try:
                return self.handle_irrd_command(query[1:])
            except WhoisQueryParserException as exc:
                logger.info(
                    f'{self.peer_str}: encountered parsing error while parsing query "{query}": {exc}'
                )
                return WhoisQueryResponse(
                    response_type=WhoisQueryResponseType.ERROR,
                    mode=WhoisQueryResponseMode.IRRD,
                    result=str(exc))
            except Exception as exc:
                logger.error(
                    f'An exception occurred while processing whois query "{query}": {exc}',
                    exc_info=exc)
                return WhoisQueryResponse(
                    response_type=WhoisQueryResponseType.ERROR,
                    mode=WhoisQueryResponseMode.IRRD,
                    result=
                    'An internal error occurred while processing this query.')
            finally:
                self.database_handler.close()

        try:
            return self.handle_ripe_command(query)
        except WhoisQueryParserException as exc:
            logger.info(
                f'{self.peer_str}: encountered parsing error while parsing query "{query}": {exc}'
            )
            return WhoisQueryResponse(
                response_type=WhoisQueryResponseType.ERROR,
                mode=WhoisQueryResponseMode.RIPE,
                result=str(exc))
        except Exception as exc:
            logger.error(
                f'An exception occurred while processing whois query "{query}": {exc}',
                exc_info=exc)
            return WhoisQueryResponse(
                response_type=WhoisQueryResponseType.ERROR,
                mode=WhoisQueryResponseMode.RIPE,
                result='An internal error occurred while processing this query.'
            )
        finally:
            self.database_handler.close()
Exemple #11
0
    def run(self) -> None:
        self.database_handler = DatabaseHandler()
        try:
            export_destination = get_setting(f'sources.{self.source}.export_destination')
            logger.info(f'Starting a source export for {self.source} to {export_destination}')
            self._export(export_destination)

            self.database_handler.commit()
        except Exception as exc:
            logger.error(f'An exception occurred while attempting to run an export '
                         f'for {self.source}: {exc}', exc_info=exc)
        finally:
            self.database_handler.close()
Exemple #12
0
def update(source, filename) -> int:
    if any([
            get_setting(f'sources.{source}.import_source'),
            get_setting(f'sources.{source}.import_serial_source')
    ]):
        print(
            f'Error: to use this command, import_source and import_serial_source '
            f'for source {source} must not be set.')
        return 2

    dh = DatabaseHandler()
    roa_validator = BulkRouteROAValidator(dh)
    parser = MirrorUpdateFileImportParser(source,
                                          filename,
                                          database_handler=dh,
                                          direct_error_return=True,
                                          roa_validator=roa_validator)
    error = parser.run_import()
    if error:
        dh.rollback()
    else:
        dh.commit()
    dh.close()
    if error:
        print(f'Error occurred while processing object:\n{error}')
        return 1
    return 0
Exemple #13
0
class SourceExportRunner:
    """
    This SourceExportRunner is the entry point for the expect process
    for a single source.

    A gzipped file will be created in the export_destination directory
    with the contents of the source, along with a CURRENTSERIAL file.

    The contents of the source are first written to a temporary file, and
    then moved in place.
    """
    def __init__(self, source: str) -> None:
        self.source = source

    def run(self) -> None:
        self.database_handler = DatabaseHandler()
        try:
            export_destination = get_setting(f'sources.{self.source}.export_destination')
            logger.info(f'Starting a source export for {self.source} to {export_destination}')
            self._export(export_destination)

            self.database_handler.commit()
        except Exception as exc:
            logger.error(f'An exception occurred while attempting to run an export '
                         f'for {self.source}: {exc}', exc_info=exc)
        finally:
            self.database_handler.close()

    def _export(self, export_destination):
        filename_export = Path(export_destination) / f'{self.source.lower()}.db.gz'
        export_tmpfile = NamedTemporaryFile(delete=False)
        filename_serial = Path(export_destination) / f'{self.source.upper()}.CURRENTSERIAL'

        query = DatabaseStatusQuery().source(self.source)

        try:
            serial = next(self.database_handler.execute_query(query))['serial_newest_seen']
        except StopIteration:
            logger.error(f'Unable to run export for {self.source}, internal database status is empty.')
            return

        with gzip.open(export_tmpfile, 'wb') as fh:
            query = RPSLDatabaseQuery().sources([self.source])
            for obj in self.database_handler.execute_query(query):
                object_bytes = remove_auth_hashes(obj['object_text']).encode('utf-8')
                fh.write(object_bytes + b'\n')

        if filename_export.exists():
            os.unlink(filename_export)
        if filename_serial.exists():
            os.unlink(filename_serial)
        shutil.move(export_tmpfile.name, filename_export)

        if serial is not None:
            with open(filename_serial, 'w') as fh:
                fh.write(str(serial))

        self.database_handler.record_serial_exported(self.source, serial)
        logger.info(f'Export for {self.source} complete, stored in {filename_export} / {filename_serial}')
    def run(self,
            database_handler: DatabaseHandler,
            serial_newest_seen: Optional[int] = None,
            force_reload=False):
        import_sources = get_setting(f'sources.{self.source}.import_source')
        if isinstance(import_sources, str):
            import_sources = [import_sources]
        import_serial_source = get_setting(
            f'sources.{self.source}.import_serial_source')

        if not import_sources:
            logger.info(
                f'Skipping full import for {self.source}, import_source not set.'
            )
            return

        logger.info(
            f'Running full import of {self.source} from {import_sources}, serial from {import_serial_source}'
        )

        import_serial = None
        if import_serial_source:
            import_serial = int(
                self._retrieve_file(import_serial_source,
                                    return_contents=True)[0])

            if not force_reload and serial_newest_seen is not None and import_serial <= serial_newest_seen:
                logger.info(
                    f'Current newest serial seen for {self.source} is '
                    f'{serial_newest_seen}, import_serial is {import_serial}, cancelling import.'
                )
                return

        database_handler.delete_all_rpsl_objects_with_journal(self.source)
        import_data = [
            self._retrieve_file(import_source, return_contents=False)
            for import_source in import_sources
        ]

        database_handler.disable_journaling()
        for import_filename, to_delete in import_data:
            p = MirrorFileImportParser(source=self.source,
                                       filename=import_filename,
                                       serial=import_serial,
                                       database_handler=database_handler)
            p.run_import()
            if to_delete:
                os.unlink(import_filename)
Exemple #15
0
def load_pgp_keys(source: str) -> None:
    dh = DatabaseHandler()
    query = RPSLDatabaseQuery(column_names=['rpsl_pk', 'object_text'])
    query = query.sources([source]).object_classes(['key-cert'])
    keycerts = dh.execute_query(query)

    for keycert in keycerts:
        rpsl_pk = keycert["rpsl_pk"]
        print(f'Loading key-cert {rpsl_pk}')
        # Parsing the keycert in strict mode will load it into the GPG keychain
        result = rpsl_object_from_text(keycert['object_text'], strict_validation=True)
        if result.messages.errors():
            print(f'Errors in PGP key {rpsl_pk}: {result.messages.errors()}')

    print('All valid key-certs loaded into the GnuPG keychain.')
    dh.close()
Exemple #16
0
async def startup():
    """
    Prepare the database connection and preloader, which
    is shared between different queries in this process.
    As these are run in a separate process, the config file
    is read from the environment.
    """
    setproctitle('irrd-http-server-listener')
    global app
    config_path = os.getenv(ENV_UVICORN_WORKER_CONFIG_PATH)
    config_init(config_path)
    try:
        app.state.database_handler = DatabaseHandler(readonly=True)
        app.state.preloader = Preloader(enable_queries=True)
    except Exception as e:
        logger.critical(
            f'HTTP worker failed to initialise preloader or database, '
            f'unable to start, terminating IRRd, traceback follows: {e}',
            exc_info=e)
        main_pid = os.getenv(ENV_MAIN_PROCESS_PID)
        if main_pid:
            os.kill(int(main_pid), signal.SIGTERM)
        else:
            logger.error(
                'Failed to terminate IRRd, unable to find main process PID')
        return
Exemple #17
0
 def save(self, database_handler: DatabaseHandler) -> None:
     """Save the change to the database."""
     if self.status != UpdateRequestStatus.PROCESSING or not self.rpsl_obj_new:
         raise ValueError(
             "ChangeRequest can only be saved in status PROCESSING")
     if self.request_type == UpdateRequestType.DELETE and self.rpsl_obj_current is not None:
         logger.info(
             f'{id(self)}: Saving change for {self.rpsl_obj_new}: deleting current object'
         )
         database_handler.delete_rpsl_object(self.rpsl_obj_current)
     else:
         logger.info(
             f'{id(self)}: Saving change for {self.rpsl_obj_new}: inserting/updating current object'
         )
         database_handler.upsert_rpsl_object(self.rpsl_obj_new)
     self.status = UpdateRequestStatus.SAVED
Exemple #18
0
    def save(self, database_handler: DatabaseHandler) -> bool:
        default_source = self.source if self.operation == DatabaseOperation.delete else None
        try:
            object_text = self.object_text.strip()
            # If an object turns out to be a key-cert, and strict_import_keycert_objects
            # is set, parse it again with strict validation to load it in the GPG keychain.
            obj = rpsl_object_from_text(object_text,
                                        strict_validation=False,
                                        default_source=default_source)
            if self.strict_validation_key_cert and obj.__class__ == RPSLKeyCert:
                obj = rpsl_object_from_text(object_text,
                                            strict_validation=True,
                                            default_source=default_source)

        except UnknownRPSLObjectClassException as exc:
            # Unknown object classes are only logged if they have not been filtered out.
            if not self.object_class_filter or exc.rpsl_object_class.lower(
            ) in self.object_class_filter:
                logger.info(f'Ignoring NRTM operation {str(self)}: {exc}')
            return False

        if self.object_class_filter and obj.rpsl_object_class.lower(
        ) not in self.object_class_filter:
            return False

        if obj.messages.errors():
            errors = '; '.join(obj.messages.errors())
            logger.critical(
                f'Parsing errors occurred while processing NRTM operation {str(self)}. '
                f'This operation is ignored, causing potential data inconsistencies. '
                f'A new operation for this update, without errors, '
                f'will still be processed and cause the inconsistency to be resolved. '
                f'Parser error messages: {errors}; original object text follows:\n{self.object_text}'
            )
            database_handler.record_mirror_error(
                self.source, f'Parsing errors: {obj.messages.errors()}, '
                f'original object text follows:\n{self.object_text}')
            return False

        if 'source' in obj.parsed_data and obj.parsed_data['source'].upper(
        ) != self.source:
            msg = (
                f'Incorrect source in NRTM object: stream has source {self.source}, found object with '
                f'source {obj.source()} in operation {self.serial}/{self.operation.value}/{obj.pk()}. '
                f'This operation is ignored, causing potential data inconsistencies.'
            )
            database_handler.record_mirror_error(self.source, msg)
            logger.critical(msg)
            return False

        if self.operation == DatabaseOperation.add_or_update:
            database_handler.upsert_rpsl_object(obj, self.serial)
        elif self.operation == DatabaseOperation.delete:
            database_handler.delete_rpsl_object(obj, self.serial)

        logger.info(f'Completed NRTM operation {str(self)}/{obj.pk()}')
        return True
Exemple #19
0
class MirrorUpdateRunner:
    """
    This MirrorUpdateRunner is the entry point for updating a single
    database mirror, depending on current state.

    If there is no current mirrored data, will call MirrorFullImportRunner
    to run a new import from full export files. Otherwise, will call
    NRTMUpdateStreamRunner to retrieve new updates from NRTM.
    """
    def __init__(self, source: str) -> None:
        self.source = source
        self.full_import_runner = MirrorFullImportRunner(source)
        self.update_stream_runner = NRTMUpdateStreamRunner(source)

    def run(self) -> None:
        self.database_handler = DatabaseHandler()

        try:
            serial_newest_seen, force_reload = self._status()
            logger.debug(
                f'Most recent serial seen for {self.source}: {serial_newest_seen}, force_reload: {force_reload}'
            )
            if not serial_newest_seen or force_reload:
                self.full_import_runner.run(
                    database_handler=self.database_handler)
            else:
                self.update_stream_runner.run(
                    serial_newest_seen, database_handler=self.database_handler)

            self.database_handler.commit()
        except Exception as exc:
            logger.critical(
                f'An exception occurred while attempting a mirror update or initial import '
                f'for {self.source}: {exc}',
                exc_info=exc)
        finally:
            self.database_handler.close()

    def _status(self) -> Tuple[Optional[int], Optional[bool]]:
        query = DatabaseStatusQuery().source(self.source)
        result = self.database_handler.execute_query(query)
        try:
            status = next(result)
            return status['serial_newest_seen'], status['force_reload']
        except StopIteration:
            return None, None
Exemple #20
0
class RPSLParse:
    obj_parsed = 0
    obj_errors = 0
    obj_unknown = 0
    unknown_object_classes: Set[str] = set()
    database_handler = None

    def main(self, filename, strict_validation, database, show_info=True):
        self.show_info = show_info
        if database:
            self.database_handler = DatabaseHandler()
            self.database_handler.disable_journaling()

        if filename == '-':  # pragma: no cover
            f = sys.stdin
        else:
            f = open(filename, encoding='utf-8', errors='backslashreplace')

        for paragraph in split_paragraphs_rpsl(f):
            self.parse_object(paragraph, strict_validation)

        print(f'Processed {self.obj_parsed} objects, {self.obj_errors} with errors')
        if self.obj_unknown:
            unknown_formatted = ', '.join(self.unknown_object_classes)
            print(f'Ignored {self.obj_unknown} objects due to unknown object classes: {unknown_formatted}')

        if self.database_handler:
            self.database_handler.commit()
            self.database_handler.close()

    def parse_object(self, rpsl_text, strict_validation):
        try:
            self.obj_parsed += 1
            obj = rpsl_object_from_text(rpsl_text.strip(), strict_validation=strict_validation)
            if (obj.messages.messages() and self.show_info) or obj.messages.errors():
                if obj.messages.errors():
                    self.obj_errors += 1

                print(rpsl_text.strip())
                print('~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~')
                print(obj.messages)
                print('\n=======================================\n')

            if self.database_handler and obj and not obj.messages.errors():
                self.database_handler.upsert_rpsl_object(obj, JournalEntryOrigin.mirror)

        except UnknownRPSLObjectClassException as e:
            self.obj_unknown += 1
            self.unknown_object_classes.add(str(e).split(':')[1].strip())
        except Exception as e:  # pragma: no cover
            print('=======================================')
            print(rpsl_text)
            print('~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~')
            raise e
Exemple #21
0
 def save(self, database_handler: DatabaseHandler) -> None:
     """Save the change to the database."""
     if self.status != UpdateRequestStatus.PROCESSING or not self.rpsl_obj_new:
         raise ValueError(
             'ChangeRequest can only be saved in status PROCESSING')
     if self.request_type == UpdateRequestType.DELETE and self.rpsl_obj_current is not None:
         logger.info(
             f'{id(self)}: Saving change for {self.rpsl_obj_new}: deleting current object'
         )
         database_handler.delete_rpsl_object(self.rpsl_obj_current)
     else:
         if not self.used_override:
             self.rpsl_obj_new.overwrite_date_new_changed_attributes(
                 self.rpsl_obj_current)
             # This call may have emitted a new info message.
             self._import_new_rpsl_obj_info_messages()
         logger.info(
             f'{id(self)}: Saving change for {self.rpsl_obj_new}: inserting/updating current object'
         )
         database_handler.upsert_rpsl_object(self.rpsl_obj_new)
     self.status = UpdateRequestStatus.SAVED
Exemple #22
0
 def save(self, database_handler: DatabaseHandler):
     """
     Save the ROA object to the DB, create a pseudo-IRR object, and save that too.
     """
     database_handler.insert_roa_object(
         ip_version=self.prefix.version(),
         prefix_str=self.prefix_str,
         asn=self.asn,
         max_length=self.max_length,
         trust_anchor=self.trust_anchor,
     )
     self._rpsl_object = RPSLObjectFromROA(
         prefix=self.prefix,
         prefix_str=self.prefix_str,
         asn=self.asn,
         max_length=self.max_length,
         trust_anchor=self.trust_anchor,
     )
     database_handler.upsert_rpsl_object(self._rpsl_object,
                                         JournalEntryOrigin.pseudo_irr,
                                         rpsl_guaranteed_no_existing=True)
Exemple #23
0
    def load_text_blob(self,
                       object_texts_blob: str,
                       pgp_fingerprint: str = None,
                       request_meta: Dict[str, Optional[str]] = None):
        self.database_handler = DatabaseHandler()
        self.request_meta = request_meta if request_meta else {}
        self._pgp_key_id = self._resolve_pgp_key_id(
            pgp_fingerprint) if pgp_fingerprint else None

        reference_validator = ReferenceValidator(self.database_handler)
        auth_validator = AuthValidator(self.database_handler, self._pgp_key_id)
        change_requests = parse_change_requests(object_texts_blob,
                                                self.database_handler,
                                                auth_validator,
                                                reference_validator)

        self._handle_change_requests(change_requests, reference_validator,
                                     auth_validator)
        self.database_handler.commit()
        self.database_handler.close()
        return self
Exemple #24
0
    def load_change_submission(self,
                               data: RPSLChangeSubmission,
                               delete=False,
                               request_meta: Dict[str, Optional[str]] = None):
        self.database_handler = DatabaseHandler()
        self.request_meta = request_meta if request_meta else {}

        reference_validator = ReferenceValidator(self.database_handler)
        auth_validator = AuthValidator(self.database_handler)
        change_requests: List[Union[ChangeRequest, SuspensionRequest]] = []

        delete_reason = None
        if delete:
            delete_reason = data.delete_reason

        auth_validator.passwords = data.passwords
        auth_validator.overrides = [data.override] if data.override else []

        for rpsl_obj in data.objects:
            object_text = rpsl_obj.object_text
            if rpsl_obj.attributes:
                # We don't have a neat way to process individual attribute pairs,
                # so construct a pseudo-object by appending the text.
                composite_object = []
                for attribute in rpsl_obj.attributes:
                    composite_object.append(attribute.name + ': ' +
                                            attribute.value)  # type: ignore
                object_text = '\n'.join(composite_object) + '\n'

            assert object_text  # enforced by pydantic
            change_requests.append(
                ChangeRequest(object_text, self.database_handler,
                              auth_validator, reference_validator,
                              delete_reason))

        self._handle_change_requests(change_requests, reference_validator,
                                     auth_validator)
        self.database_handler.commit()
        self.database_handler.close()
        return self
Exemple #25
0
    def run(self) -> None:
        self.database_handler = DatabaseHandler()

        try:
            serial_newest_seen, force_reload = self._status()
            logger.debug(
                f'Most recent serial seen for {self.source}: {serial_newest_seen}, force_reload: {force_reload}'
            )
            if not serial_newest_seen or force_reload:
                self.full_import_runner.run(
                    database_handler=self.database_handler)
            else:
                self.update_stream_runner.run(
                    serial_newest_seen, database_handler=self.database_handler)

            self.database_handler.commit()
        except Exception as exc:
            logger.critical(
                f'An exception occurred while attempting a mirror update or initial import '
                f'for {self.source}: {exc}',
                exc_info=exc)
        finally:
            self.database_handler.close()
Exemple #26
0
    def run(self, database_handler: DatabaseHandler):
        import_sources = get_setting(f'sources.{self.source}.import_source')
        if isinstance(import_sources, str):
            import_sources = [import_sources]
        import_serial_source = get_setting(
            f'sources.{self.source}.import_serial_source')

        if not import_sources:
            logger.info(
                f'Skipping full import for {self.source}, import_source not set.'
            )
            return

        database_handler.delete_all_rpsl_objects_with_journal(self.source)
        logger.info(
            f'Running full import of {self.source} from {import_sources}, serial from {import_serial_source}'
        )

        import_serial = 0
        if import_serial_source:
            import_serial = int(
                self._retrieve_file(import_serial_source,
                                    return_contents=True)[0])

        import_data = [
            self._retrieve_file(import_source, return_contents=False)
            for import_source in import_sources
        ]

        database_handler.disable_journaling()
        for import_filename, to_delete in import_data:
            MirrorFileImportParser(source=self.source,
                                   filename=import_filename,
                                   serial=import_serial,
                                   database_handler=database_handler)
            if to_delete:
                os.unlink(import_filename)
Exemple #27
0
    def validate_all_rpsl_objects(self, database_handler: DatabaseHandler) -> \
            Tuple[List[Dict[str, str]], List[Dict[str, str]], List[Dict[str, str]]]:
        """
        Apply the scope filter to all relevant objects.

        Retrieves all routes from the DB, and aggregates the validation results.
        Returns a tuple of three sets:
        - one with routes that should be set to status in_scope, but are not now
        - one with routes that should be set to status out_scope_as, but are not now
        - one with routes that should be set to status out_scope_prefix, but are not now
        Each object is recorded as a dict, which has the fields shown
        in "columns" below.

        Objects where their current status in the DB matches the new
        validation result, are not included in the return value.
        """
        columns = [
            'rpsl_pk', 'ip_first', 'prefix_length', 'asn_first', 'source',
            'object_class', 'object_text', 'scopefilter_status'
        ]

        objs_changed: Dict[ScopeFilterStatus,
                           List[Dict[str, str]]] = defaultdict(list)

        q = RPSLDatabaseQuery(column_names=columns, enable_ordering=False)
        q = q.object_classes(['route', 'route6'])
        results = database_handler.execute_query(q)

        for result in results:
            current_status = result['scopefilter_status']
            result['old_status'] = current_status
            prefix = None
            if result['ip_first']:
                prefix = IP(result['ip_first'] + '/' +
                            str(result['prefix_length']))
            new_status, _ = self._validate_rpsl_data(
                result['source'],
                result['object_class'],
                prefix,
                result['asn_first'],
            )
            if new_status != current_status:
                result['scopefilter_status'] = new_status
                objs_changed[new_status].append(result)
        return (objs_changed[ScopeFilterStatus.in_scope],
                objs_changed[ScopeFilterStatus.out_scope_as],
                objs_changed[ScopeFilterStatus.out_scope_prefix])
Exemple #28
0
    def run(self, keep_running=True) -> None:
        """
        Whois worker run loop.
        This method does not return, except if it failed to initialise a preloader,
        or if keep_running is False, after the first request is handled. The latter
        is used in the tests.
        """
        # Disable the special sigterm_handler defined in start_whois_server()
        # (signal handlers are inherited)
        signal.signal(signal.SIGTERM, signal.SIG_DFL)

        try:
            self.preloader = Preloader()
            self.database_handler = DatabaseHandler(readonly=True)
        except Exception as e:
            logger.critical(
                f'Whois worker failed to initialise preloader or database, '
                f'unable to start, terminating IRRd, traceback follows: {e}',
                exc_info=e)
            main_pid = os.getenv(ENV_MAIN_PROCESS_PID)
            if main_pid:  # pragma: no cover
                os.kill(int(main_pid), signal.SIGTERM)
            else:
                logger.error(
                    'Failed to terminate IRRd, unable to find main process PID'
                )
            return

        while True:
            try:
                setproctitle('irrd-whois-worker')
                self.request, self.client_address = self.connection_queue.get()
                self.setup()
                self.handle_connection()
                self.finish()
                self.close_request()
                memory_trim()
            except Exception as e:
                try:
                    self.close_request()
                except Exception:  # pragma: no cover
                    pass
                logger.error(
                    f'Failed to handle whois connection, traceback follows: {e}',
                    exc_info=e)
            if not keep_running:
                break
Exemple #29
0
    def generate_status(self) -> str:
        """
        Generate a human-readable overview of database status.
        """
        database_handler = DatabaseHandler()

        statistics_query = RPSLDatabaseObjectStatisticsQuery()
        self.statistics_results = list(database_handler.execute_query(statistics_query))
        status_query = DatabaseStatusQuery()
        self.status_results = list(database_handler.execute_query(status_query))

        results = [self._generate_header(), self._generate_statistics_table(), self._generate_source_detail()]
        database_handler.close()
        return '\n\n'.join(results)
Exemple #30
0
    def run(self, keep_running=True) -> None:
        """
        Whois worker run loop.
        This method does not return, except if it failed to initialise a preloader,
        or if keep_running is set, after the first request is handled. The latter
        is used in the tests.
        """
        # Disable the special sigterm_handler defined in start_whois_server()
        # (signal handlers are inherited)
        signal.signal(signal.SIGTERM, signal.SIG_DFL)

        try:
            self.preloader = Preloader()
            self.database_handler = DatabaseHandler()
        except Exception as e:
            logger.error(f'Whois worker failed to initialise preloader or database,'
                         f'unable to start, traceback follows: {e}', exc_info=e)
            return

        while True:
            try:
                setproctitle('irrd-whois-worker')
                self.request, self.client_address = self.connection_queue.get()
                self.setup()
                self.handle_connection()
                self.finish()
                self.close_request()
            except Exception as e:
                try:
                    self.close_request()
                except Exception:  # pragma: no cover
                    pass
                logger.error(f'Failed to handle whois connection, traceback follows: {e}',
                             exc_info=e)
            if not keep_running:
                break