def load(source, filename, serial) -> int: if any([ get_setting(f'sources.{source}.import_source'), get_setting(f'sources.{source}.import_serial_source') ]): print(f'Error: to use this command, import_source and import_serial_source ' f'for source {source} must not be set.') return 2 dh = DatabaseHandler() roa_validator = BulkRouteROAValidator(dh) dh.delete_all_rpsl_objects_with_journal(source) dh.disable_journaling() parser = MirrorFileImportParser( source=source, filename=filename, serial=serial, database_handler=dh, direct_error_return=True, roa_validator=roa_validator) error = parser.run_import() if error: dh.rollback() else: dh.commit() dh.close() if error: print(f'Error occurred while processing object:\n{error}') return 1 return 0
def set_last_modified(): dh = DatabaseHandler() auth_sources = [ k for k, v in get_setting('sources').items() if v.get('authoritative') ] q = RPSLDatabaseQuery(column_names=['pk', 'object_text', 'updated'], enable_ordering=False) q = q.sources(auth_sources) results = list(dh.execute_query(q)) print(f'Updating {len(results)} objects in sources {auth_sources}') for result in results: rpsl_obj = rpsl_object_from_text(result['object_text'], strict_validation=False) if rpsl_obj.messages.errors(): # pragma: no cover print( f'Failed to process {rpsl_obj}: {rpsl_obj.messages.errors()}') continue new_text = rpsl_obj.render_rpsl_text(result['updated']) stmt = RPSLDatabaseObject.__table__.update().where( RPSLDatabaseObject.__table__.c.pk == result['pk']).values( object_text=new_text, ) dh.execute_statement(stmt) dh.commit() dh.close()
def update(source, filename) -> int: if any([ get_setting(f'sources.{source}.import_source'), get_setting(f'sources.{source}.import_serial_source') ]): print( f'Error: to use this command, import_source and import_serial_source ' f'for source {source} must not be set.') return 2 dh = DatabaseHandler() roa_validator = BulkRouteROAValidator(dh) parser = MirrorUpdateFileImportParser(source, filename, database_handler=dh, direct_error_return=True, roa_validator=roa_validator) error = parser.run_import() if error: dh.rollback() else: dh.commit() dh.close() if error: print(f'Error occurred while processing object:\n{error}') return 1 return 0
class SourceExportRunner: """ This SourceExportRunner is the entry point for the expect process for a single source. A gzipped file will be created in the export_destination directory with the contents of the source, along with a CURRENTSERIAL file. The contents of the source are first written to a temporary file, and then moved in place. """ def __init__(self, source: str) -> None: self.source = source def run(self) -> None: self.database_handler = DatabaseHandler() try: export_destination = get_setting(f'sources.{self.source}.export_destination') logger.info(f'Starting a source export for {self.source} to {export_destination}') self._export(export_destination) self.database_handler.commit() except Exception as exc: logger.error(f'An exception occurred while attempting to run an export ' f'for {self.source}: {exc}', exc_info=exc) finally: self.database_handler.close() def _export(self, export_destination): filename_export = Path(export_destination) / f'{self.source.lower()}.db.gz' export_tmpfile = NamedTemporaryFile(delete=False) filename_serial = Path(export_destination) / f'{self.source.upper()}.CURRENTSERIAL' query = DatabaseStatusQuery().source(self.source) try: serial = next(self.database_handler.execute_query(query))['serial_newest_seen'] except StopIteration: logger.error(f'Unable to run export for {self.source}, internal database status is empty.') return with gzip.open(export_tmpfile, 'wb') as fh: query = RPSLDatabaseQuery().sources([self.source]) for obj in self.database_handler.execute_query(query): object_bytes = remove_auth_hashes(obj['object_text']).encode('utf-8') fh.write(object_bytes + b'\n') if filename_export.exists(): os.unlink(filename_export) if filename_serial.exists(): os.unlink(filename_serial) shutil.move(export_tmpfile.name, filename_export) if serial is not None: with open(filename_serial, 'w') as fh: fh.write(str(serial)) self.database_handler.record_serial_exported(self.source, serial) logger.info(f'Export for {self.source} complete, stored in {filename_export} / {filename_serial}')
class RPSLParse: obj_parsed = 0 obj_errors = 0 obj_unknown = 0 unknown_object_classes: Set[str] = set() database_handler = None def main(self, filename, strict_validation, database, show_info=True): self.show_info = show_info if database: self.database_handler = DatabaseHandler(journaling_enabled=False) if filename == '-': # pragma: no cover f = sys.stdin else: f = open(filename, encoding='utf-8', errors='backslashreplace') for paragraph in split_paragraphs_rpsl(f): self.parse_object(paragraph, strict_validation) print( f'Processed {self.obj_parsed} objects, {self.obj_errors} with errors' ) if self.obj_unknown: unknown_formatted = ', '.join(self.unknown_object_classes) print( f'Ignored {self.obj_unknown} objects due to unknown object classes: {unknown_formatted}' ) if self.database_handler: self.database_handler.commit() self.database_handler.close() def parse_object(self, rpsl_text, strict_validation): try: self.obj_parsed += 1 obj = rpsl_object_from_text(rpsl_text.strip(), strict_validation=strict_validation) if (obj.messages.messages() and self.show_info) or obj.messages.errors(): if obj.messages.errors(): self.obj_errors += 1 print(rpsl_text.strip()) print('~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~') print(obj.messages) print('\n=======================================\n') if self.database_handler and obj and not obj.messages.errors(): self.database_handler.upsert_rpsl_object(obj) except UnknownRPSLObjectClassException as e: self.obj_unknown += 1 self.unknown_object_classes.add(str(e).split(':')[1].strip()) except Exception as e: # pragma: no cover print('=======================================') print(rpsl_text) print('~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~') raise e
class RPSLMirrorImportUpdateRunner: """ This RPSLMirrorImportUpdateRunner is the entry point for updating a single database mirror, depending on current state. If there is no current mirrored data, will call RPSLMirrorFullImportRunner to run a new import from full export files. Otherwise, will call NRTMImportUpdateStreamRunner to retrieve new updates from NRTM. """ def __init__(self, source: str) -> None: self.source = source self.full_import_runner = RPSLMirrorFullImportRunner(source) self.update_stream_runner = NRTMImportUpdateStreamRunner(source) def run(self) -> None: self.database_handler = DatabaseHandler() try: serial_newest_mirror, force_reload = self._status() nrtm_enabled = bool( get_setting(f'sources.{self.source}.nrtm_host')) logger.debug( f'Most recent mirrored serial for {self.source}: {serial_newest_mirror}, ' f'force_reload: {force_reload}, nrtm enabled: {nrtm_enabled}') if force_reload or not serial_newest_mirror or not nrtm_enabled: self.full_import_runner.run( database_handler=self.database_handler, serial_newest_mirror=serial_newest_mirror, force_reload=force_reload) else: self.update_stream_runner.run( serial_newest_mirror, database_handler=self.database_handler) self.database_handler.commit() except OSError as ose: # I/O errors can occur and should not log a full traceback (#177) logger.error( f'An error occurred while attempting a mirror update or initial import ' f'for {self.source}: {ose}') except Exception as exc: logger.error( f'An exception occurred while attempting a mirror update or initial import ' f'for {self.source}: {exc}', exc_info=exc) finally: self.database_handler.close() def _status(self) -> Tuple[Optional[int], Optional[bool]]: query = DatabaseStatusQuery().source(self.source) result = self.database_handler.execute_query(query) try: status = next(result) return status['serial_newest_mirror'], status['force_reload'] except StopIteration: return None, None
def load(source, filename, serial) -> int: dh = DatabaseHandler() dh.delete_all_rpsl_objects_with_journal(source) dh.disable_journaling() parser = MirrorFileImportParser(source, filename, serial=serial, database_handler=dh, direct_error_return=True) error = parser.run_import() if error: dh.rollback() else: dh.commit() dh.close() if error: print(f'Error occurred while processing object:\n{error}') return 1 return 0
class MirrorUpdateRunner: """ This MirrorUpdateRunner is the entry point for updating a single database mirror, depending on current state. If there is no current mirrored data, will call MirrorFullImportRunner to run a new import from full export files. Otherwise, will call NRTMUpdateStreamRunner to retrieve new updates from NRTM. """ def __init__(self, source: str) -> None: self.source = source self.full_import_runner = MirrorFullImportRunner(source) self.update_stream_runner = NRTMUpdateStreamRunner(source) def run(self) -> None: self.database_handler = DatabaseHandler() try: serial_newest_seen, force_reload = self._status() logger.debug( f'Most recent serial seen for {self.source}: {serial_newest_seen}, force_reload: {force_reload}' ) if not serial_newest_seen or force_reload: self.full_import_runner.run( database_handler=self.database_handler) else: self.update_stream_runner.run( serial_newest_seen, database_handler=self.database_handler) self.database_handler.commit() except Exception as exc: logger.critical( f'An exception occurred while attempting a mirror update or initial import ' f'for {self.source}: {exc}', exc_info=exc) finally: self.database_handler.close() def _status(self) -> Tuple[Optional[int], Optional[bool]]: query = DatabaseStatusQuery().source(self.source) result = self.database_handler.execute_query(query) try: status = next(result) return status['serial_newest_seen'], status['force_reload'] except StopIteration: return None, None
class ScopeFilterUpdateRunner: """ Update the scope filter status for all objects. This runner does not actually import anything, the scope filter is in the configuration. """ # API consistency with other importers, source is actually ignored def __init__(self, source=None): pass def run(self): self.database_handler = DatabaseHandler() try: validator = ScopeFilterValidator() status = validator.validate_all_rpsl_objects(self.database_handler) rpsl_objs_now_in_scope, rpsl_objs_now_out_scope_as, rpsl_objs_now_out_scope_prefix = status self.database_handler.update_scopefilter_status( rpsl_objs_now_in_scope=rpsl_objs_now_in_scope, rpsl_objs_now_out_scope_as=rpsl_objs_now_out_scope_as, rpsl_objs_now_out_scope_prefix=rpsl_objs_now_out_scope_prefix, ) self.database_handler.commit() logger.info( f'Scopefilter status updated for all routes, ' f'{len(rpsl_objs_now_in_scope)} newly in scope, ' f'{len(rpsl_objs_now_out_scope_as)} newly out of scope AS, ' f'{len(rpsl_objs_now_out_scope_prefix)} newly out of scope prefix' ) except Exception as exc: logger.error( f'An exception occurred while attempting a scopefilter status update: {exc}', exc_info=exc) finally: self.database_handler.close()
class ChangeSubmissionHandler: """ The ChangeSubmissionHandler handles the text of one or more requested RPSL changes (create, modify or delete), parses, validates and eventually saves them. This includes validating references between objects, including those part of the same message, and checking authentication. """ def load_text_blob(self, object_texts_blob: str, pgp_fingerprint: str = None, request_meta: Dict[str, Optional[str]] = None): self.database_handler = DatabaseHandler() self.request_meta = request_meta if request_meta else {} self._pgp_key_id = self._resolve_pgp_key_id( pgp_fingerprint) if pgp_fingerprint else None reference_validator = ReferenceValidator(self.database_handler) auth_validator = AuthValidator(self.database_handler, self._pgp_key_id) change_requests = parse_change_requests(object_texts_blob, self.database_handler, auth_validator, reference_validator) self._handle_change_requests(change_requests, reference_validator, auth_validator) self.database_handler.commit() self.database_handler.close() return self def load_change_submission(self, data: RPSLChangeSubmission, delete=False, request_meta: Dict[str, Optional[str]] = None): self.database_handler = DatabaseHandler() self.request_meta = request_meta if request_meta else {} reference_validator = ReferenceValidator(self.database_handler) auth_validator = AuthValidator(self.database_handler) change_requests: List[Union[ChangeRequest, SuspensionRequest]] = [] delete_reason = None if delete: delete_reason = data.delete_reason auth_validator.passwords = data.passwords auth_validator.overrides = [data.override] if data.override else [] for rpsl_obj in data.objects: object_text = rpsl_obj.object_text if rpsl_obj.attributes: # We don't have a neat way to process individual attribute pairs, # so construct a pseudo-object by appending the text. composite_object = [] for attribute in rpsl_obj.attributes: composite_object.append(attribute.name + ': ' + attribute.value) # type: ignore object_text = '\n'.join(composite_object) + '\n' assert object_text # enforced by pydantic change_requests.append( ChangeRequest(object_text, self.database_handler, auth_validator, reference_validator, delete_reason)) self._handle_change_requests(change_requests, reference_validator, auth_validator) self.database_handler.commit() self.database_handler.close() return self def load_suspension_submission(self, data: RPSLSuspensionSubmission, request_meta: Dict[str, Optional[str]] = None): self.database_handler = DatabaseHandler() self.request_meta = request_meta if request_meta else {} reference_validator = ReferenceValidator(self.database_handler) auth_validator = AuthValidator(self.database_handler) change_requests: List[Union[ChangeRequest, SuspensionRequest]] = [] auth_validator.overrides = [data.override] if data.override else [] for rpsl_obj in data.objects: # We don't have a neat way to process individual attribute pairs, # so construct a pseudo-object by appending the text. object_text = f"mntner: {rpsl_obj.mntner}\nsource: {rpsl_obj.source}\n" change_requests.append( SuspensionRequest( object_text, self.database_handler, auth_validator, rpsl_obj.request_type.value, )) self._handle_change_requests(change_requests, reference_validator, auth_validator) self.database_handler.commit() self.database_handler.close() return self def _handle_change_requests(self, change_requests: List[ Union[ChangeRequest, SuspensionRequest]], reference_validator: ReferenceValidator, auth_validator: AuthValidator) -> None: objects = ', '.join([ f'{request.rpsl_obj_new} (request {id(request)})' for request in change_requests ]) logger.info( f'Processing change requests for {objects}, metadata is {self.request_meta}' ) # When an object references another object, e.g. tech-c referring a person or mntner, # an add/update is only valid if those referred objects exist. To complicate matters, # the object referred to may be part of this very same submission. For this reason, the # reference validator can be provided with all new objects to be added in this submission. # However, a possible scenario is that A, B and C are submitted. Object A refers to B, # B refers to C, C refers to D and D does not exist - or C fails authentication. # At a first scan, A is valid because B exists, B is valid because C exists. C # becomes invalid on the first scan, which is why another scan is performed, which # will mark B invalid due to the reference to an invalid C, etc. This continues until # all references are resolved and repeated scans lead to the same conclusions. valid_changes = [r for r in change_requests if r.is_valid()] previous_valid_changes: List[Union[ChangeRequest, SuspensionRequest]] = [] loop_count = 0 loop_max = len(change_requests) + 10 while valid_changes != previous_valid_changes: previous_valid_changes = valid_changes reference_validator.preload(valid_changes) valid_potential_new_mntners = [ r.rpsl_obj_new for r in valid_changes if r.request_type == UpdateRequestType.CREATE and isinstance(r.rpsl_obj_new, RPSLMntner) ] auth_validator.pre_approve(valid_potential_new_mntners) for result in valid_changes: result.validate() valid_changes = [r for r in change_requests if r.is_valid()] loop_count += 1 if loop_count > loop_max: # pragma: no cover msg = f'Update validity resolver ran an excessive amount of loops, may be stuck, aborting ' \ f'processing. Message metadata: {self.request_meta}' logger.error(msg) raise ValueError(msg) for result in change_requests: if result.is_valid(): result.save() self.results = change_requests def _resolve_pgp_key_id(self, pgp_fingerprint: str) -> Optional[str]: """ Find a PGP key ID for a given fingerprint. This method looks for an actual matching object in the database, and then returns the object's PK. """ clean_fingerprint = pgp_fingerprint.replace(' ', '') key_id = 'PGPKEY-' + clean_fingerprint[-8:] query = RPSLDatabaseQuery().object_classes(['key-cert' ]).rpsl_pk(key_id) results = list(self.database_handler.execute_query(query)) for result in results: if result['parsed_data'].get('fingerpr', '').replace(' ', '') == clean_fingerprint: return key_id logger.info( f'Message was signed with key {key_id}, but key was not found in the database. Treating message ' f'as unsigned. Message metadata: {self.request_meta}') return None def status(self) -> str: """Provide a simple SUCCESS/FAILED string based - former used if all objects were saved.""" if all([ result.status == UpdateRequestStatus.SAVED for result in self.results ]): return 'SUCCESS' return 'FAILED' def submitter_report_human(self) -> str: """Produce a human-readable report for the submitter.""" # flake8: noqa: W293 successful = [ r for r in self.results if r.status == UpdateRequestStatus.SAVED ] failed = [ r for r in self.results if r.status != UpdateRequestStatus.SAVED ] number_successful_create = len([ r for r in successful if r.request_type == UpdateRequestType.CREATE ]) number_successful_modify = len([ r for r in successful if r.request_type == UpdateRequestType.MODIFY ]) number_successful_delete = len([ r for r in successful if r.request_type == UpdateRequestType.DELETE ]) number_failed_create = len( [r for r in failed if r.request_type == UpdateRequestType.CREATE]) number_failed_modify = len( [r for r in failed if r.request_type == UpdateRequestType.MODIFY]) number_failed_delete = len( [r for r in failed if r.request_type == UpdateRequestType.DELETE]) user_report = self._request_meta_str() + textwrap.dedent(f""" SUMMARY OF UPDATE: Number of objects found: {len(self.results):3} Number of objects processed successfully: {len(successful):3} Create: {number_successful_create:3} Modify: {number_successful_modify:3} Delete: {number_successful_delete:3} Number of objects processed with errors: {len(failed):3} Create: {number_failed_create:3} Modify: {number_failed_modify:3} Delete: {number_failed_delete:3} DETAILED EXPLANATION: ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ """) for result in self.results: user_report += '---\n' user_report += result.submitter_report_human() user_report += '\n' user_report += '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n' return user_report def submitter_report_json(self): """Produce a JSON-ready report for the submitter.""" successful = [ r for r in self.results if r.status == UpdateRequestStatus.SAVED ] failed = [ r for r in self.results if r.status != UpdateRequestStatus.SAVED ] number_successful_create = len([ r for r in successful if r.request_type == UpdateRequestType.CREATE ]) number_successful_modify = len([ r for r in successful if r.request_type == UpdateRequestType.MODIFY ]) number_successful_delete = len([ r for r in successful if r.request_type == UpdateRequestType.DELETE ]) number_failed_create = len( [r for r in failed if r.request_type == UpdateRequestType.CREATE]) number_failed_modify = len( [r for r in failed if r.request_type == UpdateRequestType.MODIFY]) number_failed_delete = len( [r for r in failed if r.request_type == UpdateRequestType.DELETE]) return { 'request_meta': self.request_meta, 'summary': { 'objects_found': len(self.results), 'successful': len(successful), 'successful_create': number_successful_create, 'successful_modify': number_successful_modify, 'successful_delete': number_successful_delete, 'failed': len(failed), 'failed_create': number_failed_create, 'failed_modify': number_failed_modify, 'failed_delete': number_failed_delete, }, 'objects': [result.submitter_report_json() for result in self.results], } def send_notification_target_reports(self): # First key is e-mail address of recipient, second is UpdateRequestStatus.SAVED # or UpdateRequestStatus.ERROR_AUTH reports_per_recipient: Dict[str, Dict[UpdateRequestStatus, OrderedSet]] = defaultdict(dict) sources: OrderedSet[str] = OrderedSet() for result in self.results: for target in result.notification_targets(): if result.status in [ UpdateRequestStatus.SAVED, UpdateRequestStatus.ERROR_AUTH ]: if result.status not in reports_per_recipient[target]: reports_per_recipient[target][ result.status] = OrderedSet() reports_per_recipient[target][result.status].add( result.notification_target_report()) sources.add(result.rpsl_obj_new.source()) sources_str = '/'.join(sources) subject = f'Notification of {sources_str} database changes' header = get_setting('email.notification_header', '').format(sources_str=sources_str) header += '\nThis message is auto-generated.\n' header += 'The request was made with the following details:\n' header_saved = textwrap.dedent(""" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Some objects in which you are referenced have been created, deleted or changed. """) header_failed = textwrap.dedent(""" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Some objects in which you are referenced were requested to be created, deleted or changed, but *failed* the proper authorisation for any of the referenced maintainers. """) for recipient, reports_per_status in reports_per_recipient.items(): user_report = header + self._request_meta_str() if UpdateRequestStatus.ERROR_AUTH in reports_per_status: user_report += header_failed for report in reports_per_status[ UpdateRequestStatus.ERROR_AUTH]: user_report += f'---\n{report}\n' user_report += '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n' if UpdateRequestStatus.SAVED in reports_per_status: user_report += header_saved for report in reports_per_status[UpdateRequestStatus.SAVED]: user_report += f'---\n{report}\n' user_report += '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n' email.send_email(recipient, subject, user_report) def _request_meta_str(self): request_meta_str = '\n'.join( [f'> {k}: {v}' for k, v in self.request_meta.items() if v]) if request_meta_str: request_meta_str = '\n' + request_meta_str + '\n\n' return request_meta_str
class ROAImportRunner(FileImportRunnerBase): """ This runner performs a full import of ROA objects. The URL file for the ROA export in JSON format is provided in the configuration. """ # API consistency with other importers, source is actually ignored def __init__(self, source=None): pass def run(self): self.database_handler = DatabaseHandler() try: self.database_handler.disable_journaling() roa_objs = self._import_roas() # Do an early commit to make the new ROAs available to other processes. self.database_handler.commit() # The ROA import does not use journaling, but updating the RPKI # status may create journal entries. self.database_handler.enable_journaling() validator = BulkRouteROAValidator(self.database_handler, roa_objs) objs_now_valid, objs_now_invalid, objs_now_not_found = validator.validate_all_routes() self.database_handler.update_rpki_status( rpsl_objs_now_valid=objs_now_valid, rpsl_objs_now_invalid=objs_now_invalid, rpsl_objs_now_not_found=objs_now_not_found, ) self.database_handler.commit() notified = notify_rpki_invalid_owners(self.database_handler, objs_now_invalid) logger.info(f'RPKI status updated for all routes, {len(objs_now_valid)} newly valid, ' f'{len(objs_now_invalid)} newly invalid, ' f'{len(objs_now_not_found)} newly not_found routes, ' f'{notified} emails sent to contacts of newly invalid authoritative objects') except OSError as ose: # I/O errors can occur and should not log a full traceback (#177) logger.error(f'An error occurred while attempting a ROA import: {ose}') except ROAParserException as rpe: logger.error(f'An exception occurred while attempting a ROA import: {rpe}') except Exception as exc: logger.error(f'An exception occurred while attempting a ROA import: {exc}', exc_info=exc) finally: self.database_handler.close() def _import_roas(self): roa_source = get_setting('rpki.roa_source') slurm_source = get_setting('rpki.slurm_source') logger.info(f'Running full ROA import from: {roa_source}, SLURM {slurm_source}') self.database_handler.delete_all_roa_objects() self.database_handler.delete_all_rpsl_objects_with_journal(RPKI_IRR_PSEUDO_SOURCE) slurm_data = None if slurm_source: slurm_data, _ = self._retrieve_file(slurm_source, return_contents=True) roa_filename, roa_to_delete = self._retrieve_file(roa_source, return_contents=False) with open(roa_filename) as fh: roa_importer = ROADataImporter(fh.read(), slurm_data, self.database_handler) if roa_to_delete: os.unlink(roa_filename) logger.info(f'ROA import from {roa_source}, SLURM {slurm_source}, imported {len(roa_importer.roa_objs)} ROAs, running validator') return roa_importer.roa_objs
def set_force_reload(source) -> None: dh = DatabaseHandler() dh.set_force_reload(source) dh.commit() dh.close()
class ChangeSubmissionHandler: """ The ChangeSubmissionHandler handles the text of one or more requested RPSL changes (create, modify or delete), parses, validates and eventually saves them. This includes validating references between objects, including those part of the same message, and checking authentication. """ def __init__(self, object_texts: str, pgp_fingerprint: str=None, request_meta: Dict[str, Optional[str]]=None) -> None: self.database_handler = DatabaseHandler() self.request_meta = request_meta if request_meta else {} self._pgp_key_id = self._resolve_pgp_key_id(pgp_fingerprint) if pgp_fingerprint else None self._handle_object_texts(object_texts) self.database_handler.commit() self.database_handler.close() def _handle_object_texts(self, object_texts: str) -> None: reference_validator = ReferenceValidator(self.database_handler) auth_validator = AuthValidator(self.database_handler, self._pgp_key_id) results = parse_change_requests(object_texts, self.database_handler, auth_validator, reference_validator) # When an object references another object, e.g. tech-c referring a person or mntner, # an add/update is only valid if those referred objects exist. To complicate matters, # the object referred to may be part of this very same submission. For this reason, the # reference validator can be provided with all new objects to be added in this submission. # However, a possible scenario is that A, B and C are submitted. Object A refers to B, # B refers to C, C refers to D and D does not exist - or C fails authentication. # At a first scan, A is valid because B exists, B is valid because C exists. C # becomes invalid on the first scan, which is why another scan is performed, which # will mark B invalid due to the reference to an invalid C, etc. This continues until # all references are resolved and repeated scans lead to the same conclusions. valid_changes = [r for r in results if r.is_valid()] previous_valid_changes: List[ChangeRequest] = [] loop_count = 0 loop_max = len(results) + 10 while valid_changes != previous_valid_changes: previous_valid_changes = valid_changes reference_validator.preload(valid_changes) auth_validator.pre_approve(valid_changes) for result in valid_changes: result.validate() valid_changes = [r for r in results if r.is_valid()] loop_count += 1 if loop_count > loop_max: # pragma: no cover msg = f'Update validity resolver ran an excessive amount of loops, may be stuck, aborting ' \ f'processing. Message metadata: {self.request_meta}' logger.error(msg) raise ValueError(msg) for result in results: if result.is_valid(): result.save(self.database_handler) self.results = results def _resolve_pgp_key_id(self, pgp_fingerprint: str) -> Optional[str]: """ Find a PGP key ID for a given fingerprint. This method looks for an actual matching object in the database, and then returns the object's PK. """ clean_fingerprint = pgp_fingerprint.replace(' ', '') key_id = "PGPKEY-" + clean_fingerprint[-8:] query = RPSLDatabaseQuery().object_classes(['key-cert']).rpsl_pk(key_id) results = list(self.database_handler.execute_query(query)) for result in results: if result['parsed_data'].get('fingerpr', '').replace(' ', '') == clean_fingerprint: return key_id logger.info(f'Message was signed with key {key_id}, but key was not found in the database. Treating message ' f'as unsigned. Message metadata: {self.request_meta}') return None def status(self) -> str: """Provide a simple SUCCESS/FAILED string based - former used if all objects were saved.""" if all([result.status == UpdateRequestStatus.SAVED for result in self.results]): return "SUCCESS" return "FAILED" def submitter_report(self) -> str: """Produce a human-readable report for the submitter.""" # flake8: noqa: W293 successful = [r for r in self.results if r.status == UpdateRequestStatus.SAVED] failed = [r for r in self.results if r.status != UpdateRequestStatus.SAVED] number_successful_create = len([r for r in successful if r.request_type == UpdateRequestType.CREATE]) number_successful_modify = len([r for r in successful if r.request_type == UpdateRequestType.MODIFY]) number_successful_delete = len([r for r in successful if r.request_type == UpdateRequestType.DELETE]) number_failed_create = len([r for r in failed if r.request_type == UpdateRequestType.CREATE]) number_failed_modify = len([r for r in failed if r.request_type == UpdateRequestType.MODIFY]) number_failed_delete = len([r for r in failed if r.request_type == UpdateRequestType.DELETE]) user_report = self._request_meta_str() + textwrap.dedent(f""" SUMMARY OF UPDATE: Number of objects found: {len(self.results):3} Number of objects processed successfully: {len(successful):3} Create: {number_successful_create:3} Modify: {number_successful_modify:3} Delete: {number_successful_delete:3} Number of objects processed with errors: {len(failed):3} Create: {number_failed_create:3} Modify: {number_failed_modify:3} Delete: {number_failed_delete:3} DETAILED EXPLANATION: ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ """) for result in self.results: user_report += "---\n" user_report += result.submitter_report() user_report += "\n" user_report += '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n' return user_report def send_notification_target_reports(self): # First key is e-mail address of recipient, second is UpdateRequestStatus.SAVED # or UpdateRequestStatus.ERROR_AUTH reports_per_recipient: Dict[str, Dict[UpdateRequestStatus, OrderedSet]] = defaultdict(dict) sources: OrderedSet[str] = OrderedSet() for result in self.results: for target in result.notification_targets(): if result.status in [UpdateRequestStatus.SAVED, UpdateRequestStatus.ERROR_AUTH]: if result.status not in reports_per_recipient[target]: reports_per_recipient[target][result.status] = OrderedSet() reports_per_recipient[target][result.status].add(result.notification_target_report()) sources.add(result.rpsl_obj_new.source()) sources_str = '/'.join(sources) subject = f'Notification of {sources_str} database changes' header = textwrap.dedent(f""" This is to notify you of changes in the {sources_str} database or object authorisation failures. You may receive this message because you are listed in the notify attribute on the changed object(s), or because you are listed in the mnt-nfy or upd-to attribute on a maintainer of the object(s). This message is auto-generated. The request was made by email, with the following details: """) header_saved = textwrap.dedent(""" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Some objects in which you are referenced have been created, deleted or changed. """) header_failed = textwrap.dedent(""" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Some objects in which you are referenced were requested to be created, deleted or changed, but *failed* the proper authorisation for any of the referenced maintainers. """) for recipient, reports_per_status in reports_per_recipient.items(): user_report = header + self._request_meta_str() if UpdateRequestStatus.ERROR_AUTH in reports_per_status: user_report += header_failed for report in reports_per_status[UpdateRequestStatus.ERROR_AUTH]: user_report += f"---\n{report}\n" user_report += '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n' if UpdateRequestStatus.SAVED in reports_per_status: user_report += header_saved for report in reports_per_status[UpdateRequestStatus.SAVED]: user_report += f"---\n{report}\n" user_report += '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n' email.send_email(recipient, subject, user_report) def _request_meta_str(self): request_meta_str = '\n'.join([f"> {k}: {v}" for k, v in self.request_meta.items() if v]) if request_meta_str: request_meta_str = "\n" + request_meta_str + "\n\n" return request_meta_str
def set_force_reload(source) -> None: dh = DatabaseHandler(enable_preload_update=False) dh.set_force_reload(source) dh.commit() dh.close()
class SourceExportRunner: """ This SourceExportRunner is the entry point for the export process for a single source. A gzipped file will be created in the export_destination directory with the contents of the source, along with a CURRENTSERIAL file. The contents of the source are first written to a temporary file, and then moved in place. """ def __init__(self, source: str) -> None: self.source = source def run(self) -> None: self.database_handler = DatabaseHandler() try: export_destination = get_setting( f'sources.{self.source}.export_destination') if export_destination: logger.info( f'Starting a source export for {self.source} to {export_destination}' ) self._export(export_destination) export_destination_unfiltered = get_setting( f'sources.{self.source}.export_destination_unfiltered') if export_destination_unfiltered: logger.info( f'Starting an unfiltered source export for {self.source} ' f'to {export_destination_unfiltered}') self._export(export_destination_unfiltered, remove_auth_hashes=False) self.database_handler.commit() except Exception as exc: logger.error( f'An exception occurred while attempting to run an export ' f'for {self.source}: {exc}', exc_info=exc) finally: self.database_handler.close() def _export(self, export_destination, remove_auth_hashes=True): filename_export = Path( export_destination) / f'{self.source.lower()}.db.gz' export_tmpfile = NamedTemporaryFile(delete=False) filename_serial = Path( export_destination) / f'{self.source.upper()}.CURRENTSERIAL' query = DatabaseStatusQuery().source(self.source) try: serial = next(self.database_handler.execute_query( query))['serial_newest_seen'] except StopIteration: serial = None with gzip.open(export_tmpfile.name, 'wb') as fh: query = RPSLDatabaseQuery().sources([self.source]) query = query.rpki_status([RPKIStatus.not_found, RPKIStatus.valid]) query = query.scopefilter_status([ScopeFilterStatus.in_scope]) for obj in self.database_handler.execute_query(query): object_text = obj['object_text'] if remove_auth_hashes: object_text = remove_auth_hashes_func(object_text) object_bytes = object_text.encode('utf-8') fh.write(object_bytes + b'\n') fh.write(b'# EOF\n') os.chmod(export_tmpfile.name, EXPORT_PERMISSIONS) if filename_export.exists(): os.unlink(filename_export) if filename_serial.exists(): os.unlink(filename_serial) shutil.move(export_tmpfile.name, filename_export) if serial is not None: with open(filename_serial, 'w') as fh: fh.write(str(serial)) os.chmod(filename_serial, EXPORT_PERMISSIONS) self.database_handler.record_serial_exported(self.source, serial) logger.info( f'Export for {self.source} complete at serial {serial}, stored in {filename_export} / {filename_serial}' )