Esempio n. 1
0
    def upsert_rpsl_object(self,
                           rpsl_object: RPSLObject,
                           forced_serial: Optional[int] = None) -> None:
        """
        Schedule an RPSLObject for insertion/updating.

        This method will insert the object, or overwrite an existing object
        if it has the same RPSL primary key and source. No other checks are
        applied before overwriting.

        Writes may not be issued to the database immediately for performance
        reasons, but commit() will ensure all writes are flushed to the DB first.

        The forced serial is needed for mirrored sources, where this basically means:
        this call was triggered by an NRTM operation with this serial.
        """
        ip_first = str(rpsl_object.ip_first) if rpsl_object.ip_first else None
        ip_last = str(rpsl_object.ip_last) if rpsl_object.ip_last else None

        ip_size = None
        if rpsl_object.ip_first and rpsl_object.ip_last:
            ip_size = rpsl_object.ip_last.int() - rpsl_object.ip_first.int(
            ) + 1

        # In some cases, multiple updates may be submitted for the same object.
        # PostgreSQL will not allow rows proposed for insertion to have duplicate
        # constrained values - so if a second object appears with a pk/source
        # seen before, the cache must be flushed right away, or the two updates
        # will conflict.
        source = rpsl_object.parsed_data['source']
        rpsl_pk_source = rpsl_object.pk() + '-' + source
        if rpsl_pk_source in self._rpsl_pk_source_seen:
            self._flush_rpsl_object_upsert_cache()

        self._rpsl_upsert_cache.append((
            {
                'rpsl_pk': rpsl_object.pk(),
                'source': source,
                'object_class': rpsl_object.rpsl_object_class,
                'parsed_data': rpsl_object.parsed_data,
                'object_text': rpsl_object.render_rpsl_text(),
                'ip_version': rpsl_object.ip_version(),
                'ip_first': ip_first,
                'ip_last': ip_last,
                'ip_size': ip_size,
                'asn_first': rpsl_object.asn_first,
                'asn_last': rpsl_object.asn_last,
                'updated': datetime.now(timezone.utc),
            },
            forced_serial,
        ))
        self._rpsl_pk_source_seen.add(rpsl_pk_source)
        self._object_classes_modified.add(rpsl_object.rpsl_object_class)

        if len(self._rpsl_upsert_cache) > MAX_RECORDS_CACHE_BEFORE_INSERT:
            self._flush_rpsl_object_upsert_cache()
Esempio n. 2
0
    def delete_rpsl_object(self, rpsl_object: RPSLObject, forced_serial: Optional[int]=None) -> None:
        """
        Delete an RPSL object from the database.
        See the comment on the instance declaration for an explanation of forced_serial.
        """
        self._flush_rpsl_object_upsert_cache()
        table = RPSLDatabaseObject.__table__
        source = rpsl_object.parsed_data['source']
        stmt = table.delete(
            sa.and_(table.c.rpsl_pk == rpsl_object.pk(), table.c.source == source),
        ).returning(table.c.pk, table.c.rpsl_pk, table.c.source, table.c.object_class, table.c.object_text)
        results = self._connection.execute(stmt)

        if results.rowcount == 0:
            logger.warning(f'attempted to remove object {rpsl_object.pk()}/{source}, but no database row matched')
            return None
        if results.rowcount > 1:  # pragma: no cover
            # This should not be possible, as rpsl_pk/source are a composite unique value in the database scheme.
            # Therefore, a query should not be able to affect more than one row - and we also can not test this
            # scenario. Due to the possible harm of a bug in this area, we still check for it anyways.
            affected_pks = ','.join([r[0] for r in results.fetchall()])
            msg = f'attempted to remove object {rpsl_object.pk()}/{source}, but multiple objects were affected, '
            msg += f'internal pks affected: {affected_pks}'
            logger.error(msg)
            raise ValueError(msg)

        result = results.fetchone()
        self.status_tracker.record_operation(
            operation=DatabaseOperation.delete,
            rpsl_pk=result['rpsl_pk'],
            source=result['source'],
            object_class=result['object_class'],
            object_text=result['object_text'],
            forced_serial=forced_serial,
        )
Esempio n. 3
0
    def check_references_from_others(self,
                                     rpsl_obj: RPSLObject) -> ValidatorResult:
        """
        Check for any references to this object in the DB.
        Used for validating deletions.

        Checks self._preload_deleted, because a reference from an object
        that is also about to be deleted, is acceptable.
        """
        result = ValidatorResult()
        if not rpsl_obj.references_strong_inbound():
            return result

        query = RPSLDatabaseQuery().sources([rpsl_obj.source()])
        query = query.lookup_attrs_in(rpsl_obj.references_strong_inbound(),
                                      [rpsl_obj.pk()])
        query_results = self.database_handler.execute_query(query)
        for query_result in query_results:
            reference_to_be_deleted = (
                query_result['object_class'], query_result['rpsl_pk'],
                query_result['source']) in self._preloaded_deleted
            if not reference_to_be_deleted:
                result.error_messages.add(
                    f'Object {rpsl_obj.pk()} to be deleted, but still referenced '
                    f'by {query_result["object_class"]} {query_result["rpsl_pk"]}'
                )
        return result
Esempio n. 4
0
 def validate(self, rpsl_obj: RPSLObject,
              request_type: UpdateRequestType) -> ValidatorResult:
     result = ValidatorResult()
     if request_type == UpdateRequestType.CREATE and rpsl_obj.rpsl_object_class == 'mntner' and \
             self._check_suspended_mntner_with_same_pk(rpsl_obj.pk(), rpsl_obj.source()):
         result.error_messages.add(
             f'A suspended mntner with primary key {rpsl_obj.pk()} already exists for {rpsl_obj.source()}'
         )
     return result
Esempio n. 5
0
    def check_auth(self, rpsl_obj_new: RPSLObject,
                   rpsl_obj_current: Optional[RPSLObject]) -> ValidatorResult:
        """
        Check whether authentication passes for all required objects.
        """
        source = rpsl_obj_new.source()
        result = ValidatorResult()

        mntners_new = rpsl_obj_new.parsed_data['mnt-by']
        logger.debug(
            f'Checking auth for {rpsl_obj_new}, mntners in new object: {mntners_new}'
        )
        if not self._check_mntners(mntners_new, source):
            self._generate_failure_message(result, mntners_new, rpsl_obj_new)

        if rpsl_obj_current:
            mntners_current = rpsl_obj_current.parsed_data['mnt-by']
            logger.debug(
                f'Checking auth for {rpsl_obj_current}, mntners in new object: {mntners_current}'
            )
            if not self._check_mntners(mntners_current, source):
                self._generate_failure_message(result, mntners_current,
                                               rpsl_obj_new)

        if isinstance(rpsl_obj_new, RPSLMntner):
            # Dummy auth values are only permitted in existing objects, which are never pre-approved.
            if rpsl_obj_new.has_dummy_auth_value() and rpsl_obj_new.pk(
            ) not in self._pre_approved:
                if len(self.passwords) == 1:
                    logger.debug(
                        f'Object {rpsl_obj_new} submitted with dummy hash values and single password, '
                        f'replacing all hashes with currently supplied password.'
                    )
                    rpsl_obj_new.force_single_new_password(self.passwords[0])
                    result.info_messages.add(
                        'As you submitted dummy hash values, all password hashes on this object '
                        'were replaced with a new MD5-PW hash of the password you provided for '
                        'authentication.')
                else:
                    result.error_messages.add(
                        f'Object submitted with dummy hash values, but multiple or no passwords '
                        f'submitted. Either submit all full hashes, or a single password.'
                    )
            elif not rpsl_obj_new.verify_auth(self.passwords,
                                              self.keycert_obj_pk):
                result.error_messages.add(
                    f'Authorisation failed for the auth methods on this mntner object.'
                )

        return result
Esempio n. 6
0
    def delete_rpsl_object(self, rpsl_object: RPSLObject,
                           origin: JournalEntryOrigin) -> None:
        """
        Delete an RPSL object from the database.

        The origin indicates the origin of this change, see JournalEntryOrigin
        for the various options.
        """
        self._flush_rpsl_object_writing_buffer()
        table = RPSLDatabaseObject.__table__
        source = rpsl_object.parsed_data['source']
        stmt = table.delete(
            sa.and_(table.c.rpsl_pk == rpsl_object.pk(),
                    table.c.source == source), ).returning(
                        table.c.pk, table.c.rpsl_pk, table.c.source,
                        table.c.object_class, table.c.object_text)
        results = self._connection.execute(stmt)

        if results.rowcount == 0:
            logger.error(
                f'Attempted to remove object {rpsl_object.pk()}/{source}, but no database row matched'
            )
            return None
        if results.rowcount > 1:  # pragma: no cover
            # This should not be possible, as rpsl_pk/source are a composite unique value in the database scheme.
            # Therefore, a query should not be able to affect more than one row - and we also can not test this
            # scenario. Due to the possible harm of a bug in this area, we still check for it anyways.
            affected_pks = ','.join([r[0] for r in results.fetchall()])
            msg = f'Attempted to remove object {rpsl_object.pk()}/{source}, but multiple objects were affected, '
            msg += f'internal pks affected: {affected_pks}'
            logger.critical(msg)
            raise ValueError(msg)

        result = results.fetchone()
        self.status_tracker.record_operation(
            operation=DatabaseOperation.delete,
            rpsl_pk=result['rpsl_pk'],
            source=result['source'],
            object_class=result['object_class'],
            object_text=result['object_text'],
            origin=origin,
        )
        self._object_classes_modified.add(result['object_class'])
Esempio n. 7
0
    def upsert_rpsl_object(self,
                           rpsl_object: RPSLObject,
                           origin: JournalEntryOrigin,
                           rpsl_guaranteed_no_existing=False,
                           source_serial: Optional[int] = None,
                           forced_created_value: Optional[str] = None) -> None:
        """
        Schedule an RPSLObject for insertion/updating.

        This method will insert the object, or overwrite an existing object
        if it has the same RPSL primary key and source. No other checks are
        applied before overwriting.

        Writes may not be issued to the database immediately for performance
        reasons, but commit() will ensure all writes are flushed to the DB first.

        The origin indicates the origin of this change, see JournalEntryOrigin
        for the various options. The source_serial is the serial that an NRTM
        source assigned to this change, if any.

        If rpsl_guaranteed_no_existing is set to True, the caller guarantees that this
        PK is unique in the database. This essentially only applies to inserting
        RPKI psuedo-IRR objects.
        """
        self._check_write_permitted()
        if not rpsl_guaranteed_no_existing:
            self._rpsl_guaranteed_no_existing = False
        ip_first = str(rpsl_object.ip_first) if rpsl_object.ip_first else None
        ip_last = str(rpsl_object.ip_last) if rpsl_object.ip_last else None

        ip_size = None
        if rpsl_object.ip_first and rpsl_object.ip_last:
            ip_size = rpsl_object.ip_last.int() - rpsl_object.ip_first.int(
            ) + 1

        # In some cases, multiple updates may be submitted for the same object.
        # PostgreSQL will not allow rows proposed for insertion to have duplicate
        # constrained values - so if a second object appears with a pk/source
        # seen before, the buffer must be flushed right away, or the two updates
        # will conflict.
        source = rpsl_object.parsed_data['source']

        rpsl_pk_source = rpsl_object.pk() + '-' + source
        if rpsl_pk_source in self._rpsl_pk_source_seen:
            self._flush_rpsl_object_writing_buffer()

        update_time = datetime.now(timezone.utc)
        object_dict = {
            'rpsl_pk': rpsl_object.pk(),
            'source': source,
            'object_class': rpsl_object.rpsl_object_class,
            'parsed_data': rpsl_object.parsed_data,
            'object_text':
            rpsl_object.render_rpsl_text(last_modified=update_time),
            'ip_version': rpsl_object.ip_version(),
            'ip_first': ip_first,
            'ip_last': ip_last,
            'ip_size': ip_size,
            'prefix': str(rpsl_object.prefix) if rpsl_object.prefix else None,
            'prefix_length': rpsl_object.prefix_length,
            'asn_first': rpsl_object.asn_first,
            'asn_last': rpsl_object.asn_last,
            'rpki_status': rpsl_object.rpki_status,
            'scopefilter_status': rpsl_object.scopefilter_status,
            'updated': update_time,
        }
        if forced_created_value:
            object_dict['created'] = forced_created_value

        self._rpsl_upsert_buffer.append((object_dict, origin, source_serial))

        self._rpsl_pk_source_seen.add(rpsl_pk_source)
        self._object_classes_modified.add(rpsl_object.rpsl_object_class)

        if len(self._rpsl_upsert_buffer) > MAX_RECORDS_BUFFER_BEFORE_INSERT:
            self._flush_rpsl_object_writing_buffer()
Esempio n. 8
0
    def process_auth(
            self, rpsl_obj_new: RPSLObject,
            rpsl_obj_current: Optional[RPSLObject]) -> ValidatorResult:
        """
        Check whether authentication passes for all required objects.
        Returns a ValidatorResult object with error/info messages, and fills
        result.mntners_notify with the RPSLMntner objects that may have
        to be notified.

        If a valid override password is provided, changes are immediately approved.
        On the result object, used_override is set to True, but mntners_notify is
        not filled, as mntner resolving does not take place.
        """
        source = rpsl_obj_new.source()
        result = ValidatorResult()

        override_hash = get_setting('auth.override_password')
        if override_hash:
            for override in self.overrides:
                try:
                    if md5_crypt.verify(override, override_hash):
                        result.used_override = True
                        logger.debug(f'Found valid override password.')
                        return result
                    else:
                        logger.info(
                            f'Found invalid override password, ignoring.')
                except ValueError as ve:
                    logger.error(
                        f'Exception occurred while checking override password: {ve} (possible misconfigured hash?)'
                    )
        elif self.overrides:
            logger.info(
                f'Ignoring override password, auth.override_password not set.')

        mntners_new = rpsl_obj_new.parsed_data['mnt-by']
        logger.debug(
            f'Checking auth for new object {rpsl_obj_new}, mntners in new object: {mntners_new}'
        )
        valid, mntner_objs_new = self._check_mntners(mntners_new, source)
        if not valid:
            self._generate_failure_message(result, mntners_new, rpsl_obj_new)

        if rpsl_obj_current:
            mntners_current = rpsl_obj_current.parsed_data['mnt-by']
            logger.debug(
                f'Checking auth for current object {rpsl_obj_current}, '
                f'mntners in new object: {mntners_current}')
            valid, mntner_objs_current = self._check_mntners(
                mntners_current, source)
            if not valid:
                self._generate_failure_message(result, mntners_current,
                                               rpsl_obj_new)

            result.mntners_notify = mntner_objs_current
        else:
            result.mntners_notify = mntner_objs_new

        if isinstance(rpsl_obj_new, RPSLMntner):
            # Dummy auth values are only permitted in existing objects, which are never pre-approved.
            if rpsl_obj_new.has_dummy_auth_value() and rpsl_obj_new.pk(
            ) not in self._pre_approved:
                if len(self.passwords) == 1:
                    logger.debug(
                        f'Object {rpsl_obj_new} submitted with dummy hash values and single password, '
                        f'replacing all hashes with currently supplied password.'
                    )
                    rpsl_obj_new.force_single_new_password(self.passwords[0])
                    result.info_messages.add(
                        'As you submitted dummy hash values, all password hashes on this object '
                        'were replaced with a new MD5-PW hash of the password you provided for '
                        'authentication.')
                else:
                    result.error_messages.add(
                        f'Object submitted with dummy hash values, but multiple or no passwords '
                        f'submitted. Either submit only full hashes, or a single password.'
                    )
            elif not rpsl_obj_new.verify_auth(self.passwords,
                                              self.keycert_obj_pk):
                result.error_messages.add(
                    f'Authorisation failed for the auth methods on this mntner object.'
                )

        return result