Ejemplo n.º 1
0
    def copy_immutable_expected_data(self):
        # Create expected data for immutable rows
        if self._validate_not_updated_data:
            if not (self.view_name_for_not_updated_data
                    and self.expected_data_table_name):
                self._validate_not_updated_data = False
                DataValidatorEvent.DataValidator(
                    severity=Severity.WARNING,
                    message=
                    f"Problem during copying expected data: view not found."
                    f"View name for not updated_data: {self.view_name_for_not_updated_data}; "
                    f"Expected data table name {self.expected_data_table_name}. "
                    f"Data validation of not updated rows won' be performed"
                ).publish()
                return

            LOGGER.debug('Copy expected data for immutable rows: %s -> %s',
                         self.view_name_for_not_updated_data,
                         self.expected_data_table_name)
            if not self.longevity_self_object.copy_view(
                    node=self.longevity_self_object.db_cluster.nodes[0],
                    src_keyspace=self.keyspace_name,
                    src_view=self.view_name_for_not_updated_data,
                    dest_keyspace=self.keyspace_name,
                    dest_table=self.expected_data_table_name,
                    copy_data=True):
                self._validate_not_updated_data = False
                DataValidatorEvent.DataValidator(
                    severity=Severity.ERROR,
                    error=
                    f"Problem during copying expected data from {self.view_name_for_not_updated_data} "
                    f"to {self.expected_data_table_name}. "
                    f"Data validation of not updated rows won' be performed"
                ).publish()
Ejemplo n.º 2
0
    def run_prepare_write_cmd(self):
        # `mutation_write_*' errors are thrown when system is overloaded and got timeout on
        # operations on system.paxos table.
        #
        # Decrease severity of this event during prepare.  Shouldn't impact on test result.
        with ignore_mutation_write_errors():
            super().run_prepare_write_cmd()

        # Stop nemesis. Prefer all nodes will be run before collect data for validation
        # Increase timeout to wait for nemesis finish
        if self.db_cluster.nemesis_threads:
            self.db_cluster.stop_nemesis(timeout=300)

        # Wait for MVs data will be fully inserted (running on background)
        time.sleep(300)

        if self.db_cluster.nemesis_count > 1:
            self.data_validator = MagicMock()
            DataValidatorEvent.DataValidator(severity=Severity.WARNING,
                                             message="Test runs with parallel nemesis. Data validator is disabled."
                                             ).publish()
        else:
            self.data_validator = LongevityDataValidator(longevity_self_object=self,
                                                         user_profile_name='c-s_lwt',
                                                         base_table_partition_keys=self.BASE_TABLE_PARTITION_KEYS)

        self.data_validator.copy_immutable_expected_data()
        self.data_validator.copy_updated_expected_data()
        self.data_validator.save_count_rows_for_deletion()

        # Run nemesis during stress as it was stopped before copy expected data
        if self.params.get('nemesis_during_prepare'):
            self.start_nemesis()
Ejemplo n.º 3
0
    def save_count_rows_for_deletion(self):
        if not self.view_name_for_deletion_data:
            DataValidatorEvent.DataValidator(
                severity=Severity.WARNING,
                message=f"Problem during copying expected data: not found. "
                f"View name for deletion data: {self.view_name_for_deletion_data}. "
                f"Data validation of deleted rows won' be performed").publish(
                )
            return

        LOGGER.debug(
            f'Get rows count in {self.view_name_for_deletion_data} MV before stress'
        )
        pk_name = self.base_table_partition_keys[0]
        with self.longevity_self_object.db_cluster.cql_connection_patient(
                self.longevity_self_object.db_cluster.nodes[0],
                keyspace=self.keyspace_name) as session:
            rows_before_deletion = self.longevity_self_object.fetch_all_rows(
                session=session,
                default_fetch_size=self.DEFAULT_FETCH_SIZE,
                statement=
                f"SELECT {pk_name} FROM {self.view_name_for_deletion_data}")
            if rows_before_deletion:
                self.rows_before_deletion = len(rows_before_deletion)
                LOGGER.debug(f"{self.rows_before_deletion} rows for deletion")
    def test_data_validator_event_msgfmt(self):
        critical_event = DataValidatorEvent.DataValidator(
            severity=Severity.ERROR, error="e1")
        self.assertEqual(
            str(critical_event),
            "(DataValidatorEvent Severity.ERROR): type=DataValidator error=e1")
        self.assertEqual(critical_event,
                         pickle.loads(pickle.dumps(critical_event)))

        error_event = DataValidatorEvent.ImmutableRowsValidator(
            severity=Severity.ERROR, error="e2")
        self.assertEqual(
            str(error_event),
            "(DataValidatorEvent Severity.ERROR): type=ImmutableRowsValidator error=e2"
        )
        self.assertEqual(error_event, pickle.loads(pickle.dumps(error_event)))

        warning_event = DataValidatorEvent.UpdatedRowsValidator(
            severity=Severity.WARNING, message="m3")
        self.assertEqual(
            str(warning_event),
            "(DataValidatorEvent Severity.WARNING): type=UpdatedRowsValidator message=m3"
        )
        self.assertEqual(warning_event,
                         pickle.loads(pickle.dumps(warning_event)))

        info_event = DataValidatorEvent.DeletedRowsValidator(
            severity=Severity.NORMAL, message="m4")
        self.assertEqual(
            str(info_event),
            "(DataValidatorEvent Severity.NORMAL): type=DeletedRowsValidator message=m4"
        )
        self.assertEqual(info_event, pickle.loads(pickle.dumps(info_event)))
Ejemplo n.º 5
0
    def copy_updated_expected_data(self):
        # Create expected data for updated rows
        if self._validate_updated_data:
            if not self.view_names_for_updated_data:
                self._validate_updated_per_view = [False]
                DataValidatorEvent.DataValidator(
                    severity=Severity.WARNING,
                    message=
                    f"Problem during copying expected data: view not found. "
                    f"View names for updated data: {self.view_names_for_updated_data}. "
                    f"Data validation of updated rows won' be performed"
                ).publish()
                return

            LOGGER.debug(
                f'Copy expected data for updated rows. {self.view_names_for_updated_data}'
            )
            for src_view in self.view_names_for_updated_data:
                expected_data_table_name = self.set_expected_data_table_name(
                    src_view)
                LOGGER.debug(
                    f'Expected data table name {expected_data_table_name}')
                if not self.longevity_self_object.copy_view(
                        node=self.longevity_self_object.db_cluster.nodes[0],
                        src_keyspace=self.keyspace_name,
                        src_view=src_view,
                        dest_keyspace=self.keyspace_name,
                        dest_table=expected_data_table_name,
                        columns_list=self.base_table_partition_keys,
                        copy_data=True):
                    self._validate_updated_per_view.append(False)
                    DataValidatorEvent.DataValidator(
                        severity=Severity.ERROR,
                        error=
                        f"Problem during copying expected data from {src_view} to {expected_data_table_name}. "
                        f"Data validation of updated rows won' be performed"
                    ).publish()
                self._validate_updated_per_view.append(True)
Ejemplo n.º 6
0
    def test_data_validator_event_msgfmt(self):
        critical_event = DataValidatorEvent.DataValidator(
            severity=Severity.ERROR, error="e1")
        critical_event.event_id = "3916da00-643c-4886-bdd0-963d3ebac536"
        self.assertEqual(
            str(critical_event),
            "(DataValidatorEvent Severity.ERROR) period_type=one-time "
            "event_id=3916da00-643c-4886-bdd0-963d3ebac536: type=DataValidator error=e1"
        )
        self.assertEqual(critical_event,
                         pickle.loads(pickle.dumps(critical_event)))

        error_event = DataValidatorEvent.ImmutableRowsValidator(
            severity=Severity.ERROR, error="e2")
        error_event.event_id = "3916da00-643c-4886-bdd0-963d3ebac536"
        self.assertEqual(
            str(error_event),
            "(DataValidatorEvent Severity.ERROR) period_type=one-time "
            "event_id=3916da00-643c-4886-bdd0-963d3ebac536: type=ImmutableRowsValidator error=e2"
        )
        self.assertEqual(error_event, pickle.loads(pickle.dumps(error_event)))

        warning_event = DataValidatorEvent.UpdatedRowsValidator(
            severity=Severity.WARNING, message="m3")
        warning_event.event_id = "3916da00-643c-4886-bdd0-963d3ebac536"
        self.assertEqual(
            str(warning_event),
            "(DataValidatorEvent Severity.WARNING) period_type=one-time "
            "event_id=3916da00-643c-4886-bdd0-963d3ebac536: type=UpdatedRowsValidator message=m3"
        )
        self.assertEqual(warning_event,
                         pickle.loads(pickle.dumps(warning_event)))

        info_event = DataValidatorEvent.DeletedRowsValidator(
            severity=Severity.NORMAL, message="m4")
        info_event.event_id = "3916da00-643c-4886-bdd0-963d3ebac536"
        self.assertEqual(
            str(info_event),
            "(DataValidatorEvent Severity.NORMAL) period_type=one-time "
            "event_id=3916da00-643c-4886-bdd0-963d3ebac536: type=DeletedRowsValidator message=m4"
        )
        self.assertEqual(info_event, pickle.loads(pickle.dumps(info_event)))