def test_data_validator_event_msgfmt(self):
        critical_event = DataValidatorEvent.DataValidator(
            severity=Severity.ERROR, error="e1")
        self.assertEqual(
            str(critical_event),
            "(DataValidatorEvent Severity.ERROR): type=DataValidator error=e1")
        self.assertEqual(critical_event,
                         pickle.loads(pickle.dumps(critical_event)))

        error_event = DataValidatorEvent.ImmutableRowsValidator(
            severity=Severity.ERROR, error="e2")
        self.assertEqual(
            str(error_event),
            "(DataValidatorEvent Severity.ERROR): type=ImmutableRowsValidator error=e2"
        )
        self.assertEqual(error_event, pickle.loads(pickle.dumps(error_event)))

        warning_event = DataValidatorEvent.UpdatedRowsValidator(
            severity=Severity.WARNING, message="m3")
        self.assertEqual(
            str(warning_event),
            "(DataValidatorEvent Severity.WARNING): type=UpdatedRowsValidator message=m3"
        )
        self.assertEqual(warning_event,
                         pickle.loads(pickle.dumps(warning_event)))

        info_event = DataValidatorEvent.DeletedRowsValidator(
            severity=Severity.NORMAL, message="m4")
        self.assertEqual(
            str(info_event),
            "(DataValidatorEvent Severity.NORMAL): type=DeletedRowsValidator message=m4"
        )
        self.assertEqual(info_event, pickle.loads(pickle.dumps(info_event)))
Пример #2
0
    def test_data_validator_event_msgfmt(self):
        critical_event = DataValidatorEvent.DataValidator(
            severity=Severity.ERROR, error="e1")
        critical_event.event_id = "3916da00-643c-4886-bdd0-963d3ebac536"
        self.assertEqual(
            str(critical_event),
            "(DataValidatorEvent Severity.ERROR) period_type=one-time "
            "event_id=3916da00-643c-4886-bdd0-963d3ebac536: type=DataValidator error=e1"
        )
        self.assertEqual(critical_event,
                         pickle.loads(pickle.dumps(critical_event)))

        error_event = DataValidatorEvent.ImmutableRowsValidator(
            severity=Severity.ERROR, error="e2")
        error_event.event_id = "3916da00-643c-4886-bdd0-963d3ebac536"
        self.assertEqual(
            str(error_event),
            "(DataValidatorEvent Severity.ERROR) period_type=one-time "
            "event_id=3916da00-643c-4886-bdd0-963d3ebac536: type=ImmutableRowsValidator error=e2"
        )
        self.assertEqual(error_event, pickle.loads(pickle.dumps(error_event)))

        warning_event = DataValidatorEvent.UpdatedRowsValidator(
            severity=Severity.WARNING, message="m3")
        warning_event.event_id = "3916da00-643c-4886-bdd0-963d3ebac536"
        self.assertEqual(
            str(warning_event),
            "(DataValidatorEvent Severity.WARNING) period_type=one-time "
            "event_id=3916da00-643c-4886-bdd0-963d3ebac536: type=UpdatedRowsValidator message=m3"
        )
        self.assertEqual(warning_event,
                         pickle.loads(pickle.dumps(warning_event)))

        info_event = DataValidatorEvent.DeletedRowsValidator(
            severity=Severity.NORMAL, message="m4")
        info_event.event_id = "3916da00-643c-4886-bdd0-963d3ebac536"
        self.assertEqual(
            str(info_event),
            "(DataValidatorEvent Severity.NORMAL) period_type=one-time "
            "event_id=3916da00-643c-4886-bdd0-963d3ebac536: type=DeletedRowsValidator message=m4"
        )
        self.assertEqual(info_event, pickle.loads(pickle.dumps(info_event)))
Пример #3
0
    def validate_range_expected_to_change(self, session, during_nemesis=False):
        """
        In user profile 'data_dir/c-s_lwt_basic.yaml' LWT updates the lwt_indicator and author columns with hard coded
        values.

        Two more materialized views are added. The first one holds rows that are candidates for the update
        (i.e. all rows before the update).
        The second one holds rows with lwt_indicator=30000000 (i.e. only the updated rows)

        After prepare all primay keys from first materialized view will be saved in the separate table as
        expected result.

        After the updates will be finished, 2 type of validation:
        1. All primary key values that saved in the expected result table, should be found in the both views
        2. Also validate row counts in the both veiws agains
        """
        if not (self._validate_updated_data
                and self.view_names_for_updated_data):
            LOGGER.debug(
                'Verify updated rows can\'t be performed as expected data has not been saved. '
                'See error above in the sct.log')
            return

        if not during_nemesis:
            LOGGER.debug('Verify updated rows')

        partition_keys = ', '.join(self.base_table_partition_keys)

        # List of tuples of correlated  view names for validation: before update, after update, expected data
        views_list = list(
            zip(
                self.view_names_for_updated_data,
                self.view_names_after_updated_data,
                [
                    self.set_expected_data_table_name(view)
                    for view in self.view_names_for_updated_data
                ],
                self._validate_updated_per_view,
            ))
        for views_set in views_list:
            # views_set[0] - view name with rows before update
            # views_set[1] - view name with rows after update
            # views_set[2] - view name with all expected partition keys
            # views_set[3] - do perform validation for the view or not
            if not during_nemesis:
                LOGGER.debug('Verify updated row. View %s', views_set[0])
            if not views_set[3]:
                DataValidatorEvent.UpdatedRowsValidator(
                    severity=Severity.WARNING,
                    message=
                    f"Can't start validation for {views_set[0]}. Copying expected data failed. "
                    f"See error above in the sct.log").publish()
                return

            before_update_rows = self.longevity_self_object.fetch_all_rows(
                session=session,
                default_fetch_size=self.DEFAULT_FETCH_SIZE,
                statement=f"SELECT {partition_keys} FROM {views_set[0]}",
                verbose=not during_nemesis)
            if not before_update_rows:
                DataValidatorEvent.UpdatedRowsValidator(
                    severity=Severity.WARNING,
                    message=
                    f"Can't validate updated rows. Fetch all rows from {views_set[0]} failed. "
                    f"See error above in the sct.log").publish()
                return

            after_update_rows = self.longevity_self_object.fetch_all_rows(
                session=session,
                default_fetch_size=self.DEFAULT_FETCH_SIZE,
                statement=f"SELECT {partition_keys} FROM {views_set[1]}",
                verbose=not during_nemesis)
            if not after_update_rows:
                DataValidatorEvent.UpdatedRowsValidator(
                    severity=Severity.WARNING,
                    message=
                    f"Can't validate updated rows. Fetch all rows from {views_set[1]} failed. "
                    f"See error above in the sct.log").publish()
                return

            expected_rows = self.longevity_self_object.fetch_all_rows(
                session=session,
                default_fetch_size=self.DEFAULT_FETCH_SIZE,
                statement=f"SELECT {partition_keys} FROM {views_set[2]}",
                verbose=not during_nemesis)
            if not expected_rows:
                DataValidatorEvent.UpdatedRowsValidator(
                    severity=Severity.WARNING,
                    message=
                    f"Can't validate updated row. Fetch all rows from {views_set[2]} failed. "
                    f"See error above in the sct.log").publish()
                return

            # Issue https://github.com/scylladb/scylla/issues/6181
            # Not fail the test if unexpected additional rows where found in actual result table
            if len(before_update_rows) + len(after_update_rows) > len(
                    expected_rows):
                DataValidatorEvent.UpdatedRowsValidator(
                    severity=Severity.WARNING,
                    message=f"View {views_set[0]}. "
                    f"Actual dataset length {len(before_update_rows) + len(after_update_rows)} "
                    f"more then expected dataset length: {len(expected_rows)}. "
                    f"Issue #6181").publish()
            else:
                actual_data = sorted(before_update_rows + after_update_rows)
                expected_data = sorted(expected_rows)
                if not during_nemesis:
                    assert actual_data == expected_data,\
                        'One or more rows are not as expected, suspected LWT wrong update'

                    assert len(before_update_rows) + len(after_update_rows) == len(expected_rows), \
                        'One or more rows are not as expected, suspected LWT wrong update. '\
                        f'Actual dataset length: {len(before_update_rows) + len(after_update_rows)}, ' \
                        f'Expected dataset length: {len(expected_rows)}'

                    # raise info event in the end of test only
                    DataValidatorEvent.UpdatedRowsValidator(
                        severity=Severity.NORMAL,
                        message=
                        f"Validation updated rows finished successfully. View {views_set[0]}"
                    ).publish()
                else:
                    LOGGER.debug(
                        'Validation updated rows.  View %s. Actual dataset length %s, '
                        'Expected dataset length: %s.', views_set[0],
                        len(before_update_rows) + len(after_update_rows),
                        len(expected_rows))