Пример #1
0
    def test_drop_old_duplicate_entries_from_table_soft_delete(self):
        table_name = "__test_tmp_table__"

        table, values = self._populate_db_for_drop_duplicate_entries(self.engine, self.meta, table_name)
        utils.drop_old_duplicate_entries_from_table(self.engine, table_name, True, "b", "c")
        uniq_values = set()
        expected_values = []
        soft_deleted_values = []

        for value in sorted(values, key=lambda x: x["id"], reverse=True):
            uniq_value = (("b", value["b"]), ("c", value["c"]))
            if uniq_value in uniq_values:
                soft_deleted_values.append(value)
                continue
            uniq_values.add(uniq_value)
            expected_values.append(value)

        base_select = table.select()

        rows_select = base_select.where(table.c.deleted != table.c.id)
        row_ids = [row["id"] for row in self.engine.execute(rows_select).fetchall()]
        self.assertEqual(len(row_ids), len(expected_values))
        for value in expected_values:
            self.assertTrue(value["id"] in row_ids)

        deleted_rows_select = base_select.where(table.c.deleted == table.c.id)
        deleted_rows_ids = [row["id"] for row in self.engine.execute(deleted_rows_select).fetchall()]
        self.assertEqual(len(deleted_rows_ids), len(values) - len(row_ids))
        for value in soft_deleted_values:
            self.assertTrue(value["id"] in deleted_rows_ids)
Пример #2
0
    def test_drop_old_duplicate_entries_from_table(self):
        table_name = "__test_tmp_table__"

        test_table, values = self._populate_db_for_drop_duplicate_entries(
            self.engine, self.meta, table_name)
        utils.drop_old_duplicate_entries_from_table(self.engine, table_name,
                                                    False, 'b', 'c')

        uniq_values = set()
        expected_ids = []
        for value in sorted(values, key=lambda x: x['id'], reverse=True):
            uniq_value = (('b', value['b']), ('c', value['c']))
            if uniq_value in uniq_values:
                continue
            uniq_values.add(uniq_value)
            expected_ids.append(value['id'])

        real_ids = [
            row[0] for row in self.engine.execute(select([test_table.c.id
                                                          ])).fetchall()
        ]

        self.assertEqual(len(real_ids), len(expected_ids))
        for id_ in expected_ids:
            self.assertTrue(id_ in real_ids)
Пример #3
0
    def test_drop_old_duplicate_entries_from_table(self):
        table_name = "__test_tmp_table__"

        for engine in self.engines.values():
            meta = MetaData()
            meta.bind = engine
            test_table, values = self._populate_db_for_drop_duplicate_entries(
                engine, meta, table_name)
            utils.drop_old_duplicate_entries_from_table(
                engine, table_name, False, 'b', 'c')

            uniq_values = set()
            expected_ids = []
            for value in sorted(values, key=lambda x: x['id'], reverse=True):
                uniq_value = (('b', value['b']), ('c', value['c']))
                if uniq_value in uniq_values:
                    continue
                uniq_values.add(uniq_value)
                expected_ids.append(value['id'])

            real_ids = [row[0] for row in
                        engine.execute(select([test_table.c.id])).fetchall()]

            self.assertEqual(len(real_ids), len(expected_ids))
            for id_ in expected_ids:
                self.assertTrue(id_ in real_ids)
Пример #4
0
 def test_drop_dup_entries_in_file_conn(self):
     table_name = "__test_tmp_table__"
     tmp_db_file = self.create_tempfiles([["name", ""]], ext=".sql")[0]
     in_file_engine = session.EngineFacade("sqlite:///%s" % tmp_db_file).get_engine()
     meta = MetaData()
     meta.bind = in_file_engine
     test_table, values = self._populate_db_for_drop_duplicate_entries(in_file_engine, meta, table_name)
     utils.drop_old_duplicate_entries_from_table(in_file_engine, table_name, False, "b", "c")
Пример #5
0
 def test_drop_dup_entries_in_file_conn(self):
     table_name = "__test_tmp_table__"
     tmp_db_file = self.create_tempfiles([['name', '']], ext='.sql')[0]
     in_file_engine = session.EngineFacade('sqlite:///%s' %
                                           tmp_db_file).get_engine()
     meta = MetaData()
     meta.bind = in_file_engine
     test_table, values = self._populate_db_for_drop_duplicate_entries(
         in_file_engine, meta, table_name)
     utils.drop_old_duplicate_entries_from_table(in_file_engine, table_name,
                                                 False, 'b', 'c')
Пример #6
0
    def test_drop_old_duplicate_entries_from_table(self):
        table_name = "__test_tmp_table__"

        test_table, values = self._populate_db_for_drop_duplicate_entries(self.engine, self.meta, table_name)
        utils.drop_old_duplicate_entries_from_table(self.engine, table_name, False, "b", "c")

        uniq_values = set()
        expected_ids = []
        for value in sorted(values, key=lambda x: x["id"], reverse=True):
            uniq_value = (("b", value["b"]), ("c", value["c"]))
            if uniq_value in uniq_values:
                continue
            uniq_values.add(uniq_value)
            expected_ids.append(value["id"])

        real_ids = [row[0] for row in self.engine.execute(select([test_table.c.id])).fetchall()]

        self.assertEqual(len(real_ids), len(expected_ids))
        for id_ in expected_ids:
            self.assertTrue(id_ in real_ids)
Пример #7
0
    def test_drop_old_duplicate_entries_from_table_soft_delete(self):
        table_name = "__test_tmp_table__"

        for engine in self.engines.values():
            meta = MetaData()
            meta.bind = engine
            table, values = self._populate_db_for_drop_duplicate_entries(
                engine, meta, table_name)
            utils.drop_old_duplicate_entries_from_table(
                engine, table_name, True, 'b', 'c')
            uniq_values = set()
            expected_values = []
            soft_deleted_values = []

            for value in sorted(values, key=lambda x: x['id'], reverse=True):
                uniq_value = (('b', value['b']), ('c', value['c']))
                if uniq_value in uniq_values:
                    soft_deleted_values.append(value)
                    continue
                uniq_values.add(uniq_value)
                expected_values.append(value)

            base_select = table.select()

            rows_select = base_select.where(table.c.deleted != table.c.id)
            row_ids = [
                row['id'] for row in engine.execute(rows_select).fetchall()
            ]
            self.assertEqual(len(row_ids), len(expected_values))
            for value in expected_values:
                self.assertTrue(value['id'] in row_ids)

            deleted_rows_select = base_select.where(
                table.c.deleted == table.c.id)
            deleted_rows_ids = [
                row['id']
                for row in engine.execute(deleted_rows_select).fetchall()
            ]
            self.assertEqual(len(deleted_rows_ids), len(values) - len(row_ids))
            for value in soft_deleted_values:
                self.assertTrue(value['id'] in deleted_rows_ids)
Пример #8
0
    def test_drop_old_duplicate_entries_from_table_soft_delete(self):
        table_name = "__test_tmp_table__"

        for engine in self.engines.values():
            meta = MetaData()
            meta.bind = engine
            table, values = self._populate_db_for_drop_duplicate_entries(
                engine, meta, table_name)
            utils.drop_old_duplicate_entries_from_table(engine, table_name,
                                                        True, 'b', 'c')
            uniq_values = set()
            expected_values = []
            soft_deleted_values = []

            for value in sorted(values, key=lambda x: x['id'], reverse=True):
                uniq_value = (('b', value['b']), ('c', value['c']))
                if uniq_value in uniq_values:
                    soft_deleted_values.append(value)
                    continue
                uniq_values.add(uniq_value)
                expected_values.append(value)

            base_select = table.select()

            rows_select = base_select.where(table.c.deleted != table.c.id)
            row_ids = [row['id'] for row in
                       engine.execute(rows_select).fetchall()]
            self.assertEqual(len(row_ids), len(expected_values))
            for value in expected_values:
                self.assertTrue(value['id'] in row_ids)

            deleted_rows_select = base_select.where(
                table.c.deleted == table.c.id)
            deleted_rows_ids = [row['id'] for row in
                                engine.execute(deleted_rows_select).fetchall()]
            self.assertEqual(len(deleted_rows_ids),
                             len(values) - len(row_ids))
            for value in soft_deleted_values:
                self.assertTrue(value['id'] in deleted_rows_ids)