Пример #1
0
    def test_drop_old_duplicate_entries_from_table(self):
        table_name = "__test_tmp_table__"

        for key, engine in self.engines.items():
            meta = MetaData()
            meta.bind = engine
            test_table, values = self.\
                    _populate_db_for_drop_duplicate_entries(engine, meta,
                                                            table_name)

            utils.drop_old_duplicate_entries_from_table(
                engine, table_name, False, 'b', 'c')

            uniq_values = set()
            expected_ids = []
            for value in sorted(values, key=lambda x: x['id'], reverse=True):
                uniq_value = (('b', value['b']), ('c', value['c']))
                if uniq_value in uniq_values:
                    continue
                uniq_values.add(uniq_value)
                expected_ids.append(value['id'])

            real_ids = [
                row[0] for row in engine.execute(select([test_table.c.id
                                                         ])).fetchall()
            ]

            self.assertEqual(len(real_ids), len(expected_ids))
            for id_ in expected_ids:
                self.assertTrue(id_ in real_ids)
    def test_drop_old_duplicate_entries_from_table(self):
        table_name = "__test_tmp_table__"

        for key, engine in self.engines.items():
            meta = MetaData()
            meta.bind = engine
            test_table, values = self.\
                    _populate_db_for_drop_duplicate_entries(engine, meta,
                                                            table_name)

            utils.drop_old_duplicate_entries_from_table(engine, table_name,
                                                        False, 'b', 'c')

            uniq_values = set()
            expected_ids = []
            for value in sorted(values, key=lambda x: x['id'], reverse=True):
                uniq_value = (('b', value['b']), ('c', value['c']))
                if uniq_value in uniq_values:
                    continue
                uniq_values.add(uniq_value)
                expected_ids.append(value['id'])

            real_ids = [row[0] for row in
                        engine.execute(select([test_table.c.id])).fetchall()]

            self.assertEqual(len(real_ids), len(expected_ids))
            for id_ in expected_ids:
                self.assertTrue(id_ in real_ids)
Пример #3
0
    def test_drop_old_duplicate_entries_from_table_soft_delete(self):
        table_name = "test_drop_old_duplicate_entries_from_table_soft_delete"

        for key, engine in self.engines.items():
            meta = MetaData()
            meta.bind = engine
            table, values = self._populate_db_for_drop_duplicate_entries(engine, meta, table_name)
            utils.drop_old_duplicate_entries_from_table(engine, table_name, True, "b", "c")
            uniq_values = set()
            expected_values = []
            soft_deleted_values = []

            for value in sorted(values, key=lambda x: x["id"], reverse=True):
                uniq_value = (("b", value["b"]), ("c", value["c"]))
                if uniq_value in uniq_values:
                    soft_deleted_values.append(value)
                    continue
                uniq_values.add(uniq_value)
                expected_values.append(value)

            base_select = table.select()

            rows_select = base_select.where(table.c.deleted != table.c.id)
            row_ids = [row["id"] for row in engine.execute(rows_select).fetchall()]
            self.assertEqual(len(row_ids), len(expected_values))
            for value in expected_values:
                self.assertIn(value["id"], row_ids)

            deleted_rows_select = base_select.where(table.c.deleted == table.c.id)
            deleted_rows_ids = [row["id"] for row in engine.execute(deleted_rows_select).fetchall()]
            self.assertEqual(len(deleted_rows_ids), len(values) - len(row_ids))
            for value in soft_deleted_values:
                self.assertIn(value["id"], deleted_rows_ids)
            table.drop()
Пример #4
0
    def test_drop_old_duplicate_entries_from_table(self):
        table_name = "test_drop_old_duplicate_entries_from_table"

        for key, engine in self.engines.items():
            meta = MetaData()
            meta.bind = engine
            test_table, values = self._populate_db_for_drop_duplicate_entries(engine, meta, table_name)

            utils.drop_old_duplicate_entries_from_table(engine, table_name, False, "b", "c")

            uniq_values = set()
            expected_ids = []
            for value in sorted(values, key=lambda x: x["id"], reverse=True):
                uniq_value = (("b", value["b"]), ("c", value["c"]))
                if uniq_value in uniq_values:
                    continue
                uniq_values.add(uniq_value)
                expected_ids.append(value["id"])

            real_ids = [row[0] for row in engine.execute(select([test_table.c.id])).fetchall()]

            self.assertEqual(len(real_ids), len(expected_ids))
            for id_ in expected_ids:
                self.assertIn(id_, real_ids)
            test_table.drop()
Пример #5
0
def upgrade(migrate_engine):
    meta = MetaData(bind=migrate_engine)
    t = Table(TABLE_NAME, meta, autoload=True)

    utils.drop_old_duplicate_entries_from_table(migrate_engine, TABLE_NAME,
                                                True, *COLUMNS)
    uc = UniqueConstraint(*COLUMNS, table=t, name=UC_NAME)
    uc.create()
Пример #6
0
def upgrade(migrate_engine):
    meta = MetaData(bind=migrate_engine)
    t = Table(TABLE_NAME, meta, autoload=True)

    utils.drop_old_duplicate_entries_from_table(migrate_engine, TABLE_NAME,
                                                True, *COLUMNS)
    uc = UniqueConstraint(*COLUMNS, table=t, name=UC_NAME)
    uc.create()
Пример #7
0
    def test_drop_old_duplicate_entries_from_table_soft_delete(self):
        table_name = "test_drop_old_duplicate_entries_from_table_soft_delete"

        for key, engine in self.engines.items():
            meta = MetaData()
            meta.bind = engine
            table, values = self.\
                    _populate_db_for_drop_duplicate_entries(engine, meta,
                                                            table_name)
            utils.drop_old_duplicate_entries_from_table(
                engine, table_name, True, 'b', 'c')
            uniq_values = set()
            expected_values = []
            soft_deleted_values = []

            for value in sorted(values, key=lambda x: x['id'], reverse=True):
                uniq_value = (('b', value['b']), ('c', value['c']))
                if uniq_value in uniq_values:
                    soft_deleted_values.append(value)
                    continue
                uniq_values.add(uniq_value)
                expected_values.append(value)

            base_select = table.select()

            rows_select = base_select.\
                                where(table.c.deleted != table.c.id)
            row_ids = [
                row['id'] for row in engine.execute(rows_select).fetchall()
            ]
            self.assertEqual(len(row_ids), len(expected_values))
            for value in expected_values:
                self.assertIn(value['id'], row_ids)

            deleted_rows_select = base_select.\
                                    where(table.c.deleted == table.c.id)
            deleted_rows_ids = [
                row['id']
                for row in engine.execute(deleted_rows_select).fetchall()
            ]
            self.assertEqual(len(deleted_rows_ids), len(values) - len(row_ids))
            for value in soft_deleted_values:
                self.assertIn(value['id'], deleted_rows_ids)
            table.drop()
    def test_drop_old_duplicate_entries_from_table_soft_delete(self):
        table_name = "__test_tmp_table__"

        for key, engine in self.engines.items():
            meta = MetaData()
            meta.bind = engine
            table, values = self.\
                    _populate_db_for_drop_duplicate_entries(engine, meta,
                                                            table_name)
            utils.drop_old_duplicate_entries_from_table(engine, table_name,
                                                        True, 'b', 'c')
            uniq_values = set()
            expected_values = []
            soft_deleted_values = []

            for value in sorted(values, key=lambda x: x['id'], reverse=True):
                uniq_value = (('b', value['b']), ('c', value['c']))
                if uniq_value in uniq_values:
                    soft_deleted_values.append(value)
                    continue
                uniq_values.add(uniq_value)
                expected_values.append(value)

            base_select = table.select()

            rows_select = base_select.\
                                where(table.c.deleted != table.c.id)
            row_ids = [row['id'] for row in
                            engine.execute(rows_select).fetchall()]
            self.assertEqual(len(row_ids), len(expected_values))
            for value in expected_values:
                self.assertTrue(value['id'] in row_ids)

            deleted_rows_select = base_select.\
                                    where(table.c.deleted == table.c.id)
            deleted_rows_ids = [row['id'] for row in
                                engine.execute(deleted_rows_select).fetchall()]
            self.assertEqual(len(deleted_rows_ids),
                             len(values) - len(row_ids))
            for value in soft_deleted_values:
                self.assertTrue(value['id'] in deleted_rows_ids)
Пример #9
0
def upgrade(migrate_engine):
    meta = MetaData(bind=migrate_engine)
    key_pairs = Table(TABLE_NAME, meta, autoload=True)
    utils.drop_old_duplicate_entries_from_table(migrate_engine, TABLE_NAME,
                                                True, *UC_COLUMNS)
    old_idx = None
    #Drop old index because the new UniqueConstraint can be used instead.
    for index in key_pairs.indexes:
        if index.name == OLD_IDX_NAME:
            index.drop()
            old_idx = index

    #index.drop() in SQLAlchemy-migrate will issue a DROP INDEX statement to
    #the DB but WILL NOT update the table metadata to remove the `Index`
    #object. This can cause subsequent calls like drop or create constraint
    #on that table to fail.The solution is to update the table metadata to
    #reflect the now dropped column.
    if old_idx:
        key_pairs.indexes.remove(old_idx)
    uc = UniqueConstraint(*(UC_COLUMNS), table=key_pairs, name=UC_NAME)
    uc.create()
def upgrade(migrate_engine):
    meta = MetaData(bind=migrate_engine)
    key_pairs = Table(TABLE_NAME, meta, autoload=True)
    utils.drop_old_duplicate_entries_from_table(migrate_engine,
                                                TABLE_NAME, True,
                                                *UC_COLUMNS)
    old_idx = None
    #Drop old index because the new UniqueConstraint can be used instead.
    for index in key_pairs.indexes:
        if index.name == OLD_IDX_NAME:
            index.drop()
            old_idx = index

    #index.drop() in SQLAlchemy-migrate will issue a DROP INDEX statement to
    #the DB but WILL NOT update the table metadata to remove the `Index`
    #object. This can cause subsequent calls like drop or create constraint
    #on that table to fail.The solution is to update the table metadata to
    #reflect the now dropped column.
    if old_idx:
        key_pairs.indexes.remove(old_idx)
    uc = UniqueConstraint(*(UC_COLUMNS), table=key_pairs, name=UC_NAME)
    uc.create()