def _ensure_backend_available(cls, url): url = sa_url.make_url(str(url)) try: eng = sqlalchemy.create_engine(url) except ImportError as i_e: # SQLAlchemy performs an "import" of the DBAPI module # within create_engine(). So if ibm_db_sa, cx_oracle etc. # isn't installed, we get an ImportError here. LOG.info( _LI("The %(dbapi)s backend is unavailable: %(err)s"), dict(dbapi=url.drivername, err=i_e)) raise exception.BackendNotAvailable( "Backend '%s' is unavailable: No DBAPI installed" % url.drivername) else: try: conn = eng.connect() except sqlalchemy.exc.DBAPIError as d_e: # upon connect, SQLAlchemy calls dbapi.connect(). This # usually raises OperationalError and should always at # least raise a SQLAlchemy-wrapped DBAPI Error. LOG.info( _LI("The %(dbapi)s backend is unavailable: %(err)s"), dict(dbapi=url.drivername, err=d_e) ) raise exception.BackendNotAvailable( "Backend '%s' is unavailable: Could not connect" % url.drivername) else: conn.close() return eng
def _ensure_backend_available(cls, url): url = sa_url.make_url(str(url)) try: eng = sqlalchemy.create_engine(url) except ImportError as i_e: # SQLAlchemy performs an "import" of the DBAPI module # within create_engine(). So if ibm_db_sa, cx_oracle etc. # isn't installed, we get an ImportError here. LOG.info( _LI("The %(dbapi)s backend is unavailable: %(err)s"), dict(dbapi=url.drivername, err=i_e)) raise exception.BackendNotAvailable("No DBAPI installed") else: try: conn = eng.connect() except sqlalchemy.exc.DBAPIError as d_e: # upon connect, SQLAlchemy calls dbapi.connect(). This # usually raises OperationalError and should always at # least raise a SQLAlchemy-wrapped DBAPI Error. LOG.info( _LI("The %(dbapi)s backend is unavailable: %(err)s"), dict(dbapi=url.drivername, err=d_e) ) raise exception.BackendNotAvailable("Could not connect") else: conn.close() return eng
def drop_old_duplicate_entries_from_table(migrate_engine, table_name, use_soft_delete, *uc_column_names): """Drop all old rows having the same values for columns in uc_columns. This method drop (or mark ad `deleted` if use_soft_delete is True) old duplicate rows form table with name `table_name`. :param migrate_engine: Sqlalchemy engine :param table_name: Table with duplicates :param use_soft_delete: If True - values will be marked as `deleted`, if False - values will be removed from table :param uc_column_names: Unique constraint columns """ meta = MetaData() meta.bind = migrate_engine table = Table(table_name, meta, autoload=True) columns_for_group_by = [table.c[name] for name in uc_column_names] columns_for_select = [func.max(table.c.id)] columns_for_select.extend(columns_for_group_by) duplicated_rows_select = sqlalchemy.sql.select( columns_for_select, group_by=columns_for_group_by, having=func.count(table.c.id) > 1 ) for row in migrate_engine.execute(duplicated_rows_select).fetchall(): # NOTE(boris-42): Do not remove row that has the biggest ID. delete_condition = table.c.id != row[0] is_none = None # workaround for pyflakes delete_condition &= table.c.deleted_at == is_none for name in uc_column_names: delete_condition &= table.c[name] == row[name] rows_to_delete_select = sqlalchemy.sql.select([table.c.id]).where(delete_condition) for row in migrate_engine.execute(rows_to_delete_select).fetchall(): LOG.info( _LI("Deleting duplicated row with id: %(id)s from table: " "%(table)s"), dict(id=row[0], table=table_name), ) if use_soft_delete: delete_statement = ( table.update() .where(delete_condition) .values( { "deleted": literal_column("id"), "updated_at": literal_column("updated_at"), "deleted_at": timeutils.utcnow(), } ) ) else: delete_statement = table.delete().where(delete_condition) migrate_engine.execute(delete_statement)
def drop_old_duplicate_entries_from_table(migrate_engine, table_name, use_soft_delete, *uc_column_names): """Drop all old rows having the same values for columns in uc_columns. This method drop (or mark ad `deleted` if use_soft_delete is True) old duplicate rows form table with name `table_name`. :param migrate_engine: Sqlalchemy engine :param table_name: Table with duplicates :param use_soft_delete: If True - values will be marked as `deleted`, if False - values will be removed from table :param uc_column_names: Unique constraint columns """ meta = MetaData() meta.bind = migrate_engine table = Table(table_name, meta, autoload=True) columns_for_group_by = [table.c[name] for name in uc_column_names] columns_for_select = [func.max(table.c.id)] columns_for_select.extend(columns_for_group_by) duplicated_rows_select = sqlalchemy.sql.select( columns_for_select, group_by=columns_for_group_by, having=func.count(table.c.id) > 1) for row in migrate_engine.execute(duplicated_rows_select).fetchall(): # NOTE(boris-42): Do not remove row that has the biggest ID. delete_condition = table.c.id != row[0] is_none = None # workaround for pyflakes delete_condition &= table.c.deleted_at == is_none for name in uc_column_names: delete_condition &= table.c[name] == row[name] rows_to_delete_select = sqlalchemy.sql.select( [table.c.id]).where(delete_condition) for row in migrate_engine.execute(rows_to_delete_select).fetchall(): LOG.info( _LI("Deleting duplicated row with id: %(id)s from table: " "%(table)s"), dict(id=row[0], table=table_name)) if use_soft_delete: delete_statement = table.update().\ where(delete_condition).\ values({ 'deleted': literal_column('id'), 'updated_at': literal_column('updated_at'), 'deleted_at': timeutils.utcnow() }) else: delete_statement = table.delete().where(delete_condition) migrate_engine.execute(delete_statement)