def destroy_database(): """ Removes the schema from the database. Only useful for test cases or malicious intents. """ engine = get_engine() try: models.unregister_models(engine) except Exception as e: print('Cannot destroy schema -- assuming already gone, continuing:', e)
def build_database(echo=True, tests=False): """ Applies the schema to the database. Run this command once to build the database. """ engine = session.get_engine(echo=echo) models.register_models(engine) # Put the database under version control alembic_cfg = Config(config_get('alembic', 'cfg')) command.stamp(alembic_cfg, "head")
def core_config_mock(request): """ Fixture to allow having per-test core.config tables without affecting the other parallel tests. This override works only in tests which use core function calls directly, not in the ones working via the API, because the normal config table is not touched and the rucio instance answering API calls is not aware of this mock. This fixture acts by creating a new copy of the "config" sql table using the :memory: sqlite engine. Accesses to the "models.Config" table are then redirected to this temporary table via mock.patch(). """ from unittest import mock from rucio.common.utils import generate_uuid from sqlalchemy.pool import StaticPool from rucio.db.sqla.models import ModelBase, BASE, Column, String, PrimaryKeyConstraint from rucio.db.sqla.session import get_session, get_maker, get_engine, create_engine, declarative_base # Get the fixture parameters table_content = [] params = __get_fixture_param(request) if params: table_content = params.get("table_content", table_content) # Create an in-memory dropdown replacement table for the "models.Config" table engine = create_engine('sqlite://', connect_args={'check_same_thread': False}, poolclass=StaticPool) InMemoryBase = declarative_base(bind=engine) class InMemoryConfig(InMemoryBase, ModelBase): __tablename__ = 'configs_' + generate_uuid() section = Column(String(128)) opt = Column(String(128)) value = Column(String(4000)) _table_args = (PrimaryKeyConstraint('section', 'opt', name='CONFIGS_PK'), ) InMemoryBase.metadata.create_all() # Register the new table with the associated engine into the sqlalchemy sessionmaker # In theory, this code must be protected by rucio.db.scla.session._LOCK, but this code will be executed # during test case initialization, so there is no risk here to have concurrent calls from within the # same process current_engine = get_engine() get_maker().configure(binds={BASE: current_engine, InMemoryBase: engine}) # Fill the table with the requested mock data session = get_session()() for section, option, value in (table_content or []): InMemoryConfig(section=section, opt=option, value=value).save(flush=True, session=session) session.commit() with mock.patch('rucio.core.config.models.Config', new=InMemoryConfig): yield
def is_old_db(): """ Returns true, if alembic is used and the database is not on the same revision as the code base. """ schema = config_get('database', 'schema', raise_exception=False) # checks if alembic is being used by looking up the AlembicVersion table if not get_engine().has_table(models.AlembicVersion.__tablename__, schema): return False s = get_session() query = s.query(models.AlembicVersion.version_num) return query.count() != 0 and str(query.first()[0]) != alembicrevision.ALEMBIC_REVISION
def drop_everything(echo=True): """ Pre-gather all named constraints and table names, and drop everything. This is better than using metadata.reflect(); metadata.drop_all() as it handles cyclical constraints between tables. Ref. http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything """ engine = session.get_engine(echo=echo) conn = engine.connect() # the transaction only applies if the DB supports # transactional DDL, i.e. Postgresql, MS SQL Server trans = conn.begin() inspector = reflection.Inspector.from_engine(engine) # gather all data first before dropping anything. # some DBs lock after things have been dropped in # a transaction. metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: try: print(str(DropConstraint(fkc)) + ';') conn.execute(DropConstraint(fkc)) except: print(format_exc()) for table in tbs: try: print(str(DropTable(table)).strip() + ';') conn.execute(DropTable(table)) except: print(format_exc()) trans.commit()
def build_database(): """ Applies the schema to the database. Run this command once to build the database. """ engine = get_engine() schema = config_get('database', 'schema', raise_exception=False, check_config_table=False) if schema: print('Schema set in config, trying to create schema:', schema) try: engine.execute(CreateSchema(schema)) except Exception as e: print('Cannot create schema, please validate manually if schema creation is needed, continuing:', e) models.register_models(engine) # Put the database under version control alembic_cfg = Config(config_get('alembic', 'cfg')) command.stamp(alembic_cfg, "head")
def drop_everything(): """ Pre-gather all named constraints and table names, and drop everything. This is better than using metadata.reflect(); metadata.drop_all() as it handles cyclical constraints between tables. Ref. https://github.com/sqlalchemy/sqlalchemy/wiki/DropEverything """ engine = get_engine() # the transaction only applies if the DB supports # transactional DDL, i.e. Postgresql, MS SQL Server with engine.begin() as conn: inspector = inspect(conn) # type: Union[Inspector, PGInspector] for tname, fkcs in reversed( inspector.get_sorted_table_and_fkc_names(schema='*')): if tname: drop_table_stmt = DropTable( Table(tname, MetaData(), schema='*')) conn.execute(drop_table_stmt) elif fkcs: if not engine.dialect.supports_alter: continue for tname, fkc in fkcs: fk_constraint = ForeignKeyConstraint((), (), name=fkc) Table(tname, MetaData(), fk_constraint) drop_constraint_stmt = DropConstraint(fk_constraint) conn.execute(drop_constraint_stmt) schema = config_get('database', 'schema', raise_exception=False) if schema: conn.execute(DropSchema(schema, cascade=True)) if engine.dialect.name == 'postgresql': assert isinstance(inspector, PGInspector), 'expected a PGInspector' for enum in inspector.get_enums(schema='*'): sqlalchemy.Enum(**enum).drop(bind=conn)
def rename_vo(old_vo, new_vo, insert_new_vo=False, description=None, email=None, commit_changes=False, skip_history=False): """ Updates rows so that entries associated with `old_vo` are now associated with `new_vo` as part of multi-VO migration. :param old_vo: The 3 character string for the current VO (for a single-VO instance this will be 'def'). :param new_vo: The 3 character string for the new VO. :param insert_new_vo: If True then an entry for `new_vo` is created in the database. :param description: Full description of the new VO, unused if `insert_new_vo` is False. :param email: Admin email for the new VO, unused if `insert_new_vo` is False. :param commit_changes: If True then changes are made against the database directly. If False, then nothing is commited and the commands needed are dumped to be run later. :param skip_history: If True then tables without FKC containing historical data will not be converted to save time. """ success = True engine = session.get_engine() conn = engine.connect() trans = conn.begin() inspector = reflection.Inspector.from_engine(engine) metadata = MetaData(bind=conn, reflect=True) dialect = engine.dialect.name # Gather all the columns that need updating and all relevant foreign key constraints all_fks = [] tables_and_columns = [] for table_name in inspector.get_table_names(): if skip_history and ('_history' in table_name or '_hist_recent' in table_name): continue fks = [] table = Table(table_name, metadata) for column in table.c: if 'scope' in column.name or column.name == 'account': tables_and_columns.append((table, column)) for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue if 'scope' in fk['referred_columns'] or 'account' in fk['referred_columns']: fks.append(ForeignKeyConstraint(fk['constrained_columns'], [fk['referred_table'] + '.' + r for r in fk['referred_columns']], name=fk['name'], table=table, **fk['options'])) all_fks.extend(fks) try: bound_params = {'old_vo': old_vo, 'new_vo': new_vo, 'old_vo_suffix': '' if old_vo == 'def' else old_vo, 'new_vo_suffix': '' if new_vo == 'def' else '@%s' % new_vo, 'split_character': '@', 'int_1': 1, 'int_2': 2, 'new_description': description, 'new_email': email, 'datetime': datetime.utcnow()} bound_params_text = {} for key in bound_params: if isinstance(bound_params[key], int): bound_params_text[key] = bound_params[key] else: bound_params_text[key] = "'%s'" % bound_params[key] if insert_new_vo: table = Table('vos', metadata) insert_command = table.insert().values(vo=bindparam('new_vo'), description=bindparam('new_description'), email=bindparam('new_email'), updated_at=bindparam('datetime'), created_at=bindparam('datetime')) print(str(insert_command) % bound_params_text + ';') if commit_changes: conn.execute(insert_command, bound_params) # Drop all FKCs affecting InternalAccounts/Scopes for fk in all_fks: print(str(DropConstraint(fk)) + ';') if commit_changes: conn.execute(DropConstraint(fk)) # Update columns for table, column in tables_and_columns: update_command = table.update().where(split_vo(dialect, column, return_vo=True) == bindparam('old_vo_suffix')) if new_vo == 'def': update_command = update_command.values({column.name: split_vo(dialect, column)}) else: update_command = update_command.values({column.name: split_vo(dialect, column) + cast(bindparam('new_vo_suffix'), CHAR(4))}) print(str(update_command) % bound_params_text + ';') if commit_changes: conn.execute(update_command, bound_params) table = Table('rses', metadata) update_command = table.update().where(table.c.vo == bindparam('old_vo')).values(vo=bindparam('new_vo')) print(str(update_command) % bound_params_text + ';') if commit_changes: conn.execute(update_command, bound_params) # Re-add the FKCs we dropped for fkc in all_fks: print(str(AddConstraint(fkc)) + ';') if commit_changes: conn.execute(AddConstraint(fkc)) except: success = False print(format_exc()) print('Exception occured, changes not committed to DB.') if commit_changes and success: trans.commit() trans.close() return success
def remove_vo(vo, commit_changes=False, skip_history=False): """ Deletes rows associated with `vo` as part of multi-VO migration. :param vo: The 3 character string for the VO being removed from the DB. :param commit_changes: If True then changes are made against the database directly. If False, then nothing is commited and the commands needed are dumped to be run later. :param skip_history: If True then tables without FKC containing historical data will not be converted to save time. """ success = True engine = session.get_engine() conn = engine.connect() trans = conn.begin() inspector = reflection.Inspector.from_engine(engine) metadata = MetaData(bind=conn, reflect=True) dialect = engine.dialect.name # Gather all the columns that need deleting and all relevant foreign key constraints all_fks = [] tables_and_columns = [] tables_and_columns_rse = [] for table_name in inspector.get_table_names(): if skip_history and ('_history' in table_name or '_hist_recent' in table_name): continue fks = [] table = Table(table_name, metadata) for column in table.c: if 'scope' in column.name or column.name == 'account': tables_and_columns.append((table, column)) if 'rse_id' in column.name: tables_and_columns_rse.append((table, column)) for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue if 'scope' in fk['referred_columns'] or 'account' in fk['referred_columns'] or ('rse' in fk['referred_table'] and 'id' in fk['referred_columns']): fks.append(ForeignKeyConstraint(fk['constrained_columns'], [fk['referred_table'] + '.' + r for r in fk['referred_columns']], name=fk['name'], table=table, **fk['options'])) all_fks.extend(fks) try: bound_params = {'vo': vo, 'vo_suffix': '' if vo == 'def' else vo, 'split_character': '@', 'int_1': 1, 'int_2': 2} bound_params_text = {} for key in bound_params: if isinstance(bound_params[key], int): bound_params_text[key] = bound_params[key] else: bound_params_text[key] = "'%s'" % bound_params[key] # Drop all FKCs affecting InternalAccounts/Scopes or RSE IDs for fk in all_fks: print(str(DropConstraint(fk)) + ';') if commit_changes: conn.execute(DropConstraint(fk)) # Delete rows for table, column in tables_and_columns: delete_command = table.delete().where(split_vo(dialect, column, return_vo=True) == bindparam('vo_suffix')) print(str(delete_command) % bound_params_text + ';') if commit_changes: conn.execute(delete_command, bound_params) rse_table = Table('rses', metadata) for table, column in tables_and_columns_rse: delete_command = table.delete().where(column == rse_table.c.id).where(rse_table.c.vo == bindparam('vo')) print(str(delete_command) % bound_params_text + ';') if commit_changes: conn.execute(delete_command, bound_params) delete_command = rse_table.delete().where(rse_table.c.vo == bindparam('vo')) print(str(delete_command) % bound_params_text + ';') if commit_changes: conn.execute(delete_command, bound_params) table = Table('vos', metadata) delete_command = table.delete().where(table.c.vo == bindparam('vo')) print(str(delete_command) % bound_params_text + ';') if commit_changes: conn.execute(delete_command, bound_params) # Re-add the FKCs we dropped for fkc in all_fks: print(str(AddConstraint(fkc)) + ';') if commit_changes: conn.execute(AddConstraint(fkc)) except: success = False print(format_exc()) print('Exception occured, changes not committed to DB.') if commit_changes and success: trans.commit() trans.close() return success
def destroy_database(echo=True): """ Removes the schema from the database. Only useful for test cases or malicious intents. """ engine = session.get_engine(echo=echo) models.unregister_models(engine)