def drop_database(database_id: Union[str, int]): '''Kill all connections to database and connections made by given login. Drop login and database. ''' session_ids = execute_query(engine, QUERIES.get('get_db_session'), variables=[database_id]) for sid in session_ids: execute_query(engine, f'KILL {sid.session_id}') execute_query(engine, f'DROP DATABASE {db_name}')
def create_database(): '''Create database and alter its collation, compatibility level and recovery.''' # If filepaths are not given - do not specify database/log files and their size create_query = f"CREATE DATABASE {db_name}" if db_path is not None and log_path is not None: db_file = path.splitext(path.basename(db_path))[0] + '_dat' log_file = path.splitext(path.basename(log_path))[0] + '_log' init_size_str = f'{init_size}MB' max_size_str = f'{max_size}MB' file_growth_str = f'{file_growth}MB' create_query = create_query + f""" ON ( NAME = {db_file}, FILENAME = '{db_path}', SIZE = {init_size_str}, MAXSIZE = {max_size_str}, FILEGROWTH = {file_growth_str} ) LOG ON ( NAME = {log_file}, FILENAME = '{log_path}', SIZE = 50MB, MAXSIZE = 5000MB, FILEGROWTH = 500MB )""" execute_query(engine, create_query) if collation is not None: execute_query(engine, f'ALTER DATABASE {db_name} COLLATE {collation}') if compatibility_level is not None: execute_query( engine, f'ALTER DATABASE {db_name} SET COMPATIBILITY_LEVEL = {compatibility_level}' ) execute_query(engine, f'ALTER DATABASE {db_name} SET RECOVERY SIMPLE')
def print_alembic_version(engine: Engine, alembic_version_table: str): """Print last deployed revision number from Alembic version table.""" with OperationManager('Checking Alembic version from database'): alembic_version_query = f"SELECT * FROM {alembic_version_table}" try: alembic_version = execute_query(engine=engine, query=alembic_version_query)[0][0] logger.info("Alembic version: " + alembic_version) except IndexError: logger.info( f"Table {alembic_version_table} is empty. No deployed revisions." )
def prepare_and_exec_query(engine: Engine, query_path: str, param_list: list) -> list: """Open query from query_path and set correct amount of parameter placeholders to question mark. Finally, execute query.""" with open(query_path, 'r', encoding='utf-8') as file: query = file.read() param_placeholder = ','.join(['?'] * len(param_list)) query = query.replace('?', param_placeholder) result = execute_query(engine, query=query, variables=tuple(param_list), include_headers=True) return result
def exec_update_extended_properties(engine: Engine, object_name: str, object_metadata: dict, extended_property_name: str, extended_property_value: str): """Update object's extended properties by calling either procedure sp_addextendedproperty or sp_updateextendedproperty. If object_metadata is None, object does not exist in database. """ try: if object_metadata is None: raise Exception('Object not found in database.') if object_metadata.get(extended_property_name) is None: procedure_call = 'EXEC sp_addextendedproperty ' else: procedure_call = 'EXEC sp_updateextendedproperty ' procedure_call += '@name=?, @value=?, @level0type=?, @level0name=?' params = [ extended_property_name, extended_property_value, 'schema', object_metadata.get('schema_name') ] object_type = object_metadata.get('object_type') parent_type = object_metadata.get('parent_type') if object_type in ('view', 'table', 'function', 'procedure', 'column'): level1type = parent_type if parent_type is not None else object_type procedure_call += ', @level1type=?, @level1name=?' params.extend([level1type, object_metadata.get('object_name')]) if object_type == 'column': procedure_call += ', @level2type=?, @level2name=?' params.extend(['column', object_metadata.get('column_name')]) execute_query(engine, procedure_call, tuple(params)) except Exception as err: logger.warning( f"Failed to update {object_name} extended property '{extended_property_name}'." ) logger.debug(f"Extended property value: {extended_property_value}") logger.debug(err, exc_info=1) logger.info("------")
def create_db_login(engine: Engine, login_name: str, login_password: str, default_db: str): '''First, kill all sessions related to login. Second, drop login. Third, create login with given password and default database. Overwrite example: ------------------ @action('create-db-login', True, ['init']) def create_db_login(context): config_dict = merge_config_files(context.config_filename) login_name = config_dict.get('DJANGO', {}).get('KP_REP_DB_USRNAME', '$') login_password = config_dict.get('DJANGO', {}).get('KP_REP_DB_PW', '$') default_db = config_dict.get('DJANGO', {}).get('KP_REP_DB_NAME', '$') op.create_db_login(context.engine, login_name, login_password, default_db) Arguments --------- engine SQL Alchemy engine. login_name Login name. login_password Login password. default_db Default database of login. ''' with OperationManager('Creating database login'): login = execute_query(engine, QUERIES.get( 'get_login_name'), variables=[login_name]) login_exists = True if len(login) > 0 else False if login_exists: if len(login[0]) > 0 and login[0][1] != default_db: raise Exception(f'There already exists a database: {default_db} assigned to a login: {login_name}.') session_ids = execute_query(engine, QUERIES.get( 'get_login_session'), variables=[login_name]) for sid in session_ids: execute_query(engine, f'KILL {sid.session_id}') if login_exists: execute_query(engine, f'DROP LOGIN {login_name}') if login_password == 'SALASANA': logger.info(f'Creating login {login_name} with default password.') create_query = f"""CREATE LOGIN {login_name} WITH PASSWORD='******', DEFAULT_DATABASE=[{default_db}], DEFAULT_LANGUAGE=[us_english], CHECK_EXPIRATION=OFF, CHECK_POLICY=OFF""" execute_query(engine, create_query)
def print_git_version(engine: Engine, git_table_schema: str, git_table: str): with OperationManager('Checking Git version from database'): try: metadata = MetaData(engine) git_version_table = _sqla_git_table(metadata, git_table_schema, git_table) git_version_query = git_version_table.select() result = execute_query(engine=engine, query=git_version_query)[0] repository, branch, version = result logger.info(f"Repository: {repository}") logger.info(f"Branch: {branch}") logger.info(f"Version: {version}") except Exception as error: logger.error( 'Failed to read GIT version table. See log for detailed error message.' ) logger.debug(error)
def create_db(engine: Engine, db_name: str, db_path: str, log_path: str, init_size: int, max_size: int, file_growth: int, compatibility_level: str, collation: str): '''First, kill all database sessions. Second, drop database if it exists. Third, create database according to given parameters. Arguments --------- engine SQL Alchemy engine connected to 'master' database. db_name Name of the database. db_path Path to database data file. log_path Path to database log file. init_size Initial size of database data file (MB). max_size Max size of database data file (MB). file_growth How much the database data file will grow when it runs out of space (MB). compatibility_level Compatibility level of database. collation Collation of database. ''' def drop_database(database_id: Union[str, int]): '''Kill all connections to database and connections made by given login. Drop login and database. ''' session_ids = execute_query(engine, QUERIES.get('get_db_session'), variables=[database_id]) for sid in session_ids: execute_query(engine, f'KILL {sid.session_id}') execute_query(engine, f'DROP DATABASE {db_name}') def create_database(): '''Create database and alter its collation, compatibility level and recovery.''' # If filepaths are not given - do not specify database/log files and their size create_query = f"CREATE DATABASE {db_name}" if db_path is not None and log_path is not None: db_file = path.splitext(path.basename(db_path))[0] + '_dat' log_file = path.splitext(path.basename(log_path))[0] + '_log' init_size_str = f'{init_size}MB' max_size_str = f'{max_size}MB' file_growth_str = f'{file_growth}MB' create_query = create_query + f""" ON ( NAME = {db_file}, FILENAME = '{db_path}', SIZE = {init_size_str}, MAXSIZE = {max_size_str}, FILEGROWTH = {file_growth_str} ) LOG ON ( NAME = {log_file}, FILENAME = '{log_path}', SIZE = 50MB, MAXSIZE = 5000MB, FILEGROWTH = 500MB )""" execute_query(engine, create_query) if collation is not None: execute_query(engine, f'ALTER DATABASE {db_name} COLLATE {collation}') if compatibility_level is not None: execute_query( engine, f'ALTER DATABASE {db_name} SET COMPATIBILITY_LEVEL = {compatibility_level}' ) execute_query(engine, f'ALTER DATABASE {db_name} SET RECOVERY SIMPLE') with OperationManager('Creating database'): db_id = execute_query(engine, QUERIES.get('get_db_id'), variables=[db_name])[0][0] if db_id is not None: drop_database(db_id) database = execute_query(engine, QUERIES.get('get_existing_db'), variables=[db_name]) if len(database) == 0: create_database()