def create_local_config_base(config_filename: str): """Check the existence and create local config file, if key 'LOCAL' exists in configuration file (config_filename). Local config file is used in development environment to store Django related passwords, secrets and all stuff which aren't suitable for versioning. """ with OperationManager('Initializing local config file'): if not Path(config_filename).is_file(): print("File not found: " + config_filename) return config_data = load_json_conf(config_filename, key='') local_path = config_data.get('LOCAL', None) if not local_path: print("Local config not defined in {}".format(config_filename)) return if Path(local_path).is_file(): print("Local config file already exists") return if isinstance(local_path, str): try: local_conf_dict = filter_nested_dict(config_data, '$') with open(local_path, 'w+', encoding='utf-8') as file: json.dump(local_conf_dict, file) print('Local config file created') except Exception as err: print('Problem creating local config file: {}'.format(err))
def execute_action(action_name: str, config_filename: str, *args, **kwargs): """Prepare and execute given action. Does the logging and error handling for preparation. Arguments --------- action_name: str The name of the action to execute config_filename: str The name of the config file for context creation. """ logger.info('------') with OperationManager('Starting to execute "' + action_name + '"'): context = Context(config_filename) # validity check if not check_action_validity( action_name, context.configuration.get('allowed_actions', [])): return action = registered_actions.get(action_name) # user confirmation if not action.pre_exec_check(context): return return action.function(context, *args, **kwargs)
def deploy_sqlfiles(engine: Engine, data_src: Union[str, list], message: str, display_output: bool = False, scripting_variables: dict = None) -> bool: """Run every SQL script file found in given directory/filelist and print the executed file names. If any file in directory/filelist cannot be deployed after multiple tries, raise an exeption and list failed files to user. Parameters ---------- engine SQL Alchemy engine. data_src If data_src is string: path of directory holding the SQL script files. If data_src is list: list of filepaths referencing to the SQL scripts. message Message passed to OperationManager. display_output Indicator to print script output. scripting_variables Variables passed to SQL script. Raises ------ ValueError If engine is not instance of sqlalchemy.engine.Engine. RuntimeError If any of the files in given directory/filelist fail to deploy after multiple tries. """ with OperationManager(message): if isinstance(engine, dict): raise ValueError( "First parameter of function 'deploy_sqlfiles' should be instance of sqlalchemy engine. Check your custom actions!" ) files = sql_files_found(data_src) n_files = len(files) if n_files == 0: return False failed = sql_file_loop(deploy_sql_from_file, engine, display_output, scripting_variables, file_list=files, max_loop=n_files) if len(failed) > 0: error_msg = "Failed to deploy the following files:\n{}".format( '\n'.join(failed.keys())) error_msg = error_msg + '\nSee log for error details.' for fail_object, fail_messages in failed.items(): logger.debug(f'----- Error for object {fail_object} -----') logger.debug(''.join(fail_messages)) raise RuntimeError(error_msg) return True
def create_new_project(project_name: str, init_location: str, message: str): '''Create project root directory and call populate_project.''' with OperationManager(message): project_root = path.join(init_location, project_name) if path.exists(project_root): print(f'Folder {project_root} already exists. Terminating.') return makedirs(project_root) populate_project(project_root, PROJECT_STRUCTURE) print(f'Project {project_root} created.')
def update_db_object_properties(engine: Engine, schema_list: list): """Update extended properties from file to database. Arguments --------- engine : sqlalchemy.engine.Engine SQL Alchemy engine. schema_list : list of str List of schemas to be documented. - If None, all schemas are updated. - If empty list, nothing is updated. - Else schemas of schema_list are updated. """ with OperationManager('Updating extended properties'): if schema_list is None: schema_list = [ s for s in get_schema_names(engine) if s not in EXCLUDED_SCHEMAS ] elif len(schema_list) == 0: logger.warning( 'No schemas allowed for update. Check variable "metadata_allowed_schemas".' ) return logger.debug( f'Updating extended properties for schemas {", ".join(schema_list)}' ) for object_type in DB_OBJECTS: existing_metadata = query_metadata(engine, DB_OBJECTS[object_type], schema_list) source_file = DB_OBJECTS[object_type]['file'] if not path.exists(source_file): logger.warning( f"Cannot update extended properties for {object_type}s. File {source_file} does not exist." ) continue try: with open(source_file, 'r', encoding='utf-8') as f: documented_properties = json.load(f) except Exception as err: raise Exception( f'Failed to read extended properties for {object_type}' ) from err for object_name, extended_properties in documented_properties.items( ): schema_name = object_name.split('.')[0] if schema_name in schema_list: object_metadata = existing_metadata.get(object_name) for property_name, property_value in extended_properties.items( ): exec_update_extended_properties( engine, object_name, object_metadata, property_name, property_value)
def print_alembic_version(engine: Engine, alembic_version_table: str): """Print last deployed revision number from Alembic version table.""" with OperationManager('Checking Alembic version from database'): alembic_version_query = f"SELECT * FROM {alembic_version_table}" try: alembic_version = execute_query(engine=engine, query=alembic_version_query)[0][0] logger.info("Alembic version: " + alembic_version) except IndexError: logger.info( f"Table {alembic_version_table} is empty. No deployed revisions." )
def print_git_version(engine: Engine, git_table_schema: str, git_table: str): with OperationManager('Checking Git version from database'): try: metadata = MetaData(engine) git_version_table = _sqla_git_table(metadata, git_table_schema, git_table) git_version_query = git_version_table.select() result = execute_query(engine=engine, query=git_version_query)[0] repository, branch, version = result logger.info(f"Repository: {repository}") logger.info(f"Branch: {branch}") logger.info(f"Version: {version}") except Exception as error: logger.error( 'Failed to read GIT version table. See log for detailed error message.' ) logger.debug(error)
def create_db_login(engine: Engine, login_name: str, login_password: str, default_db: str): '''First, kill all sessions related to login. Second, drop login. Third, create login with given password and default database. Overwrite example: ------------------ @action('create-db-login', True, ['init']) def create_db_login(context): config_dict = merge_config_files(context.config_filename) login_name = config_dict.get('DJANGO', {}).get('KP_REP_DB_USRNAME', '$') login_password = config_dict.get('DJANGO', {}).get('KP_REP_DB_PW', '$') default_db = config_dict.get('DJANGO', {}).get('KP_REP_DB_NAME', '$') op.create_db_login(context.engine, login_name, login_password, default_db) Arguments --------- engine SQL Alchemy engine. login_name Login name. login_password Login password. default_db Default database of login. ''' with OperationManager('Creating database login'): login = execute_query(engine, QUERIES.get( 'get_login_name'), variables=[login_name]) login_exists = True if len(login) > 0 else False if login_exists: if len(login[0]) > 0 and login[0][1] != default_db: raise Exception(f'There already exists a database: {default_db} assigned to a login: {login_name}.') session_ids = execute_query(engine, QUERIES.get( 'get_login_session'), variables=[login_name]) for sid in session_ids: execute_query(engine, f'KILL {sid.session_id}') if login_exists: execute_query(engine, f'DROP LOGIN {login_name}') if login_password == 'SALASANA': logger.info(f'Creating login {login_name} with default password.') create_query = f"""CREATE LOGIN {login_name} WITH PASSWORD='******', DEFAULT_DATABASE=[{default_db}], DEFAULT_LANGUAGE=[us_english], CHECK_EXPIRATION=OFF, CHECK_POLICY=OFF""" execute_query(engine, create_query)
def update_git_version(engine: Engine, git_table_schema: str, git_table: str, repository: str = None, git_version_info_path: str = None): """Store the Git remote, branch and commit information to database. Alembic version does not catch changes to views and procedures, but git version catches. """ with OperationManager("Updating Git version table"): try: default_git_version_info_path = PurePath(os.getcwd(), "git_version.json") if git_version_info_path: branch, commit, repository = _load_git_commit_info_json( git_version_info_path=git_version_info_path) elif Path(default_git_version_info_path).is_file(): branch, commit, repository = _load_git_commit_info_json( git_version_info_path=default_git_version_info_path) else: branch, commit = _get_git_commit_info() if repository is None: origin_url_command = split( "git config --get remote.origin.url") repository = check_output(origin_url_command).decode( "utf-8").strip() if repository is None: repository = '' except Exception as error: logger.error( 'Failed to retrieve Git commit. See log for detailed error message.' ) logger.debug(error) return try: logger.info("GIT version table: " + git_table_schema + "." + git_table) _update_git_db_record(engine, git_table_schema, git_table, repository, branch, commit) except Exception as error: logger.error( 'Failed to update Git version table. See log for detailed error message.' ) logger.debug(error)
def drop_sqlfile_objects(engine: Engine, object_type: str, data_src: Union[str, list], message: str): """Drop all the objects created in SQL script files of an directory. The naming of the files should be consistent! Parameters ---------- engine SQL Alchemy engine. object_type Type of database object. data_src If data_src is string: path of directory holding the SQL script files. If data_src is list: list of filepaths referencing to the SQL script locations. message Message passed to OperationManager. Raises ------ RuntimeError If any of the files in given directory/filelist fail to drop after multiple tries. """ with OperationManager(message): files = sql_files_found(data_src) n_files = len(files) if n_files == 0: return False failed = sql_file_loop(drop_sql_from_file, engine, object_type, file_list=files, max_loop=n_files) if len(failed) > 0: error_msg = "Failed to drop the following files:\n{}".format( '\n'.join(failed.keys())) for fail_messages in failed.values(): error_msg = error_msg + ''.join(fail_messages) raise RuntimeError(error_msg)
def update_file_object_properties(engine: Engine, schema_list: list): """Write extended properties to JSON file. If project doesn't have docs directory, create it. If schema_list is None, all schemas are written to file. If schema_list is empty list, nothing is written to file. Else schemas of schema_list are written to file. Arguments --------- engine : sqlalchemy.engine.Engine SQL Alchemy engine. schema_list : list of str List of schemas to be documented. """ with OperationManager('Fetching extended properties to files'): if not path.exists(DOCS_DIR): makedirs(DOCS_DIR, exist_ok=True) if schema_list is None: schema_list = [ s for s in get_schema_names(engine) if s not in EXCLUDED_SCHEMAS ] elif len(schema_list) == 0: logger.warning( 'No schemas allowed for document. Check variable "metadata_allowed_schemas".' ) return logger.debug( f'Fetching extended properties for schemas {", ".join(schema_list)}' ) for object_type in DB_OBJECTS: existing_metadata = query_metadata(engine, DB_OBJECTS[object_type], schema_list, properties_only=True) target_file = DB_OBJECTS[object_type]['file'] with open(target_file, 'w+', encoding='utf-8', newline='') as f: json.dump(existing_metadata, f, indent=4, ensure_ascii=False) logger.debug('Extended properties fetched')
def create_db_permissions(conn_info: dict): """Set permissions for DB Login. Used mainly in Django projects.""" with OperationManager('Setting login permissions'): invoke_sqlcmd(conn_info, infile='database/create_db_permissions.sql')
def create_db(engine: Engine, db_name: str, db_path: str, log_path: str, init_size: int, max_size: int, file_growth: int, compatibility_level: str, collation: str): '''First, kill all database sessions. Second, drop database if it exists. Third, create database according to given parameters. Arguments --------- engine SQL Alchemy engine connected to 'master' database. db_name Name of the database. db_path Path to database data file. log_path Path to database log file. init_size Initial size of database data file (MB). max_size Max size of database data file (MB). file_growth How much the database data file will grow when it runs out of space (MB). compatibility_level Compatibility level of database. collation Collation of database. ''' def drop_database(database_id: Union[str, int]): '''Kill all connections to database and connections made by given login. Drop login and database. ''' session_ids = execute_query(engine, QUERIES.get('get_db_session'), variables=[database_id]) for sid in session_ids: execute_query(engine, f'KILL {sid.session_id}') execute_query(engine, f'DROP DATABASE {db_name}') def create_database(): '''Create database and alter its collation, compatibility level and recovery.''' # If filepaths are not given - do not specify database/log files and their size create_query = f"CREATE DATABASE {db_name}" if db_path is not None and log_path is not None: db_file = path.splitext(path.basename(db_path))[0] + '_dat' log_file = path.splitext(path.basename(log_path))[0] + '_log' init_size_str = f'{init_size}MB' max_size_str = f'{max_size}MB' file_growth_str = f'{file_growth}MB' create_query = create_query + f""" ON ( NAME = {db_file}, FILENAME = '{db_path}', SIZE = {init_size_str}, MAXSIZE = {max_size_str}, FILEGROWTH = {file_growth_str} ) LOG ON ( NAME = {log_file}, FILENAME = '{log_path}', SIZE = 50MB, MAXSIZE = 5000MB, FILEGROWTH = 500MB )""" execute_query(engine, create_query) if collation is not None: execute_query(engine, f'ALTER DATABASE {db_name} COLLATE {collation}') if compatibility_level is not None: execute_query( engine, f'ALTER DATABASE {db_name} SET COMPATIBILITY_LEVEL = {compatibility_level}' ) execute_query(engine, f'ALTER DATABASE {db_name} SET RECOVERY SIMPLE') with OperationManager('Creating database'): db_id = execute_query(engine, QUERIES.get('get_db_id'), variables=[db_name])[0][0] if db_id is not None: drop_database(db_id) database = execute_query(engine, QUERIES.get('get_existing_db'), variables=[db_name]) if len(database) == 0: create_database()
def create_db_structure(conn_info: dict): """Create DB structure, that is schemas, tables and constraints. """ with OperationManager('Creating structure'): invoke_sqlcmd(conn_info, infile='database/create_db_structure.sql')
def downgrade_db_to_alembic_base(config_filename: str): """Run Alembic 'downgrade base' in the same python-process by calling Alembic's API. """ with OperationManager('Downgrading to base'): command.downgrade(alembic_config(config_filename), 'base')
def upgrade_db_to_latest_alembic_version(config_filename: str): """Run Alembic 'upgrade head' in the same python-process by calling Alembic's API. """ with OperationManager("Running all upgrade migrations"): command.upgrade(alembic_config(config_filename), 'head')