def cli(ctx): """ DIRBS script to intiliaze, configure and upgrade the PostgreSQL schema. :param ctx: current cli context obj """ config = common.ensure_config(ctx) db_config = config.db_config logger = logging.getLogger('dirbs.db') subcommand = ctx.invoked_subcommand dirbs.logging.setup_file_logging( config.log_config, 'dirbs-db_{0}_{1}'.format(subcommand, datetime.datetime.now().strftime('%Y%m%d'))) # check subcommand should try and fail regardless of these checks. # install_roles subcommand installs these roles so can't do these checks if subcommand not in ['install_roles', 'check']: with utils.create_db_connection(db_config) as conn: try: utils.warn_if_db_superuser(conn) utils.verify_db_roles_installed(conn) utils.verify_db_role_for_job(conn, 'dirbs_core_power_user') utils.verify_db_ownership(conn) utils.verify_hll_schema(conn) if subcommand != 'install': # install subcommand creates the schema, so can't check it here utils.verify_core_schema(conn) utils.verify_db_search_path(conn) except (utils.DatabaseRoleCheckException, utils.DatabaseSchemaException) as ex: logger.error(str(ex)) sys.exit(1)
def check(ctx): """ Checks whether DB schema matches software DB version. :param ctx: current cli context obj """ db_config = common.ensure_config(ctx).db_config logger = logging.getLogger('dirbs.db') logger.info('Querying DB schema version for DB %s on host %s', db_config.database, db_config.host) with utils.create_db_connection(db_config) as conn: version = utils.query_db_schema_version(conn) logger.info('Code schema version: %d', code_db_schema_version) if version is None: logger.error( 'DB has not been clean installed. Maybe this DB pre-dates the version checking?' ) logger.error('DB schema version unknown.') # Exit code is used to determine if schema has(exit code:0) or has not(exit code:1) been installed. # Non-zero exit code triggers installation of schema at entrypoint of processing container. sys.exit(1) else: logger.info('DB schema version: %s', str(version)) if version < code_db_schema_version: logger.error('DB schema older than code.') elif version > code_db_schema_version: logger.error('DB schema newer than code.') else: logger.info('Schema versions match between code and DB.')
def repartition(ctx, num_physical_shards): """Repartition DIRBS Core tables into a new number of physical IMEI shards.""" logger = logging.getLogger('dirbs.db') config = common.ensure_config(ctx) with utils.create_db_connection( config.db_config) as conn, conn.cursor() as cursor: logger.info( 'Repartitioning DB schema in DB %s on host %s into %d physical shards...', config.db_config.database, config.db_config.host, num_physical_shards) logger.info('Re-partitioning classification_state table...') partition_utils.repartition_classification_state( conn, num_physical_shards=num_physical_shards) logger.info('Re-partitioned classification_state table') logger.info('Re-partitioning registration_list table...') partition_utils.repartition_registration_list( conn, num_physical_shards=num_physical_shards) logger.info('Re-partitioned registration_list table') logger.info('Re-partitioning stolen_list table...') partition_utils.repartition_stolen_list( conn, num_physical_shards=num_physical_shards) logger.info('Re-partitioned stolen_list table') logger.info('Re-partitioning pairing_list table...') partition_utils.repartition_pairing_list( conn, num_physical_shards=num_physical_shards) logger.info('Re-partitioned pairing_list table') logger.info('Re-partitioning blacklist table...') partition_utils.repartition_blacklist( conn, num_physical_shards=num_physical_shards) logger.info('Re-partitioned blacklist table') logger.info('Re-partitioning notifications_lists table...') partition_utils.repartition_notifications_lists( conn, num_physical_shards=num_physical_shards) logger.info('Re-partitioned notifications_lists table') logger.info('Re-partitioning exceptions_lists table...') partition_utils.repartition_exceptions_lists( conn, num_physical_shards=num_physical_shards) logger.info('Re-partitioned exceptions_lists table') logger.info('Re-partitioning network_imeis table...') partition_utils.repartition_network_imeis( conn, num_physical_shards=num_physical_shards) logger.info('Re-partitioned network_imeis table') logger.info('Re-partitioning monthly_network_triplets tables...') partition_utils.repartition_monthly_network_triplets( conn, num_physical_shards=num_physical_shards) logger.info('Re-partitioned monthly_network_triplets tables') # Update schema metadata table cursor.execute('UPDATE schema_metadata SET phys_shards = %s', [num_physical_shards])
def _process_batch_size(ctx, param, val): logger = logging.getLogger('dirbs.import') config = common.ensure_config(ctx) if val is not None: if val < 0: logger.warning('Ignoring invalid value %d for --batch-size', val) config.import_config.batch_size = val return val
def cli(ctx): """DIRBS Script to run whitelist jobs.""" logger = logging.getLogger('dirbs.whitelist') config = common.ensure_config(ctx) # check if whitelist mode is enabled before running any command if config.operational_config.activate_whitelist is False: logger.info( 'Whitelist operation mode is currently not enabled, exiting...') sys.exit(1)
def install(ctx): """ Installs latest schema on clean DB instance. :param ctx: current cli context obj :return: status """ logger = logging.getLogger('dirbs.db') config = common.ensure_config(ctx) db_config = config.db_config with utils.create_db_connection(db_config) as conn, conn.cursor() as cur: logger.info('Creating initial base DB schema in DB %s on host %s', db_config.database, db_config.host) # Check if there is stuff already in there cur.execute("""SELECT COUNT(*) FROM pg_class c JOIN pg_namespace n ON n.oid = c.relnamespace WHERE n.nspname = current_schema()""") is_clean = (cur.fetchone()[0] == 0) if not is_clean: logger.error('Can\'t install latest schema into a non-clean DB') logger.error( 'Instead, use dirbs-db upgrade to upgrade the schema to the latest version' ) sys.exit(1) # Set our role here so that new objects get created with dirbs_core_power_user as owner by default with utils.db_role_setter(conn, role_name='dirbs_core_power_user'): # First we setup the schema, search path etc. sql = pkgutil.get_data('dirbs', 'sql/base/on_db_creation.sql') cur.execute(sql) # Install the base schema for v19 and set current version to 19 base_schema = 'sql/base/v19_schema.sql' logger.info('Restoring base v19 schema from SQL file: %s', base_schema) sql = pkgutil.get_data('dirbs', base_schema) cur.execute(sql) utils.set_db_schema_version(conn, min_schema_version) logger.info( 'Successfully created base v{0:d} schema. Scheduling dirbs-db upgrade...' .format(min_schema_version)) # Then we call upgrade to complete the process rv = 0 if code_db_schema_version > min_schema_version: rv = ctx.invoke(upgrade) else: # Can't do anything until we know the schema is the right version _store_job_metadata(config, 'install') return rv
def _process_disable_rat_import(ctx, param, val): """ Process disable rat import cli option. :param ctx: current cli context obj :param param: param :param val: flag value :return: flag value """ config = common.ensure_config(ctx) if val is not None: config.region_config.import_rat_data = not val return val
def _validate_operator_id(ctx, param, val): logger = logging.getLogger('dirbs.import') if len(val) > 16: raise click.BadParameter('Operator ID must be 16 chars or less') else: config = common.ensure_config(ctx) operator_id_list = [op.id for op in config.region_config.operators] if val.lower() not in operator_id_list: raise click.BadParameter('\'{0}\' not in {1}'.format( val, operator_id_list)) elif val != val.lower(): logger.warning( 'operator_id: {0} has been changed to lower case: {1}'.format( val, val.lower())) return val.lower()
def _process_batch_size(ctx, param, val): """ Process batch size cli option. :param ctx: current cli context obj :param param: param :param val: batch size value :return: batch size """ logger = logging.getLogger('dirbs.import') config = common.ensure_config(ctx) if val is not None: if val < 0: logger.warning('Ignoring invalid value %d for --batch-size', val) config.import_config.batch_size = val return val
def install_roles(ctx): """Creates DIRBS Core PostgreSQL base roles if they don't exist.""" logger = logging.getLogger('dirbs.db') config = common.ensure_config(ctx) db_config = copy.copy(config.db_config) # Allow install_roles to work even if database doesn't exist by using the postgres DB db_config.database = 'postgres' with utils.create_db_connection(db_config) as conn, conn.cursor() as cur: if not utils.can_db_user_create_roles(conn): logger.error( 'Current PostgreSQL user does not have the CREATEROLE privilege. Please run this command ' 'as a normal user with the CREATEROLE privilege granted (preferred) or as a superuser' ) sys.exit(1) logger.info('Creating DIRBS Core PostgreSQL roles...') sql = pkgutil.get_data('dirbs', 'sql/base/roles.sql') cur.execute(sql) logger.info('Created DIRBS Core PostgreSQL roles')
def upgrade(ctx): """ Upgrades the current DB schema to the version supported by this code using migration scripts. :param ctx: current cli context obj """ logger = logging.getLogger('dirbs.db') config = common.ensure_config(ctx) db_config = config.db_config needs_analyze = False with utils.create_db_connection(db_config) as conn: logger.info('Querying DB schema version for DB %s on host %s', db_config.database, db_config.host) with conn.cursor() as cur: try: version = utils.query_db_schema_version(conn) except ProgrammingError: logger.warn( 'Could not determine current schema version. Assuming no version' ) version = None if version is None: logger.error( 'DB currently not installed or version number could not be determined. Can\'t upgrade' ) sys.exit(1) if version < min_schema_version: logger.error( 'Current DB schema is older than DIRBS 4.0.0. Can\'t upgrade' ) sys.exit(1) if version > code_db_schema_version: logger.error('DB schema newer than code. Can\'t upgrade') sys.exit(1) if version != code_db_schema_version: logger.info('Upgrading DB schema from version %d to %d', version, code_db_schema_version) # If we're upgrading, make sure we schedule a full ANALYZE outside the transaction later needs_analyze = True # Set our role here so that new objects get created with dirbs_core_power_user as owner by default with utils.db_role_setter(conn, role_name='dirbs_core_power_user'): for old_version in range(version, code_db_schema_version): new_version = old_version + 1 # Check if there is a special migration class, otherwise use standard SQL file try: module_name = 'dirbs.schema_migrators.v{0}_upgrade'.format( new_version) module = importlib.import_module(module_name) logger.info('Running Python migration script: %s', module_name) migrator = module.migrator() migrator.upgrade(conn) except ImportError as ex: script_name = 'sql/migration_scripts/v{0:d}_upgrade.sql'.format( new_version) logger.info('Running SQL migration script: %s', script_name) sql = pkgutil.get_data('dirbs', script_name) cur.execute(sql) # We commit after every version upgrade utils.set_db_schema_version(conn, new_version) conn.commit() logger.info( 'Successfully updated schema - DB schema version is now %d', code_db_schema_version) # Can't do anything until we know the schema is the right version _store_job_metadata(config, 'upgrade') else: logger.info('DB schema is already latest version') # Schedule a full ANALYZE at the end of an upgrade if needs_analyze: logger.info( 'Running ANALYZE of entire database after upgrade...') cur.execute('ANALYZE') logger.info( 'Finished running ANALYZE of entire database after upgrade' )
def _process_disable_rat_import(ctx, param, val): config = common.ensure_config(ctx) if val is not None: config.region_config.import_rat_data = not val return val