def purge(args: argparse.Namespace, auth: Sequence[str]) -> None: '''Use purge to start from scratch. Drops & re-creates databases including the metadata. Note that purge will not re-apply the schema. ''' databases = args.databases.split(',') with _connection_timeout_wrapper( args, auth, databases=databases, aws=args.aws, lower_timeout=args.lower_timeout, kill_blocking_connections=args.kill_blocking_connections): for dbname in databases: logging.info('Dropping database %s', dbname) database_utils.mysql( 'DROP DATABASE IF EXISTS `%s`;' % dbname, auth=auth, container_check=not args.skip_container_check, ) logging.info('Creating database %s', dbname) database_utils.mysql( 'CREATE DATABASE `%s` CHARACTER SET UTF8 COLLATE ' 'utf8_general_ci;' % dbname, auth=auth, container_check=not args.skip_container_check, ) logging.info('Done creating database %s', dbname)
def migrate(args, auth, update_metadata=True): '''Performs the database schema migration. This command applies all scripts that have not yet been applied in order, and records their application in the metadata database. This command is idempotent and can be run any number of times. ''' latest_revision = 0 if update_metadata: latest_revision = _revision(args, auth) for revision, name, path in _scripts(): if latest_revision >= revision: continue if args.limit and revision > args.limit: break if args.noop: sys.stderr.write('Installing %s\n' % path) else: comment = "migrate" if name.startswith('test_') and not args.development_environment: comment = "skipped" else: for dbname in args.databases.split(','): database_utils.mysql('source %s;' % database_utils.quote(path), dbname=dbname, auth=auth) if update_metadata: database_utils.mysql(('INSERT INTO `Revision` ' 'VALUES(%d, CURRENT_TIMESTAMP, "%s");') % (revision, comment), dbname='_omegaup_metadata', auth=auth)
def migrate(args, auth, update_metadata=True): '''Performs the database schema migration. This command applies all scripts that have not yet been applied in order, and records their application in the metadata database. This command is idempotent and can be run any number of times. ''' latest_revision = 0 if update_metadata: latest_revision = _revision(args, auth) for revision, name, path in _scripts(): if latest_revision >= revision: continue if args.limit and revision > args.limit: break if args.noop: sys.stderr.write('Installing %s\n' % path) else: comment = "migrate" if name.startswith('test_') and not args.development_environment: comment = "skipped" else: for dbname in args.databases.split(','): database_utils.mysql('source %s;' % database_utils.quote(path), dbname=dbname, auth=auth) if update_metadata: database_utils.mysql( ('INSERT INTO `Revision` ' 'VALUES(%d, CURRENT_TIMESTAMP, "%s");') % (revision, comment), dbname='_omegaup_metadata', auth=auth)
def schema(args: argparse.Namespace, auth: Sequence[str]) -> None: '''Prints the schema without modifying the usual database tables. This does touch the database, but is restricted to a dummy database `_omegaup_schema`. ''' _SCHEMA_DB = '_omegaup_schema' args.databases = _SCHEMA_DB args.noop = False args.development_environment = False purge(args, auth) migrate(args, auth, update_metadata=False) # This is a false positive. # pylint: disable=no-member sys.stdout.buffer.write( database_utils.mysqldump( dbname=_SCHEMA_DB, auth=auth, container_check=not args.skip_container_check, )) database_utils.mysql( 'DROP DATABASE `%s`;' % _SCHEMA_DB, auth=auth, container_check=not args.skip_container_check, )
def migrate(args: argparse.Namespace, auth: Sequence[str], update_metadata: bool = True) -> None: '''Performs the database schema migration. This command applies all scripts that have not yet been applied in order, and records their application in the metadata database. This command is idempotent and can be run any number of times. ''' latest_revision = 0 if update_metadata: latest_revision = _revision(args, auth) scripts = _scripts() if not scripts: # If there are no scripts that need to be run, there is no need to even # touch the connection timeout. return databases = args.databases.split(',') with _connection_timeout_wrapper( args, auth, databases=databases, aws=args.aws, lower_timeout=args.lower_timeout, kill_blocking_connections=args.kill_blocking_connections): for revision, name, path in scripts: if latest_revision >= revision: continue if args.limit and revision > args.limit: break if args.noop: sys.stderr.write('Installing %s\n' % path) continue logging.info('Running script for revision %d...', revision) comment = "migrate" if name.startswith('test_') and not args.development_environment: comment = "skipped" else: for dbname in databases: database_utils.mysql( 'source %s;' % database_utils.quote(path), dbname=dbname, auth=auth, container_check=not args.skip_container_check, ) if update_metadata: database_utils.mysql( ('INSERT INTO `Revision` ' 'VALUES(%d, CURRENT_TIMESTAMP, "%s");') % (revision, comment), dbname='_omegaup_metadata', auth=auth, container_check=not args.skip_container_check, ) logging.info('Done running script for revision %d', revision)
def _connection_timeout_wrapper( # pylint: disable=too-many-arguments args: argparse.Namespace, auth: Sequence[str], databases: Sequence[str], aws: bool, lower_timeout: bool, kill_blocking_connections: bool = False) -> Iterator[None]: '''A context manager that temporarily lowers the wait timeout. This can also also optionally kill any existing connections to the database. By doing so, the next time they connect, they will use the lowered wait timeout, which in turn should make this script be able to grab any locks within ~10s. ''' def _set_timeout(timeout: Optional[int]) -> None: if aws: _set_aws_rds_timeout(args, auth, timeout) else: _set_mysql_timeout(args, auth, timeout) try: if lower_timeout: logging.info('Lowering MySQL timeout...') _set_timeout(10) if kill_blocking_connections: logging.info('Killing all other MySQL connections...') for line in database_utils.mysql( (_BLOCKING_PROCESSES_QUERY % (', '.join(f'"{dbname}"' for dbname in databases))), dbname='mysql', auth=auth).strip().split('\n'): if not line.strip(): continue try: if aws: database_utils.mysql('CALL mysql.rds_kill(%s);' % line.split()[0], dbname='mysql', auth=auth) else: database_utils.mysql('KILL %s;' % line.split()[0], dbname='mysql', auth=auth) except subprocess.CalledProcessError: # The command already logged the error. pass else: # If we are not killing connections, at least sleep on it. time.sleep(10) yield finally: if lower_timeout: logging.info('Restoring MySQL timeout...') _set_timeout(None)
def purge(args, auth): '''Use purge to start from scratch. Drops & re-creates databases including the metadata. Note that purge will not re-apply the schema. ''' for dbname in args.databases.split(','): database_utils.mysql('DROP DATABASE IF EXISTS `%s`;' % dbname, auth=auth) database_utils.mysql('CREATE DATABASE `%s` CHARACTER SET UTF8 COLLATE ' 'utf8_general_ci;' % dbname, auth=auth)
def purge(args, auth): '''Use purge to start from scratch. Drops & re-creates databases including the metadata. Note that purge will not re-apply the schema. ''' for dbname in args.databases.split(','): database_utils.mysql('DROP DATABASE IF EXISTS `%s`;' % dbname, auth=auth) database_utils.mysql('CREATE DATABASE `%s` CHARACTER SET UTF8 COLLATE ' 'utf8_general_ci;' % dbname, auth=auth)
def exists(args, auth): # pylint: disable=unused-argument '''Determines whether the metadata database is present. Exits with 1 (error) if the metadata database has not been installed. This is a helper command for Puppet. ''' if not database_utils.mysql('SHOW DATABASES LIKE "_omegaup_metadata";', auth=auth): sys.exit(1) if not database_utils.mysql('SHOW TABLES LIKE "Revision";', dbname='_omegaup_metadata', auth=auth): sys.exit(1)
def exists(args, auth): # pylint: disable=unused-argument '''Determines whether the metadata database is present. Exits with 1 (error) if the metadata database has not been installed. This is a helper command for Puppet. ''' if not database_utils.mysql('SHOW DATABASES LIKE "_omegaup_metadata";', auth=auth): sys.exit(1) if not database_utils.mysql('SHOW TABLES LIKE "Revision";', dbname='_omegaup_metadata', auth=auth): sys.exit(1)
def exists(args: argparse.Namespace, auth: Sequence[str]) -> None: '''Determines whether the metadata database is present. Exits with 1 (error) if the metadata database has not been installed. This is a helper command for Puppet. ''' del args # unused if not database_utils.mysql( 'SHOW DATABASES LIKE "_omegaup_metadata";', auth=auth): sys.exit(1) if not database_utils.mysql( 'SHOW TABLES LIKE "Revision";', dbname='_omegaup_metadata', auth=auth): sys.exit(1)
def reset(args, auth): '''Forces the metadata table to be in a particular revision. Note that this does not apply or unapply any changes to the actual database, so use this only for testing or recovering a botched migration! ''' ensure(args, auth) database_utils.mysql( 'DELETE FROM `Revision` WHERE `id` >= %d;' % args.revision, dbname='_omegaup_metadata', auth=auth) if args.revision > 0: database_utils.mysql( ('INSERT INTO `Revision` ' 'VALUES(%d, CURRENT_TIMESTAMP, "manual reset");') % args.revision, dbname='_omegaup_metadata', auth=auth)
def _set_mysql_timeout(args: argparse.Namespace, auth: Sequence[str], timeout: Optional[int] = None) -> None: '''Set the MySQL timeouts.''' del args # unused if timeout is None: timeout_str = 'DEFAULT' else: timeout_str = str(timeout) database_utils.mysql('SET GLOBAL interactive_timeout = %s;' % timeout_str, dbname='mysql', auth=auth) database_utils.mysql('SET GLOBAL wait_timeout = %s;' % timeout_str, dbname='mysql', auth=auth)
def reset(args, auth): '''Forces the metadata table to be in a particular revision. Note that this does not apply or unapply any changes to the actual database, so use this only for testing or recovering a botched migration! ''' ensure(args, auth) database_utils.mysql( 'DELETE FROM `Revision` WHERE `id` >= %d;' % args.revision, dbname='_omegaup_metadata', auth=auth) if args.revision > 0: database_utils.mysql( ('INSERT INTO `Revision` ' 'VALUES(%d, CURRENT_TIMESTAMP, "manual reset");') % args.revision, dbname='_omegaup_metadata', auth=auth)
def ensure(args, auth): # pylint: disable=unused-argument '''Creates both the metadata database and table, if they don't exist yet. ''' database_utils.mysql('CREATE DATABASE IF NOT EXISTS `_omegaup_metadata`;', auth=auth) # This is the table that tracks the migrations. |id| is the revision, # |applied| is the timestamp the operation was made and |comment| is a # human-readable comment about the migration. It can be either 'migrate' if # it was applied normally, 'skipped' if it was not applied due to not being # run in a development environment, and 'manual reset' if it was added as a # result of the 'reset' command. database_utils.mysql( 'CREATE TABLE IF NOT EXISTS `Revision`' '(`id` INTEGER NOT NULL PRIMARY KEY, ' '`applied` TIMESTAMP DEFAULT CURRENT_TIMESTAMP, ' '`comment` VARCHAR(50));', dbname='_omegaup_metadata', auth=auth)
def ensure(args, auth): # pylint: disable=unused-argument '''Creates both the metadata database and table, if they don't exist yet. ''' database_utils.mysql('CREATE DATABASE IF NOT EXISTS `_omegaup_metadata`;', auth=auth) # This is the table that tracks the migrations. |id| is the revision, # |applied| is the timestamp the operation was made and |comment| is a # human-readable comment about the migration. It can be either 'migrate' if # it was applied normally, 'skipped' if it was not applied due to not being # run in a development environment, and 'manual reset' if it was added as a # result of the 'reset' command. database_utils.mysql( 'CREATE TABLE IF NOT EXISTS `Revision`' '(`id` INTEGER NOT NULL PRIMARY KEY, ' '`applied` TIMESTAMP DEFAULT CURRENT_TIMESTAMP, ' '`comment` VARCHAR(50));', dbname='_omegaup_metadata', auth=auth)
def _connection_timeout_wrapper( args: argparse.Namespace, auth: Sequence[str], lower_timeout: str = 'no', kill_other_connections: bool = False) -> Iterator[None]: '''A context manager that temporarily lowers the wait timeout. This can also also optionally kill any existing connections to the database. By doing so, the next time they connect, they will use the lowered wait timeout, which in turn should make this script be able to grab any locks within ~10s. ''' try: if lower_timeout == 'mysql': logging.info('Lowering MySQL timeout...') _set_mysql_timeout(args, auth, 10) elif lower_timeout == 'aws': logging.info('Lowering MySQL timeout...') _set_aws_rds_timeout(args, auth, 10) if kill_other_connections: logging.info('Killing all other MySQL connections...') for line in database_utils.mysql( 'SHOW FULL PROCESSLIST;', dbname='mysql', auth=auth).strip().split('\n'): try: database_utils.mysql( 'KILL %s;' % line.split()[0], dbname='mysql', auth=auth) except: # noqa: bare-except # The command already logged the error. There is one # unkillable system thread and one dead thread (the one # that issued the `SHOW FULL PROCESSLIST;` query. pass else: # If we are not killing connections, at least sleep on it. time.sleep(10) yield finally: if lower_timeout == 'mysql': logging.info('Restoring MySQL timeout...') _set_mysql_timeout(args, auth, None) elif lower_timeout == 'aws': logging.info('Restoring MySQL timeout...') _set_aws_rds_timeout(args, auth, None)
def purge(args, auth): '''Use purge to start from scratch. Drops & re-creates databases including the metadata. Note that purge will not re-apply the schema. ''' with _kill_other_connections_wrapper(args, auth): for dbname in args.databases.split(','): logging.info('Dropping database %s', dbname) database_utils.mysql('DROP DATABASE IF EXISTS `%s`;' % dbname, auth=auth) logging.info('Creating database %s', dbname) database_utils.mysql( 'CREATE DATABASE `%s` CHARACTER SET UTF8 COLLATE ' 'utf8_general_ci;' % dbname, auth=auth) logging.info('Done creating database %s', dbname)
def schema(args, auth): '''Prints the schema without modifying the usual database tables. This does touch the database, but is restricted to a dummy database `_omegaup_schema`. ''' _SCHEMA_DB = '_omegaup_schema' args.databases = _SCHEMA_DB args.noop = False args.development_environment = False purge(args, auth) migrate(args, auth, update_metadata=False) # This is a false positive. # pylint: disable=no-member sys.stdout.buffer.write(database_utils.mysqldump(dbname=_SCHEMA_DB, auth=auth)) database_utils.mysql('DROP DATABASE `%s`;' % _SCHEMA_DB, auth=auth)
def _revision(args, auth): '''Returns the latest revision that has been applied to the database. Returns 0 if no revision has been applied. ''' ensure(args, auth) return int(database_utils.mysql( 'SELECT COALESCE(MAX(id), 0) FROM `Revision`;', dbname='_omegaup_metadata', auth=auth).strip())
def _revision(args, auth): '''Returns the latest revision that has been applied to the database. Returns 0 if no revision has been applied. ''' ensure(args, auth) return int(database_utils.mysql( 'SELECT COALESCE(MAX(id), 0) FROM `Revision`;', dbname='_omegaup_metadata', auth=auth).strip())
def _missing(args, auth): '''Gets all the missing privacy statements.''' for statement_type, git_object_id in _latest(): if int(database_utils.mysql( 'SELECT COUNT(*) FROM `PrivacyStatements` WHERE ' '`type` = "%s" AND `git_object_id` = "%s";' % (statement_type, git_object_id), dbname=args.database, auth=auth)) != 0: continue yield (statement_type, git_object_id)
def _missing(args, auth): '''Gets all the missing privacy statements.''' for statement_type, git_object_id in _latest(): if int( database_utils.mysql( 'SELECT COUNT(*) FROM `PrivacyStatements` WHERE ' '`type` = "%s" AND `git_object_id` = "%s";' % (statement_type, git_object_id), dbname=args.database, auth=auth)) != 0: continue yield (statement_type, git_object_id)
def _revision(args: argparse.Namespace, auth: Sequence[str]) -> int: '''Returns the latest revision that has been applied to the database. Returns 0 if no revision has been applied. ''' ensure(args, auth) return int( database_utils.mysql( 'SELECT COALESCE(MAX(id), 0) FROM `Revision`;', dbname='_omegaup_metadata', auth=auth, container_check=not args.skip_container_check, ).strip())
def update_score_manually(self, problem_alias, assignment_alias): '''Set score = 100 manually in DB''' database_utils.mysql((''' UPDATE `Runs` AS r INNER JOIN `Problems` AS p ON p.problem_id = r.problem_id INNER JOIN `Problemsets` AS ps ON ps.problemset_id = r.problemset_id INNER JOIN `Assignments` AS a ON a.acl_id = ps.acl_id SET `score` = 1, `contest_score` = 100, `verdict` = 'AC', `status` = 'ready' WHERE p.alias = '%s' AND a.alias = '%s'; ''') % (problem_alias, assignment_alias), dbname='omegaup', auth=self.mysql_auth()) # NOQA
def update_score_manually(self, problem_alias, assignment_alias): '''Set score = 100 manually in DB''' database_utils.mysql(( ''' UPDATE `Runs` AS r INNER JOIN `Problems` AS p ON p.problem_id = r.problem_id INNER JOIN `Problemsets` AS ps ON ps.problemset_id = r.problemset_id INNER JOIN `Assignments` AS a ON a.acl_id = ps.acl_id SET `score` = 1, `contest_score` = 100, `verdict` = 'AC', `status` = 'ready' WHERE p.alias = '%s' AND a.alias = '%s'; ''' ) % (problem_alias, assignment_alias), dbname='omegaup', auth=self.mysql_auth()) # NOQA
def _missing( args: argparse.Namespace, auth: Sequence[str], ) -> Generator[Tuple[str, str], None, None]: '''Gets all the missing privacy statements.''' for statement_type, git_object_id in _latest(): if int( database_utils.mysql( 'SELECT COUNT(*) FROM `PrivacyStatements` WHERE ' '`type` = "%s" AND `git_object_id` = "%s";' % (statement_type, git_object_id), dbname=args.database, auth=auth, container_check=not args.skip_container_check, )) != 0: continue yield (statement_type, git_object_id)
def _kill_other_connections_wrapper(args, auth): '''A context manager that temporarily lowers the wait timeout. This also kills any existing connections to the database. By doing so, the next time they connect, they will use the lowered wait timeout, which in turn should make this script be able to grab any locks within ~10s. ''' if not args.kill_other_connections: yield return try: logging.info('Lowering MySQL timeout...') database_utils.mysql('SET GLOBAL interactive_timeout = 10;', dbname='mysql', auth=auth) database_utils.mysql('SET GLOBAL wait_timeout = 10;', dbname='mysql', auth=auth) logging.info('Killing all other MySQL connections...') for line in database_utils.mysql('SHOW FULL PROCESSLIST;', dbname='mysql', auth=auth).strip().split('\n'): try: database_utils.mysql('KILL %s;' % line.split()[0], dbname='mysql', auth=auth) except: # noqa: bare-except # The command already logged the error. There is one unkillable # system thread and one dead thread (the one that issued the # `SHOW FULL PROCESSLIST;` query. pass yield finally: logging.info('Restoring MySQL timeout...') database_utils.mysql('SET GLOBAL wait_timeout = DEFAULT;', dbname='mysql', auth=auth) database_utils.mysql('SET GLOBAL interactive_timeout = DEFAULT;', dbname='mysql', auth=auth)