def upgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) ### commands auto generated by Alembic - please adjust! ### op.create_table( "repositorysearchscore", sa.Column("id", sa.Integer(), nullable=False), sa.Column("repository_id", sa.Integer(), nullable=False), sa.Column("score", sa.BigInteger(), nullable=False), sa.Column("last_updated", sa.DateTime(), nullable=True), sa.ForeignKeyConstraint( ["repository_id"], ["repository.id"], name=op.f("fk_repositorysearchscore_repository_id_repository"), ), sa.PrimaryKeyConstraint("id", name=op.f("pk_repositorysearchscore")), ) op.create_index( "repositorysearchscore_repository_id", "repositorysearchscore", ["repository_id"], unique=True, ) op.create_index("repositorysearchscore_score", "repositorysearchscore", ["score"], unique=False) ### end Alembic commands ### # ### population of test data ### # tester.populate_table( "repositorysearchscore", [ ("repository_id", tester.TestDataType.Foreign("repository")), ("score", tester.TestDataType.BigInteger), ("last_updated", tester.TestDataType.DateTime), ], )
def upgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) # ### commands auto generated by Alembic - please adjust! ### op.create_index( "repositorytag_repository_id_lifetime_end_ts", "repositorytag", ["repository_id", "lifetime_end_ts"], unique=False, ) op.create_index( "tag_repository_id_lifetime_end_ms", "tag", ["repository_id", "lifetime_end_ms"], unique=False, ) op.create_index( "repositorytag_repository_id_lifetime_start_ts", "repositorytag", ["repository_id", "lifetime_start_ts"], unique=False, ) op.create_index( "tag_repository_id_lifetime_start_ms", "tag", ["repository_id", "lifetime_start_ms"], unique=False, )
def downgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) op.execute( tables.notificationkind.delete().where( tables.notificationkind.c.name == op.inline_literal("build_cancelled") ) )
def upgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) ### commands auto generated by Alembic - please adjust! ### op.create_table( 'repositorysearchscore', sa.Column('id', sa.Integer(), nullable=False), sa.Column('repository_id', sa.Integer(), nullable=False), sa.Column('score', sa.BigInteger(), nullable=False), sa.Column('last_updated', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint( ['repository_id'], ['repository.id'], name=op.f('fk_repositorysearchscore_repository_id_repository')), sa.PrimaryKeyConstraint('id', name=op.f('pk_repositorysearchscore'))) op.create_index('repositorysearchscore_repository_id', 'repositorysearchscore', ['repository_id'], unique=True) op.create_index('repositorysearchscore_score', 'repositorysearchscore', ['score'], unique=False) ### end Alembic commands ### # ### population of test data ### # tester.populate_table('repositorysearchscore', [ ('repository_id', tester.TestDataType.Foreign('repository')), ('score', tester.TestDataType.BigInteger), ('last_updated', tester.TestDataType.DateTime), ])
def downgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) op.execute( tables.logentrykind.delete().where( tables.logentrykind.c.name == op.inline_literal("change_tag_expiration") ) )
def upgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) for media_type in DOCKER_SCHEMA2_CONTENT_TYPES: op.bulk_insert(tables.mediatype, [ {'name': media_type}, ])
def upgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) op.bulk_insert(tables.externalnotificationevent, [ { "name": "build_cancelled" }, ])
def downgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) op.execute(tables .externalnotificationevent .delete() .where(tables. externalnotificationevent.c.name == op.inline_literal('build_cancelled')))
def upgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) ### commands auto generated by Alembic - please adjust! ### op.add_column( "user", sa.Column("company", UTF8CharField(length=255), nullable=True)) op.add_column( "user", sa.Column("family_name", UTF8CharField(length=255), nullable=True)) op.add_column( "user", sa.Column("given_name", UTF8CharField(length=255), nullable=True)) ### end Alembic commands ### op.bulk_insert(tables.userpromptkind, [ { "name": "enter_name" }, { "name": "enter_company" }, ]) # ### population of test data ### # tester.populate_column("user", "company", tester.TestDataType.UTF8Char) tester.populate_column("user", "family_name", tester.TestDataType.UTF8Char) tester.populate_column("user", "given_name", tester.TestDataType.UTF8Char)
def upgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) op.bulk_insert(tables.logentrykind, [ { "name": "change_tag_expiration" }, ])
def upgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) # ### commands auto generated by Alembic - please adjust! ### op.add_column("user", sa.Column("maximum_queued_builds_count", sa.Integer(), nullable=True)) # ### end Alembic commands ### # ### population of test data ### # tester.populate_column("user", "maximum_queued_builds_count", tester.TestDataType.Integer)
def downgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) for media_type in DOCKER_SCHEMA2_CONTENT_TYPES: op.execute(tables .mediatype .delete() .where(tables. mediatype.c.name == op.inline_literal(media_type)))
def downgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) # ### commands auto generated by Alembic - please adjust! ### op.drop_index("tag_repository_id_lifetime_end_ms", table_name="tag") op.drop_index("repositorytag_repository_id_lifetime_end_ts", table_name="repositorytag") op.drop_index("tag_repository_id_lifetime_start_ms", table_name="tag") op.drop_index("repositorytag_repository_id_lifetime_start_ts", table_name="repositorytag")
def upgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) op.add_column( "user", sa.Column("location", UTF8CharField(length=255), nullable=True)) # ### population of test data ### # tester.populate_column("user", "location", tester.TestDataType.UTF8Char)
def upgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) # ### commands auto generated by Alembic - please adjust! ### op.add_column('user', sa.Column('creation_date', sa.DateTime(), nullable=True)) # ### end Alembic commands ### # ### population of test data ### # tester.populate_column('user', 'creation_date', tester.TestDataType.DateTime)
def upgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) # Add a 0 entry into the RepositorySearchScore table for each repository that isn't present conn = op.get_bind() conn.execute( "insert into repositorysearchscore (repository_id, score) SELECT id, 0 FROM " + "repository WHERE id not in (select repository_id from repositorysearchscore)" )
def upgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) op.alter_column( table_name="logentry", column_name="id", nullable=False, autoincrement=True, type_=sa.BigInteger(), )
def downgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) op.alter_column( table_name='logentry', column_name='id', nullable=False, autoincrement=True, type_=sa.Integer(), )
def upgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) conn = op.get_bind() copy_table_contents('blob', 'apprblob', conn) copy_table_contents('manifest', 'apprmanifest', conn) copy_table_contents('manifestlist', 'apprmanifestlist', conn) copy_table_contents('blobplacement', 'apprblobplacement', conn) copy_table_contents('manifestblob', 'apprmanifestblob', conn) copy_table_contents('manifestlistmanifest', 'apprmanifestlistmanifest', conn) copy_table_contents('tag', 'apprtag', conn)
def upgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) conn = op.get_bind() copy_table_contents("blob", "apprblob", conn) copy_table_contents("manifest", "apprmanifest", conn) copy_table_contents("manifestlist", "apprmanifestlist", conn) copy_table_contents("blobplacement", "apprblobplacement", conn) copy_table_contents("manifestblob", "apprmanifestblob", conn) copy_table_contents("manifestlistmanifest", "apprmanifestlistmanifest", conn) copy_table_contents("tag", "apprtag", conn)
def downgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) # ### commands auto generated by Alembic - please adjust! ### op.drop_table("appspecificauthtoken") # ### end Alembic commands ### op.execute(tables.logentrykind.delete().where( tables.logentrykind.name == op.inline_literal( "create_app_specific_token"))) op.execute(tables.logentrykind.delete().where( tables.logentrykind.name == op.inline_literal( "revoke_app_specific_token")))
def run_migration(migrate_function, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) conn = op.get_bind() triggers = conn.execute("SELECT id, config FROM repositorybuildtrigger") for trigger in triggers: config = json.dumps(migrate_function(json.loads(trigger[1]))) try: conn.execute( "UPDATE repositorybuildtrigger SET config=%s WHERE id=%s", config, trigger[0]) except (RevisionError, CommandError) as e: log.warning("Failed to update build trigger %s with exception: ", trigger[0], e)
def downgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) # ### commands auto generated by Alembic - please adjust! ### op.drop_index("user_uuid", table_name="user") op.drop_index("repositorybuildtrigger_uuid", table_name="repositorybuildtrigger") op.drop_index("permissionprototype_uuid", table_name="permissionprototype")
def upgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) # ### commands auto generated by Alembic - please adjust! ### op.add_column( "repositorybuildtrigger", sa.Column("successive_failure_count", sa.Integer(), server_default="0", nullable=False), ) op.add_column( "repositorybuildtrigger", sa.Column( "successive_internal_error_count", sa.Integer(), server_default="0", nullable=False ), ) # ### end Alembic commands ### op.bulk_insert( tables.disablereason, [ {"id": 2, "name": "successive_build_failures"}, {"id": 3, "name": "successive_build_internal_errors"}, ], ) # ### population of test data ### # tester.populate_column( "repositorybuildtrigger", "successive_failure_count", tester.TestDataType.Integer ) tester.populate_column( "repositorybuildtrigger", "successive_internal_error_count", tester.TestDataType.Integer )
def upgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) # ### commands auto generated by Alembic - please adjust! ### op.create_table( "robotaccountmetadata", sa.Column("id", sa.Integer(), nullable=False), sa.Column("robot_account_id", sa.Integer(), nullable=False), sa.Column("description", UTF8CharField(length=255), nullable=False), sa.Column("unstructured_json", sa.Text(), nullable=False), sa.ForeignKeyConstraint( ["robot_account_id"], ["user.id"], name=op.f("fk_robotaccountmetadata_robot_account_id_user"), ), sa.PrimaryKeyConstraint("id", name=op.f("pk_robotaccountmetadata")), ) op.create_index( "robotaccountmetadata_robot_account_id", "robotaccountmetadata", ["robot_account_id"], unique=True, ) # ### end Alembic commands ### # ### population of test data ### # tester.populate_table( "robotaccountmetadata", [ ("robot_account_id", tester.TestDataType.Foreign("user")), ("description", tester.TestDataType.UTF8Char), ("unstructured_json", tester.TestDataType.JSON), ], )
def upgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) logger.info('Migrating to external_reference from existing columns') op.add_column('repomirrorconfig', sa.Column('external_reference', sa.Text(), nullable=True)) from app import app if app.config.get('SETUP_COMPLETE', False) or tester.is_testing(): for repo_mirror in _iterate( RepoMirrorConfig, (RepoMirrorConfig.external_reference >> None)): repo = '%s/%s/%s' % (repo_mirror.external_registry, repo_mirror.external_namespace, repo_mirror.external_repository) logger.info('migrating %s' % repo) repo_mirror.external_reference = repo repo_mirror.save() op.drop_column('repomirrorconfig', 'external_registry') op.drop_column('repomirrorconfig', 'external_namespace') op.drop_column('repomirrorconfig', 'external_repository') op.alter_column('repomirrorconfig', 'external_reference', nullable=False, existing_type=sa.Text()) tester.populate_column('repomirrorconfig', 'external_reference', tester.TestDataType.String)
def upgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) # ### commands auto generated by Alembic - please adjust! ### op.create_index("permissionprototype_uuid", "permissionprototype", ["uuid"], unique=False) op.create_index("repositorybuildtrigger_uuid", "repositorybuildtrigger", ["uuid"], unique=False) op.create_index("user_uuid", "user", ["uuid"], unique=False)
def downgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) op.drop_column('repositorynotification', 'number_of_failures') op.execute(tables .logentrykind .delete() .where(tables.logentrykind.c.name == op.inline_literal('reset_repo_notification')))
def downgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) ### commands auto generated by Alembic - please adjust! ### op.drop_column("repository", "trust_enabled") ### end Alembic commands ### op.execute(tables.logentrykind.delete().where( tables.logentrykind.name == op.inline_literal("change_repo_trust")))
def downgrade(tables, tester, progress_reporter): op = ProgressWrapper(original_op, progress_reporter) if not app.config.get("SETUP_COMPLETE", False): return repostioryBuildTriggers = RepositoryBuildTrigger.select() for repositoryBuildTrigger in repostioryBuildTriggers: config = json.loads(repositoryBuildTrigger.config) repositoryBuildTrigger.config = json.dumps(get_config_expand(config)) repositoryBuildTrigger.save()