def upgrade(): if op.get_context().dialect.name == 'postgresql': # INFO - G.M - 2018-11-27 - TO modify type in postgresq, we should # create a new one set column type to this new one and remove old one op.execute("ALTER TYPE authtype RENAME TO authtype_old;") op.execute("ALTER TABLE users alter auth_type drop default;") enum = sa.Enum(*new_auth_type_list, name='authtype') enum.create(op.get_bind(), checkfirst=False) with op.batch_alter_table('users') as batch_op: batch_op.alter_column( 'auth_type', type_=enum, postgresql_using="auth_type::text::authtype", server_default='INTERNAL' ) op.execute("DROP TYPE authtype_old;") else: # INFO - G.M - 2018-11-27 - MYSQL case enum = sa.Enum(*new_auth_type_list, name='authtype') enum.create(op.get_bind(), checkfirst=False) with op.batch_alter_table('users') as batch_op: batch_op.alter_column( 'auth_type', type_=enum, )
def downgrade(): bind = op.get_bind() insp = sa.engine.reflection.Inspector.from_engine(bind) # Restore the size of the metric_name column. with op.batch_alter_table('metrics', naming_convention=conv) as batch_op: batch_op.alter_column( 'metric_name', existing_type=sa.String(length=255), type_=sa.String(length=512), existing_nullable=True, ) # Remove the previous missing uniqueness constraints. for table, column in names.items(): with op.batch_alter_table(table, naming_convention=conv) as batch_op: batch_op.drop_constraint( generic_find_uq_constraint_name( table, {column, 'datasource_id'}, insp, ) or 'uq_{}_{}'.format(table, column), type_='unique', )
def downgrade(): with op.batch_alter_table('lti_user', naming_convention=convention) as batch_op: batch_op.drop_column('lis_person_sourcedid') with op.batch_alter_table('lti_context', naming_convention=convention) as batch_op: batch_op.drop_column('lis_course_section_sourcedid') batch_op.drop_column('lis_course_offering_sourcedid')
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_table('ticket_transfer') op.drop_table('ticket_checkin') op.drop_table('ticket_attrib') with op.batch_alter_table('ticket', schema=None) as batch_op: batch_op.drop_index(batch_op.f('ix_ticket_type_id')) batch_op.drop_index(batch_op.f('ix_ticket_paid')) op.drop_table('ticket') op.drop_table('payment_change') op.drop_table('cfp_vote') op.drop_table('cfp_message') with op.batch_alter_table('bank_transaction', schema=None) as batch_op: batch_op.drop_index('ix_bank_transaction_u1') batch_op.drop_index(batch_op.f('ix_bank_transaction_fit_id')) op.drop_table('bank_transaction') with op.batch_alter_table('user_permission', schema=None) as batch_op: batch_op.drop_index(batch_op.f('ix_user_permission_user_id')) op.drop_table('user_permission') with op.batch_alter_table('transaction', schema=None) as batch_op: batch_op.drop_index(batch_op.f('ix_transaction_user_id')) op.drop_table('transaction') op.drop_table('ticket_price') op.drop_table('proposal') op.drop_table('payment') op.drop_table('diversity') with op.batch_alter_table('category_reviewers', schema=None) as batch_op: batch_op.drop_index(batch_op.f('ix_category_reviewers_user_id')) op.drop_table('category_reviewers') with op.batch_alter_table('user', schema=None) as batch_op: batch_op.drop_index(batch_op.f('ix_user_name')) batch_op.drop_index(batch_op.f('ix_user_email')) op.drop_table('user') op.drop_table('ticket_type') with op.batch_alter_table('proposal_version', schema=None) as batch_op: batch_op.drop_index(batch_op.f('ix_proposal_version_transaction_id')) batch_op.drop_index(batch_op.f('ix_proposal_version_operation_type')) op.drop_table('proposal_version') with op.batch_alter_table('permission', schema=None) as batch_op: batch_op.drop_index(batch_op.f('ix_permission_name')) op.drop_table('permission') op.drop_table('feature_flag') with op.batch_alter_table('cfp_vote_version', schema=None) as batch_op: batch_op.drop_index(batch_op.f('ix_cfp_vote_version_transaction_id')) batch_op.drop_index(batch_op.f('ix_cfp_vote_version_operation_type')) op.drop_table('cfp_vote_version') op.drop_table('category') with op.batch_alter_table('bank_account', schema=None) as batch_op: batch_op.drop_index('ix_bank_account_sort_code_acct_id') op.drop_table('bank_account')
def downgrade(): ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('scenario', schema=None) as batch_op: batch_op.drop_column('root_node') op.create_table('ESP', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('score', sa.INTEGER(), nullable=True), sa.Column('data', sa.VARCHAR(length=300), nullable=True), sa.Column('xpath', sa.VARCHAR(length=500), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table('contains', sa.Column('esp_id', sa.INTEGER(), nullable=True), sa.Column('scenario_id', sa.INTEGER(), nullable=True), sa.ForeignKeyConstraint(['esp_id'], ['ESP.id'], ), sa.ForeignKeyConstraint(['scenario_id'], ['scenario.id'], ) ) op.drop_table('scenario_groups') op.drop_table('group_fields') op.drop_table('group') with op.batch_alter_table('field', schema=None) as batch_op: batch_op.drop_index(batch_op.f('ix_field_name')) op.drop_table('field')
def downgrade(): """alexm: i believe this method is never called""" with op.batch_alter_table(t2_name) as batch_op: batch_op.drop_column("do_not_use") with op.batch_alter_table(t1_name) as batch_op: batch_op.drop_column("enabled")
def upgrade(): with op.batch_alter_table('lti_context', naming_convention=convention) as batch_op: batch_op.add_column(sa.Column('lis_course_offering_sourcedid', sa.String(length=255), nullable=True)) batch_op.add_column(sa.Column('lis_course_section_sourcedid', sa.String(length=255), nullable=True)) with op.batch_alter_table('lti_user', naming_convention=convention) as batch_op: batch_op.add_column(sa.Column('lis_person_sourcedid', sa.String(length=255), nullable=True))
def upgrade(): with op.batch_alter_table("deployments", schema=None) as batch_op: batch_op.add_column( sa.Column("credentials", sa.PickleType(), nullable=True)) connection = op.get_bind() for deployment in connection.execute(deployments_helper.select()): creds = [ ["openstack", { "admin": deployment.admin, "users": deployment.users }] ] connection.execute( deployments_helper.update().where( deployments_helper.c.id == deployment.id).values( credentials=creds)) with op.batch_alter_table("deployments", schema=None) as batch_op: batch_op.alter_column("credentials", existing_type=sa.PickleType, existing_nullable=True, nullable=False) batch_op.drop_column("admin") batch_op.drop_column("users")
def downgrade(): with op.batch_alter_table("commits") as batch_op: batch_op.alter_column('project_name', existing_type=sa.String(256), type_=sa.String()) batch_op.alter_column('repo_dir', existing_type=sa.String(1024), type_=sa.String()) batch_op.alter_column('distgit_dir', existing_type=sa.String(1024), type_=sa.String()) batch_op.alter_column('commit_hash', existing_type=sa.String(64), type_=sa.String()) batch_op.alter_column('distro_hash', existing_type=sa.String(64), type_=sa.String()) batch_op.alter_column('distgit_dir', existing_type=sa.String(1024), type_=sa.String()) batch_op.alter_column('commit_branch', existing_type=sa.String(256), type_=sa.String()) batch_op.alter_column('status', existing_type=sa.String(64), type_=sa.String()) batch_op.alter_column('rpms', existing_type=sa.Text(), type_=sa.String()) batch_op.alter_column('notes', existing_type=sa.Text(), type_=sa.String()) with op.batch_alter_table("projects") as batch_op: batch_op.alter_column('project_name', existing_type=sa.String(256), type_=sa.String())
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table( "favourite_calendar_events", sa.Column("user_id", sa.Integer(), nullable=True), sa.Column("event_id", sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ["event_id"], ["calendar_event.id"], name=op.f("fk_favourite_calendar_events_event_id_calendar_event") ), sa.ForeignKeyConstraint(["user_id"], ["user.id"], name=op.f("fk_favourite_calendar_events_user_id_user")), ) with op.batch_alter_table(u"calendar_event", schema=None) as batch_op: batch_op.add_column(sa.Column("end_dt", sa.DateTime(), nullable=False)) batch_op.add_column(sa.Column("start_dt", sa.DateTime(), nullable=False)) batch_op.drop_column("end_tz") batch_op.drop_column("start_tz") batch_op.drop_column("start_utc") batch_op.drop_column("end_local") batch_op.drop_column("end_utc") batch_op.drop_column("start_local") with op.batch_alter_table(u"calendar_source", schema=None) as batch_op: batch_op.add_column(sa.Column("priority", sa.Integer(), nullable=True)) batch_op.add_column(sa.Column("type", sa.String(), nullable=True)) with op.batch_alter_table(u"venue", schema=None) as batch_op: batch_op.add_column(sa.Column("priority", sa.Integer(), nullable=True))
def downgrade(): with op.batch_alter_table("civotes") as batch_op: batch_op.drop_constraint('civ_user_fk', type_='foreignkey') with op.batch_alter_table("promotions") as batch_op: batch_op.drop_constraint('prom_user_fk', type_='foreignkey') with op.batch_alter_table("users") as batch_op: batch_op.alter_column('username', existing_type=sa.String(255), type_=sa.String(256)) with op.batch_alter_table("civotes") as batch_op: batch_op.alter_column('user', existing_type=sa.String(255), type_=sa.String(256)) batch_op.create_foreign_key( constraint_name="civ_user_fk", referent_table="users", local_cols=["user"], remote_cols=["username"]) with op.batch_alter_table("promotions") as batch_op: batch_op.alter_column('user', existing_type=sa.String(255), type_=sa.String(256)) batch_op.create_foreign_key( constraint_name="prom_user_fk", referent_table="users", local_cols=["user"], remote_cols=["username"])
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('user', schema=None) as batch_op: batch_op.add_column(sa.Column('promo_opt_in', sa.Boolean(), nullable=False, server_default=expression.false())) with op.batch_alter_table('user_version', schema=None) as batch_op: batch_op.add_column(sa.Column('promo_opt_in', sa.Boolean(), autoincrement=False, nullable=True))
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('user_version', schema=None) as batch_op: batch_op.drop_column('promo_opt_in') with op.batch_alter_table('user', schema=None) as batch_op: batch_op.drop_column('promo_opt_in')
def downgrade(): ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('task', schema=None) as batch_op: batch_op.add_column(sa.Column('chunk_start', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True)) batch_op.add_column(sa.Column('current_frame', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True)) batch_op.add_column(sa.Column('chunk_end', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True)) batch_op.drop_column('type') batch_op.drop_column('settings') batch_op.drop_column('name') batch_op.drop_column('log') batch_op.drop_column('activity') with op.batch_alter_table('manager', schema=None) as batch_op: batch_op.add_column(sa.Column('running_tasks', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False)) batch_op.add_column(sa.Column('total_workers', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True)) with op.batch_alter_table('job', schema=None) as batch_op: batch_op.add_column(sa.Column('frame_start', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True)) batch_op.add_column(sa.Column('current_frame', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True)) batch_op.add_column(sa.Column('chunk_size', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True)) batch_op.add_column(sa.Column('render_settings', mysql.VARCHAR(length=120), nullable=True)) batch_op.add_column(sa.Column('filepath', mysql.VARCHAR(length=256), nullable=True)) batch_op.add_column(sa.Column('frame_end', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True)) batch_op.add_column(sa.Column('format', mysql.VARCHAR(length=10), nullable=True)) batch_op.drop_column('type') batch_op.drop_column('settings')
def upgrade(): c = get_context() # drop foreign keys for mysql if isinstance(c.connection.engine.dialect, MySQLDialect): insp = Inspector.from_engine(c.connection.engine) for t in ['groups_resources_permissions', 'users_resources_permissions', 'resources']: for constraint in insp.get_foreign_keys(t): if constraint['referred_columns'] == ['resource_id']: op.drop_constraint(constraint['name'], t, type='foreignkey') with op.batch_alter_table('resources', schema=None) as batch_op: batch_op.alter_column('resource_id', type_=sa.Integer(), existing_type=sa.BigInteger(), autoincrement=True) with op.batch_alter_table('resources', schema=None) as batch_op: batch_op.alter_column('parent_id', type_=sa.Integer(), existing_type=sa.BigInteger()) with op.batch_alter_table('users_resources_permissions', schema=None) as batch_op: batch_op.alter_column('resource_id', type_=sa.Integer(), existing_type=sa.BigInteger()) batch_op.alter_column('resource_id', type_=sa.Integer(), existing_type=sa.BigInteger()) # recreate foreign keys for mysql if isinstance(c.connection.engine.dialect, MySQLDialect): op.create_foreign_key("groups_resources_permissions_resource_fk", 'groups_resources_permissions', "resources", ["resource_id"], ["resource_id"], onupdate='CASCADE', ondelete='CASCADE') op.create_foreign_key("users_resources_permissions_fk", 'users_resources_permissions', "resources", ["resource_id"], ["resource_id"], onupdate='CASCADE', ondelete='CASCADE')
def downgrade(): with op.batch_alter_table("deployments", schema=None) as batch_op: batch_op.add_column( sa.Column("users", sa.PickleType(), nullable=True)) batch_op.add_column( sa.Column("admin", sa.PickleType(), nullable=True)) connection = op.get_bind() for deployment in connection.execute(deployments_helper.select()): admin = None users = [] for credentials in deployment.credentials: if credentials[0] == "openstack": admin = credentials[1].get("admin") users = credentials[1].get("users", []) connection.execute( deployments_helper.update().where( deployments_helper.c.id == deployment.id).values( admin=admin, users=users)) with op.batch_alter_table("deployments", schema=None) as batch_op: batch_op.alter_column("users", existing_type=sa.PickleType, existing_nullable=True, nullable=False) batch_op.drop_column("credentials")
def downgrade(): # insert selfevaltype_id column into Questions table op.add_column(u'Questions', sa.Column('selfevaltype_id', sa.Integer(), nullable=True)) # populate the column - only populate the no comparison self evaluation type type = text( "SELECT id FROM SelfEvalTypes " + "WHERE name='No Comparison with Another Answer'" ) conn = op.get_bind() res = conn.execute(type) selfevaltype = res.fetchall() populate = text( "UPDATE Questions " + "SET selfevaltype_id = " "(SELECT qs.selfevaltypes_id " + "FROM QuestionsAndSelfEvalTypes qs " "WHERE Questions.id = qs.questions_id " + "AND qs.selfevaltypes_id = " + str(selfevaltype[0][0]) + ')' ) op.get_bind().execute(populate) with op.batch_alter_table('Questions', naming_convention=convention) as batch_op: batch_op.create_foreign_key('fk_Questions_selfevaltype_id_SelfEvalTypes', 'SelfEvalTypes', ['selfevaltype_id'], ['id'], ondelete="CASCADE") with op.batch_alter_table('PostsForJudgements', naming_convention=convention) as batch_op: batch_op.drop_column('selfeval') op.drop_table('QuestionsAndSelfEvalTypes')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table("message") as batch_op: batch_op.alter_column("workflow_execution_uid", new_column_name="workflow_execution_id", type=UUIDType) with op.batch_alter_table("scheduled_workflow") as batch_op: batch_op.alter_column("uid", new_column_name="workflow_id", type=UUIDType, nullable=False)
def upgrade(): try: with op.batch_alter_table( "Criteria", naming_convention=convention, table_args=(UniqueConstraint("name")) ) as batch_op: batch_op.drop_constraint("uq_Criteria_name", type_="unique") except exc.InternalError: with op.batch_alter_table( "Criteria", naming_convention=convention, table_args=(UniqueConstraint("name")) ) as batch_op: batch_op.drop_constraint("name", type_="unique") except ValueError: logging.warning("Drop unique constraint is not support for SQLite, dropping uq_Critiera_name ignored!") # set existing criteria's active attribute to True using server_default op.add_column( "CriteriaAndCourses", sa.Column("active", sa.Boolean(name="active"), default=True, server_default="1", nullable=False), ) op.add_column( "Criteria", sa.Column("public", sa.Boolean(name="public"), default=False, server_default="0", nullable=False) ) # set the first criteria as public t = {"name": "Which is better?", "public": True} op.get_bind().execute(text("Update Criteria set public=:public where name=:name"), **t)
def downgrade(): op.drop_table('ensemble_data_file_variables') op.drop_table('data_file_variables_qc_flags') op.drop_table('data_file_variables') with op.batch_alter_table('data_files', schema=None) as batch_op: batch_op.drop_index('data_files_run_id_key') op.drop_table('data_files') with op.batch_alter_table('y_cell_bounds', schema=None) as batch_op: batch_op.drop_index('y_c_b_grid_id_key') op.drop_table('y_cell_bounds') op.drop_table('variables') with op.batch_alter_table('times', schema=None) as batch_op: batch_op.drop_index('time_set_id_key') op.drop_table('times') with op.batch_alter_table('runs', schema=None) as batch_op: batch_op.drop_index('runs_model_id_key') batch_op.drop_index('runs_emission_id_key') op.drop_table('runs') op.drop_table('levels') with op.batch_alter_table('climatological_times', schema=None) as batch_op: batch_op.drop_index('climatological_times_time_set_id_key') op.drop_table('climatological_times') op.drop_table('variable_aliases') op.drop_table('time_sets') op.drop_table('qc_flags') op.drop_table('models') op.drop_table('level_sets') op.drop_table('grids') op.drop_table('ensembles') op.drop_table('emissions')
def upgrade(): op.create_table( 'SelfEvalTypes', sa.Column('id', sa.Integer(), nullable=True), sa.Column('name', sa.String(length=255), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name'), mysql_charset='utf8', mysql_collate='utf8_unicode_ci', mysql_engine='InnoDB' ) # populate table with a self evaluation type insert = text( "INSERT INTO SelfEvalTypes (name) " + "VALUES ('No Comparison with Another Answer')" ) op.get_bind().execute(insert) with op.batch_alter_table('Questions', naming_convention=convention) as batch_op: batch_op.add_column(sa.Column('selfevaltype_id', sa.Integer(), nullable=True)) with op.batch_alter_table('Questions', naming_convention=convention) as batch_op: batch_op.create_foreign_key( 'fk_Questions_selfevaltype_id_SelfEvalTypes', 'SelfEvalTypes', ['selfevaltype_id'], ['id'], ondelete="CASCADE")
def upgrade(): ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('job', schema=None) as batch_op: batch_op.add_column(sa.Column('settings', sa.Text(), nullable=True)) batch_op.add_column(sa.Column('type', sa.String(length=64), nullable=True)) batch_op.drop_column('format') batch_op.drop_column('frame_end') batch_op.drop_column('filepath') batch_op.drop_column('render_settings') batch_op.drop_column('chunk_size') batch_op.drop_column('current_frame') batch_op.drop_column('frame_start') with op.batch_alter_table('manager', schema=None) as batch_op: batch_op.drop_column('total_workers') batch_op.drop_column('running_tasks') with op.batch_alter_table('task', schema=None) as batch_op: batch_op.add_column(sa.Column('activity', sa.String(length=128), nullable=True)) batch_op.add_column(sa.Column('log', sa.Text(), nullable=True)) batch_op.add_column(sa.Column('name', sa.String(length=64), nullable=True)) batch_op.add_column(sa.Column('settings', sa.Text(), nullable=True)) batch_op.add_column(sa.Column('type', sa.String(length=64), nullable=True)) batch_op.drop_column('chunk_end') batch_op.drop_column('current_frame') batch_op.drop_column('chunk_start')
def downgrade(): with op.batch_alter_table("register") as batch_op: batch_op.create_unique_constraint(None, ['registration_no']) with op.batch_alter_table("register_details") as batch_op: batch_op.drop_column('amends') batch_op.drop_column('cancelled_on')
def upgrade(): role_focus_user = op.create_table('role_focus_user', sa.Column('role_focus_user_id', sa.Integer(), nullable=False), sa.Column('role_focus_id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('start_date', sa.DateTime(), nullable=True), sa.Column('end_date', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint(['role_focus_id'], ['role_focus.role_focus_id'], name='fk_user_focus'), sa.PrimaryKeyConstraint('role_focus_user_id') ) with op.batch_alter_table('role', schema=None) as batch_op: batch_op.add_column(sa.Column('duration', sa.Integer(), nullable=True)) batch_op.add_column(sa.Column('role_type', sa.Enum('leadlink', 'elected', 'assigned'), nullable=True)) for focus in connection.execute(focus_helper.select()): op.bulk_insert( role_focus_user, [ { 'role_focus_user_id': focus.role_focus_id, 'role_focus_id': focus.role_focus_id, 'user_id' : focus.user_id, }, ] ) with op.batch_alter_table('role_focus', schema=None) as batch_op: batch_op.drop_column('user_id')
def downgrade(): ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('task', schema=None) as batch_op: batch_op.alter_column('uuid', existing_type=sa.String(length=256), type_=sa.VARCHAR(), existing_nullable=True) batch_op.alter_column('user', existing_type=sa.String(length=256), type_=sa.VARCHAR(), existing_nullable=True) batch_op.alter_column('task', existing_type=sa.String(length=256), type_=sa.VARCHAR(), existing_nullable=True) with op.batch_alter_table('session', schema=None) as batch_op: batch_op.alter_column('uuid', existing_type=sa.String(length=256), type_=sa.VARCHAR(), existing_nullable=True) batch_op.alter_column('user', existing_type=sa.String(length=256), type_=sa.VARCHAR(), existing_nullable=True) batch_op.alter_column('ua', existing_type=sa.String(length=2048), type_=sa.VARCHAR(), existing_nullable=True) batch_op.alter_column('ip', existing_type=sa.String(length=256), type_=sa.VARCHAR(), existing_nullable=True)
def upgrade(): for table, _, col in TABLES_FKS: inspector = reflection.Inspector.from_engine(op.get_bind()) fk_name = [fk['name'] for fk in inspector.get_foreign_keys(table) if col in fk['constrained_columns']] try: op.drop_constraint(fk_name[0], table, 'foreignkey') except (NotImplementedError, IndexError): pass with op.batch_alter_table(table) as batch_op: batch_op.alter_column(col, existing_type=sa.Integer, type_=sa.String(255), existing_nullable=False, nullable=False) for table in TABLES: with op.batch_alter_table(table) as batch_op: batch_op.alter_column("aim_id", existing_type=sa.Integer, type_=sa.String(255), existing_nullable=False, nullable=False) with op.batch_alter_table("aim_statuses") as batch_op: batch_op.alter_column("resource_id", existing_type=sa.Integer, type_=sa.String(255), existing_nullable=False, nullable=False) for table, other, col in TABLES_FKS: with op.batch_alter_table(table) as batch_op: batch_op.create_foreign_key('fk_' + table + '_' + col, other, [col], ['aim_id'])
def downgrade(): # revert table Judgements op.add_column("Judgements", sa.Column('criteriaandcourses_id', sa.Integer(), nullable=True)) # populate column with the judgements' criteriaandcourses_id update = text( # rewrite into subquery to support SQLite. need more testing to verify "UPDATE Judgements " "SET criteriaandcourses_id = " "(SELECT cc.id FROM CriteriaAndCourses cc " "JOIN CriteriaAndQuestions cq ON cq.criteria_id = cc.criteria_id " "JOIN Questions q ON cq.questions_id = q.id " "JOIN Posts p ON q.posts_id = p.id " "WHERE Judgements.criteriaandquestions_id = cq.id AND p.courses_id = cc.courses_id)" # "UPDATE Judgements j " + \ # "JOIN CriteriaAndQuestions cq ON j.criteriaandquestions_id = cq.id " + \ # "JOIN Questions q ON cq.questions_id = q.id " + \ # "JOIN Posts p ON q.posts_id = p.id " + \ # "JOIN CriteriaAndCourses cc ON cq.criteria_id = cc.criteria_id AND p.courses_id = cc.courses_id " + \ # "SET j.criteriaandcourses_id = cc.id" ) op.get_bind().execute(update) with op.batch_alter_table('Judgements', naming_convention=convention) as batch_op: batch_op.create_foreign_key('fk_Judgements_criteriaandcourses_id_CriteriaAndCourses', 'CriteriaAndCourses', ['criteriaandcourses_id'], ['id'], ondelete="CASCADE") batch_op.alter_column('criteriaandcourses_id', nullable=False, existing_type=sa.Integer()) batch_op.drop_constraint('fk_Judgements_criteriaandquestions_id_CriteriaAndQuestions', 'foreignkey') # batch_op.drop_index("criteriaandquestions_id") batch_op.drop_column("criteriaandquestions_id") # revert table Scores op.add_column('Scores', sa.Column('criteriaandcourses_id', sa.Integer(), nullable=True)) # populate column with the scores' criteriaandcourses_id update = text( # rewrite into subquery to support SQLite. need more testing to verify "UPDATE Scores " "SET criteriaandcourses_id = " "(SELECT cc.id FROM CriteriaAndCourses cc " "JOIN CriteriaAndQuestions cq ON cq.criteria_id = cc.criteria_id " "JOIN Questions q ON cq.questions_id = q.id " "JOIN Posts p ON q.posts_id = p.id " "WHERE Scores.criteriaandquestions_id = cq.id AND p.courses_id = cc.courses_id)" # "UPDATE Scores s " + \ # "JOIN CriteriaAndQuestions cq ON s.criteriaandquestions_id = cq.id " + \ # "JOIN Questions q ON cq.questions_id = q.id " + \ # "JOIN Posts p ON q.posts_id = p.id " + \ # "JOIN CriteriaAndCourses cc ON cq.criteria_id = cc.criteria_id AND p.courses_id = cc.courses_id " + \ # "SET s.criteriaandcourses_id = cc.id" ) op.get_bind().execute(update) with op.batch_alter_table('Scores', naming_convention=convention) as batch_op: batch_op.create_foreign_key('fk_Scores_criteriaandcourses_id_CriteriaAndCourses', 'CriteriaAndCourses', ['criteriaandcourses_id'], ['id'], ondelete="CASCADE") batch_op.alter_column('criteriaandcourses_id', nullable=False, existing_type=sa.Integer()) batch_op.drop_constraint('fk_Scores_criteriaandquestions_id_CriteriaAndQuestions', 'foreignkey') # batch_op.drop_index('criteriaandquestions_id') batch_op.drop_column('criteriaandquestions_id') # drop table CriteriaAndQuestions op.drop_table('CriteriaAndQuestions')
def downgrade(): with op.batch_alter_table('Criteria', naming_convention=convention, table_args=(UniqueConstraint('name'))) as batch_op: batch_op.create_unique_constraint('uq_Criteria_name', ['name']) batch_op.drop_column('public') with op.batch_alter_table('CriteriaAndCourses', naming_convention=convention) as batch_op: batch_op.drop_column('active')
def downgrade(): with op.batch_alter_table('submissions', schema=None) as batch_op: batch_op.drop_column('archived') with op.batch_alter_table('comments', schema=None) as batch_op: batch_op.alter_column( 'author', existing_type=sa.VARCHAR(), nullable=False)
def upgrade(): table_prefix = context.config.get_main_option('table_prefix') op.drop_table(table_prefix + 'template') table_name = table_prefix + 'environment_schema_values' with op.batch_alter_table(table_name) as batch: batch.drop_constraint(table_name + '_schema_id_fkey', 'foreignkey') batch.alter_column( 'schema_id', new_column_name='resource_definition_id', existing_type=sa.Integer(), ) op.rename_table(table_name, table_prefix + 'resource_values') op.rename_table(table_prefix + 'schema', table_prefix + 'resource_definition') with op.batch_alter_table(table_prefix + 'resource_definition') as batch: batch.drop_column('namespace_id') op.drop_table(table_prefix + 'namespace') table_name = table_prefix + 'resource_values' with op.batch_alter_table(table_name) as batch: batch.create_foreign_key( table_name + '_resource_definition_id_fkey', table_prefix + 'resource_definition', ['resource_definition_id'], ['id'], )
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'user', sa.Column('id', sa.Integer(), nullable=False), sa.Column('username', sa.String(length=200), nullable=True), sa.Column('email', sa.String(length=200), nullable=True), sa.Column('password_hash', sa.String(length=150), nullable=True), sa.Column('about_me', sa.String(length=300), nullable=True), sa.Column('nickname', sa.String(length=150), nullable=True), sa.Column('last_seen', sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint('id', name=op.f('pk_user'))) with op.batch_alter_table('user', schema=None) as batch_op: batch_op.create_index(batch_op.f('ix_user_email'), ['email'], unique=True) batch_op.create_index(batch_op.f('ix_user_username'), ['username'], unique=True) op.create_table( 'followers', sa.Column('follower_id', sa.Integer(), nullable=True), sa.Column('followed_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['followed_id'], ['user.id'], name=op.f('fk_followers_followed_id_user')), sa.ForeignKeyConstraint(['follower_id'], ['user.id'], name=op.f('fk_followers_follower_id_user'))) op.create_table( 'post', sa.Column('id', sa.Integer(), nullable=False), sa.Column('title', sa.String(length=200), nullable=True), sa.Column('sphere', sa.String(length=200), nullable=True), sa.Column('description', sa.String(length=800), nullable=True), sa.Column('officialLink', sa.String(length=300), nullable=True), sa.Column('timestamp', sa.DateTime(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_post_user_id_user')), sa.PrimaryKeyConstraint('id', name=op.f('pk_post'))) with op.batch_alter_table('post', schema=None) as batch_op: batch_op.create_index(batch_op.f('ix_post_description'), ['description'], unique=False) batch_op.create_index(batch_op.f('ix_post_officialLink'), ['officialLink'], unique=False) batch_op.create_index(batch_op.f('ix_post_sphere'), ['sphere'], unique=False) batch_op.create_index(batch_op.f('ix_post_timestamp'), ['timestamp'], unique=False) batch_op.create_index(batch_op.f('ix_post_title'), ['title'], unique=True) op.create_table( 'software', sa.Column('id', sa.Integer(), nullable=False), sa.Column('title', sa.String(length=200), nullable=True), sa.Column('description', sa.String(length=800), nullable=True), sa.Column('downloadLink', sa.String(length=300), nullable=True), sa.Column('activeDevelopment', sa.String(length=200), nullable=True), sa.Column('license', sa.String(length=200), nullable=True), sa.Column('owner', sa.String(length=200), nullable=True), sa.Column('dateCreation', sa.String(length=300), nullable=True), sa.Column('dateRelease', sa.String(length=300), nullable=True), sa.Column('timestamp', sa.DateTime(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_software_user_id_user')), sa.PrimaryKeyConstraint('id', name=op.f('pk_software'))) with op.batch_alter_table('software', schema=None) as batch_op: batch_op.create_index(batch_op.f('ix_software_activeDevelopment'), ['activeDevelopment'], unique=False) batch_op.create_index(batch_op.f('ix_software_dateCreation'), ['dateCreation'], unique=False) batch_op.create_index(batch_op.f('ix_software_dateRelease'), ['dateRelease'], unique=False) batch_op.create_index(batch_op.f('ix_software_description'), ['description'], unique=False) batch_op.create_index(batch_op.f('ix_software_downloadLink'), ['downloadLink'], unique=False) batch_op.create_index(batch_op.f('ix_software_license'), ['license'], unique=False) batch_op.create_index(batch_op.f('ix_software_owner'), ['owner'], unique=False) batch_op.create_index(batch_op.f('ix_software_timestamp'), ['timestamp'], unique=False) batch_op.create_index(batch_op.f('ix_software_title'), ['title'], unique=True) op.create_table( 'category', sa.Column('id', sa.Integer(), nullable=False), sa.Column('category', sa.String(length=200), nullable=True), sa.Column('timestamp', sa.DateTime(), nullable=True), sa.Column('postCategory_id', sa.Integer(), nullable=True), sa.Column('softwareCategory_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['postCategory_id'], ['post.id'], name=op.f('fk_category_postCategory_id_post')), sa.ForeignKeyConstraint( ['softwareCategory_id'], ['software.id'], name=op.f('fk_category_softwareCategory_id_software')), sa.PrimaryKeyConstraint('id', name=op.f('pk_category'))) with op.batch_alter_table('category', schema=None) as batch_op: batch_op.create_index(batch_op.f('ix_category_category'), ['category'], unique=False) batch_op.create_index(batch_op.f('ix_category_timestamp'), ['timestamp'], unique=False) op.create_table( 'comment', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=200), nullable=True), sa.Column('email', sa.String(length=200), nullable=True), sa.Column('text', sa.String(length=600), nullable=True), sa.Column('timestamp', sa.DateTime(), nullable=True), sa.Column('postComment_id', sa.Integer(), nullable=True), sa.Column('softwareComment_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['postComment_id'], ['post.id'], name=op.f('fk_comment_postComment_id_post')), sa.ForeignKeyConstraint( ['softwareComment_id'], ['software.id'], name=op.f('fk_comment_softwareComment_id_software')), sa.PrimaryKeyConstraint('id', name=op.f('pk_comment'))) with op.batch_alter_table('comment', schema=None) as batch_op: batch_op.create_index(batch_op.f('ix_comment_email'), ['email'], unique=False) batch_op.create_index(batch_op.f('ix_comment_name'), ['name'], unique=False) batch_op.create_index(batch_op.f('ix_comment_text'), ['text'], unique=False) batch_op.create_index(batch_op.f('ix_comment_timestamp'), ['timestamp'], unique=False) op.create_table( 'favorites_post', sa.Column('favorite_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['favorite_id'], ['post.id'], name=op.f('fk_favorites_post_favorite_id_post'))) op.create_table( 'favorites_software', sa.Column('favorite_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['favorite_id'], ['software.id'], name=op.f('fk_favorites_software_favorite_id_software'))) op.create_table( 'report', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=200), nullable=True), sa.Column('description', sa.String(length=500), nullable=True), sa.Column('type', sa.String(length=200), nullable=True), sa.Column('timestamp', sa.DateTime(), nullable=True), sa.Column('postReport_id', sa.Integer(), nullable=True), sa.Column('softwareReport_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['postReport_id'], ['post.id'], name=op.f('fk_report_postReport_id_post')), sa.ForeignKeyConstraint( ['softwareReport_id'], ['software.id'], name=op.f('fk_report_softwareReport_id_software')), sa.PrimaryKeyConstraint('id', name=op.f('pk_report'))) with op.batch_alter_table('report', schema=None) as batch_op: batch_op.create_index(batch_op.f('ix_report_description'), ['description'], unique=False) batch_op.create_index(batch_op.f('ix_report_name'), ['name'], unique=False) batch_op.create_index(batch_op.f('ix_report_timestamp'), ['timestamp'], unique=False) batch_op.create_index(batch_op.f('ix_report_type'), ['type'], unique=False) op.create_table( 'similar', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=200), nullable=True), sa.Column('timestamp', sa.DateTime(), nullable=True), sa.Column('postSimilar_id', sa.Integer(), nullable=True), sa.Column('softwareSimilare_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['postSimilar_id'], ['post.id'], name=op.f('fk_similar_postSimilar_id_post')), sa.ForeignKeyConstraint( ['softwareSimilare_id'], ['software.id'], name=op.f('fk_similar_softwareSimilare_id_software')), sa.PrimaryKeyConstraint('id', name=op.f('pk_similar'))) with op.batch_alter_table('similar', schema=None) as batch_op: batch_op.create_index(batch_op.f('ix_similar_name'), ['name'], unique=False) batch_op.create_index(batch_op.f('ix_similar_timestamp'), ['timestamp'], unique=False) op.create_table( 'tag', sa.Column('id', sa.Integer(), nullable=False), sa.Column('tag', sa.String(length=200), nullable=True), sa.Column('timestamp', sa.DateTime(), nullable=True), sa.Column('postTag_id', sa.Integer(), nullable=True), sa.Column('softwareTag_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['postTag_id'], ['post.id'], name=op.f('fk_tag_postTag_id_post')), sa.ForeignKeyConstraint(['softwareTag_id'], ['software.id'], name=op.f('fk_tag_softwareTag_id_software')), sa.PrimaryKeyConstraint('id', name=op.f('pk_tag'))) with op.batch_alter_table('tag', schema=None) as batch_op: batch_op.create_index(batch_op.f('ix_tag_tag'), ['tag'], unique=False) batch_op.create_index(batch_op.f('ix_tag_timestamp'), ['timestamp'], unique=False)
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('tags', schema=None) as batch_op: batch_op.drop_constraint(batch_op.f('fk_tags_software_id_software'), type_='foreignkey') batch_op.drop_column('software_id')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('match', schema=None) as batch_op: batch_op.add_column(sa.Column('scoresheet', sa.String(), nullable=True))
def downgrade(): with op.batch_alter_table("misc") as batch_op: batch_op.drop_column('daemon_debug_mode')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('user', schema=None) as batch_op: batch_op.drop_index('ix_user_token') batch_op.drop_column('token_expiration') batch_op.drop_column('token')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('match', schema=None) as batch_op: batch_op.drop_column('scoresheet')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('question', schema=None) as batch_op: batch_op.drop_constraint(batch_op.f('fk_question_user_id_user'), type_='foreignkey') batch_op.drop_column('user_id')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('users', schema=None) as batch_op: batch_op.drop_column('last_recived_comments_read_time')
def upgrade(): # so Sqlite DOES NOT support ALTER operations so to update columns you gotta do batch updates with op.batch_alter_table('blacklist') as batch_op: batch_op.alter_column('added_at', nullable=False, server_default=sa.func.current_timestamp())
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'patients', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=255), nullable=True), sa.Column('case_number', sa.String(length=255), nullable=True), sa.Column('sex', sa.String(length=255), nullable=False), sa.Column('date', sa.Date(), nullable=False), sa.Column('address', sa.String(length=255), nullable=True), sa.Column('timestamp', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), sa.PrimaryKeyConstraint('id', name=op.f('pk_patients'))) with op.batch_alter_table('patients', schema=None) as batch_op: batch_op.create_index(batch_op.f('ix_patients_timestamp'), ['timestamp'], unique=False) op.create_table('roles', sa.Column('id', sa.Integer(), nullable=False), sa.Column('slug', sa.String(length=255), nullable=True), sa.Column('name', sa.String(length=255), nullable=True), sa.Column('default', sa.Boolean(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), sa.PrimaryKeyConstraint('id', name=op.f('pk_roles')), sa.UniqueConstraint('slug', name=op.f('uq_roles_slug'))) with op.batch_alter_table('roles', schema=None) as batch_op: batch_op.create_index(batch_op.f('ix_roles_default'), ['default'], unique=False) op.create_table( 'diseases', sa.Column('id', sa.Integer(), nullable=False), sa.Column('collected_date', sa.DateTime(), nullable=True), sa.Column('age', sa.Integer(), nullable=True), sa.Column('disease_type', sa.String(length=255), nullable=True), sa.Column('type', sa.String(length=255), nullable=True), sa.Column('tnm', sa.String(length=255), nullable=True), sa.Column('period', sa.String(length=255), nullable=True), sa.Column('pathological_immunohistochemistry', sa.String(length=255), nullable=True), sa.Column('operation_date', sa.DateTime(), nullable=True), sa.Column('pathological_information', sa.String(length=255), nullable=True), sa.Column('Typing', sa.String(length=255), nullable=True), sa.Column('hypertension', sa.String(length=255), nullable=True), sa.Column('diabetes', sa.String(length=255), nullable=True), sa.Column('history_of_cancer', sa.String(length=255), nullable=True), sa.Column('systemic_diseases', sa.String(length=255), nullable=True), sa.Column('family_history', sa.String(length=255), nullable=True), sa.Column('antiviral_therapy', sa.String(length=255), nullable=True), sa.Column('preoperative_tumor_treatment', sa.String(length=255), nullable=True), sa.Column('blood_lipids', sa.String(length=255), nullable=True), sa.Column('biochemical_indicators', sa.String(length=255), nullable=True), sa.Column('lymphocyte', sa.String(length=255), nullable=True), sa.Column('Neutrophils', sa.String(length=255), nullable=True), sa.Column('after_AEP', sa.String(length=255), nullable=True), sa.Column('after_CEA', sa.String(length=255), nullable=True), sa.Column('after_CA19_9', sa.String(length=255), nullable=True), sa.Column('HBV_DNA', sa.String(length=255), nullable=True), sa.Column('hepatitis_B_surface_antigen', sa.String(length=255), nullable=True), sa.Column('surface_antibody', sa.String(length=255), nullable=True), sa.Column('E_antigen', sa.String(length=255), nullable=True), sa.Column('E_antibody', sa.String(length=255), nullable=True), sa.Column('core_antibody', sa.String(length=255), nullable=True), sa.Column('smoking', sa.String(length=255), nullable=True), sa.Column('treatment', sa.Text(), nullable=True), sa.Column('timestamp', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), sa.Column('patient_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['patient_id'], ['patients.id'], name=op.f('fk_diseases_patient_id_patients')), sa.PrimaryKeyConstraint('id', name=op.f('pk_diseases'))) with op.batch_alter_table('diseases', schema=None) as batch_op: batch_op.create_index(batch_op.f('ix_diseases_timestamp'), ['timestamp'], unique=False) op.create_table( 'users', sa.Column('id', sa.Integer(), nullable=False), sa.Column('username', sa.String(length=64), nullable=True), sa.Column('email', sa.String(length=120), nullable=True), sa.Column('password_hash', sa.String(length=128), nullable=True), sa.Column('name', sa.String(length=64), nullable=True), sa.Column('location', sa.String(length=64), nullable=True), sa.Column('about_me', sa.Text(), nullable=True), sa.Column('member_since', sa.DateTime(), nullable=True), sa.Column('last_seen', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), sa.Column('confirmed', sa.Boolean(), nullable=True), sa.Column('role_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['role_id'], ['roles.id'], name=op.f('fk_users_role_id_roles')), sa.PrimaryKeyConstraint('id', name=op.f('pk_users'))) with op.batch_alter_table('users', schema=None) as batch_op: batch_op.create_index(batch_op.f('ix_users_email'), ['email'], unique=True) batch_op.create_index(batch_op.f('ix_users_username'), ['username'], unique=True) op.create_table( 'sequence', sa.Column('sequence_id', sa.Integer(), nullable=False), sa.Column('batch', sa.String(length=255), nullable=True), sa.Column('id', sa.String(length=255), nullable=True), sa.Column('gao_lab_id', sa.String(length=255), nullable=True), sa.Column('introduction', sa.Text(), nullable=True), sa.Column('sample_origin', sa.String(length=255), nullable=True), sa.Column('collected_date', sa.DATE(), nullable=True), sa.Column('timestamp', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), sa.Column('author_id', sa.Integer(), nullable=True), sa.Column('disease_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['author_id'], ['users.id'], name=op.f('fk_sequence_author_id_users')), sa.ForeignKeyConstraint(['disease_id'], ['diseases.id'], name=op.f('fk_sequence_disease_id_diseases')), sa.PrimaryKeyConstraint('sequence_id', name=op.f('pk_sequence'))) with op.batch_alter_table('sequence', schema=None) as batch_op: batch_op.create_index(batch_op.f('ix_sequence_timestamp'), ['timestamp'], unique=False) op.create_table( 'results', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name_1', sa.String(length=255), nullable=True), sa.Column('data_quality_input', sa.Float(), nullable=True), sa.Column('data_quality_bam', sa.Float(), nullable=True), sa.Column('data_quality_bam_input', sa.Float(), nullable=True), sa.Column('data_Quality_uniq_bam', sa.Float(), nullable=True), sa.Column('data_Quality_uniq_nodup_bam', sa.Float(), nullable=True), sa.Column('data_Quality_uniq_nodup_bam_input', sa.Float(), nullable=True), sa.Column('coverage', sa.Float(), nullable=True), sa.Column('timestamp', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), sa.Column('sequence_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['sequence_id'], ['sequence.id'], name=op.f('fk_results_sequence_id_sequence')), sa.PrimaryKeyConstraint('id', name=op.f('pk_results'))) with op.batch_alter_table('results', schema=None) as batch_op: batch_op.create_index(batch_op.f('ix_results_timestamp'), ['timestamp'], unique=False)
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('person_has_role', schema=None) as batch_op: batch_op.drop_constraint('_person_has_role_function_uc', type_='unique') batch_op.drop_column('function')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('question', schema=None) as batch_op: batch_op.add_column(sa.Column('user_id', sa.Integer(), nullable=False)) batch_op.create_foreign_key(batch_op.f('fk_question_user_id_user'), 'user', ['user_id'], ['id'], ondelete='CASCADE')
def downgrade(): with op.batch_alter_table("puppet") as batch_op: batch_op.drop_column('is_bot')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( "khandaker_mojo_medication_item", sa.Column( "medicationtable_id", sa.Integer(), nullable=False, comment="FK to medicationtable", ), sa.Column( "seqnum", sa.Integer(), nullable=False, comment="Sequence number of this medication", ), sa.Column( "medication_name", sa.UnicodeText(), nullable=True, comment="Medication name", ), sa.Column( "chemical_name", sa.UnicodeText(), nullable=True, comment="Chemical name for study team", ), sa.Column("dose", sa.UnicodeText(), nullable=True, comment="Dose"), sa.Column("frequency", sa.UnicodeText(), nullable=True, comment="Frequency"), sa.Column( "duration_months", sa.Float(), nullable=True, comment="Duration (months)", ), sa.Column( "indication", sa.UnicodeText(), nullable=True, comment="Indication (what is the medication used for?)", ), sa.Column( "response", sa.Integer(), nullable=True, comment=("1 = treats all symptoms, 2 = most symptoms, 3 = some" " symptoms, 4 = no symptoms)"), ), sa.Column( "_pk", sa.Integer(), autoincrement=True, nullable=False, comment="(SERVER) Primary key (on the server)", ), sa.Column( "_device_id", sa.Integer(), nullable=False, comment="(SERVER) ID of the source tablet device", ), sa.Column( "_era", sa.String(length=32), nullable=False, comment=( "(SERVER) 'NOW', or when this row was preserved and removed" " from the source device (UTC ISO 8601)"), ), sa.Column( "_current", sa.Boolean(), nullable=False, comment="(SERVER) Is the row current (1) or not (0)?", ), sa.Column( "_when_added_exact", PendulumDateTimeAsIsoTextColType(length=32), nullable=True, comment="(SERVER) Date/time this row was added (ISO 8601)", ), sa.Column( "_when_added_batch_utc", sa.DateTime(), nullable=True, comment=( "(SERVER) Date/time of the upload batch that added this row" " (DATETIME in UTC)"), ), sa.Column( "_adding_user_id", sa.Integer(), nullable=True, comment="(SERVER) ID of user that added this row", ), sa.Column( "_when_removed_exact", PendulumDateTimeAsIsoTextColType(length=32), nullable=True, comment=("(SERVER) Date/time this row was removed, i.e. made not" " current (ISO 8601)"), ), sa.Column( "_when_removed_batch_utc", sa.DateTime(), nullable=True, comment=( "(SERVER) Date/time of the upload batch that removed this row" " (DATETIME in UTC)"), ), sa.Column( "_removing_user_id", sa.Integer(), nullable=True, comment="(SERVER) ID of user that removed this row", ), sa.Column( "_preserving_user_id", sa.Integer(), nullable=True, comment="(SERVER) ID of user that preserved this row", ), sa.Column( "_forcibly_preserved", sa.Boolean(), nullable=True, comment=("(SERVER) Forcibly preserved by superuser (rather than" " normally preserved by tablet)?"), ), sa.Column( "_predecessor_pk", sa.Integer(), nullable=True, comment="(SERVER) PK of predecessor record, prior to modification", ), sa.Column( "_successor_pk", sa.Integer(), nullable=True, comment=( "(SERVER) PK of successor record (after modification) or NULL" " (whilst live, or after deletion)"), ), sa.Column( "_manually_erased", sa.Boolean(), nullable=True, comment="(SERVER) Record manually erased (content destroyed)?", ), sa.Column( "_manually_erased_at", PendulumDateTimeAsIsoTextColType(length=32), nullable=True, comment="(SERVER) Date/time of manual erasure (ISO 8601)", ), sa.Column( "_manually_erasing_user_id", sa.Integer(), nullable=True, comment="(SERVER) ID of user that erased this row manually", ), sa.Column( "_camcops_version", SemanticVersionColType(length=147), nullable=True, comment="(SERVER) CamCOPS version number of the uploading device", ), sa.Column( "_addition_pending", sa.Boolean(), nullable=False, comment="(SERVER) Addition pending?", ), sa.Column( "_removal_pending", sa.Boolean(), nullable=True, comment="(SERVER) Removal pending?", ), sa.Column( "_group_id", sa.Integer(), nullable=False, comment="(SERVER) ID of group to which this record belongs", ), sa.Column( "id", sa.Integer(), nullable=False, comment="(TASK) Primary key (task ID) on the tablet device", ), sa.Column( "when_last_modified", PendulumDateTimeAsIsoTextColType(length=32), nullable=True, comment=( "(STANDARD) Date/time this row was last modified on the source" " tablet device (ISO 8601)"), ), sa.Column( "_move_off_tablet", sa.Boolean(), nullable=True, comment="(SERVER/TABLET) Record-specific preservation pending?", ), sa.ForeignKeyConstraint( ["_adding_user_id"], ["_security_users.id"], name=op.f("fk_khandaker_mojo_medication_item__adding_user_id"), ), sa.ForeignKeyConstraint( ["_device_id"], ["_security_devices.id"], name=op.f("fk_khandaker_mojo_medication_item__device_id"), ), sa.ForeignKeyConstraint( ["_group_id"], ["_security_groups.id"], name=op.f("fk_khandaker_mojo_medication_item__group_id"), ), sa.ForeignKeyConstraint( ["_manually_erasing_user_id"], ["_security_users.id"], name=op.f( "fk_khandaker_mojo_medication_item__manually_erasing_user_id"), ), sa.ForeignKeyConstraint( ["_preserving_user_id"], ["_security_users.id"], name=op.f("fk_khandaker_mojo_medication_item__preserving_user_id"), ), sa.ForeignKeyConstraint( ["_removing_user_id"], ["_security_users.id"], name=op.f("fk_khandaker_mojo_medication_item__removing_user_id"), ), sa.PrimaryKeyConstraint( "_pk", name=op.f("pk_khandaker_mojo_medication_item")), mysql_charset="utf8mb4 COLLATE utf8mb4_unicode_ci", mysql_engine="InnoDB", mysql_row_format="DYNAMIC", ) with op.batch_alter_table("khandaker_mojo_medication_item", schema=None) as batch_op: batch_op.create_index( batch_op.f("ix_khandaker_mojo_medication_item__current"), ["_current"], unique=False, ) batch_op.create_index( batch_op.f("ix_khandaker_mojo_medication_item__device_id"), ["_device_id"], unique=False, ) batch_op.create_index( batch_op.f("ix_khandaker_mojo_medication_item__era"), ["_era"], unique=False, ) batch_op.create_index( batch_op.f("ix_khandaker_mojo_medication_item__group_id"), ["_group_id"], unique=False, ) batch_op.create_index( batch_op.f("ix_khandaker_mojo_medication_item__pk"), ["_pk"], unique=False, ) batch_op.create_index( batch_op.f("ix_khandaker_mojo_medication_item_id"), ["id"], unique=False, ) batch_op.create_index( batch_op.f("ix_khandaker_mojo_medication_item_when_last_modified"), ["when_last_modified"], unique=False, ) op.create_table( "khandaker_mojo_medicationtherapy", sa.Column( "patient_id", sa.Integer(), nullable=False, comment="(TASK) Foreign key to patient.id (for this device/era)", ), sa.Column( "when_created", PendulumDateTimeAsIsoTextColType(length=32), nullable=False, comment=( "(TASK) Date/time this task instance was created (ISO 8601)"), ), sa.Column( "when_firstexit", PendulumDateTimeAsIsoTextColType(length=32), nullable=True, comment=( "(TASK) Date/time of the first exit from this task (ISO 8601)" ), ), sa.Column( "firstexit_is_finish", sa.Boolean(), nullable=True, comment=("(TASK) Was the first exit from the task because it was" " finished (1)?"), ), sa.Column( "firstexit_is_abort", sa.Boolean(), nullable=True, comment=("(TASK) Was the first exit from this task because it was" " aborted (1)?"), ), sa.Column( "editing_time_s", sa.Float(), nullable=True, comment="(TASK) Time spent editing (s)", ), sa.Column( "_pk", sa.Integer(), autoincrement=True, nullable=False, comment="(SERVER) Primary key (on the server)", ), sa.Column( "_device_id", sa.Integer(), nullable=False, comment="(SERVER) ID of the source tablet device", ), sa.Column( "_era", sa.String(length=32), nullable=False, comment=( "(SERVER) 'NOW', or when this row was preserved and removed" " from the source device (UTC ISO 8601)"), ), sa.Column( "_current", sa.Boolean(), nullable=False, comment="(SERVER) Is the row current (1) or not (0)?", ), sa.Column( "_when_added_exact", PendulumDateTimeAsIsoTextColType(length=32), nullable=True, comment="(SERVER) Date/time this row was added (ISO 8601)", ), sa.Column( "_when_added_batch_utc", sa.DateTime(), nullable=True, comment=( "(SERVER) Date/time of the upload batch that added this row" " (DATETIME in UTC)"), ), sa.Column( "_adding_user_id", sa.Integer(), nullable=True, comment="(SERVER) ID of user that added this row", ), sa.Column( "_when_removed_exact", PendulumDateTimeAsIsoTextColType(length=32), nullable=True, comment=("(SERVER) Date/time this row was removed, i.e. made not" " current (ISO 8601)"), ), sa.Column( "_when_removed_batch_utc", sa.DateTime(), nullable=True, comment=( "(SERVER) Date/time of the upload batch that removed this row" " (DATETIME in UTC)"), ), sa.Column( "_removing_user_id", sa.Integer(), nullable=True, comment="(SERVER) ID of user that removed this row", ), sa.Column( "_preserving_user_id", sa.Integer(), nullable=True, comment="(SERVER) ID of user that preserved this row", ), sa.Column( "_forcibly_preserved", sa.Boolean(), nullable=True, comment=("(SERVER) Forcibly preserved by superuser (rather than" " normally preserved by tablet)?"), ), sa.Column( "_predecessor_pk", sa.Integer(), nullable=True, comment="(SERVER) PK of predecessor record, prior to modification", ), sa.Column( "_successor_pk", sa.Integer(), nullable=True, comment=( "(SERVER) PK of successor record (after modification) or NULL" " (whilst live, or after deletion)"), ), sa.Column( "_manually_erased", sa.Boolean(), nullable=True, comment="(SERVER) Record manually erased (content destroyed)?", ), sa.Column( "_manually_erased_at", PendulumDateTimeAsIsoTextColType(length=32), nullable=True, comment="(SERVER) Date/time of manual erasure (ISO 8601)", ), sa.Column( "_manually_erasing_user_id", sa.Integer(), nullable=True, comment="(SERVER) ID of user that erased this row manually", ), sa.Column( "_camcops_version", SemanticVersionColType(length=147), nullable=True, comment="(SERVER) CamCOPS version number of the uploading device", ), sa.Column( "_addition_pending", sa.Boolean(), nullable=False, comment="(SERVER) Addition pending?", ), sa.Column( "_removal_pending", sa.Boolean(), nullable=True, comment="(SERVER) Removal pending?", ), sa.Column( "_group_id", sa.Integer(), nullable=False, comment="(SERVER) ID of group to which this record belongs", ), sa.Column( "id", sa.Integer(), nullable=False, comment="(TASK) Primary key (task ID) on the tablet device", ), sa.Column( "when_last_modified", PendulumDateTimeAsIsoTextColType(length=32), nullable=True, comment=( "(STANDARD) Date/time this row was last modified on the source" " tablet device (ISO 8601)"), ), sa.Column( "_move_off_tablet", sa.Boolean(), nullable=True, comment="(SERVER/TABLET) Record-specific preservation pending?", ), sa.ForeignKeyConstraint( ["_adding_user_id"], ["_security_users.id"], name=op.f("fk_khandaker_mojo_medicationtherapy__adding_user_id"), ), sa.ForeignKeyConstraint( ["_device_id"], ["_security_devices.id"], name=op.f("fk_khandaker_mojo_medicationtherapy__device_id"), ), sa.ForeignKeyConstraint( ["_group_id"], ["_security_groups.id"], name=op.f("fk_khandaker_mojo_medicationtherapy__group_id"), ), sa.ForeignKeyConstraint( ["_manually_erasing_user_id"], ["_security_users.id"], name=op. f("fk_khandaker_mojo_medicationtherapy__manually_erasing_user_id"), ), sa.ForeignKeyConstraint( ["_preserving_user_id"], ["_security_users.id"], name=op.f( "fk_khandaker_mojo_medicationtherapy__preserving_user_id"), ), sa.ForeignKeyConstraint( ["_removing_user_id"], ["_security_users.id"], name=op.f("fk_khandaker_mojo_medicationtherapy__removing_user_id"), ), sa.PrimaryKeyConstraint( "_pk", name=op.f("pk_khandaker_mojo_medicationtherapy")), mysql_charset="utf8mb4 COLLATE utf8mb4_unicode_ci", mysql_engine="InnoDB", mysql_row_format="DYNAMIC", ) with op.batch_alter_table("khandaker_mojo_medicationtherapy", schema=None) as batch_op: batch_op.create_index( batch_op.f("ix_khandaker_mojo_medicationtherapy__current"), ["_current"], unique=False, ) batch_op.create_index( batch_op.f("ix_khandaker_mojo_medicationtherapy__device_id"), ["_device_id"], unique=False, ) batch_op.create_index( batch_op.f("ix_khandaker_mojo_medicationtherapy__era"), ["_era"], unique=False, ) batch_op.create_index( batch_op.f("ix_khandaker_mojo_medicationtherapy__group_id"), ["_group_id"], unique=False, ) batch_op.create_index( batch_op.f("ix_khandaker_mojo_medicationtherapy__pk"), ["_pk"], unique=False, ) batch_op.create_index( batch_op.f("ix_khandaker_mojo_medicationtherapy_id"), ["id"], unique=False, ) batch_op.create_index( batch_op.f("ix_khandaker_mojo_medicationtherapy_patient_id"), ["patient_id"], unique=False, ) batch_op.create_index( batch_op.f( "ix_khandaker_mojo_medicationtherapy_when_last_modified"), ["when_last_modified"], unique=False, ) op.create_table( "khandaker_mojo_therapy_item", sa.Column( "medicationtable_id", sa.Integer(), nullable=False, comment="FK to medicationtable", ), sa.Column( "seqnum", sa.Integer(), nullable=False, comment="Sequence number of this therapy", ), sa.Column("therapy", sa.UnicodeText(), nullable=True, comment="Therapy"), sa.Column("frequency", sa.UnicodeText(), nullable=True, comment="Frequency"), sa.Column( "sessions_completed", sa.Integer(), nullable=True, comment="Sessions completed", ), sa.Column( "sessions_planned", sa.Integer(), nullable=True, comment="Total number of sessions planned", ), sa.Column( "indication", sa.UnicodeText(), nullable=True, comment="Indication (what is the medication used for?)", ), sa.Column( "response", sa.Integer(), nullable=True, comment=("1 = treats all symptoms, 2 = most symptoms, 3 = some" " symptoms, 4 = no symptoms)"), ), sa.Column( "_pk", sa.Integer(), autoincrement=True, nullable=False, comment="(SERVER) Primary key (on the server)", ), sa.Column( "_device_id", sa.Integer(), nullable=False, comment="(SERVER) ID of the source tablet device", ), sa.Column( "_era", sa.String(length=32), nullable=False, comment=( "(SERVER) 'NOW', or when this row was preserved and removed" " from the source device (UTC ISO 8601)"), ), sa.Column( "_current", sa.Boolean(), nullable=False, comment="(SERVER) Is the row current (1) or not (0)?", ), sa.Column( "_when_added_exact", PendulumDateTimeAsIsoTextColType(length=32), nullable=True, comment="(SERVER) Date/time this row was added (ISO 8601)", ), sa.Column( "_when_added_batch_utc", sa.DateTime(), nullable=True, comment=( "(SERVER) Date/time of the upload batch that added this row" " (DATETIME in UTC)"), ), sa.Column( "_adding_user_id", sa.Integer(), nullable=True, comment="(SERVER) ID of user that added this row", ), sa.Column( "_when_removed_exact", PendulumDateTimeAsIsoTextColType(length=32), nullable=True, comment=("(SERVER) Date/time this row was removed, i.e. made not" " current (ISO 8601)"), ), sa.Column( "_when_removed_batch_utc", sa.DateTime(), nullable=True, comment=( "(SERVER) Date/time of the upload batch that removed this row" " (DATETIME in UTC)"), ), sa.Column( "_removing_user_id", sa.Integer(), nullable=True, comment="(SERVER) ID of user that removed this row", ), sa.Column( "_preserving_user_id", sa.Integer(), nullable=True, comment="(SERVER) ID of user that preserved this row", ), sa.Column( "_forcibly_preserved", sa.Boolean(), nullable=True, comment=("(SERVER) Forcibly preserved by superuser (rather than" " normally preserved by tablet)?"), ), sa.Column( "_predecessor_pk", sa.Integer(), nullable=True, comment="(SERVER) PK of predecessor record, prior to modification", ), sa.Column( "_successor_pk", sa.Integer(), nullable=True, comment=( "(SERVER) PK of successor record (after modification) or NULL" " (whilst live, or after deletion)"), ), sa.Column( "_manually_erased", sa.Boolean(), nullable=True, comment="(SERVER) Record manually erased (content destroyed)?", ), sa.Column( "_manually_erased_at", PendulumDateTimeAsIsoTextColType(length=32), nullable=True, comment="(SERVER) Date/time of manual erasure (ISO 8601)", ), sa.Column( "_manually_erasing_user_id", sa.Integer(), nullable=True, comment="(SERVER) ID of user that erased this row manually", ), sa.Column( "_camcops_version", SemanticVersionColType(length=147), nullable=True, comment="(SERVER) CamCOPS version number of the uploading device", ), sa.Column( "_addition_pending", sa.Boolean(), nullable=False, comment="(SERVER) Addition pending?", ), sa.Column( "_removal_pending", sa.Boolean(), nullable=True, comment="(SERVER) Removal pending?", ), sa.Column( "_group_id", sa.Integer(), nullable=False, comment="(SERVER) ID of group to which this record belongs", ), sa.Column( "id", sa.Integer(), nullable=False, comment="(TASK) Primary key (task ID) on the tablet device", ), sa.Column( "when_last_modified", PendulumDateTimeAsIsoTextColType(length=32), nullable=True, comment=( "(STANDARD) Date/time this row was last modified on the source" " tablet device (ISO 8601)"), ), sa.Column( "_move_off_tablet", sa.Boolean(), nullable=True, comment="(SERVER/TABLET) Record-specific preservation pending?", ), sa.ForeignKeyConstraint( ["_adding_user_id"], ["_security_users.id"], name=op.f("fk_khandaker_mojo_therapy_item__adding_user_id"), ), sa.ForeignKeyConstraint( ["_device_id"], ["_security_devices.id"], name=op.f("fk_khandaker_mojo_therapy_item__device_id"), ), sa.ForeignKeyConstraint( ["_group_id"], ["_security_groups.id"], name=op.f("fk_khandaker_mojo_therapy_item__group_id"), ), sa.ForeignKeyConstraint( ["_manually_erasing_user_id"], ["_security_users.id"], name=op.f( "fk_khandaker_mojo_therapy_item__manually_erasing_user_id"), ), sa.ForeignKeyConstraint( ["_preserving_user_id"], ["_security_users.id"], name=op.f("fk_khandaker_mojo_therapy_item__preserving_user_id"), ), sa.ForeignKeyConstraint( ["_removing_user_id"], ["_security_users.id"], name=op.f("fk_khandaker_mojo_therapy_item__removing_user_id"), ), sa.PrimaryKeyConstraint("_pk", name=op.f("pk_khandaker_mojo_therapy_item")), mysql_charset="utf8mb4 COLLATE utf8mb4_unicode_ci", mysql_engine="InnoDB", mysql_row_format="DYNAMIC", ) with op.batch_alter_table("khandaker_mojo_therapy_item", schema=None) as batch_op: batch_op.create_index( batch_op.f("ix_khandaker_mojo_therapy_item__current"), ["_current"], unique=False, ) batch_op.create_index( batch_op.f("ix_khandaker_mojo_therapy_item__device_id"), ["_device_id"], unique=False, ) batch_op.create_index( batch_op.f("ix_khandaker_mojo_therapy_item__era"), ["_era"], unique=False, ) batch_op.create_index( batch_op.f("ix_khandaker_mojo_therapy_item__group_id"), ["_group_id"], unique=False, ) batch_op.create_index( batch_op.f("ix_khandaker_mojo_therapy_item__pk"), ["_pk"], unique=False, ) batch_op.create_index( batch_op.f("ix_khandaker_mojo_therapy_item_id"), ["id"], unique=False, ) batch_op.create_index( batch_op.f("ix_khandaker_mojo_therapy_item_when_last_modified"), ["when_last_modified"], unique=False, )
def upgrade(): with op.batch_alter_table("puppet") as batch_op: batch_op.add_column(sa.Column('is_bot', sa.Boolean(), nullable=True))
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('releases', schema=None) as batch_op: batch_op.drop_index(batch_op.f('ix_releases_timestamp')) op.drop_table('releases')