def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('brands', sa.Column('id', sa.Integer(), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('name', sa.String(length=100), nullable=False), sa.Column('description', sa.Text(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name')) op.create_table( 'categories', sa.Column('id', sa.Integer(), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('name', sa.String(length=100), nullable=False), sa.Column('description', sa.Text(), nullable=True), sa.Column('parent_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['parent_id'], ['categories.id'], ondelete='SET NULL'), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name')) op.create_table( 'users', sa.Column('id', sa.Integer(), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('firstname', sa.String(length=100), nullable=False), sa.Column('lastname', sa.String(length=100), nullable=False), sa.Column('email', sa.String(length=100), nullable=False), sa.Column('password', sa.String(length=250), nullable=False), sa.Column('is_admin', sa.Boolean(), nullable=False), sa.Column('is_activated', sa.Boolean(), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email')) op.create_table( 'products', sa.Column('id', sa.Integer(), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('name', sa.String(length=100), nullable=False), sa.Column('description', sa.Text(), nullable=True), sa.Column('main_image', postgresql.JSON(astext_type=sa.Text()), nullable=False), sa.Column('images', postgresql.ARRAY(postgresql.JSON(astext_type=sa.Text())), nullable=True), sa.Column('category_id', sa.Integer(), nullable=False), sa.Column('brand_id', sa.Integer(), nullable=True), sa.Column('price', sa.DECIMAL(precision=12, scale=2), nullable=False), sa.Column('quantity', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['brand_id'], ['brands.id'], ondelete='SET NULL'), sa.ForeignKeyConstraint( ['category_id'], ['categories.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name'))
def upgrade(): op.create_table( 'registration_requests', sa.Column('id', sa.Integer(), nullable=False), sa.Column('email', sa.String(), nullable=False, unique=True, index=True), sa.Column('extra_emails', postgresql.ARRAY(sa.String()), nullable=False), sa.Column('comment', sa.Text(), nullable=False), sa.Column('user_data', postgresql.JSON(), nullable=False), sa.Column('identity_data', postgresql.JSON(), nullable=False), sa.Column('settings', postgresql.JSON(), nullable=False), sa.PrimaryKeyConstraint('id'), sa.CheckConstraint("email = lower(email)", name='lowercase_email'), schema='users' )
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('video', sa.Column('json', postgresql.JSON(), autoincrement=False, nullable=True)) op.drop_column('video', 'name') op.drop_column('video', 'media_process') op.drop_column('video', 'media_meta') op.add_column('document', sa.Column('json', postgresql.JSON(), autoincrement=False, nullable=True)) op.drop_column('document', 'name') op.drop_column('document', 'media_process') op.drop_column('document', 'media_meta') op.add_column('audio', sa.Column('json', postgresql.JSON(), autoincrement=False, nullable=True)) op.drop_column('audio', 'name') op.drop_column('audio', 'media_process') op.drop_column('audio', 'media_meta')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column( 'tasks', sa.Column('result_no_stop_words', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True)) op.add_column( 'tasks', sa.Column('result_all', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True))
def upgrade(): op.add_column('transnet_powerline', sa.Column('cables', sa.INTEGER(), nullable=True)) op.add_column('transnet_powerline', sa.Column('lat', sa.INTEGER(), nullable=True)) op.add_column('transnet_powerline', sa.Column('length', sa.INTEGER(), nullable=True)) op.add_column('transnet_powerline', sa.Column('lon', sa.INTEGER(), nullable=True)) op.add_column('transnet_powerline', sa.Column('name', sa.String(), nullable=True)) op.add_column( 'transnet_powerline', sa.Column('nodes', postgresql.ARRAY(sa.INTEGER()), nullable=True)) op.add_column('transnet_powerline', sa.Column('osm_id', sa.INTEGER(), nullable=True)) op.add_column('transnet_powerline', sa.Column('raw_geom', sa.String(), nullable=True)) op.add_column('transnet_powerline', sa.Column('tags', postgresql.JSON(), nullable=True)) op.add_column('transnet_powerline', sa.Column('type', sa.String(), nullable=True)) op.add_column( 'transnet_powerline', sa.Column('voltage', postgresql.ARRAY(sa.INTEGER()), nullable=True)) op.drop_column('transnet_powerline', 'properties') op.drop_column('transnet_station', 'properties') op.add_column('transnet_station', sa.Column('tags', postgresql.JSON(), nullable=True)) op.add_column('transnet_station', sa.Column('raw_geom', sa.String(), nullable=True)) op.add_column('transnet_station', sa.Column('lat', sa.INTEGER(), nullable=True)) op.add_column('transnet_station', sa.Column('length', sa.INTEGER(), nullable=True)) op.add_column('transnet_station', sa.Column('lon', sa.INTEGER(), nullable=True)) op.add_column('transnet_station', sa.Column('name', sa.String(), nullable=True)) op.add_column('transnet_station', sa.Column('osm_id', sa.INTEGER(), nullable=True)) op.add_column( 'transnet_station', sa.Column('nodes', postgresql.ARRAY(sa.INTEGER()), nullable=True)) op.add_column( 'transnet_station', sa.Column('voltage', postgresql.ARRAY(sa.INTEGER()), nullable=True)) op.add_column('transnet_station', sa.Column('type', sa.String(), nullable=True))
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.alter_column('people', 'mental_health_triggers', existing_type=sa.String(), type_=postgresql.JSON(astext_type=sa.Text()), existing_nullable=True) op.alter_column('people', 'mental_health_baseline_behavior', existing_type=sa.String(), type_=postgresql.JSON(astext_type=sa.Text()), existing_nullable=True) op.alter_column('people', 'date_of_birth', existing_type=sa.Date(), type_=postgresql.TIMESTAMP(), existing_nullable=True)
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column( 'Venue', sa.Column('upcoming_shows', postgresql.ARRAY(postgresql.JSON(astext_type=sa.Text())), autoincrement=False, nullable=True)) op.add_column( 'Venue', sa.Column('past_shows', postgresql.ARRAY(postgresql.JSON(astext_type=sa.Text())), autoincrement=False, nullable=True)) op.add_column( 'Venue', sa.Column('past_shows_count', sa.INTEGER(), autoincrement=False, nullable=True)) op.add_column( 'Venue', sa.Column('upcoming_shows_count', sa.INTEGER(), autoincrement=False, nullable=True)) op.add_column( 'Artist', sa.Column('upcoming_shows', postgresql.ARRAY(postgresql.JSON(astext_type=sa.Text())), autoincrement=False, nullable=True)) op.add_column( 'Artist', sa.Column('past_shows', postgresql.ARRAY(postgresql.JSON(astext_type=sa.Text())), autoincrement=False, nullable=True)) op.add_column( 'Artist', sa.Column('past_shows_count', sa.INTEGER(), autoincrement=False, nullable=True)) op.add_column( 'Artist', sa.Column('upcoming_shows_count', sa.INTEGER(), autoincrement=False, nullable=True))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'Artist', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(), nullable=True), sa.Column('city', sa.String(length=120), nullable=True), sa.Column('state', sa.String(length=120), nullable=True), sa.Column('phone', sa.String(length=120), nullable=True), sa.Column('genres', postgresql.JSON(astext_type=sa.Text()), nullable=True), sa.Column('image_link', sa.String(length=500), nullable=True), sa.Column('facebook_link', sa.String(length=120), nullable=True), sa.Column('website', sa.String(length=120), nullable=True), sa.Column('seeking_venue', sa.Boolean(), nullable=True), sa.Column('seeking_description', sa.String(length=200), nullable=True), sa.Column('upcoming_shows', sa.Integer(), nullable=True), sa.Column('past_shows', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table( 'Venue', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(), nullable=True), sa.Column('city', sa.String(length=120), nullable=True), sa.Column('state', sa.String(length=120), nullable=True), sa.Column('address', sa.String(length=120), nullable=True), sa.Column('phone', sa.String(length=120), nullable=True), sa.Column('image_link', sa.String(length=500), nullable=True), sa.Column('facebook_link', sa.String(length=120), nullable=True), sa.Column('website', sa.String(length=120), nullable=True), sa.Column('genres', postgresql.JSON(astext_type=sa.Text()), nullable=True), sa.Column('seeking_talent', sa.Boolean(), nullable=True), sa.Column('seeking_description', sa.String(length=200), nullable=True), sa.Column('upcoming_shows', sa.Integer(), nullable=True), sa.Column('past_shows', sa.Integer(), nullable=True), sa.Column('past_shows_count', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table('Show', sa.Column('venue_id', sa.Integer(), nullable=False), sa.Column('artist_id', sa.Integer(), nullable=False), sa.Column('start_time', sa.DateTime(), nullable=False), sa.ForeignKeyConstraint( ['artist_id'], ['Artist.id'], ), sa.ForeignKeyConstraint( ['venue_id'], ['Venue.id'], ), sa.PrimaryKeyConstraint('venue_id', 'artist_id'))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('activity_definitions_version', sa.Column('id', sa.Integer(), autoincrement=False, nullable=False), sa.Column('key', sa.String(), autoincrement=False, nullable=True), sa.Column('name', sa.String(), autoincrement=False, nullable=True), sa.Column('description', sa.Text(), autoincrement=False, nullable=True), sa.Column('case_definition_id', sa.Integer(), autoincrement=False, nullable=True), sa.Column('custom_fields', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True), sa.Column('created_at', sa.DateTime(), autoincrement=False, nullable=True), sa.Column('updated_at', sa.DateTime(), autoincrement=False, nullable=True), sa.Column('created_by_id', sa.Integer(), autoincrement=False, nullable=True), sa.Column('updated_by_id', sa.Integer(), autoincrement=False, nullable=True), sa.Column('transaction_id', sa.BigInteger(), autoincrement=False, nullable=False), sa.Column('end_transaction_id', sa.BigInteger(), nullable=True), sa.Column('operation_type', sa.SmallInteger(), nullable=False), sa.PrimaryKeyConstraint('id', 'transaction_id') ) op.create_index(op.f('ix_activity_definitions_version_created_at'), 'activity_definitions_version', ['created_at'], unique=False) op.create_index(op.f('ix_activity_definitions_version_end_transaction_id'), 'activity_definitions_version', ['end_transaction_id'], unique=False) op.create_index(op.f('ix_activity_definitions_version_operation_type'), 'activity_definitions_version', ['operation_type'], unique=False) op.create_index(op.f('ix_activity_definitions_version_transaction_id'), 'activity_definitions_version', ['transaction_id'], unique=False) op.create_index(op.f('ix_activity_definitions_version_updated_at'), 'activity_definitions_version', ['updated_at'], unique=False) op.create_table('activity_definitions', sa.Column('id', sa.Integer(), nullable=False), sa.Column('key', sa.String(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('description', sa.Text(), nullable=True), sa.Column('case_definition_id', sa.Integer(), nullable=False), sa.Column('custom_fields', postgresql.JSON(astext_type=sa.Text()), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('created_by_id', sa.Integer(), nullable=False), sa.Column('updated_by_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['case_definition_id'], ['case_definitions.id'], ), sa.ForeignKeyConstraint(['created_by_id'], ['users.id'], ), sa.ForeignKeyConstraint(['updated_by_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('key'), sa.UniqueConstraint('name') ) op.create_index(op.f('ix_activity_definitions_created_at'), 'activity_definitions', ['created_at'], unique=False) op.create_index(op.f('ix_activity_definitions_updated_at'), 'activity_definitions', ['updated_at'], unique=False)
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('suggestions', sa.Column('related', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True)) op.add_column('suggestions', sa.Column('narrower', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True)) op.add_column('suggestions', sa.Column('alternative_label', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True)) op.add_column('suggestions', sa.Column('group', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True)) op.add_column('suggestions', sa.Column('broader', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True)) op.drop_column('suggestions', 'scopeNote') op.drop_column('suggestions', 'related_labels') op.drop_column('suggestions', 'neededFor') op.drop_column('suggestions', 'narrower_labels') op.drop_column('suggestions', 'groups') op.drop_column('suggestions', 'exactMatches') op.drop_column('suggestions', 'broader_labels') op.drop_column('suggestions', 'alternative_labels')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'results', sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), sa.Column('url', sa.VARCHAR(), autoincrement=False, nullable=True), sa.Column('result_all', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True), sa.Column('result_no_stop_words', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True), sa.PrimaryKeyConstraint('id', name='results_pkey'))
def upgrade(): op.add_column('permissions', sa.Column('permissions', postgresql.JSON(), nullable=True)) op.alter_column( 'permissions', 'permissions', type_=postgresql.JSON(), postgresql_using= "json_build_object('read',read,'write',write,'admin',admin,'owner',owner)" ) op.drop_column('permissions', 'read') op.drop_column('permissions', 'write') op.drop_column('permissions', 'admin') op.drop_column('permissions', 'owner')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('employee', sa.Column('id', sa.Integer(), nullable=False), sa.Column('last_name', sa.String(), nullable=True), sa.Column('first_name', sa.String(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table( 'results', sa.Column('id', sa.Integer(), nullable=False), sa.Column('url', sa.String(), nullable=True), sa.Column('result_all', postgresql.JSON(astext_type=sa.Text()), nullable=True), sa.Column('result_no_stop_words', postgresql.JSON(astext_type=sa.Text()), nullable=True), sa.PrimaryKeyConstraint('id'))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('bots', sa.Column('id', sa.Integer(), nullable=False), sa.Column('bot_guid', sa.String(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('name', sa.String(), nullable=True), sa.Column('platforms', postgresql.JSON(astext_type=sa.Text()), nullable=True), sa.Column('team', postgresql.JSON(astext_type=sa.Text()), nullable=True), sa.Column('created', sa.DateTime(), nullable=True), sa.Column('used', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('bot_guid') )
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column( 'results', sa.Column('result_all', postgresql.JSON(astext_type=sa.Text()), nullable=True)) op.add_column( 'results', sa.Column('result_no_stop_words', postgresql.JSON(astext_type=sa.Text()), nullable=True)) op.alter_column('results', 'url', existing_type=sa.TEXT(), nullable=True) op.drop_column('results', 'results_all') op.drop_column('results', 'results_no_stops_words')
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('experiment', sa.Column('inhibitor_id', sa.INTEGER(), autoincrement=False, nullable=True)) op.add_column('experiment', sa.Column('additional_quant_params', postgresql.JSON(), autoincrement=False, nullable=True)) op.add_column('experiment', sa.Column('probe_id', sa.INTEGER(), autoincrement=False, nullable=True)) op.add_column('experiment', sa.Column('additional_search_params', postgresql.JSON(), autoincrement=False, nullable=True)) op.add_column('experiment', sa.Column('treatment_details', postgresql.JSONB(), autoincrement=False, nullable=True)) op.create_foreign_key('experiment_probe_id_fkey', 'experiment', 'probe', ['probe_id'], ['id']) op.create_foreign_key('experiment_inhibitor_id_fkey', 'experiment', 'inhibitor', ['inhibitor_id'], ['id']) op.create_index('ix_experiment_probe_id', 'experiment', ['probe_id'], unique=False) op.create_index('ix_experiment_inhibitor_id', 'experiment', ['inhibitor_id'], unique=False) op.drop_column('experiment', 'treatment') op.drop_column('experiment', 'search_params') op.drop_column('experiment', 'quant_params') op.drop_index(op.f('ix_cell_type_description'), table_name='cell_type')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column( 'people', sa.Column('most_important_contacts', postgresql.JSON(astext_type=sa.Text()), nullable=True))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column( 'maintenance', sa.Column('tags_default', postgresql.JSON(astext_type=sa.Text()), nullable=True))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column( 'logs', sa.Column('user_data', postgresql.JSON(astext_type=sa.Text()), nullable=True))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('session', sa.Column('session_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('training_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('setup_id', postgresql.UUID(as_uuid=True), nullable=True), sa.Column('bike_id', postgresql.UUID(as_uuid=True), nullable=True), sa.Column('application', sa.String(length=50), nullable=True), sa.Column('datetime_created', sa.DateTime(), nullable=False), sa.Column('datetime_last_modified', sa.DateTime(), nullable=False), sa.Column('datetime_display', sa.DateTime(), nullable=False), sa.ForeignKeyConstraint(['bike_id'], ['bike.bike_id'], ), sa.ForeignKeyConstraint(['setup_id'], ['setup.setup_id'], ), sa.ForeignKeyConstraint(['training_id'], ['training.training_id'], ), sa.PrimaryKeyConstraint('session_id'), sa.UniqueConstraint('session_id') ) op.create_table('laptime', sa.Column('lap_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('session_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('lap_no', sa.Integer(), nullable=False), sa.Column('valid', sa.Boolean(), nullable=False), sa.Column('laptime_seconds', sa.Float(), nullable=False), sa.Column('sectors', postgresql.JSON(astext_type=sa.Text()), nullable=False), sa.Column('datetime_created', sa.DateTime(), nullable=False), sa.Column('datetime_last_modified', sa.DateTime(), nullable=False), sa.Column('datetime_display', sa.DateTime(), nullable=False), sa.ForeignKeyConstraint(['session_id'], ['session.session_id'], ), sa.PrimaryKeyConstraint('lap_id'), sa.UniqueConstraint('lap_id') )
class Migration(db.Model, Timestamp): """Represent a community metadata inside the SQL database. Additionally it contains two columns ``created`` and ``updated`` with automatically managed timestamps. """ __tablename__ = 'b2share_migrations' id = db.Column( UUIDType, primary_key=True, default=uuid.uuid4, ) # migration version version = db.Column(db.String(80), nullable=False) data = db.Column(JSONType().with_variant( postgresql.JSON(none_as_null=True), 'postgresql', ), nullable=True) @property def success(self): """Returns the success or failure of the last migration.""" return 'status' in self.data and self.data['status'] == 'success'
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'statistic', sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), sa.Column('action', sa.VARCHAR(length=14), autoincrement=False, nullable=False), sa.Column('date', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), sa.Column('chat_id', sa.INTEGER(), autoincrement=False, nullable=False), sa.Column('meta', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=False), sa.CheckConstraint( "(action)::text = ANY ((ARRAY['_default'::character varying, 'start'::character varying, 'unsubscribe'::character varying, 'city_added'::character varying, 'position_added'::character varying])::text[])", name='action'), sa.ForeignKeyConstraint(['chat_id'], ['user_chat.id'], name='statistic_chat_id_fkey'), sa.PrimaryKeyConstraint('id', name='statistic_pkey')) op.drop_table('stat')
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table( 'orders', sa.Column('id', sa.Integer(), autoincrement=False, nullable=False), sa.Column('name', sa.String(length=128), nullable=True), sa.Column('email', sa.String(length=254), nullable=True), sa.Column('state', sa.String(length=30), nullable=True), sa.Column('zipcode', sa.String(length=20), nullable=True), sa.Column('birthday', sa.Date(), nullable=True), sa.Column('valid', sa.Boolean(), nullable=True), sa.Column('validation_failure', postgresql.JSON(), nullable=True), sa.Column('created_at', sa.DateTime(), server_default=sa.text(u'NOW()'), nullable=True), sa.Column('updated_at', sa.DateTime(), server_default=sa.text(u'NOW()'), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_orders_email'), 'orders', ['email'], unique=False) op.create_index(op.f('ix_orders_name'), 'orders', ['name'], unique=False) op.create_index('ix_orders_state_zipcode', 'orders', ['state', 'zipcode'], unique=False)
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.alter_column('message', 'message', existing_type=postgresql.JSON(astext_type=sa.Text()), nullable=False) op.drop_column('message', 'message_id')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column( 'custom_fields', sa.Column('selections', postgresql.JSON(astext_type=sa.Text()), nullable=True))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column( 'portfolio', sa.Column('expertise', postgresql.JSON(astext_type=sa.Text()), nullable=True))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.alter_column('proteomics', 'gene', existing_type=sa.TEXT(), type_=postgresql.JSON(astext_type=sa.Text()), existing_nullable=False, postgresql_using="gene::json")
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table( 'library', sa.Column('id', postgresql.UUID(), nullable=False), sa.Column('name', sa.String(length=50), nullable=True), sa.Column('description', sa.String(length=50), nullable=True), sa.Column('public', sa.Boolean(), nullable=True), sa.Column('bibcode', postgresql.JSON(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table('user', sa.Column('id', sa.Integer(), nullable=False), sa.Column('absolute_uid', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('absolute_uid')) op.create_table('permissions', sa.Column('id', sa.Integer(), nullable=False), sa.Column('read', sa.Boolean(), nullable=True), sa.Column('write', sa.Boolean(), nullable=True), sa.Column('admin', sa.Boolean(), nullable=True), sa.Column('owner', sa.Boolean(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('library_id', postgresql.UUID(), nullable=True), sa.ForeignKeyConstraint( ['library_id'], ['library.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id'))
def upgrade(): op.create_table( 'monthly_billing', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('service_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('month', sa.String(), nullable=False), sa.Column('year', sa.Float(), nullable=False), sa.Column('notification_type', postgresql.ENUM('email', 'sms', 'letter', name='notification_type', create_type=False), nullable=False), sa.Column('monthly_totals', postgresql.JSON(), nullable=False), sa.Column('updated_at', sa.DateTime, nullable=False), sa.ForeignKeyConstraint( ['service_id'], ['services.id'], ), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_monthly_billing_service_id'), 'monthly_billing', ['service_id'], unique=False) op.create_index(op.f('uix_monthly_billing'), 'monthly_billing', ['service_id', 'month', 'year', 'notification_type'], unique=True)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('device_types', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.PrimaryKeyConstraint('id')) op.create_table( 'devices', sa.Column('id', sa.Integer(), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('modified_at', sa.DateTime(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('device_type', sa.Integer(), nullable=True), sa.Column('configuration', postgresql.JSON(astext_type=sa.Text()), nullable=True), sa.ForeignKeyConstraint( ['device_type'], ['device_types.id'], ), sa.PrimaryKeyConstraint('id')) op.add_column('accounts', sa.Column('created_at', sa.DateTime(), nullable=True)) op.add_column('accounts', sa.Column('modified_at', sa.DateTime(), nullable=True)) accounts = sa.sql.table('accounts', sa.sql.column('created_at'), sa.sql.column('modified_at')) op.execute(accounts.update().values(created_at=sa.func.now(), modified_at=sa.func.now())) op.alter_column('accounts', 'created_at', nullable=False) op.alter_column('accounts', 'modified_at', nullable=False) op.execute('DELETE FROM recordings') op.create_foreign_key(None, 'recordings', 'devices', ['device_id'], ['id'])