def test_create_drop_ddl(self): self.assert_compile( CreateSequence(Sequence('foo_seq')), "CREATE SEQUENCE foo_seq", ) self.assert_compile( CreateSequence(Sequence('foo_seq', start=5)), "CREATE SEQUENCE foo_seq START WITH 5", ) self.assert_compile( CreateSequence(Sequence('foo_seq', increment=2)), "CREATE SEQUENCE foo_seq INCREMENT BY 2", ) self.assert_compile( CreateSequence(Sequence('foo_seq', increment=2, start=5)), "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 5", ) self.assert_compile( DropSequence(Sequence('foo_seq')), "DROP SEQUENCE foo_seq", )
def upgrade(): op.execute(CreateSequence(sa.Sequence('search_solr_seq'))) op.create_table( model.Solr.__tablename__, sa.Column('id', sa.Integer(), sa.Sequence('search_solr_seq'), primary_key=True), sa.Column('name', sa.String(200), nullable=False), sa.Column('is_active', sa.Boolean(), nullable=False, default=True), sa.Column('address', sa.String(400), nullable=False), sa.Column('timeout', sa.Integer(), nullable=False), sa.Column('ping_path', sa.String(40), nullable=False), sa.Column('options', sa.String(800), nullable=True), sa.Column('pool_size', sa.Integer(), nullable=False), sa.Column('cluster_id', sa.Integer, sa.ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False), ) op.create_unique_constraint( 'search_solr_name_cluster_id_key', 'search_solr', ['name', 'cluster_id'] ) op.execute(CreateSequence(sa.Sequence('sec_tls_ca_cert_seq'))) op.create_table( model.TLSCACert.__tablename__, sa.Column('id', sa.Integer(), sa.Sequence('sec_tls_ca_cert_seq'), primary_key=True), sa.Column('name', sa.String(200), nullable=False), sa.Column('is_active', sa.Boolean(), nullable=False), sa.Column('fs_name', sa.String(200), nullable=False), sa.Column('cluster_id', sa.Integer(), sa.ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False), ) op.add_column( model.HTTPSOAP.__tablename__, sa.Column( 'sec_tls_ca_cert_id', sa.Integer(), sa.ForeignKey('sec_tls_ca_cert.id', ondelete='CASCADE'), nullable=True))
def upgrade(): op.execute(CreateSequence(Sequence("log_step_id_seq"))) op.execute(CreateSequence(Sequence("session_id_seq"))) op.create_table( 'log_steps', sa.Column('id', sa.Integer, sa.Sequence('log_step_id_seq'), primary_key=True), sa.Column('session_id', sa.Integer), sa.Column('control_line', sa.String), sa.Column('body', sa.String), sa.Column('screenshot', sa.String), sa.Column('time', sa.Float), ) op.create_table( 'sessions', sa.Column('id', sa.Integer, sa.Sequence('session_id_seq'), primary_key=True), sa.Column('name', sa.String), sa.Column('time', sa.Float), )
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.execute(CreateSequence(Sequence('Users_id_seq'))) op.create_table( 'Users', sa.Column('id', sa.Integer(), server_default=sa.text('nextval(\'"Users_id_seq"\')'), nullable=False), sa.Column('email', sa.String(length=255), nullable=False), sa.Column('password_hash', sa.String(length=128), nullable=True), sa.Column('first_name', sa.String(length=255), nullable=True), sa.Column('last_name', sa.String(length=255), nullable=True), sa.Column('polys', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('createdAt', sa.DateTime(timezone=True), nullable=False), sa.Column('updatedAt', sa.DateTime(timezone=True), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email')) op.execute(CreateSequence(Sequence('seed_meta_id_seq'))) op.create_table( 'seed_meta', sa.Column('id', sa.Integer(), server_default=sa.text("nextval('seed_meta_id_seq')"), nullable=False), sa.Column('name', sa.String(length=255), nullable=True), sa.PrimaryKeyConstraint('id'))
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('search_queries', sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('id', sa.BigInteger(), nullable=False), sa.Column('search_args_hash', sa.Text(), nullable=True), sa.Column('search_args', postgresql.JSON(), nullable=True), sa.Column('display_name', sa.Text(), nullable=True), sa.Column('search_count', sa.BigInteger(), nullable=True), sa.PrimaryKeyConstraint('id') ) op.execute(CreateSequence(Sequence('search_query_id_seq'))) op.create_index(op.f('ix_search_queries_search_args_hash'), 'search_queries', ['search_args_hash'], unique=True) op.create_index(op.f('ix_search_queries_search_count'), 'search_queries', ['search_count'], unique=False) op.create_table('user_search_queries', sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('id', sa.BigInteger(), nullable=False), sa.Column('user_id', sa.BigInteger(), nullable=True), sa.Column('search_query_id', sa.BigInteger(), nullable=True), sa.ForeignKeyConstraint(['search_query_id'], ['search_queries.id'], ), sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id') ) op.execute(CreateSequence(Sequence('user_search_query_id_seq'))) op.create_index(op.f('ix_user_search_queries_search_query_id'), 'user_search_queries', ['search_query_id'], unique=False) op.create_index(op.f('ix_user_search_queries_user_id'), 'user_search_queries', ['user_id'], unique=False)
def upgrade(): op.execute(CreateSequence(sa.Sequence('msg_xpath_seq'))) op.create_table( model.XPath.__tablename__, sa.Column('id', sa.Integer(), sa.Sequence('msg_xpath_seq'), primary_key=True), sa.Column('name', sa.String(200), nullable=False), sa.Column('value', sa.String(1500), nullable=False), sa.Column('cluster_id', sa.Integer(), sa.ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False), sa.UniqueConstraint('name', 'cluster_id')) op.execute(CreateSequence(sa.Sequence('msg_json_pointer_seq'))) op.create_table( model.JSONPointer.__tablename__, sa.Column('id', sa.Integer(), sa.Sequence('msg_json_pointer_seq'), primary_key=True), sa.Column('name', sa.String(200), nullable=False), sa.Column('value', sa.String(1500), nullable=False), sa.Column('cluster_id', sa.Integer(), sa.ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False), sa.UniqueConstraint('name', 'cluster_id'))
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('teams', sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('id', sa.BigInteger(), nullable=False), sa.Column('name', sa.Text(), nullable=True), sa.PrimaryKeyConstraint('id') ) op.execute(CreateSequence(Sequence('team_id_seq'))) op.create_index(op.f('ix_team_name'), 'teams', ['name'], unique=False) op.create_table('team_members', sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('id', sa.BigInteger(), nullable=False), sa.Column('user_id', sa.BigInteger(), nullable=False), sa.Column('team_id', sa.BigInteger(), nullable=True), sa.ForeignKeyConstraint(['user_id'], ['users.id'],), sa.ForeignKeyConstraint(['team_id'], ['teams.id'],), sa.PrimaryKeyConstraint('id') ) op.execute(CreateSequence(Sequence('team_member_id_seq'))) op.create_index(op.f('ix_team_member_user_id'), 'team_members', ['user_id'], unique=False) op.create_index(op.f('ix_team_member_team_id'), 'team_members', ['team_id'], unique=False)
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('search_regression_queries', sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('id', sa.BigInteger(), nullable=False), sa.Column('query', sa.Text(), nullable=False), sa.PrimaryKeyConstraint('id'), op.execute(CreateSequence(Sequence('search_regression_query_id_seq'))), sa.UniqueConstraint('query') ) op.create_table('search_assessment_results', sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('id', sa.BigInteger(), nullable=False), sa.Column('query_id', sa.BigInteger(), nullable=False), sa.Column('build', sa.Text(), nullable=False), sa.Column('version', sa.BigInteger(), nullable=False), sa.Column('doc_ids', postgresql.ARRAY(sa.BigInteger()), nullable=False), sa.Column('results', postgresql.JSON(), nullable=False), sa.Column('scores', postgresql.ARRAY(sa.Numeric()), nullable=True), sa.ForeignKeyConstraint(['query_id'], ['search_regression_queries.id'], ), sa.PrimaryKeyConstraint('id'), op.execute(CreateSequence(Sequence('search_assessment_result_id_seq'))) )
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('marketing_campaigns', sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('id', sa.BigInteger(), nullable=False), sa.Column('name', sa.Text(), nullable=True), sa.Column('start_date', sa.DateTime(), nullable=True), sa.Column('end_date', sa.DateTime(), nullable=True), sa.Column('token', sa.Text(), nullable=True), sa.Column('notes', sa.Text(), nullable=True), sa.Column('created_by_user_id', sa.BigInteger(), nullable=True), sa.ForeignKeyConstraint(['created_by_user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id') ) op.execute(CreateSequence(Sequence('marketing_campaign_id_seq'))) op.create_table('marketing_campaign_users', sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('id', sa.BigInteger(), nullable=False), sa.Column('user_id', sa.BigInteger(), nullable=True), sa.Column('marketing_campaign_id', sa.BigInteger(), nullable=True), sa.ForeignKeyConstraint(['marketing_campaign_id'], ['marketing_campaigns.id'], ), sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id') ) op.execute(CreateSequence(Sequence('marketing_campaign_user_id_seq')))
def test_create_drop_ddl(self): self.assert_compile( CreateSequence(Sequence('foo_seq')), "CREATE SEQUENCE foo_seq", ) self.assert_compile( CreateSequence(Sequence('foo_seq', start=5)), "CREATE SEQUENCE foo_seq START WITH 5", ) self.assert_compile( CreateSequence(Sequence('foo_seq', increment=2)), "CREATE SEQUENCE foo_seq INCREMENT BY 2", ) self.assert_compile( CreateSequence(Sequence('foo_seq', increment=2, start=5)), "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 5", ) self.assert_compile( CreateSequence(Sequence( 'foo_seq', increment=2, start=0, minvalue=0)), "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 0 MINVALUE 0", ) self.assert_compile( CreateSequence(Sequence( 'foo_seq', increment=2, start=1, maxvalue=5)), "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 1 MAXVALUE 5", ) self.assert_compile( CreateSequence(Sequence( 'foo_seq', increment=2, start=1, nomaxvalue=True)), "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 1 NO MAXVALUE", ) self.assert_compile( CreateSequence(Sequence( 'foo_seq', increment=2, start=0, nominvalue=True)), "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 0 NO MINVALUE", ) self.assert_compile( CreateSequence(Sequence( 'foo_seq', start=1, maxvalue=10, cycle=True)), "CREATE SEQUENCE foo_seq START WITH 1 MAXVALUE 10 CYCLE", ) self.assert_compile( DropSequence(Sequence('foo_seq')), "DROP SEQUENCE foo_seq", )
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('user_tags', sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('id', sa.BigInteger(), nullable=False), sa.Column('user_id', sa.BigInteger(), nullable=True), sa.Column('name', sa.Text(), nullable=True), sa.Column('provenance', sa.Text(), nullable=True), sa.Column('topic_id', sa.BigInteger(), nullable=True), sa.Column('topic_table', sa.Text(), nullable=True), sa.ForeignKeyConstraint( ['user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id')) op.execute(CreateSequence(Sequence('user_tag_id_seq'))) op.create_index(op.f('ix_user_tags_user_id'), 'user_tags', ['user_id'], unique=False) op.create_index('topic_table_id_index', 'user_tags', ['topic_table', 'topic_id'], unique=False) op.create_table( 'user_document_tags', sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('id', sa.BigInteger(), nullable=False), sa.Column('doc_id', sa.BigInteger(), nullable=True), sa.Column('user_tag_id', sa.BigInteger(), nullable=True), sa.Column('user_id', sa.BigInteger(), nullable=True), sa.Column('is_positive', sa.Boolean(), nullable=True), sa.Column('display_style', sa.Text(), nullable=True), sa.ForeignKeyConstraint( ['user_id'], ['users.id'], ), sa.ForeignKeyConstraint( ['user_tag_id'], ['user_tags.id'], ), sa.PrimaryKeyConstraint('id')) op.execute(CreateSequence(Sequence('user_document_tag_id_seq'))) op.create_index(op.f('ix_user_document_tags_doc_id'), 'user_document_tags', ['doc_id'], unique=False) op.create_index('user_id_doc_id_is_positive_index', 'user_document_tags', ['user_id', 'doc_id', 'is_positive'], unique=False) op.create_index('user_tag_id_display_style_index', 'user_document_tags', ['user_tag_id', 'display_style'], unique=False)
def init_udl_tenant_sequences(udl2_conf): # Create and sync sequence for each tenant on udl database if it doesn't exist with get_udl_connection() as udl_conn: all_tenants = udl2_conf.get(PRODUCTION_NAMESPACE) udl_schema_name = udl2_conf.get(UDL_NAMESPACE).get(Constants.DB_SCHEMA) # dict to keep track of tenant sequence values for each tenant defined in the ini all_tenant_sequences = {} for tenant in all_tenants: tenant_seq_name = Constants.TENANT_SEQUENCE_NAME(tenant) tenant_schema_name = all_tenants.get(tenant).get(Constants.DB_SCHEMA) # unique identifier for each tenant key = all_tenants.get(tenant).get(Constants.URL) + ':' + tenant_schema_name # check if we have already visited the tenant prod schema if not key in all_tenant_sequences: with get_prod_connection(tenant) as prod_conn: prod_seq_result = prod_conn.execute(text("select nextval(\'{schema_name}.{seq_name} \')". format(schema_name=tenant_schema_name, seq_name=Constants.SEQUENCE_NAME))) all_tenant_sequences[key] = prod_seq_result.fetchone()[0] # check if the global tenant sequence exists in udl database if not sequence_exists(udl_conn, tenant_seq_name): # create sequence if does not exist udl_conn.execute(CreateSequence(Sequence(name=tenant_seq_name, increment=1))) # update and set the current val for the tenant sequence udl_conn.execute(text("select setval(\'{schema_name}.{seq_name} \', {value}, {called})". format(schema_name=udl_schema_name, seq_name=tenant_seq_name, value=all_tenant_sequences[key], called=True)))
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table( 'user_search_result_ratings', sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('id', sa.BigInteger(), nullable=False), sa.Column('user_id', sa.BigInteger(), nullable=True), sa.Column('search_query_id', sa.BigInteger(), nullable=True), sa.Column('doc_id', sa.BigInteger(), nullable=True), sa.Column('is_relevant', sa.Boolean(), nullable=True), sa.ForeignKeyConstraint( ['search_query_id'], ['search_queries.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id')) op.execute(CreateSequence(Sequence('user_search_result_rating_id_seq'))) op.create_index(op.f('ix_user_search_result_ratings_doc_id'), 'user_search_result_ratings', ['doc_id'], unique=False) op.create_index(op.f('ix_user_search_result_ratings_search_query_id'), 'user_search_result_ratings', ['search_query_id'], unique=False) op.create_index(op.f('ix_user_search_result_ratings_user_id'), 'user_search_result_ratings', ['user_id'], unique=False)
def upgrade(): # Create an auto-increment sequence for cards.id conn = alembic.context.get_context().bind meta = sqlalchemy.MetaData(bind=conn) meta.reflect() cards = meta.tables['cards'] # This table already has a (not-previously-used) auto-increment sequence in # the production DB, but new DBs created from scratch via the alembic setup # won't have it, so check if it already exists and create if it's missing # to bring everything back into sync if not cards.c.id.server_default or 'cards_id_seq' not in cards.c.id.server_default.arg.text: maxid, = conn.execute(sqlalchemy.select([sqlalchemy.func.max(cards.c.id)])).first() if maxid is None: maxid = 0 alembic.op.execute(CreateSequence(Sequence('cards_id_seq', start=maxid + 1))) alembic.op.alter_column("cards", "id", nullable=False, server_default=sqlalchemy.text("nextval('cards_id_seq'::regclass)")) # Add cards.game column # create it with a default but then remove the default, to set the value on # all existing rows, but have the column mandatory in the future alembic.op.drop_index("cards_name_idx") alembic.op.add_column('cards', sqlalchemy.Column('game', sqlalchemy.Integer, nullable=False, server_default='1') ) alembic.op.alter_column("cards", "game", server_default=None) alembic.op.create_index("cards_name_idx", "cards", ["game", "filteredname"], unique=True)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.execute( CreateSequence( Sequence('user_extra_registration_seq', start=1, increment=1))) op.create_table( 'user_extra_registrations', sa.Column('id', sa.Integer(), nullable=False), sa.Column('account_id', sa.String(length=20), nullable=False), sa.Column('registration_number', sa.String(length=10), nullable=False), sa.Column('remove_ind', sa.String(length=1), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_user_extra_registrations_account_id'), 'user_extra_registrations', ['account_id'], unique=False) op.create_index(op.f('ix_user_extra_registrations_registration_number'), 'user_extra_registrations', ['registration_number'], unique=False) op.alter_column('registrations', 'client_reference_id', existing_type=sa.VARCHAR(length=20), type_=sa.String(length=50), existing_nullable=True) op.alter_column('search_requests', 'client_reference_id', existing_type=sa.VARCHAR(length=20), type_=sa.String(length=50), existing_nullable=True)
def table_upgrade(table_name, table_info): columns = table_info.get('columns') keys = table_info.get('keys') seq_column = table_info.get('seq_column') # Delete the old primary key if one was specified if keys: op.drop_constraint('%s_pkey' % table_name, table_name, type_='primary') # Create a sequence and add associated column if specified if seq_column: seq = Sequence('%s_%s_seq' % (table_name, seq_column)) op.execute(CreateSequence(seq)) op.add_column(table_name, sa.Column(seq_column, sa.Integer(), server_default=seq.next_value(), nullable=False)) # Apply the new column settings if specified if columns: for column_name, column_info in columns.items(): op.alter_column(table_name, column_name, existing_type=column_info['type'], nullable=column_info['nullable']['new']) # Create the new primary key if one was specified if keys: op.create_primary_key('%s_pkey' % table_name, table_name, keys['new'])
def upgrade(): op.execute(CreateSequence(Sequence("job_id_seq", cycle=True))) op.create_table( "job", Column("id", Integer, Sequence("job_id_seq", cycle=True), primary_key=True), Column("name", UnicodeText, nullable=False), Column("enqueued_at", DateTime, nullable=False, server_default=func.now()), Column("scheduled_at", DateTime, nullable=False, server_default=func.now()), Column( "expires_at", DateTime, nullable=False, server_default=text("now() + interval '30 days'"), ), Column("priority", Integer, nullable=False), Column("tag", UnicodeText, nullable=False), Column( "kwargs", JSONB, server_default=text("'{}'::jsonb"), nullable=False, ), )
def upgrade(): op.execute("""ALTER TABLE sepasammler RENAME TO sepasammlerentry;""") op.execute("CREATE TABLE sepasammler AS SELECT * FROM sepasammlerentry;") op.execute("""DELETE FROM sepasammler""") op.execute("""ALTER SEQUENCE sepasammler_id_seq RENAME TO sepasammlerentry_id_seq""") op.execute("""ALTER INDEX sepasammler_pkey RENAME TO sepasammlerentry_pkey""") op.execute(CreateSequence(Sequence('sepasammler_id_seq'))) op.alter_column( "sepasammler", "id", nullable=False, server_default=sa.text("nextval('sepasammler_id_seq'::regclass)")) op.create_unique_constraint(u'sepasammler_pkey', 'sepasammler', ['id']) op.drop_column(u'sepasammler', 'ignore_in_reporting') op.drop_column(u'sepasammler', 'value') op.drop_column(u'sepasammler', 'member_id') op.execute("""INSERT into sepasammler (pmtinfid, booking_day, accounting_year, kind_id) (SELECT distinct pmtinfid, booking_day, accounting_year, kind_id FROM sepasammlerentry)""") op.create_foreign_key(None, 'sepasammler', 'bookingkind', ['kind_id'], ['id']) op.add_column('sepasammlerentry', sa.Column('sepa_sammler_id', sa.Integer(), nullable=True)) op.execute("""UPDATE sepasammlerentry SET sepa_sammler_id = ( SELECT id FROM sepasammler WHERE pmtinfid = sepasammlerentry.pmtinfid )""") op.create_foreign_key(None, 'sepasammlerentry', 'sepasammler', ['sepa_sammler_id'], ['id']) op.drop_column('sepasammlerentry', 'kind_id') op.drop_column('sepasammlerentry', 'accounting_year') op.drop_column('sepasammlerentry', 'pmtinfid') op.drop_column('sepasammlerentry', 'booking_day')
def ensure_sequence_exists(self, sequence_name): if not self.supports_sequences: return None if self.op.impl.dialect.has_sequence(self.connection, sequence_name): return False self.op.execute(CreateSequence(Sequence(sequence_name))) return True
def upgrade(): op.execute(CreateSequence(sa.Sequence('email_imap_seq'))) op.create_table( model.IMAP.__tablename__, sa.Column('id', sa.Integer(), sa.Sequence('email_imap_seq'), primary_key=True), sa.Column('name', sa.String(200), nullable=False), sa.Column('is_active', sa.Boolean(), nullable=False), sa.Column('host', sa.String(400), nullable=False), sa.Column('port', sa.Integer(), nullable=False), sa.Column('timeout', sa.Integer(), nullable=False), sa.Column('debug_level', sa.Integer(), nullable=False), sa.Column('username', sa.String(400), nullable=True), sa.Column('password', sa.String(400), nullable=True), sa.Column('mode', sa.String(20), nullable=False), sa.Column('get_criteria', sa.String(2000), nullable=False), sa.Column('cluster_id', sa.Integer(), sa.ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False), ) op.create_unique_constraint('email_imap_name_cluster_id_key', model.IMAP.__tablename__, ['name', 'cluster_id'])
def downgrade(): op.execute(CreateSequence(Sequence("hera_obs_obsid_seq"))) op.execute(CreateSequence(Sequence("lib_status_time_seq"))) op.execute(CreateSequence(Sequence("rtp_status_time_seq"))) op.alter_column( 'hera_obs', 'obsid', server_default=sa.text("nextval('hera_obs_obsid_seq'::regclass)")) op.alter_column( 'lib_status', 'time', server_default=sa.text("nextval('lib_status_time_seq'::regclass)")) op.alter_column( 'rtp_status', 'time', server_default=sa.text("nextval('rtp_status_time_seq'::regclass)"))
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table( 'contributor_point_types', sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('id', sa.BigInteger(), nullable=False), sa.Column('short_name', sa.Text(), nullable=True), sa.Column('point_group_name', sa.Text(), nullable=True), sa.Column('description', sa.Text(), nullable=True), sa.Column('frequency', sa.Text(), nullable=True), sa.Column('points_per_action', sa.BigInteger(), nullable=True), sa.Column('actions_per_week', sa.BigInteger(), nullable=True), sa.Column('points_per_month', sa.BigInteger(), nullable=True), sa.PrimaryKeyConstraint('id')) op.execute(CreateSequence(Sequence('contributor_point_type_id_seq'))) op.create_index(op.f('ix_contributor_point_types_point_group_name'), 'contributor_point_types', ['point_group_name'], unique=False) op.create_index(op.f('ix_contributor_point_types_short_name'), 'contributor_point_types', ['short_name'], unique=False) op.create_table( 'user_contributor_points', sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('id', sa.BigInteger(), nullable=False), sa.Column('user_id', sa.BigInteger(), nullable=True), sa.Column('contributor_point_type_id', sa.BigInteger(), nullable=True), sa.Column('num_points', sa.BigInteger(), nullable=True), sa.Column('notes', sa.Text(), nullable=True), sa.ForeignKeyConstraint( ['contributor_point_type_id'], ['contributor_point_types.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id')) op.execute(CreateSequence(Sequence('user_contributor_point_id_seq'))) op.create_index( op.f('ix_user_contributor_points_contributor_point_type_id'), 'user_contributor_points', ['contributor_point_type_id'], unique=False) op.create_index(op.f('ix_user_contributor_points_user_id'), 'user_contributor_points', ['user_id'], unique=False)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.execute(CreateSequence(Sequence('user_seq_id'))) op.create_table('tb_user', sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('user_name', sa.String(length=255), nullable=True), sa.Column('user_email', sa.String(length=255), nullable=False), sa.Column('user_password', sa.String(length=255), nullable=True), sa.PrimaryKeyConstraint('user_email') )
def create_udl2_sequence(schema_name): ''' create sequences according to configuration file @param udl2_conf: The configuration dictionary for ''' print("create sequences") with get_udl_connection() as conn: metadata = conn.get_metadata() for sequence in generate_udl2_sequences(schema_name, metadata): conn.execute(CreateSequence(sequence))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.execute(CreateSequence(Sequence('source_evidence_id_seq', schema='city4age_ar'))) op.create_table('source_evidence', sa.Column('id', sa.Integer(), server_default=sa.text(u"nextval('city4age_ar.source_evidence_id_seq')"), nullable=False, primary_key=True), sa.Column('text_evidence', sa.String(255), nullable=True), sa.Column('creation_date', sqlalchemy_utils.types.arrow.ArrowType(timezone=True), server_default=sa.text(u"TIMEZONE('utc', CURRENT_TIMESTAMP)"), nullable=True), schema='city4age_ar' )
def upgrade(): """Update database.""" op.create_table( "transaction", sa.Column("issued_at", sa.DateTime(), nullable=True), sa.Column("id", sa.BigInteger(), nullable=False), sa.Column("remote_addr", sa.String(length=50), nullable=True), ) op.create_primary_key("pk_transaction", "transaction", ["id"]) if op._proxy.migration_context.dialect.supports_sequences: op.execute(CreateSequence(Sequence("transaction_id_seq")))
def upgrade(): """Update database.""" op.create_table( 'transaction', sa.Column('issued_at', sa.DateTime(), nullable=True), sa.Column('id', sa.BigInteger(), nullable=False), sa.Column('remote_addr', sa.String(length=50), nullable=True), ) op.create_primary_key('pk_transaction', 'transaction', ['id']) if op._proxy.migration_context.dialect.supports_sequences: op.execute(CreateSequence(Sequence('transaction_id_seq')))
def upgrade(): op.execute(CreateSequence(sa.Sequence(name='menu_items_id_seq'))) op.create_table( 'menu_items', sa.Column( 'id', sa.INTEGER(), server_default=sa.Sequence(name='menu_items_id_seq').next_value(), nullable=False), sa.Column('parent_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('icon', sa.VARCHAR(length=126), autoincrement=False, nullable=True), sa.Column('opened', sa.BOOLEAN(), autoincrement=False, nullable=True), sa.Column('disabled', sa.BOOLEAN(), autoincrement=False, nullable=True), sa.Column('selected', sa.BOOLEAN(), autoincrement=False, nullable=True), sa.Column('type', sa.VARCHAR(length=58), autoincrement=False, nullable=True), sa.Column('tag', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('slug', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('page_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['parent_id'], [u'menu_items.id'], name=u'menu_items_parent_id_fkey'), sa.PrimaryKeyConstraint(u'id', name=u'menu_items_pkey')) op.create_table( 'menu_items_translation', sa.Column('id', sa.INTEGER(), autoincrement=False, nullable=False), sa.Column('locale', sa.VARCHAR(length=10), autoincrement=False, nullable=False), sa.Column('text', sa.TEXT(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['id'], [u'menu_items.id'], name=u'menu_items_translation_id_fkey', ondelete=u'CASCADE'), sa.PrimaryKeyConstraint('id', 'locale', name=u'menu_items_translation_pkey'))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('moves', sa.Column('move_id', sa.Integer(), nullable=False), sa.Column('move_name', sa.String(), nullable=False), sa.PrimaryKeyConstraint('move_id') ) op.create_table('pokemon', sa.Column('pokeapi_id', sa.Integer(), nullable=False), sa.Column('pokemon_id', sa.Integer(), nullable=False), sa.Column('pokemon_name', sa.String(), nullable=False), sa.PrimaryKeyConstraint('pokeapi_id') ) op.create_table('users', sa.Column('id', sa.Integer(), nullable=False), sa.Column('email', sa.String(), nullable=False), sa.Column('username', sa.String(length=30), nullable=False), sa.Column('password', sa.String(), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email'), sa.UniqueConstraint('username') ) op.create_table('teams', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=50), nullable=False), sa.Column('description', sa.String(length=2000), nullable=True), sa.Column('is_private', sa.Boolean(), nullable=False), sa.Column('owner_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['owner_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id') ) op.execute(CreateSequence(Sequence('teams_pokemon_id_seq'))) op.create_table('teams_pokemon', sa.Column('id', sa.Integer(), autoincrement=True, nullable=True), sa.Column('team_id', sa.Integer(), nullable=False), sa.Column('team_index', sa.Integer(), nullable=False), sa.Column('pokeapi_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['pokeapi_id'], ['pokemon.pokeapi_id'], ), sa.ForeignKeyConstraint(['team_id'], ['teams.id'], ), sa.PrimaryKeyConstraint('team_id', 'team_index'), sa.UniqueConstraint('id') ) op.create_table('pokemon_moves', sa.Column('team_pokemon_id', sa.Integer(), nullable=False), sa.Column('pokeapi_id', sa.Integer(), nullable=False), sa.Column('pokemon_move_index', sa.Integer(), nullable=False), sa.Column('move_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['move_id'], ['moves.move_id'], ), sa.ForeignKeyConstraint(['pokeapi_id'], ['pokemon.pokeapi_id'], ), sa.ForeignKeyConstraint(['team_pokemon_id'], ['teams_pokemon.id'], ), sa.PrimaryKeyConstraint('team_pokemon_id', 'pokeapi_id', 'pokemon_move_index') )
def downgrade(): # Create new column 'id' and autoincrement its value op.execute(CreateSequence(Sequence("carbonmonoxide_id_seq"))) op.add_column( 'carbonmonoxide', sa.Column('id', sa.INTEGER(), nullable=False, server_default=sa.text( "nextval('carbonmonoxide_id_seq'::regclass)"))) # Use 'id' as the new primary key op.create_primary_key('carbonmonoxide_pkey', 'carbonmonoxide', ['id'])