def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.alter_column('actors', 'gender', existing_type=sa.CHAR(length=1), nullable=False) op.add_column('movies', sa.Column('identifier', sa.String(length=36), nullable=True)) op.add_column( 'movies', sa.Column('ott_partner', sa.String(length=20), nullable=True)) op.add_column( 'movies', sa.Column('production_houese', sa.String(length=50), nullable=True)) op.create_unique_constraint(None, 'movies', ['identifier'])
def upgrade(): """Upgrade database.""" # ### commands auto generated by Alembic - please adjust! ### op.add_column('oarepo_communities', sa.Column('title', sa.String(length=128), nullable=True)) op.add_column('oarepo_communities', sa.Column('type', sqlalchemy_utils.types.choice.ChoiceType( choices=OAREPO_COMMUNITIES_TYPES, impl=sa.CHAR(length=16)), nullable=False)) op.drop_constraint('fk_oarepo_communities_curators_id_accounts_role', 'oarepo_communities', type_='foreignkey') op.drop_constraint('fk_oarepo_communities_publishers_id_accounts_role', 'oarepo_communities', type_='foreignkey') op.drop_constraint('fk_oarepo_communities_members_id_accounts_role', 'oarepo_communities', type_='foreignkey') op.drop_column('oarepo_communities', 'curators_id') op.drop_column('oarepo_communities', 'members_id') op.drop_column('oarepo_communities', 'publishers_id')
def upgrade(): connection = op.get_bind() # add uuid column and unique constraint for each table for table_name in table_names: # step 1, add new uuid column op.add_column( table_name, sa.Column('uuid', sa.CHAR(22), nullable=False, server_default='')) # step 2, fill in unique uuids (base64 url safe strings) table = sa.table(table_name, sa.Column('id', sa.Integer()), sa.Column('uuid', sa.CHAR(22))) for record in connection.execute(table.select()): connection.execute( table.update().where(table.c.id == record.id).values( uuid=str(base64.urlsafe_b64encode( uuid.uuid4().bytes)).replace('=', ''))) # step 3, apply unique cosntraint on generated table with op.batch_alter_table(table_name, naming_convention=convention) as batch_op: batch_op.create_unique_constraint("uq_" + table_name + "_uuid", ['uuid'])
def upgrade(): # step 1, add new uuid column op.add_column( 'lti_consumer', sa.Column('uuid', sa.CHAR(22), nullable=False, server_default='')) connection = op.get_bind() # step 2, fill in unique uuids (base64 url safe strings) lti_consumer_table = sa.table('lti_consumer', sa.Column('id', sa.Integer()), sa.Column('uuid', sa.CHAR(22))) for record in connection.execute(lti_consumer_table.select()): connection.execute(lti_consumer_table.update().where( lti_consumer_table.c.id == record.id).values( uuid=str(base64.urlsafe_b64encode(uuid.uuid4().bytes)).replace( '=', ''))) # step 3, apply unique constraint on generated table with op.batch_alter_table('lti_consumer', naming_convention=convention) as batch_op: batch_op.create_unique_constraint("uq_lti_consumer_uuid", ['uuid']) batch_op.alter_column('lti_version', existing_type=sa.String(20), nullable=True)
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.alter_column('user', 'salt', existing_type=sa.CHAR(length=29), nullable=False) op.alter_column('user', 'email', existing_type=sa.VARCHAR(length=254), nullable=True) op.drop_index('ix_post_search_vector', table_name='post') op.create_index('post_comments_index', 'comment', ['post_uuid'], unique=False) op.drop_index(op.f('ix_comment_post_uuid'), table_name='comment')
def upgrade(): op.create_table( 'auth_sessions', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_name', sa.String(), nullable=True), sa.Column('token', sa.CHAR(length=32), nullable=False), sa.Column('groups', sa.String(), nullable=True), sa.Column('last_access', sa.DateTime(), nullable=False), sa.Column('description', sa.String(), nullable=True), sa.Column('can_expire', sa.Boolean(), server_default=sa.text('true'), nullable=True), sa.PrimaryKeyConstraint('id', name=op.f('pk_auth_sessions')), sa.UniqueConstraint('token', name=op.f('uq_auth_sessions_token'))) op.drop_table('user_sessions')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('Products', sa.Column('p_id', sa.Integer(), nullable=False), sa.Column('p_name', sa.String(), nullable=False), sa.Column('price', sa.Float(), nullable=False), sa.Column('accessory', sa.Boolean(), nullable=False), sa.PrimaryKeyConstraint('p_id')) op.create_table('User', sa.Column('u_id', sa.Integer(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('username', sa.String(), nullable=False), sa.Column('email', sa.String(), nullable=False), sa.Column('password', sa.String(), nullable=False), sa.Column('verified', sa.Boolean(), nullable=True), sa.PrimaryKeyConstraint('u_id'), sa.UniqueConstraint('email'), sa.UniqueConstraint('username')) op.create_table('Purchases', sa.Column('pu_id', sa.Integer(), nullable=False), sa.Column('u_id', sa.Integer(), nullable=False), sa.Column('p_id', sa.Integer(), nullable=False), sa.Column('date', sa.DateTime(), nullable=False), sa.ForeignKeyConstraint( ['p_id'], ['Products.p_id'], ), sa.ForeignKeyConstraint( ['u_id'], ['User.u_id'], ), sa.PrimaryKeyConstraint('pu_id')) op.create_table('ShippingInfo', sa.Column('id', sa.Integer(), nullable=False), sa.Column('u_id', sa.Integer(), nullable=False), sa.Column('address', sa.String(), nullable=False), sa.Column('phone', sa.CHAR(length=10), nullable=False), sa.ForeignKeyConstraint( ['u_id'], ['User.u_id'], ), sa.PrimaryKeyConstraint('id')) op.create_table('Wishlist', sa.Column('id', sa.Integer(), nullable=False), sa.Column('u_id', sa.Integer(), nullable=False), sa.Column('p_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['p_id'], ['Products.p_id'], ), sa.ForeignKeyConstraint( ['u_id'], ['User.u_id'], ), sa.PrimaryKeyConstraint('id'))
def upgrade(): search_type_table = op.create_table( 'search_type', sa.Column('long_code', sa.String(length=40), primary_key=True), sa.Column('short_code', sa.CHAR(length=2), nullable=False), sa.Column('description', postgresql.TEXT, nullable=False), ) op.create_table( 'search', sa.Column('id', sa.BigInteger, primary_key=True), sa.Column('type_long_code', sa.String(length=40), sa.ForeignKey('search_type.long_code'), nullable=False), sa.Column('criteria', postgresql.JSONB, nullable=False), sa.Column('creation_date_time', sa.DateTime(timezone=True), server_default=sa.text('NOW()'), nullable=False)) # Populate the search type code tables. Short codes are reflective of the codes in the original PPR system, # while the long codes are for use in the API moving forward. op.bulk_insert(search_type_table, [{ 'long_code': 'AIRCRAFT_DOT', 'short_code': 'AS', 'description': 'Aircraft Airframe D.O.T. Number' }, { 'long_code': 'BUSINESS_DEBTOR', 'short_code': 'BS', 'description': 'Business Debtor Name' }, { 'long_code': 'INDIVIDUAL_DEBTOR', 'short_code': 'IS', 'description': 'Individual Debtor Name' }, { 'long_code': 'MHR_NUMBER', 'short_code': 'MS', 'description': 'Manufactured Home Registration Number' }, { 'long_code': 'REGISTRATION_NUMBER', 'short_code': 'RS', 'description': 'Registration Number' }, { 'long_code': 'SERIAL_NUMBER', 'short_code': 'SS', 'description': 'Serial Number' }])
def writers(cnxn): """ Create writers tabell. """ with cnxn.connect() as con: con.execute('DROP TABLE IF EXISTS writers;') meta = sa.MetaData() print("Creating table writers...") imdbTable = sa.Table( "writers", meta, sa.Column("writer", sa.VARCHAR(300), primary_key=True), sa.Column("titleId", sa.VARCHAR(100), primary_key=True), sa.Column("surrogate", sa.VARCHAR(1000)), sa.Column("hashkey", sa.CHAR(32)), ) meta.create_all(cnxn) col_list = ["tconst", "writers"] title_ak = pd.read_csv(os.path.join(data_path, 'title.crew.tsv'), sep='\t', na_values='\\N', quoting=3, nrows=rows, usecols=col_list) title_ak = title_ak.rename(columns={ 'tconst': 'titleId', 'writers': 'writer', }) #Dropa nulls title_ak = title_ak[title_ak['writer'].notna()] #Explode and set hashkey exploded = explode_array_df(title_ak, 'writer', ',', 'titleId') exploded['surrogate'] = exploded['titleId'].astype( str) + '-' + exploded['writer'].astype(str) writers = hashkey(exploded, 'surrogate') #to db writers.to_sql('writers', cnxn, if_exists='append', index=None, chunksize=100000)
class Job(Base): """Represent a job in the database.""" __tablename__ = 'Jobs' id = sa.Column(sa.CHAR(32), primary_key=True) creation_time = sa.Column(sa.TIMESTAMP, default=sa.func.now()) modification_time = sa.Column(sa.TIMESTAMP, default=sa.func.now()) status = sa.Column(sa.REAL) finished = sa.Column(sa.INTEGER) job_type = sa.Column(sa.VARCHAR(16)) training_jobs = orm.relationship('TrainingJob', cascade='all,delete,delete-orphan', passive_deletes=True) training_results = orm.relationship('TrainingResult', cascade='all,delete,delete-orphan', passive_deletes=True)
def upgrade(): op.create_table( 'p2p_lending_platforms_mintos_statements', sa.Column('id', sa.INTEGER, primary_key=True, nullable=False), sa.Column('investment_account_id', sa.INTEGER, primary_key=True, nullable=False), sa.Column('statement_file', sa.dialects.postgresql.BYTEA, nullable=False), sa.Column('statement_file_type', filetypes, nullable=False), sa.Column('upload_timestamp', sa.TIMESTAMP, nullable=False), sa.Column('document_sha256_hash', sa.CHAR(64), unique=True, nullable=False), sa.Column('transactions_total', sa.INTEGER, nullable=False), sa.Column('first_transaction_timestamp', sa.TIMESTAMP, nullable=False), sa.Column('last_transaction_timestamp', sa.TIMESTAMP, nullable=False), sa.Column('processing_state', statement_processing_states, nullable=False), sa.Column('processing_start_timestamp', sa.TIMESTAMP), sa.Column('processing_finish_timestamo', sa.TIMESTAMP) )
def title_type(cnxn): """ Create title_type tabell. """ with cnxn.connect() as con: con.execute('DROP TABLE IF EXISTS title_type;') meta = sa.MetaData() print("Creating table title_type...") imdbTable = sa.Table( "title_type", meta, sa.Column("titleId", sa.VARCHAR(100), primary_key=True), sa.Column("ordering", sa.INTEGER(), primary_key=True), sa.Column("types", sa.VARCHAR(1000)), sa.Column("surrogate", sa.VARCHAR(1000)), sa.Column("hashkey", sa.CHAR(32)), ) meta.create_all(cnxn) col_list = ["titleId", "types", "ordering"] title_ak = pd.read_csv(os.path.join(data_path, 'title.akas.tsv'), dtype={ 'titleId': 'str', 'ordering': 'int', 'types': 'str' }, sep='\t', na_values='\\N', quoting=3, nrows=rows, usecols=col_list) title_ak.dropna(subset=['types'], inplace=True) #Business key/surrogate och hasha title_ak['surrogate'] = title_ak['titleId'].astype(str) + '-' + title_ak[ 'ordering'].astype(str) + '-' + title_ak['types'].astype(str) title_type = hashkey(title_ak, 'surrogate') #to db title_type.to_sql('title_type', cnxn, if_exists='append', index=None, chunksize=100000)
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('sections', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('letter', sa.CHAR(), nullable=False), sa.Column('title', sa.TEXT(), nullable=False), sa.PrimaryKeyConstraint('id')) op.create_index('ix_sections_title', 'sections', ['title'], unique=False) op.create_index('ix_sections_letter', 'sections', ['letter'], unique=False) op.create_table('recommendations', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('title', sa.VARCHAR(length=128), nullable=False), sa.Column('text', sa.TEXT(), nullable=False), sa.Column('interpretation', sa.TEXT(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_index('ix_recommendations_title', 'recommendations', ['title'], unique=False)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'User', sa.Column('user_id', sa.CHAR(length=36), nullable=False), sa.Column('login', sa.String(length=80), nullable=False), sa.Column( 'password', sqlalchemy_utils.types.password.PasswordType(max_length=1137), nullable=False), sa.Column('name', sa.String(length=255), nullable=False), sa.Column('email', sqlalchemy_utils.types.email.EmailType(length=255), nullable=False), sa.Column('created', mysql.DATETIME(), nullable=False), sa.PrimaryKeyConstraint('user_id', name=op.f('pk_User')), sa.UniqueConstraint('email', name=op.f('uq_User_email')), sa.UniqueConstraint('login', name=op.f('uq_User_login')))
def person_known_for(cnxn): """ Create person_known_for tabell. """ with cnxn.connect() as con: con.execute('DROP TABLE IF EXISTS person_known_for;') meta = sa.MetaData() print("Creating person_known_for table...") imdbTable = sa.Table( "person_known_for", meta, sa.Column("personId", sa.VARCHAR(300), primary_key=True), sa.Column("knownForTitles", sa.VARCHAR(100), primary_key=True), sa.Column("surrogate", sa.VARCHAR(1000)), sa.Column("hashkey", sa.CHAR(32)), ) meta.create_all(cnxn) col_list = ["nconst", "knownForTitles"] title_ak = pd.read_csv(os.path.join(data_path, 'name.basics.tsv'), sep='\t', na_values='\\N', quoting=3, nrows=rows, usecols=col_list) title_ak = title_ak.rename(columns={'nconst': 'personId'}) #Dropa nulls title_ak = title_ak[title_ak['knownForTitles'].notna()] #Explode and set hashkey exploded = explode_array_df(title_ak, 'knownForTitles', ',', 'personId') exploded['surrogate'] = exploded['personId'].astype( str) + '-' + exploded['knownForTitles'].astype(str) person_known_for = hashkey(exploded, 'surrogate') #to db person_known_for.to_sql('person_known_for', cnxn, if_exists='append', index=None, chunksize=100000)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('lottery_result', sa.Column('id', sa.Integer(), nullable=False), sa.Column('created_time', sa.DateTime(timezone=True), server_default=sa.text(u'CURRENT_TIMESTAMP'), nullable=False), sa.Column('updated_time', sa.DateTime(timezone=True), server_default=sa.text(u'CURRENT_TIMESTAMP'), nullable=False), sa.Column('month', sa.CHAR(length=7), nullable=False), sa.Column('personal_participate_total', sa.Integer(), nullable=False), sa.Column('personal_total', sa.Integer(), nullable=False), sa.Column('corporate_participate_total', sa.Integer(), nullable=False), sa.Column('corporate_total', sa.Integer(), nullable=False), sa.PrimaryKeyConstraint('id'), mysql_charset='utf8', mysql_collate='utf8_bin') op.create_index(op.f('ix_lottery_result_month'), 'lottery_result', ['month'], unique=True) op.add_column( u'competition_result', sa.Column('corporate_participate_total', sa.Integer(), nullable=False)) op.add_column( u'competition_result', sa.Column('personal_participate_total', sa.Integer(), nullable=False)) op.alter_column(u'competition_result', 'created_time', existing_type=mysql.DATETIME(), type_=sa.DateTime(timezone=True), existing_nullable=False, existing_server_default=sa.text(u'CURRENT_TIMESTAMP')) op.alter_column(u'competition_result', 'updated_time', existing_type=mysql.DATETIME(), type_=sa.DateTime(timezone=True), existing_nullable=False, existing_server_default=sa.text(u'CURRENT_TIMESTAMP'))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('artist', sa.Column('seeking_venue', sa.Boolean(), nullable=True)) op.alter_column('artist', 'city', existing_type=sa.VARCHAR(length=120), nullable=True) op.alter_column('artist', 'genres', existing_type=sa.VARCHAR(length=120), nullable=True) op.alter_column('artist', 'name', existing_type=sa.VARCHAR(), nullable=True) op.alter_column('artist', 'past_shows_count', existing_type=sa.INTEGER(), nullable=True) op.alter_column('artist', 'state', existing_type=sa.VARCHAR(length=120), nullable=True) op.alter_column('artist', 'upcoming_shows_count', existing_type=sa.INTEGER(), nullable=True) op.alter_column('show', 'start_time', existing_type=postgresql.TIMESTAMP(), nullable=True) op.add_column('venue', sa.Column('seeking_talent', sa.Boolean(), nullable=True)) op.alter_column('venue', 'city', existing_type=sa.VARCHAR(length=120), nullable=True) op.alter_column('venue', 'genres', existing_type=sa.CHAR(length=120), nullable=True) op.alter_column('venue', 'name', existing_type=sa.VARCHAR(), nullable=True) op.alter_column('venue', 'past_shows_count', existing_type=sa.INTEGER(), nullable=True) op.alter_column('venue', 'state', existing_type=sa.VARCHAR(length=120), nullable=True) op.alter_column('venue', 'upcoming_shows_count', existing_type=sa.INTEGER(), nullable=True) op.execute('UPDATE venue SET seeking_talent = False WHERE seeking_talent = NULL;') op.execute('UPDATE artist SET seeking_venue = False WHERE seeking_venue = NULL;')
def insert_t02_gta_data_iran(): df = feather.read_dataframe(path) itrpoc_engine = gen_itr_engine(db='itrpoc') df.to_sql('t02_gta_data_iran', itrpoc_engine, if_exists='replace', index=False, chunksize=10000, dtype={ 'year': sqlalchemy.INT(), 'trade': sqlalchemy.VARCHAR(8), 'hscode': sqlalchemy.CHAR(6), 'reporter': sqlalchemy.VARCHAR(45), 'partner': sqlalchemy.VARCHAR(45), 'weight': sqlalchemy.BIGINT(), 'value': sqlalchemy.BIGINT(), }) itrpoc_engine.dispose()
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'products', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=500), nullable=True), sa.Column('description', sa.Text(), nullable=True), sa.Column('image', sa.String(), nullable=True), sa.Column('quantity', sa.Integer(), nullable=True), sa.Column('price', sa.Float(), nullable=True), sa.Column('category_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['category_id'], ['categories.id'], ), sa.PrimaryKeyConstraint('id')) op.alter_column('categories', 'name', existing_type=sa.CHAR(length=50), nullable=True)
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column( 'promo', sa.Column('clip_data', postgresql.BYTEA(), autoincrement=False, nullable=True)) op.drop_column('promo', 'clip_url') op.add_column( 'clip', sa.Column('clip_data', postgresql.BYTEA(), autoincrement=False, nullable=True)) op.drop_column('clip', 'clip_url') op.add_column( 'channel', sa.Column('image_data', postgresql.BYTEA(), autoincrement=False, nullable=True)) op.drop_column('channel', 'image_url') op.create_table( 'user', sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), sa.Column('date_created', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), sa.Column('last_updated', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), sa.Column('username', sa.VARCHAR(), autoincrement=False, nullable=False), sa.Column('password', sa.CHAR(length=120), autoincrement=False, nullable=False), sa.PrimaryKeyConstraint('id', name=u'user_pkey'), sa.UniqueConstraint('username', name=u'user_username_key'))
class ContainerConsumerMetadatum(BASE, SoftDeleteMixIn, ModelBase): """Stores Consumer Registrations for Containers in the datastore. Services can register interest in Containers. Services will provide a type and a URL for the object that is using the Container. """ __tablename__ = 'container_consumer_metadata' container_id = sa.Column(sa.String(36), sa.ForeignKey('containers.id'), index=True, nullable=False) project_id = sa.Column(sa.String(36), sa.ForeignKey('projects.id'), index=True, nullable=True) name = sa.Column(sa.String(36)) URL = sa.Column(sa.String(255)) data_hash = sa.Column(sa.CHAR(64)) __table_args__ = (sa.UniqueConstraint( 'data_hash', name='_consumer_hashed_container_name_url_uc'), sa.Index('values_index', 'container_id', 'name', 'URL')) def __init__(self, container_id, project_id, parsed_request): """Registers a Consumer to a Container.""" super(ContainerConsumerMetadatum, self).__init__() # TODO(john-wood-w) This class should really be immutable due to the # data_hash attribute. if container_id and parsed_request: self.container_id = container_id self.project_id = project_id self.name = parsed_request.get('name') self.URL = parsed_request.get('URL') hash_text = ''.join((self.container_id, self.name, self.URL)) self.data_hash = hashlib.sha256( hash_text.encode('utf-8')).hexdigest() self.status = States.ACTIVE def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'name': self.name, 'URL': self.URL}
def cast(cnxn): """ Create cast tabell. """ with cnxn.connect() as con: con.execute('DROP TABLE IF EXISTS cast;') meta = sa.MetaData() print("Creating table cast...") imdbTable = sa.Table( "cast", meta, sa.Column("titleId", sa.VARCHAR(100), primary_key=True), sa.Column("ordering", sa.INTEGER(), primary_key=True), sa.Column("personId", sa.VARCHAR(300)), sa.Column("category", sa.VARCHAR(500)), sa.Column("job", sa.VARCHAR(500)), sa.Column("surrogate", sa.VARCHAR(1000)), sa.Column("hashkey", sa.CHAR(32)), ) meta.create_all(cnxn) title_ak = pd.read_csv(os.path.join(data_path, 'title.principals.tsv'), sep='\t', na_values='\\N', quoting=3, nrows=rows) title_ak = title_ak.rename(columns={ 'tconst': 'titleId', 'nconst': 'personId' }) #Droppa onödiga kolumner title_ak.drop(['characters'], axis=1, inplace=True) #Business key/surrogate och hasha title_ak['surrogate'] = title_ak['titleId'].astype( str) + '-' + title_ak['ordering'].astype(str) cast = hashkey(title_ak, 'surrogate') #to db cast.to_sql('cast', cnxn, if_exists='append', index=None, chunksize=100000)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'membership', sa.Column('uuid', sa.CHAR(length=36), nullable=False), sa.Column('account_id', sa.Integer(), nullable=True), sa.Column('create_at', sa.DateTime(), nullable=True), sa.Column('duration', sa.Enum('NONE', 'ONE_MONTH', 'TWO_MONTH', 'THREE_MONTH', 'FOUR_MONTH', 'FIVE_MONTH', 'SIX_MONTH', 'ONE_YEAR', name='membershipduration'), nullable=False), sa.Column('first_time', sa.Boolean(), nullable=False), sa.Column('adherent_id', sa.Integer(), nullable=False), sa.Column('payment_method_id', sa.Integer(), nullable=True), sa.Column('products', sa.String(length=255), nullable=True), sa.Column('status', sa.Enum('INITIAL', 'PENDING_RULES', 'PENDING_PAYMENT_INITIAL', 'PENDING_PAYMENT', 'PENDING_PAYMENT_VALIDATION', 'COMPLETE', 'CANCELLED', 'ABORTED', name='membershipstatus'), nullable=False), sa.Column('update_at', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint( ['account_id'], ['accounts.id'], ), sa.ForeignKeyConstraint( ['adherent_id'], ['adherents.id'], ), sa.ForeignKeyConstraint( ['payment_method_id'], ['payment_methods.id'], ), sa.PrimaryKeyConstraint('uuid'))
def upgrade(): op.create_table( 'debit_note_date', sa.Column('id', sa.Integer(), nullable=False), sa.Column('state', sa.Boolean(), nullable=False), sa.Column('date', sa.DateTime(), nullable=False), sa.Column('vas_name', sa.VARCHAR(length=32), nullable=False), sa.PrimaryKeyConstraint('id')) op.create_table( 'debit_note_detail', sa.Column('id', sa.BigInteger(), nullable=False), sa.Column('sn', sa.CHAR(length=32), nullable=False), sa.Column('vas_name', sa.VARCHAR(length=32), nullable=False), sa.Column('amount', sa.Numeric(precision=16, scale=2), nullable=False), sa.Column('order_id', sa.VARCHAR(length=64), nullable=False), sa.Column('state', sa.Boolean(), nullable=False), sa.Column('valid', sa.Boolean(), nullable=False), sa.Column('type', sa.Enum(u'PAYMENT', u'REFUND'), nullable=False), sa.Column('created_on', sa.DateTime(), nullable=False), sa.PrimaryKeyConstraint('id'))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'user', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.CHAR(length=36), nullable=True), sa.Column('first_name', sa.String(length=255), nullable=False), sa.Column('last_name', sa.String(length=255), nullable=False), sa.Column('email', sa.String(length=255), nullable=False), sa.Column('password', sa.String(length=255), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email')) op.create_index(op.f('ix_user_created_at'), 'user', ['created_at'], unique=False) op.create_index(op.f('ix_user_updated_at'), 'user', ['updated_at'], unique=False) op.create_index(op.f('ix_user_user_id'), 'user', ['user_id'], unique=False)
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('labels', sa.Column('labelid', sa.INTEGER(), nullable=False), sa.Column('name', sa.TEXT(), nullable=False), sa.Column('description', sa.TEXT(), nullable=True), sa.Column('hotkey', sa.CHAR(length=1), nullable=True), sa.Column('extract', sa.TEXT(), nullable=False), sa.Column('labelgroupid', sa.INTEGER(), nullable=True), sa.Column('labelsetid', sa.INTEGER(), nullable=False), sa.ForeignKeyConstraint(['labelgroupid'], [u'labelgroups.labelgroupid'], name=u'labels_labelgroupid_fkey'), sa.ForeignKeyConstraint(['labelsetid'], [u'labelsets.labelsetid'], name=u'labels_labelsetid_fkey'), sa.PrimaryKeyConstraint(u'labelid', name=op.f('pk_labels')), sa.CheckConstraint("hotkey<>' '"), ) op.create_index(op.f('ix_labels_labelgroupid'), 'labels', ['labelgroupid'], unique=False) op.create_index('ix_labels_labelsetid_key', 'labels', ['labelsetid', 'extract'], unique=True) op.create_index('ix_labels_labelsetid_key1', 'labels', ['labelsetid', 'name'], unique=True) op.create_index('ix_labels_labelsetid_key2', 'labels', ['labelsetid', 'hotkey'], unique=True)
def upgrade(): op.drop_table('tasks') op.drop_table('workflow_executions') op.drop_table('workbooks') op.drop_table('triggers') op.add_column( 'cron_triggers_v2', sa.Column('workflow_params', st.JsonEncoded(), nullable=True) ) op.add_column( 'cron_triggers_v2', sa.Column('workflow_params_hash', sa.CHAR(length=64), nullable=True) ) op.create_unique_constraint( None, 'cron_triggers_v2', ['workflow_input_hash', 'workflow_name', 'pattern', 'project_id', 'workflow_params_hash'] )
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.alter_column('venue', 'upcoming_shows_count', existing_type=sa.INTEGER(), nullable=False) op.alter_column('venue', 'state', existing_type=sa.VARCHAR(length=120), nullable=False) op.alter_column('venue', 'past_shows_count', existing_type=sa.INTEGER(), nullable=False) op.alter_column('venue', 'name', existing_type=sa.VARCHAR(), nullable=False) op.alter_column('venue', 'genres', existing_type=sa.CHAR(length=120), nullable=False) op.alter_column('venue', 'city', existing_type=sa.VARCHAR(length=120), nullable=False) op.drop_column('venue', 'seeking_talent') op.alter_column('show', 'start_time', existing_type=postgresql.TIMESTAMP(), nullable=False) op.alter_column('artist', 'upcoming_shows_count', existing_type=sa.INTEGER(), nullable=False) op.alter_column('artist', 'state', existing_type=sa.VARCHAR(length=120), nullable=False) op.alter_column('artist', 'past_shows_count', existing_type=sa.INTEGER(), nullable=False) op.alter_column('artist', 'name', existing_type=sa.VARCHAR(), nullable=False) op.alter_column('artist', 'genres', existing_type=sa.VARCHAR(length=120), nullable=False) op.alter_column('artist', 'city', existing_type=sa.VARCHAR(length=120), nullable=False) op.drop_column('artist', 'seeking_venue')
class Owner(Base): __tablename__ = "owner" id = sqlalchemy.Column(sqlalchemy.CHAR(9), primary_key=True) name = sqlalchemy.Column(sqlalchemy.TEXT, nullable=False) @classmethod def upsert(cls, owners: typing.Iterable["Owner"]) -> None: keys = cls.__table__.c.keys() with engine.connect() as conn: for owner in owners: data = {key: getattr(owner, key) for key in keys} insert = (postgresql .insert(cls.__table__) .values(**data) .on_conflict_do_update( index_elements=[cls.__table__.c.id], set_={k: data[k] for k in data if k != 'id'}) ) conn.execute(insert)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('user', sa.Column('created_at', sa.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True), sa.Column('updated_at', sa.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP'), nullable=True), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('mail', sa.String(length=120), nullable=True), sa.Column('phone', sa.String(length=20), nullable=True), sa.Column('username', sa.String(length=10), nullable=True), sa.Column('password', sa.CHAR(length=32), nullable=True), sa.Column('is_super', sa.SmallInteger(), server_default=sa.text('0'), nullable=True), sa.Column('active', sa.SmallInteger(), server_default=sa.text('0'), nullable=True), sa.Column('deleted', sa.SmallInteger(), server_default=sa.text('0'), nullable=True), sa.PrimaryKeyConstraint('user_id'), mysql_engine='InnoDB' ) op.create_index(op.f('ix_user_mail'), 'user', ['mail'], unique=False) op.create_index(op.f('ix_user_phone'), 'user', ['phone'], unique=False) op.create_index(op.f('ix_user_username'), 'user', ['username'], unique=False)