def _version_table_schema(metadata): return sa.Table( 'version_info', metadata, sa.Column( 'id', sa.Integer, unique=True, nullable=False, primary_key=True, ), sa.Column( 'version', sa.Integer, unique=True, nullable=False, ), # This constraint ensures a single entry in this table sa.CheckConstraint('id <= 1'), )
class WarehouseStock(Model): __tablename__ = "warehouse_stock" __table_args__ = ( sa.CheckConstraint("quantity >= 0"), sa.UniqueConstraint("warehouse_id", "product_variant_id"), ) id = sa.Column(sa.Integer, primary_key=True) quantity = sa.Column(sa.Integer, nullable=False) product_variant_id = sa.Column( sa.ForeignKey("product_variant.id"), nullable=False, ) product_variant = relationship(ProductVariant) warehouse_id = sa.Column( sa.ForeignKey("warehouse.id"), nullable=False, ) warehouse = relationship(Warehouse)
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'user', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('email', sa.VARCHAR(length=40), nullable=False), sa.Column('password', sa.VARCHAR(length=100), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email')) op.create_table( 'question', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('question', sa.VARCHAR(length=200), nullable=False), sa.PrimaryKeyConstraint('id')) op.create_table('answer', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('answer', sa.VARCHAR(length=20), nullable=False), sa.Column('isCorrect', sa.BOOLEAN(), nullable=True), sa.Column('questions', sa.INTEGER(), nullable=False), sa.CheckConstraint('"isCorrect" IN (0, 1)'), sa.ForeignKeyConstraint( ['questions'], ['question.id'], ), sa.PrimaryKeyConstraint('id'))
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('sample', sa.Column('id', sa.VARCHAR(length=50), nullable=False), sa.Column('formula', sa.VARCHAR(length=50), nullable=True), sa.Column('description', sa.VARCHAR(), nullable=True), sa.Column('starred', sa.BOOLEAN(), nullable=True), sa.Column('project', sa.VARCHAR(), nullable=True), sa.CheckConstraint('starred IN (0, 1)'), sa.PrimaryKeyConstraint('id')) op.create_table('data_link', sa.Column('directoryPath', sa.VARCHAR(), nullable=True), sa.Column('scriptPath', sa.VARCHAR(), nullable=True), sa.Column('dataType', sa.VARCHAR(), nullable=True), sa.Column('name', sa.VARCHAR(), nullable=False), sa.Column('description', sa.VARCHAR(), nullable=True), sa.PrimaryKeyConstraint('name')) op.drop_table('script_params') op.drop_table('samples') op.drop_table('data_links')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('user', 'grubhub_id') op.create_table( 'Driver', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('email', sa.VARCHAR(length=255), nullable=False), sa.Column('registered_on', sa.DATETIME(), nullable=False), sa.Column('admin', sa.BOOLEAN(), nullable=False), sa.Column('public_id', sa.VARCHAR(length=100), nullable=True), sa.Column('username', sa.VARCHAR(length=50), nullable=True), sa.Column('password_hash', sa.VARCHAR(length=100), nullable=True), sa.Column('destination', sa.VARCHAR(length=100), nullable=True), sa.Column('num_boxes', sa.INTEGER(), nullable=True), sa.CheckConstraint('admin IN (0, 1)'), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email'), sa.UniqueConstraint('public_id'), sa.UniqueConstraint('username')) op.drop_table('restaurant') op.drop_table('order') op.drop_table('driver') op.drop_table('cleaner')
class PredefinedAppTemplate(Base): __tablename__ = 'predefined_app_templates' id = sa.Column(sa.Integer, primary_key=True, autoincrement=True, nullable=False) predefined_app_id = sa.Column(sa.Integer, sa.ForeignKey("predefined_apps.id"), nullable=False) template = sa.Column(sa.Text, nullable=False) active = sa.Column(sa.Boolean, default=False, nullable=False) switching_allowed = sa.Column(sa.Boolean, default=True, nullable=False) is_deleted = sa.Column(sa.Boolean, default=False, nullable=False) created = sa.Column(sa.DateTime, nullable=True) modified = sa.Column(sa.DateTime, nullable=True) __table_args__ = ( sa.Index('predefined_app_id_active', 'predefined_app_id', 'active', unique=True, postgresql_where=active), sa.CheckConstraint('NOT (active AND is_deleted)'), )
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('comments', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('body', sa.TEXT(), nullable=True), sa.Column('body_html', sa.TEXT(), nullable=True), sa.Column('timestamp', sa.DATETIME(), nullable=True), sa.Column('disabled', sa.BOOLEAN(), nullable=True), sa.Column('author_id', sa.INTEGER(), nullable=True), sa.Column('post_id', sa.INTEGER(), nullable=True), sa.CheckConstraint('disabled IN (0, 1)'), sa.ForeignKeyConstraint( ['author_id'], ['user.id'], ), sa.ForeignKeyConstraint( ['post_id'], ['post.id'], ), sa.PrimaryKeyConstraint('id')) op.create_index('ix_comments_timestamp', 'comments', ['timestamp'], unique=False)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('monitors', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=128), nullable=False), sa.Column('slug', sa.String(), nullable=False), sa.Column('created', sa.TIMESTAMP(timezone=True), nullable=False), sa.Column('updated', sa.TIMESTAMP(timezone=True), nullable=False), sa.Column('frequency', sa.Integer(), nullable=False), sa.Column('scheme', sa.String(length=16), nullable=False), sa.Column('server', sa.String(length=256), nullable=False), sa.Column('port', sa.Integer(), nullable=False), sa.Column('path', sa.String(), nullable=True), sa.Column('verb', sa.Enum('GET', 'POST', 'PUT', 'DELETE', name='httpverb'), nullable=True), sa.Column('payload', sa.Text(), nullable=True), sa.Column('headers', sqlalchemy_json.MutableJson(), nullable=True), sa.CheckConstraint('port > 0 AND port < 65536', name='valid_port_range'), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_monitors_slug'), 'monitors', ['slug'], unique=True)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'tb_product_sku', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('product_id', sa.Integer(), nullable=True, comment='产品关联键'), sa.Column('name', sa.String(length=128), nullable=True, comment='SKU名称'), sa.Column('description', sa.String(length=128), nullable=True, comment='SKU描述'), sa.Column('price', sa.DECIMAL(precision=5, scale=2), nullable=True, comment='SKU价格'), sa.Column('stock', sa.Integer(), nullable=True, comment='SKU库存'), sa.CheckConstraint('stock >= 0'), sa.PrimaryKeyConstraint('id'))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'vote', sa.Column('created', sa.TIMESTAMP(), nullable=False), sa.Column('updated', sa.TIMESTAMP(), nullable=False), sa.Column('talk_id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('public_id', postgresql.UUID(as_uuid=True), nullable=True), sa.Column('value', sa.Integer(), nullable=True), sa.Column('skipped', sa.Boolean(), nullable=True), sa.CheckConstraint('value is NULL OR value IN (-1, 0, 1)', name='ck_vote_values'), sa.ForeignKeyConstraint( ['talk_id'], ['talk.talk_id'], ), sa.ForeignKeyConstraint( ['user_id'], ['user.user_id'], ), sa.PrimaryKeyConstraint('talk_id', 'user_id'), sa.UniqueConstraint('public_id'))
class Endangerment(_backend.Model): __tablename__ = 'endangerment' languoid_id = sa.Column(sa.ForeignKey('languoid.id'), primary_key=True) status = sa.Column(sa.Enum(*ENDANGERMENT_STATUS), nullable=False) source = sa.Column(sa.Enum(*sorted(ENDANGERMENT_SOURCE)), nullable=False) date = sa.Column(sa.DateTime, nullable=False) comment = sa.Column(sa.Text, sa.CheckConstraint("comment != ''"), nullable=False) def __repr__(self): return '<%s languoid_id=%r status=%r source=%r date=%r>' % ( self.__class__.__name__, self.languoid_id, self.status, self.source, self.date) languoid = sa.orm.relationship('Languoid', innerjoin=True, back_populates='endangerment')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'purchase', sa.Column('id', sa.Integer(), nullable=False), sa.Column('bill_no', sa.Integer(), nullable=True), sa.Column('date', sa.Date(), nullable=False), sa.Column('s_id', sa.Integer(), nullable=False), sa.Column('p_id', sa.Integer(), nullable=False), sa.Column('pro_qty', sa.Integer(), nullable=False), sa.Column('pro_price', sa.Numeric(precision=8, scale=2), nullable=False), sa.CheckConstraint(u'pro_qty >= 1', name='check_quantity_entered'), sa.ForeignKeyConstraint( ['p_id'], ['tbl_product.id'], ), sa.ForeignKeyConstraint( ['s_id'], ['tbl_supplier.id'], ), sa.PrimaryKeyConstraint('id'))
def upgrade(): op.create_table( 'audio_collection_recording_flags', sa.Column('recording_flag_id', sa.INTEGER(), nullable=False), sa.Column('audio_collection_id', sa.INTEGER(), nullable=False), sa.Column('name', sa.TEXT(), nullable=False), sa.Column('severity', sa.TEXT(), nullable=False), sa.Column('enabled', sa.BOOLEAN(), server_default='true', nullable=False), sa.CheckConstraint(u"severity IN ('Info', 'Warning', 'Severe')"), sa.ForeignKeyConstraint( ['audio_collection_id'], ['audio_collections.audio_collection_id'], ), sa.PrimaryKeyConstraint('recording_flag_id')) op.create_index('audio_collection_recording_flags_by_audio_collection_id', 'audio_collection_recording_flags', ['audio_collection_id'], unique=False)
def upgrade(): columns_and_constraints = [ sa.Column("one_row_id", sa.Boolean, server_default=sa.true(), primary_key=True), sa.Column("worker_uuid", sa.String(255)) ] conn = op.get_bind() # alembic creates an invalid SQL for mssql dialect if conn.dialect.name not in ('mssql'): columns_and_constraints.append( sa.CheckConstraint("one_row_id", name="kube_worker_one_row_id")) table = op.create_table(RESOURCE_TABLE, *columns_and_constraints) op.bulk_insert(table, [{"worker_uuid": ""}])
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'single_count_aggregate', sa.Column('record_id', sa.Integer(), nullable=False), sa.Column('jid', sa.String(length=8), nullable=False), sa.Column('date', sa.Date(), nullable=False), sa.Column('ethnicity', postgresql.ENUM('EXTERNAL_UNKNOWN', 'HISPANIC', 'NOT_HISPANIC', create_type=False, name='ethnicity'), nullable=True), sa.Column('gender', postgresql.ENUM('EXTERNAL_UNKNOWN', 'FEMALE', 'MALE', 'OTHER', 'TRANS', 'TRANS_FEMALE', 'TRANS_MALE', create_type=False, name='gender'), nullable=True), sa.Column('race', postgresql.ENUM('AMERICAN_INDIAN_ALASKAN_NATIVE', 'ASIAN', 'BLACK', 'EXTERNAL_UNKNOWN', 'NATIVE_HAWAIIAN_PACIFIC_ISLANDER', 'OTHER', 'WHITE', create_type=False, name='race'), nullable=True), sa.Column('count', sa.Integer(), nullable=False), sa.CheckConstraint('LENGTH(jid) = 8', name='single_count_jid_length_check'), sa.PrimaryKeyConstraint('record_id'), sa.UniqueConstraint('jid', 'date', 'ethnicity', 'gender', 'race'))
def test_reserved(self): # check a table that uses an SQL reserved name doesn't cause an error meta = MetaData(testing.db) table_a = Table('select', meta, Column('not', sa.Integer, primary_key=True), Column('from', sa.String(12), nullable=False), sa.UniqueConstraint('from', name='when')) sa.Index('where', table_a.c['from']) # There's currently no way to calculate identifier case normalization # in isolation, so... if testing.against('firebird', 'oracle', 'maxdb'): check_col = 'TRUE' else: check_col = 'true' quoter = meta.bind.dialect.identifier_preparer.quote_identifier table_b = Table( 'false', meta, Column('create', sa.Integer, primary_key=True), Column('true', sa.Integer, sa.ForeignKey('select.not')), sa.CheckConstraint('%s <> 1' % quoter(check_col), name='limit')) table_c = Table( 'is', meta, Column('or', sa.Integer, nullable=False, primary_key=True), Column('join', sa.Integer, nullable=False, primary_key=True), sa.PrimaryKeyConstraint('or', 'join', name='to')) index_c = sa.Index('else', table_c.c.join) meta.create_all() index_c.drop() meta2 = MetaData(testing.db) try: table_a2 = Table('select', meta2, autoload=True) table_b2 = Table('false', meta2, autoload=True) table_c2 = Table('is', meta2, autoload=True) finally: meta.drop_all()
class DeviceAttr(db.Model): __tablename__ = 'attrs' id = db.Column(db.Integer, db.Sequence('attr_id'), primary_key=True) label = db.Column(db.String(128), nullable=False) created = db.Column(db.DateTime, default=datetime.now) updated = db.Column(db.DateTime, onupdate=datetime.now) type = db.Column(db.String(32), nullable=False) value_type = db.Column(db.String(32), nullable=False) static_value = db.Column(db.String(128)) template_id = db.Column(db.Integer, db.ForeignKey('templates.id')) template = db.relationship("DeviceTemplate", back_populates="attrs") parent_id = db.Column(db.Integer, db.ForeignKey('attrs.id')) parent = db.relationship("DeviceAttr", remote_side=[id], back_populates="children") children = db.relationship("DeviceAttr", back_populates="parent", cascade="delete") # remove known overrides if this attribute is removed overrides = db.relationship('DeviceOverride', cascade="delete") # remove known pre shared keys if this attribute is removed pre_shared_keys = db.relationship('DeviceAttrsPsk', cascade="delete", back_populates="attrs") # Any given template must not possess two attributes with the same type, label __table_args__ = ( sqlalchemy.UniqueConstraint('template_id', 'type', 'label'), sqlalchemy.CheckConstraint("((template_id IS NULL) AND NOT (parent_id IS NULL)) OR \ (NOT (template_id IS NULL) AND (parent_id IS NULL))") ) def __repr__(self): children_str="" for child in self.children: children_str += "«{}:{}»".format(child.label, child.static_value) return "<Attr(label='{}', type='{}', value_type='{}', children='{}', parent={})>".format( self.label, self.type, self.value_type, children_str, self.parent)
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_foreign_key( "pyfunceble_status_ibfk_1", "pyfunceble_status", "pyfunceble_file", ["file_id"], ["id"], onupdate="CASCADE", ondelete="CASCADE", ) op.alter_column( "pyfunceble_status", "file_id", existing_type=mysql.INTEGER(display_width=11), nullable=False, ) op.create_table( "pyfunceble_file", sa.Column("id", mysql.INTEGER(display_width=11), autoincrement=True, nullable=False), sa.Column("created", mysql.DATETIME(), nullable=False), sa.Column("modified", mysql.DATETIME(), nullable=True), sa.Column("path", mysql.TEXT(collation="utf8mb4_unicode_ci"), nullable=False), sa.Column( "test_completed", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False, ), sa.CheckConstraint("`test_completed` in (0,1)", name="CONSTRAINT_1"), sa.PrimaryKeyConstraint("id"), mysql_collate="utf8mb4_unicode_ci", mysql_default_charset="utf8mb4", mysql_engine="InnoDB", ) op.create_index("path", "pyfunceble_file", ["path"], unique=True)
def upgrade(): op.create_table( "media", sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), sa.Column("peripheral_id", sa.Integer(), nullable=False), sa.Column("kit_id", sa.Integer(), nullable=False), sa.Column("kit_configuration_id", sa.Integer(), nullable=False), sa.Column("datetime", sa.DateTime(timezone=True), nullable=False), sa.Column("name", sa.String(), nullable=False), sa.Column("type", sa.String(), nullable=False), sa.Column("metadata", postgresql.JSON(astext_type=sa.Text()), nullable=False), sa.Column("size", sa.BigInteger(), nullable=False), sa.CheckConstraint("size >= 0", name="size_positive"), sa.ForeignKeyConstraint( ["kit_configuration_id"], ["kit_configurations.id"], onupdate="CASCADE", ondelete="CASCADE", ), sa.ForeignKeyConstraint( ["kit_id"], ["kits.id"], onupdate="CASCADE", ondelete="CASCADE" ), sa.ForeignKeyConstraint( ["peripheral_id"], ["peripherals.id"], onupdate="CASCADE", ondelete="CASCADE", ), sa.PrimaryKeyConstraint("id"), ) op.create_index(op.f("ix_media_datetime"), "media", ["datetime"], unique=False) op.create_index( op.f("ix_media_kit_configuration_id"), "media", ["kit_configuration_id"], unique=False, ) op.create_index(op.f("ix_media_kit_id"), "media", ["kit_id"], unique=False) op.create_index( op.f("ix_media_peripheral_id"), "media", ["peripheral_id"], unique=False )
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column( "pyfunceble_whois_record", sa.Column("modified", mysql.DATETIME(), nullable=True), ) op.add_column( "pyfunceble_whois_record", sa.Column("created", mysql.DATETIME(), nullable=False), ) op.drop_column("pyfunceble_whois_record", "modified_at") op.drop_column("pyfunceble_whois_record", "created_at") op.add_column("pyfunceble_status", sa.Column("modified", mysql.DATETIME(), nullable=True)) op.add_column("pyfunceble_status", sa.Column("created", mysql.DATETIME(), nullable=False)) op.drop_column("pyfunceble_status", "modified_at") op.drop_column("pyfunceble_status", "created_at") op.create_table( "pyfunceble_file", sa.Column("id", mysql.INTEGER(display_width=11), autoincrement=True, nullable=False), sa.Column("created", mysql.DATETIME(), nullable=False), sa.Column("modified", mysql.DATETIME(), nullable=True), sa.Column("path", mysql.TEXT(collation="utf8mb4_unicode_ci"), nullable=False), sa.Column( "test_completed", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False, ), sa.CheckConstraint("`test_completed` in (0,1)", name="CONSTRAINT_1"), sa.PrimaryKeyConstraint("id"), mysql_collate="utf8mb4_unicode_ci", mysql_default_charset="utf8mb4", mysql_engine="InnoDB", )
def _version_table_schema(metadata): # NOTE: When modifying this schema, update the ASSET_DB_VERSION value return sa.Table( 'version_info', metadata, sa.Column( 'id', sa.Integer, unique=True, nullable=False, primary_key=True, ), sa.Column( 'version', sa.Integer, unique=True, nullable=False, ), # This constraint ensures a single entry in this table sa.CheckConstraint('id <= 1'), )
def json_column(column_name: str, *, default=None) -> sa.Column: """A JSONB column. Return a column of type JSONB for use in models. Use this for entries like <language>: <text> :param column_name: the name of the column :param default: the column default (default value None, meaning no column default) :return: a SQLAlchemy Column for a non-null JSONB type. """ return sa.Column( pg.json.JSONB, sa.CheckConstraint( "{} @> '{{}}'".format(column_name), name='{}_valid_json_check'.format(column_name), ), nullable=False, server_default=default, )
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'tbl_product', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=100), nullable=False), sa.Column('description', sa.String(length=100), nullable=False), sa.Column('quantity', sa.Integer(), nullable=False), sa.Column('price', sa.NUMERIC(precision=8, scale=2), nullable=True), sa.CheckConstraint(u'quantity >= 0', name='check_quantity_positive'), sa.PrimaryKeyConstraint('id')) op.create_table('tbl_supplier', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=100), nullable=False), sa.Column('addr', sa.String(length=100), nullable=True), sa.Column('ph', sa.String(length=50), nullable=False), sa.Column('type', sa.String(length=50), nullable=False), sa.PrimaryKeyConstraint('id')) op.add_column(u'tbl_member', sa.Column('d_date', sa.DateTime(), nullable=False)) op.add_column(u'tbl_member', sa.Column('e_date', sa.DateTime(), nullable=False))
class Email(Base): """Models an e-mail address.""" __tablename__ = 'email' id = util.pk() address = sa.Column( pg.TEXT, sa.CheckConstraint("address ~ '.*@.*'"), nullable=False, unique=True ) user_id = sa.Column(pg.UUID, util.fk('auth_user.id'), nullable=False) last_update_time = util.last_update_time() def _asdict(self) -> OrderedDict: return OrderedDict(( ('id', self.id), ('address', self.address), ('user', self.user.name), ('last_update_time', self.last_update_time), ))
class VlanAllocation(model_base.BASEV2): """Represent allocation state of a vlan_id on a physical network. If allocated is False, the vlan_id on the physical_network is available for allocation to a tenant network. If allocated is True, the vlan_id on the physical_network is in use, either as a tenant or provider network. When an allocation is released, if the vlan_id for the physical_network is inside the pool described by VlanTypeDriver.network_vlan_ranges, then allocated is set to False. If it is outside the pool, the record is deleted. """ __tablename__ = 'ml2_vlan_allocations' __table_args__ = ( sa.Index('ix_ml2_vlan_allocations_physical_network_allocated', 'physical_network', 'allocated'), sa.CheckConstraint(sqltext=VLAN_CONSTRAINT, name='check_ml2_vlan_allocations0vlan_id'), model_base.BASEV2.__table_args__, ) physical_network = sa.Column(sa.String(64), nullable=False, primary_key=True) vlan_id = sa.Column(sa.Integer, nullable=False, primary_key=True, autoincrement=False) allocated = sa.Column(sa.Boolean, nullable=False) @classmethod def get_segmentation_id(cls): return cls.vlan_id @property def segmentation_id(self): return self.vlan_id @staticmethod def primary_keys(): return {'vlan_id', 'physical_network'}
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'MST_TODO', sa.Column('item_id', sa.INTEGER(), nullable=False), sa.Column('title', sa.VARCHAR(length=100), nullable=False), sa.Column('description', sa.VARCHAR(length=200), nullable=False), sa.Column('date_created', sa.DATETIME(), nullable=True), sa.Column('is_completed', sa.BOOLEAN(), nullable=False), sa.Column('user_id', sa.INTEGER(), nullable=False), sa.CheckConstraint('is_completed IN (0, 1)'), sa.ForeignKeyConstraint( ['user_id'], ['MST_USER.id'], ), sa.PrimaryKeyConstraint('item_id')) op.create_table( 'MST_USER', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('username', sa.VARCHAR(length=64), nullable=True), sa.Column('email', sa.VARCHAR(length=120), nullable=True), sa.Column('password_hash', sa.VARCHAR(length=128), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_index('ix_MST_USER_username', 'MST_USER', ['username'], unique=1) op.create_index('ix_MST_USER_email', 'MST_USER', ['email'], unique=1)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('users', sa.Column('id', sa.Integer(), nullable=False), sa.Column('email', sa.Text(), nullable=True), sa.Column('username', sa.Text(), nullable=True), sa.Column('name', sa.Text(), nullable=True), sa.Column('image_url', sa.Text(), nullable=True), sa.Column('header_image_url', sa.Text(), nullable=True), sa.Column('bio', sa.Text(), nullable=True), sa.Column('location', sa.Text(), nullable=True), sa.Column('password', sa.Text(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email'), sa.UniqueConstraint('username')) op.create_table( 'follows', sa.Column('id', sa.Integer(), nullable=False), sa.Column('followee_id', sa.Integer(), nullable=True), sa.Column('follower_id', sa.Integer(), nullable=True), sa.CheckConstraint('follower_id != followee_id', name='no_self_follow'), sa.ForeignKeyConstraint(['followee_id'], ['users.id'], ondelete='cascade'), sa.ForeignKeyConstraint(['follower_id'], ['users.id'], ondelete='cascade'), sa.PrimaryKeyConstraint('id')) op.create_table( 'parties', sa.Column('id', sa.Integer(), nullable=False), sa.Column('description', sa.Text(), nullable=True), sa.Column('instructions', sa.Text(), nullable=True), sa.Column('timestamp', sa.DateTime(), nullable=True), sa.Column('cost', sa.Numeric(), nullable=True), sa.Column('host_id', sa.Integer(), nullable=True), sa.Column('attendee_id', sa.Integer(), nullable=True), sa.Column('host_rating', sa.Integer(), nullable=True), sa.Column('attendee_rating', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['attendee_id'], ['users.id'], ondelete='CASCADE'), sa.ForeignKeyConstraint(['host_id'], ['users.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('id'))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'zoom_participants', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('meeting_id', sa.Integer(), nullable=True), sa.Column('zoom_user_id', sa.String(length=50), nullable=False), sa.Column('zoom_name', sa.String(length=50), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.CheckConstraint('updated_at >= created_at'), sa.ForeignKeyConstraint( ['meeting_id'], ['zoom_meetings.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('meeting_id'), sa.UniqueConstraint('zoom_user_id', 'meeting_id')) op.drop_table('zoom_messages') op.drop_table('zoom_recordings')
class BlockSchema(db.Model, Timestamp): """Represent one of the community's metadata block schema in the database. Every schema is versioned. Previous versions can always be accessible. These versions are represented as BlockSchemaVersion. Additionally it contains two columns ``created`` and ``updated`` with automatically managed timestamps. """ from b2share.modules.communities.models import Community __tablename__ = 'b2share_block_schema' id = db.Column( UUIDType, default=uuid.uuid4, primary_key=True, ) """Schema identifier.""" name = db.Column( db.String(200), sa.CheckConstraint('LENGTH(name) > 2 AND ' 'LENGTH(name) <= 200', name='name_length'), nullable=False, ) """Name of the schema.""" deprecated = db.Column(db.Boolean, default=False, nullable=False) """True if the schema is not maintained anymore.""" community = db.Column(UUIDType, db.ForeignKey( Community.id, name='fk_b2share_block_schema_community'), nullable=False) """Community owning and maintaining this schema."""
def upgrade(): op.create_table( "blacklist", sa.Column( "id", postgresql.UUID(as_uuid=True), server_default=sa.text("gen_random_uuid()"), nullable=False, ), sa.Column("created", sa.DateTime(), server_default=sa.text("now()"), nullable=False), sa.Column("name", sa.Text(), nullable=False), sa.Column("blacklisted_by", postgresql.UUID(), nullable=True), sa.Column("comment", sa.Text(), server_default="", nullable=False), sa.CheckConstraint( "name ~* '^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$'::text", name="blacklist_valid_name", ), sa.ForeignKeyConstraint(["blacklisted_by"], ["accounts_user.id"]), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("name"), ) # Setup a trigger that will ensure that we never commit a name that hasn't # been normalized to our blacklist. op.execute(""" CREATE OR REPLACE FUNCTION ensure_normalized_blacklist() RETURNS TRIGGER AS $$ BEGIN NEW.name = normalize_pep426_name(NEW.name); RETURN NEW; END; $$ LANGUAGE plpgsql; """) op.execute(""" CREATE TRIGGER normalize_blacklist AFTER INSERT OR UPDATE OR DELETE ON blacklist FOR EACH ROW EXECUTE PROCEDURE ensure_normalized_blacklist(); """)