def _add_artifact_properties_table(): op.create_table('artifact_properties', Column('id', String(length=36), nullable=False), Column('artifact_id', String(length=36), nullable=False), Column('name', String(length=255), nullable=False), Column('string_value', String(length=255), nullable=True), Column('int_value', Integer(), nullable=True), Column('numeric_value', Numeric(), nullable=True), Column('bool_value', Boolean(), nullable=True), Column('text_value', Text(), nullable=True), Column('created_at', DateTime(), nullable=False), Column('updated_at', DateTime(), nullable=False), Column('position', Integer(), nullable=True), ForeignKeyConstraint( ['artifact_id'], ['artifacts.id'], ), PrimaryKeyConstraint('id'), mysql_engine='InnoDB', mysql_charset='utf8', extend_existing=True) op.create_index('ix_artifact_properties_artifact_id', 'artifact_properties', ['artifact_id'], unique=False) op.create_index('ix_artifact_properties_name', 'artifact_properties', ['name'], unique=False)
def _nuke_db(self): self._log('Nuking the database...') inspector = reflection.Inspector.from_engine(self.engine) # gather all data first before dropping anything. # some DBs lock after things have been dropped in # a transaction. metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: self.session.execute(DropConstraint(fkc)) for table in tbs: self.session.execute(DropTable(table)) self.session.commit()
def _get_constraints(self, table): """Retrieve information about existing constraints of the table This feature is needed for recreate_table() to work properly. """ data = table.metadata.bind.execute( """SELECT sql FROM sqlite_master WHERE type='table' AND name=:table_name""", table_name=table.name ).fetchone()[0] UNIQUE_PATTERN = "CONSTRAINT (\w+) UNIQUE \(([^\)]+)\)" constraints = [] for name, cols in re.findall(UNIQUE_PATTERN, data): # Filter out any columns that were dropped from the table. columns = self._filter_columns(cols, table) if columns: constraints.extend(UniqueConstraint(*columns, name=name)) FKEY_PATTERN = "CONSTRAINT (\w+) FOREIGN KEY \(([^\)]+)\)" for name, cols in re.findall(FKEY_PATTERN, data): # Filter out any columns that were dropped from the table. columns = self._filter_columns(cols, table) if columns: constraints.extend(ForeignKeyConstraint(*columns, name=name)) return constraints
def test_onetomany_noinflect(metadata): Table( "oglkrogk", metadata, Column("id", INTEGER, primary_key=True), Column("fehwiuhfiwID", INTEGER), ForeignKeyConstraint(["fehwiuhfiwID"], ["fehwiuhfiw.id"]), ) Table("fehwiuhfiw", metadata, Column("id", INTEGER, primary_key=True)) assert (generate_code(metadata) == """\ # coding: utf-8 from sqlalchemy import Column, ForeignKey, Integer from sqlalchemy.orm import relationship from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() metadata = Base.metadata class Fehwiuhfiw(Base): __tablename__ = 'fehwiuhfiw' id = Column(Integer, primary_key=True) class Oglkrogk(Base): __tablename__ = 'oglkrogk' id = Column(Integer, primary_key=True) fehwiuhfiwID = Column(ForeignKey('fehwiuhfiw.id')) fehwiuhfiw = relationship('Fehwiuhfiw') """)
def rebuild_db(): # See this SQLAlchemy recipe: http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything inspector = reflection.Inspector.from_engine(db.engine) # gather all data first before dropping anything. # some DBs lock after things have been dropped in # a transaction. metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: # pragma: no cover continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: db.engine.execute(DropConstraint(fkc)) for table in tbs: db.engine.execute(DropTable(table)) db.session.commit() db.create_all()
def drop_db(): """It is a workaround for dropping all tables in sqlalchemy. """ if db_engine is None: raise Exception conn = db_engine.connect() trans = conn.begin() inspector = engine.reflection.Inspector.from_engine(db_engine) # gather all data first before dropping anything. # some DBs lock after things have been dropped in # a transaction. metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(DropConstraint(fkc)) for table in tbs: conn.execute(DropTable(table)) trans.commit()
def test_onetoone(metadata): Table( "simple_items", metadata, Column("id", INTEGER, primary_key=True), Column("other_item_id", INTEGER), ForeignKeyConstraint(["other_item_id"], ["other_items.id"]), UniqueConstraint("other_item_id"), ) Table("other_items", metadata, Column("id", INTEGER, primary_key=True)) assert (generate_code(metadata) == """\ # coding: utf-8 from sqlalchemy import Column, ForeignKey, Integer from sqlalchemy.orm import relationship from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() metadata = Base.metadata class OtherItem(Base): __tablename__ = 'other_items' id = Column(Integer, primary_key=True) class SimpleItem(Base): __tablename__ = 'simple_items' id = Column(Integer, primary_key=True) other_item_id = Column(ForeignKey('other_items.id'), unique=True) other_item = relationship('OtherItem', uselist=False) """)
def drop_all_table(): from sqlalchemy.engine import reflection from sqlalchemy.schema import MetaData, Table, DropTable, ForeignKeyConstraint, DropConstraint conn = db.engine.connect() trans = conn.begin() inspector = reflection.Inspector.from_engine(db.engine) metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(DropConstraint(fkc)) for table in tbs: conn.execute(DropTable(table)) trans.commit()
class Request(BASE, ModelBase): """Represents a pre-cache request from other service""" __tablename__ = 'ess_requests' request_id = Column(BigInteger().with_variant(Integer, "sqlite"), Sequence('ESS_REQUEST_ID_SEQ'), primary_key=True) scope = Column(String(SCOPE_LENGTH)) name = Column(String(NAME_LENGTH)) data_type = Column(DataType.db_type(name='ESS_REQUESTS_DATA_TYPE'), default=DataType.DATASET) granularity_type = Column( GranularityType.db_type(name='ESS_REQUESTS_GRANULARITY_TYPE'), default=GranularityType.PARTIAL) granularity_level = Column(Integer()) priority = Column(Integer()) edge_id = Column(Integer) status = Column(RequestStatus.db_type(name='ESS_REQUESTS_STATUS'), default=RequestStatus.NEW) request_meta = Column(JSON()) # task id, job id, pandq queues inside processing_meta = Column(JSON()) # collection_id or file_id inside errors = Column(JSON()) _table_args = (PrimaryKeyConstraint('request_id', name='ESS_REQUESTS_PK'), ForeignKeyConstraint(['edge_id'], ['ess_edges.edge_id'], name='ESS_REQUESTS_EDGE_ID_FK'), CheckConstraint('status IS NOT NULL', name='ESS_REQ_STATUS_ID_NN'), Index('ESS_REQUESTS_SCOPE_NAME_IDX', 'scope', 'name', 'data_type', 'request_id'), Index('ESS_REQUESTS_STATUS_PRIO_IDX', 'status', 'priority', 'request_id'))
class FuncKeyDestParkPosition(Base): DESTINATION_TYPE_ID = 7 __tablename__ = 'func_key_dest_park_position' __table_args__ = ( PrimaryKeyConstraint('func_key_id', 'destination_type_id'), ForeignKeyConstraint(['func_key_id', 'destination_type_id'], ['func_key.id', 'func_key.destination_type_id']), CheckConstraint('destination_type_id = {}'.format(DESTINATION_TYPE_ID)), CheckConstraint("park_position ~ '^[0-9]+$'") ) func_key_id = Column(Integer) destination_type_id = Column(Integer, server_default="{}".format(DESTINATION_TYPE_ID)) park_position = Column(String(40), nullable=False) type = 'park_position' func_key = relationship(FuncKey, cascade='all,delete-orphan', single_parent=True) def to_tuple(self): return (('position', self.position),) @hybrid_property def position(self): return int(self.park_position) @position.expression def position(cls): return cast(cls.park_position, Integer) @position.setter def position(self, value): self.park_position = value
def test_drop_with_complex_foreign_keys(self): from sqlalchemy.schema import ForeignKeyConstraint from sqlalchemy.schema import UniqueConstraint self.table.drop() self.meta.clear() # create FK's target reftable = Table('tmp_ref', self.meta, Column('id', Integer, primary_key=True), Column('jd', Integer), UniqueConstraint('id', 'jd')) if self.engine.has_table(reftable.name): reftable.drop() reftable.create() # add a table with a complex foreign key constraint self.table = Table( self.table_name, self.meta, Column('id', Integer, primary_key=True), Column('r1', Integer), Column('r2', Integer), ForeignKeyConstraint(['r1', 'r2'], [reftable.c.id, reftable.c.jd], name='test_fk')) self.table.create() # paranoid check self.assertEqual([['r1', 'r2']], self._actual_foreign_keys()) # delete one self.table.c.r2.drop() # check the constraint is gone, since part of it # is no longer there - if people hit this, # they may be confused, maybe we should raise an error # and insist that the constraint is deleted first, separately? self.assertEqual([], self._actual_foreign_keys())
def upgrade_resource_data_pre(migrate_engine): meta = sqlalchemy.MetaData(bind=migrate_engine) rd_table = sqlalchemy.Table('resource_data', meta, autoload=True) res_table = sqlalchemy.Table('resource', meta, autoload=True) # remove foreignkey on resource_id inspector = sqlalchemy.inspect(migrate_engine) fkc_query = inspector.get_foreign_keys('resource_data') if fkc_query: fkc = ForeignKeyConstraint([rd_table.c.resource_id], [res_table.c.id], fkc_query[0]['name']) migrate_engine.execute(DropConstraint(fkc)) # migrate.ForeignKeyConstraint(columns=[rd_table.c.resource_id], # refcolumns=[res_table.c.id]).drop() # rename resource_id -> tmp_res_uuid rd_table.c.resource_id.alter('tmp_res_uuid', sqlalchemy.String(36)) # create the new resource_id column (no foreignkey yet) res_id_column_kwargs = {} if migrate_engine.name == 'ibm_db_sa': # NOTE(mriedem): This is turned into a foreignkey key constraint # later so it must be non-nullable. res_id_column_kwargs['nullable'] = False res_id = sqlalchemy.Column('resource_id', sqlalchemy.Integer, **res_id_column_kwargs) rd_table.create_column(res_id)
def drop_everything(db): # From: http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything conn = db.engine.connect() trans = conn.begin() inspector = reflection.Inspector.from_engine(db.engine) metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = db.Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(DropConstraint(fkc)) for table in tbs: conn.execute(DropTable(table)) trans.commit()
def _add_artifact_blobs_table(): op.create_table('artifact_blobs', Column('id', String(length=36), nullable=False), Column('artifact_id', String(length=36), nullable=False), Column('size', BigInteger(), nullable=False), Column('checksum', String(length=32), nullable=True), Column('name', String(length=255), nullable=False), Column('item_key', String(length=329), nullable=True), Column('position', Integer(), nullable=True), Column('created_at', DateTime(), nullable=False), Column('updated_at', DateTime(), nullable=False), ForeignKeyConstraint( ['artifact_id'], ['artifacts.id'], ), PrimaryKeyConstraint('id'), mysql_engine='InnoDB', mysql_charset='utf8', extend_existing=True) op.create_index('ix_artifact_blobs_artifact_id', 'artifact_blobs', ['artifact_id'], unique=False) op.create_index('ix_artifact_blobs_name', 'artifact_blobs', ['name'], unique=False)
def fk_constraint(to_catalog: dict, to_collection: dict, srcdst: str) -> ForeignKeyConstraint: """Creates FK constraint to given catalog and collection from as src or dst Example: foreign key from (src_id, src_volgnummer) to (_id, volgnummer), or foreign key from (dst_id) to (_id) :param to_catalog: :param to_collection: :param srcdst: src or dst :return: """ tablename = model.get_table_name(to_catalog['name'], to_collection['name']) has_states = to_collection.get('has_states', False) return ForeignKeyConstraint( # Source columns (src or dst) [ f"{srcdst}_{col}" for col in ([FIELD.REFERENCE_ID, FIELD.SEQNR] if has_states else [FIELD.REFERENCE_ID]) ], # Destination columns, prefixed with destination table name [ f"{tablename}.{col}" for col in ( [FIELD.ID, FIELD.SEQNR] if has_states else [FIELD.ID]) ], name=f"{NameCompressor.compress_name(table_name)}_{srcdst[0]}fk" )
def test_onetomany_selfref(metadata): Table( "simple_items", metadata, Column("id", INTEGER, primary_key=True), Column("parent_item_id", INTEGER), ForeignKeyConstraint(["parent_item_id"], ["simple_items.id"]), ) assert (generate_code(metadata) == """\ # coding: utf-8 from sqlalchemy import Column, ForeignKey, Integer from sqlalchemy.orm import relationship from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() metadata = Base.metadata class SimpleItem(Base): __tablename__ = 'simple_items' id = Column(Integer, primary_key=True) parent_item_id = Column(ForeignKey('simple_items.id')) parent_item = relationship('SimpleItem', remote_side=[id]) """)
class PlaceMatcher(Base): __tablename__ = 'place_matcher' start = Column(DateTime, default=now_utc(), primary_key=True) end = Column(DateTime) osm_type = Column(osm_type_enum, primary_key=True) osm_id = Column(BigInteger, primary_key=True) remote_addr = Column(String) user_id = Column(Integer, ForeignKey('user.id')) user_agent = Column(String) is_refresh = Column(Boolean, nullable=False) place = relationship('Place', uselist=False, backref=backref('matcher_runs', lazy='dynamic', order_by='PlaceMatcher.start.desc()')) user = relationship('User', uselist=False, backref=backref('matcher_runs', lazy='dynamic', order_by='PlaceMatcher.start.desc()')) __table_args__ = (ForeignKeyConstraint( ['osm_type', 'osm_id'], ['place.osm_type', 'place.osm_id'], ), ) def duration(self): if self.end: return self.end - self.start def complete(self): self.end = now_utc() session.commit()
class Changeset(Base): __tablename__ = 'changeset' id = Column(BigInteger, primary_key=True) created = Column(DateTime) place_id = Column(BigInteger) osm_type = Column(osm_type_enum, index=True) osm_id = Column(BigInteger, index=True) item_id = Column(Integer) comment = Column(String) user_id = Column(Integer, ForeignKey(User.id)) update_count = Column(Integer, nullable=False) __table_args__ = (ForeignKeyConstraint( ['osm_type', 'osm_id'], ['place.osm_type', 'place.osm_id']), ) user = relationship('User', backref=backref('changesets', lazy='dynamic', order_by='Changeset.created.desc()')) place = relationship('Place', backref=backref('changesets', lazy='dynamic', order_by='Changeset.created.desc()')) @property def item_label(self): item = Item.query.get(self.item_id) if item: return item.label()
def drop_tables(engine): """ Drop all the tables in the database attached to by the supplied engine. As many foreign key constraints as possible will be dropped first making this quite brutal! """ # from http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything conn = engine.connect() inspector = Inspector.from_engine(engine) # gather all data first before dropping anything. # some DBs lock after things have been dropped in # a transaction. metadata = MetaData() tbs = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append( ForeignKeyConstraint((),(),name=fk['name']) ) t = Table(table_name, metadata,*fks) tbs.append(t) for fkc in fks: conn.execute(DropConstraint(fkc, cascade=True)) for table in tbs: conn.execute(DropTable(table))
def _add_image_properties_table(): op.create_table('image_properties', Column('id', Integer(), nullable=False), Column('image_id', String(length=36), nullable=False), Column('name', String(length=255), nullable=False), Column('value', Text(), nullable=True), Column('created_at', DateTime(), nullable=False), Column('updated_at', DateTime(), nullable=True), Column('deleted_at', DateTime(), nullable=True), Column('deleted', Boolean(), nullable=False), PrimaryKeyConstraint('id'), ForeignKeyConstraint( ['image_id'], ['images.id'], ), UniqueConstraint('image_id', 'name', name='ix_image_properties_image_id_name'), mysql_engine='InnoDB', mysql_charset='utf8', extend_existing=True) op.create_index('ix_image_properties_deleted', 'image_properties', ['deleted'], unique=False) op.create_index('ix_image_properties_image_id', 'image_properties', ['image_id'], unique=False)
class Fragment(Base): """Fragment model for fragments table""" __tablename__ = 'fragments' # Fragment identifier fragid = Column(Integer, primary_key=True, autoincrement=True) # Molecule identifier molid = Column(Integer, ForeignKey('molecules.molid'), index=True) # Scan identifier scanid = Column(Integer, ForeignKey('scans.scanid'), index=True) # m/z of peak in scan mz = Column(Float) # Mass of fragment in Dalton, corrected with h delta mass = Column(Float) # Score of how good the molecule fragment matches the mass spectras score = Column(Float) # From which fragment this fragment is a fragment of parentfragid = Column(Integer, ForeignKey('fragments.fragid')) # Atom indices of molecule which are the fragment # is a comma seperated list, starting with 0 atoms = Column(String) deltah = Column(Float) # (mz+deltah*1.007825032-mass)/(mz*1e6) as deltappm deltappm = Column(Float) smiles = Column(String) # molecular formula of fragment formula = Column(String) # A fragment can have child fragments children_backref = backref('parent', remote_side=[fragid]) children = relationship('Fragment', backref=children_backref, lazy='joined', join_depth=1) __table_args__ = (ForeignKeyConstraint(['scanid', 'mz'], ['peaks.scanid', 'peaks.mz']), {})
class CollectionContent(BASE, ModelBase): """Represents files""" __tablename__ = 'ess_coll_content' content_id = Column(BigInteger, Sequence('ESS_CONTENT_ID_SEQ')) coll_id = Column(BigInteger) scope = Column(String(SCOPE_LENGTH)) name = Column(String(NAME_LENGTH)) min_id = Column(BigInteger) max_id = Column(BigInteger) content_type = Column(ContentType.db_type(name='ESS_CONTENT_TYPE'), default=ContentType.FILE) # size = Column(BigInteger) # md5 = Column(String(32)) # adler32 = Column(String(8)) edge_id = Column(Integer) status = Column(ContentStatus.db_type(name='ESS_CONTENT_STATUS'), default=ContentStatus.NEW) priority = Column(Integer()) num_success = Column(Integer()) num_failure = Column(Integer()) last_failed_at = Column(DateTime) pfn_size = Column(BigInteger) pfn = Column(String(1024)) object_metadata = Column(JSON()) _table_args = ( PrimaryKeyConstraint('content_id', name='ESS_COLL_CONTENT_PK'), # PrimaryKeyConstraint('scope', 'name', 'coll_id', 'content_type', 'min_id', 'max_id', 'edge_id', 'content_id', name='ESS_COLL_CONTENT_PK'), ForeignKeyConstraint(['edge_id'], ['ess_edges.edge_id'], name='ESS_CONTENT_EDGE_ID_FK'), ForeignKeyConstraint(['coll_id'], ['ess_coll.coll_id'], name='ESS_CONTENT_COLL_ID_FK'), CheckConstraint('status IS NOT NULL', name='ESS_CONTENT_STATUS_NN'), UniqueConstraint('scope', 'name', 'coll_id', 'content_type', 'min_id', 'max_id', 'edge_id', name='ESS_CONTENT_UQ'), Index('ESS_CONTENT_SCOPE_NAME_IDX', 'scope', 'name', 'edge_id', 'status'), Index('ESS_CONTENT_SCOPE_NAME_MINMAX_IDX', 'scope', 'name', 'content_type', 'min_id', 'max_id', 'edge_id', 'status'), Index('ESS_CONTENT_COLLECTION_ID_IDX', 'coll_id', 'status'), Index('ESS_CONTENT_EDGE_ID_IDX', 'edge_id', 'status'), Index('ESS_CONTENT_STATUS_PRIORITY_IDX', 'status', 'priority'))
class Source(db.Model): """ A source represents the canonical parameters that a build is running against. It always implies a revision to build off (though until we have full repo integration this is considered optional, and defaults to tip/master), and an optional patch_id to apply on top of it. """ id = Column(GUID, primary_key=True, default=uuid4) repository_id = Column(GUID, ForeignKey('repository.id'), nullable=False) patch_id = Column(GUID, ForeignKey('patch.id')) revision_sha = Column(String(40)) date_created = Column(DateTime, default=datetime.utcnow) data = Column(JSONEncodedDict) repository = relationship('Repository', innerjoin=False) patch = relationship('Patch') revision = relationship('Revision', foreign_keys=[repository_id, revision_sha]) __tablename__ = 'source' __table_args__ = ( ForeignKeyConstraint(('repository_id', 'revision_sha'), ('revision.repository_id', 'revision.sha')), UniqueConstraint( 'repository_id', 'revision_sha', name='unq_source_revision', # postgresql_where=(patch_id == None) ), UniqueConstraint( 'patch_id', name='unq_source_patch_id', # postgresql_where=(patch_id != None), ), ) def __init__(self, **kwargs): super(Source, self).__init__(**kwargs) if self.id is None: self.id = uuid4() if self.date_created is None: self.date_created = datetime.utcnow() def generate_diff(self): if self.patch: return self.patch.diff vcs = self.repository.get_vcs() if vcs: try: return vcs.export(self.revision_sha) except Exception: pass return None def is_commit(self): return self.patch_id is None and self.revision_sha
class Executable(Base): """Class to store information about one file generated by the compilation of a submission. """ __tablename__ = 'executables' __table_args__ = ( ForeignKeyConstraint( ('submission_id', 'dataset_id'), (SubmissionResult.submission_id, SubmissionResult.dataset_id), onupdate="CASCADE", ondelete="CASCADE"), UniqueConstraint('submission_id', 'dataset_id', 'filename'), ) # Auto increment primary key. id = Column( Integer, primary_key=True) # Submission (id and object) owning the executable. submission_id = Column( Integer, ForeignKey(Submission.id, onupdate="CASCADE", ondelete="CASCADE"), nullable=False, index=True) submission = relationship( Submission, viewonly=True) # Dataset (id and object) owning the executable. dataset_id = Column( Integer, ForeignKey(Dataset.id, onupdate="CASCADE", ondelete="CASCADE"), nullable=False, index=True) dataset = relationship( Dataset, viewonly=True) # SubmissionResult owning the executable. submission_result = relationship( SubmissionResult, backref=backref('executables', collection_class= attribute_mapped_collection('filename'), cascade="all, delete-orphan", passive_deletes=True)) # Filename and digest of the generated executable. filename = Column( Unicode, FilenameConstraint("filename"), nullable=False) digest = Column( String, DigestConstraint("digest"), nullable=False)
def test_manytomany_composite(metadata): Table('simple_items', metadata, Column('id1', INTEGER, primary_key=True), Column('id2', INTEGER, primary_key=True)) Table('simple_containers', metadata, Column('id1', INTEGER, primary_key=True), Column('id2', INTEGER, primary_key=True)) Table( 'container_items', metadata, Column('item_id1', INTEGER), Column('item_id2', INTEGER), Column('container_id1', INTEGER), Column('container_id2', INTEGER), ForeignKeyConstraint(['item_id1', 'item_id2'], ['simple_items.id1', 'simple_items.id2']), ForeignKeyConstraint( ['container_id1', 'container_id2'], ['simple_containers.id1', 'simple_containers.id2'])) assert generate_code(metadata) == """\
def test_onetoone(metadata): Table('simple_items', metadata, Column('id', INTEGER, primary_key=True), Column('other_item_id', INTEGER), ForeignKeyConstraint(['other_item_id'], ['other_items.id']), UniqueConstraint('other_item_id')) Table('other_items', metadata, Column('id', INTEGER, primary_key=True)) assert generate_code(metadata) == """\
def test_joined_inheritance(self): Table( 'simple_sub_items', self.metadata, Column('simple_items_id', INTEGER, primary_key=True), Column('data3', INTEGER), ForeignKeyConstraint(['simple_items_id'], ['simple_items.super_item_id'])) Table('simple_super_items', self.metadata, Column('id', INTEGER, primary_key=True), Column('data1', INTEGER)) Table( 'simple_items', self.metadata, Column('super_item_id', INTEGER, primary_key=True), Column('data2', INTEGER), ForeignKeyConstraint(['super_item_id'], ['simple_super_items.id'])) assert self.generate_code() == """\
class Workprogress2transform(BASE, ModelBase): """Represents a workprogress to transform""" __tablename__ = 'wp2transforms' workprogress_id = Column(BigInteger().with_variant(Integer, "sqlite"), primary_key=True) transform_id = Column(BigInteger().with_variant(Integer, "sqlite"), primary_key=True) _table_args = (PrimaryKeyConstraint('workprogress_id', 'transform_id', name='WP2TRANSFORM_PK'), ForeignKeyConstraint(['workprogress_id'], ['workprogresses.workprogress_id'], name='WP2TRANSFORM_WORK_ID_FK'), ForeignKeyConstraint(['transform_id'], ['transforms.transform_id'], name='WP2TRANSFORM_TRANS_ID_FK'))
def get_table_args(cls): return ( UniqueConstraint('tweet_id', 'search_term', 'data_set_id'), ForeignKeyConstraint(['tweet_id', 'data_set_id'], ['tweet.tweet_id', 'tweet.data_set_id'], ondelete='CASCADE', onupdate='CASCADE'), )
class Collection(BASE, ModelBase): """Represents a collection""" __tablename__ = 'collections' coll_id = Column(BigInteger().with_variant(Integer, "sqlite"), Sequence('COLLECTION_ID_SEQ', schema=DEFAULT_SCHEMA_NAME), primary_key=True) coll_type = Column(EnumWithValue(CollectionType)) transform_id = Column(BigInteger().with_variant(Integer, "sqlite")) relation_type = Column(EnumWithValue(CollectionRelationType)) scope = Column(String(SCOPE_LENGTH)) name = Column(String(NAME_LENGTH)) bytes = Column(Integer()) status = Column(EnumWithValue(CollectionStatus)) substatus = Column(Integer()) locking = Column(EnumWithValue(CollectionLocking)) total_files = Column(Integer()) storage_id = Column(Integer()) new_files = Column(Integer()) processed_files = Column(Integer()) processing_files = Column(Integer()) processing_id = Column(Integer()) retries = Column(Integer(), default=0) created_at = Column("created_at", DateTime, default=datetime.datetime.utcnow) updated_at = Column("updated_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) next_poll_at = Column("next_poll_at", DateTime, default=datetime.datetime.utcnow) accessed_at = Column("accessed_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) expired_at = Column("expired_at", DateTime) coll_metadata = Column(JSON()) _table_args = (PrimaryKeyConstraint('coll_id', name='COLLECTIONS_PK'), UniqueConstraint('name', 'scope', 'transform_id', 'relation_type', name='COLLECTIONS_NAME_SCOPE_UQ'), ForeignKeyConstraint(['transform_id'], ['transforms.transform_id'], name='COLLECTIONS_TRANSFORM_ID_FK'), CheckConstraint('status IS NOT NULL', name='COLLECTIONS_STATUS_ID_NN'), CheckConstraint('transform_id IS NOT NULL', name='COLLECTIONS_TRANSFORM_ID_NN'), Index('COLLECTIONS_STATUS_RELAT_IDX', 'status', 'relation_type'), Index('COLLECTIONS_TRANSFORM_IDX', 'transform_id', 'coll_id'), Index('COLLECTIONS_STATUS_UPDATED_IDX', 'status', 'locking', 'updated_at', 'next_poll_at', 'created_at'))