class Content(BASE, ModelBase): """Represents a content""" __tablename__ = 'contents' content_id = Column(BigInteger().with_variant(Integer, "sqlite"), Sequence('CONTENT_ID_SEQ', schema=DEFAULT_SCHEMA_NAME), primary_key=True) transform_id = Column(BigInteger().with_variant(Integer, "sqlite")) coll_id = Column(BigInteger().with_variant(Integer, "sqlite")) map_id = Column(BigInteger().with_variant(Integer, "sqlite"), default=0) scope = Column(String(SCOPE_LENGTH)) name = Column(String(NAME_LENGTH)) min_id = Column(Integer()) max_id = Column(Integer()) content_type = Column(EnumWithValue(ContentType)) status = Column(EnumWithValue(ContentStatus)) substatus = Column(EnumWithValue(ContentStatus)) locking = Column(EnumWithValue(ContentLocking)) bytes = Column(Integer()) md5 = Column(String(32)) adler32 = Column(String(8)) processing_id = Column(Integer()) storage_id = Column(Integer()) retries = Column(Integer(), default=0) path = Column(String(4000)) created_at = Column("created_at", DateTime, default=datetime.datetime.utcnow) updated_at = Column("updated_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) accessed_at = Column("accessed_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) expired_at = Column("expired_at", DateTime) content_metadata = Column(JSON()) _table_args = ( PrimaryKeyConstraint('content_id', name='CONTENTS_PK'), # UniqueConstraint('name', 'scope', 'coll_id', 'content_type', 'min_id', 'max_id', name='CONTENT_SCOPE_NAME_UQ'), # UniqueConstraint('name', 'scope', 'coll_id', 'min_id', 'max_id', name='CONTENT_SCOPE_NAME_UQ'), # UniqueConstraint('content_id', 'coll_id', name='CONTENTS_UQ'), UniqueConstraint('transform_id', 'coll_id', 'map_id', name='CONTENT_ID_UQ'), ForeignKeyConstraint(['transform_id'], ['transforms.transform_id'], name='CONTENTS_TRANSFORM_ID_FK'), ForeignKeyConstraint(['coll_id'], ['collections.coll_id'], name='CONTENTS_COLL_ID_FK'), CheckConstraint('status IS NOT NULL', name='CONTENTS_STATUS_ID_NN'), CheckConstraint('coll_id IS NOT NULL', name='CONTENTS_COLL_ID_NN'), Index('CONTENTS_STATUS_UPDATED_IDX', 'status', 'locking', 'updated_at', 'created_at'))
def test_manytomany(metadata): Table('simple_items', metadata, Column('id', INTEGER, primary_key=True)) Table('simple_containers', metadata, Column('id', INTEGER, primary_key=True)) Table('container_items', metadata, Column('item_id', INTEGER), Column('container_id', INTEGER), ForeignKeyConstraint(['item_id'], ['simple_items.id']), ForeignKeyConstraint(['container_id'], ['simple_containers.id'])) assert generate_code(metadata) == """\
def test_manytomany_selfref(metadata): Table('simple_items', metadata, Column('id', INTEGER, primary_key=True)) Table('child_items', metadata, Column('parent_id', INTEGER), Column('child_id', INTEGER), ForeignKeyConstraint(['parent_id'], ['simple_items.id']), ForeignKeyConstraint(['child_id'], ['simple_items.id']), schema='otherschema') assert generate_code(metadata) == """\
def test_onetomany_selfref_multi(self): Table( 'simple_items', self.metadata, Column('id', INTEGER, primary_key=True), Column('parent_item_id', INTEGER), Column('top_item_id', INTEGER), ForeignKeyConstraint(['parent_item_id'], ['simple_items.id']), ForeignKeyConstraint(['top_item_id'], ['simple_items.id']) ) assert self.generate_code() == """\
def test_onetomany_multiref(metadata): Table( 'simple_items', metadata, Column('id', INTEGER, primary_key=True), Column('parent_container_id', INTEGER), Column('top_container_id', INTEGER), ForeignKeyConstraint(['parent_container_id'], ['simple_containers.id']), ForeignKeyConstraint(['top_container_id'], ['simple_containers.id'])) Table('simple_containers', metadata, Column('id', INTEGER, primary_key=True)) assert generate_code(metadata) == """\
def get_table_args(cls): return ( UniqueConstraint('data_set_id', 'orig_name', 'dest_name', 'commod_name'), ForeignKeyConstraint(['orig_name', 'data_set_id'], ['node.name', 'node.data_set_id'], ondelete='CASCADE', onupdate='CASCADE'), ForeignKeyConstraint(['dest_name', 'data_set_id'], ['node.name', 'node.data_set_id'], ondelete='CASCADE', onupdate='CASCADE'), ForeignKeyConstraint(['commod_name', 'data_set_id'], ['commodity.name', 'commodity.data_set_id'], ondelete='CASCADE', onupdate='CASCADE'), ForeignKeyConstraint(['orig_name', 'dest_name', 'data_set_id'], ['arc.orig_name', 'arc.dest_name', 'arc.data_set_id'], ondelete='CASCADE', onupdate='CASCADE'), )
class Measure(Base): __tablename__ = 'measure' id = Column(GUID, default=uuid.uuid4, primary_key=True) program_id = Column(GUID, nullable=False, primary_key=True) response_type_id = Column(GUID, nullable=False) response_type_ = Column('response_type', Text, nullable=False) __table_args__ = ( ForeignKeyConstraint(['program_id'], ['program.id']), ForeignKeyConstraint(['response_type_id', 'program_id'], ['response_type.id', 'response_type.program_id']), ) program = relationship(Program, backref=backref('measures'))
def test_joined_inheritance(metadata): Table( 'simple_sub_items', metadata, Column('simple_items_id', INTEGER, primary_key=True), Column('data3', INTEGER), ForeignKeyConstraint(['simple_items_id'], ['simple_items.super_item_id'])) Table('simple_super_items', metadata, Column('id', INTEGER, primary_key=True), Column('data1', INTEGER)) Table('simple_items', metadata, Column('super_item_id', INTEGER, primary_key=True), Column('data2', INTEGER), ForeignKeyConstraint(['super_item_id'], ['simple_super_items.id'])) assert generate_code(metadata) == """\
def test_joined_inheritance(metadata): Table( "simple_sub_items", metadata, Column("simple_items_id", INTEGER, primary_key=True), Column("data3", INTEGER), ForeignKeyConstraint(["simple_items_id"], ["simple_items.super_item_id"]), ) Table("simple_super_items", metadata, Column("id", INTEGER, primary_key=True), Column("data1", INTEGER)) Table( "simple_items", metadata, Column("super_item_id", INTEGER, primary_key=True), Column("data2", INTEGER), ForeignKeyConstraint(["super_item_id"], ["simple_super_items.id"]), ) assert (generate_code(metadata) == """\ # coding: utf-8 from sqlalchemy import Column, ForeignKey, Integer from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() metadata = Base.metadata class SimpleSuperItem(Base): __tablename__ = 'simple_super_items' id = Column(Integer, primary_key=True) data1 = Column(Integer) class SimpleItem(SimpleSuperItem): __tablename__ = 'simple_items' super_item_id = Column(ForeignKey('simple_super_items.id'), primary_key=True) data2 = Column(Integer) class SimpleSubItem(SimpleItem): __tablename__ = 'simple_sub_items' simple_items_id = Column(ForeignKey('simple_items.super_item_id'), primary_key=True) data3 = Column(Integer) """)
class CollectionContent(BASE, ModelBase): """Represents files""" __tablename__ = 'ess_coll_content' content_id = Column(BigInteger().with_variant(Integer, "sqlite"), Sequence('ESS_CONTENT_ID_SEQ')) coll_id = Column(BigInteger) scope = Column(String(SCOPE_LENGTH)) name = Column(String(NAME_LENGTH)) min_id = Column(BigInteger) max_id = Column(BigInteger) content_type = Column(ContentType.db_type(name='ESS_CONTENT_TYPE'), default=ContentType.FILE) # size = Column(BigInteger) # md5 = Column(String(32)) # adler32 = Column(String(8)) edge_id = Column(Integer) status = Column(ContentStatus.db_type(name='ESS_CONTENT_STATUS'), default=ContentStatus.NEW) priority = Column(Integer()) num_success = Column(Integer()) num_failure = Column(Integer()) last_failed_at = Column(DateTime) pfn_size = Column(BigInteger) pfn = Column(String(1024)) object_metadata = Column(JSON()) _table_args = ( PrimaryKeyConstraint('content_id', name='ESS_COLL_CONTENT_PK'), # PrimaryKeyConstraint('scope', 'name', 'coll_id', 'content_type', 'min_id', 'max_id', 'edge_id', 'content_id', name='ESS_COLL_CONTENT_PK'), ForeignKeyConstraint(['edge_id'], ['ess_edges.edge_id'], name='ESS_CONTENT_EDGE_ID_FK'), ForeignKeyConstraint(['coll_id'], ['ess_coll.coll_id'], name='ESS_CONTENT_COLL_ID_FK'), CheckConstraint('status IS NOT NULL', name='ESS_CONTENT_STATUS_NN'), UniqueConstraint('scope', 'name', 'coll_id', 'content_type', 'min_id', 'max_id', 'edge_id', name='ESS_CONTENT_UQ'), Index('ESS_CONTENT_SCOPE_NAME_IDX', 'scope', 'name', 'edge_id', 'status'), Index('ESS_CONTENT_SCOPE_NAME_MM_IDX', 'scope', 'name', 'content_type', 'min_id', 'max_id', 'edge_id', 'status'), Index('ESS_CONTENT_COLLECTION_ID_IDX', 'coll_id', 'status'), Index('ESS_CONTENT_EDGE_ID_IDX', 'edge_id', 'status'), Index('ESS_CONTENT_STATUS_PRIO_IDX', 'status', 'priority'))
class QnodeMeasure(Base): # This is an association object for qnodes <-> measures. Normally this would # be done with a raw table, but because we want access to the `seq` column, # it needs to be a mapped class. __tablename__ = 'qnode_measure_link' survey_id = Column(GUID, nullable=False, primary_key=True) qnode_id = Column(GUID, nullable=False, primary_key=True) measure_id = Column(GUID, nullable=False, primary_key=True) seq = Column(Integer) __table_args__ = ( ForeignKeyConstraint( ['qnode_id', 'survey_id'], ['qnode.id', 'qnode.survey_id'] ), ForeignKeyConstraint( ['measure_id', 'survey_id'], ['measure.id', 'measure.survey_id'] ), ForeignKeyConstraint( ['survey_id'], ['survey.id'] ), ) survey = relationship(Survey) # This constructor is used by association_proxy when adding items to the # colleciton. def __init__(self, measure=None, qnode=None, seq=None, survey=None, **kwargs): self.measure = measure self.qnode = qnode self.seq = seq if survey is not None: self.survey_id = survey.id elif measure is not None: self.survey_id = measure.survey_id elif qnode is not None: self.survey_id = qnode.survey_id super().__init__(**kwargs) def __repr__(self): return "QnodeMeasure(qnode={}, measure={}, survey={})".format( getattr(self.qnode, 'title', None), getattr(self.measure, 'title', None), getattr(self.survey, 'title', None))
class Changeset(Base): __tablename__ = 'changeset' id = Column(BigInteger, primary_key=True) created = Column(DateTime) place_id = Column(BigInteger) osm_type = Column(osm_type_enum, index=True) osm_id = Column(BigInteger, index=True) item_id = Column(Integer) comment = Column(String) user_id = Column(Integer, ForeignKey(User.id)) update_count = Column(Integer, nullable=False) __table_args__ = (ForeignKeyConstraint( ['osm_type', 'osm_id'], ['place.osm_type', 'place.osm_id']), ) user = relationship('User', backref=backref('changesets', lazy='dynamic', order_by='Changeset.created.desc()')) place = relationship('Place', backref=backref('changesets', lazy='dynamic', order_by='Changeset.created.desc()')) @property def item_label(self): item = Item.query.get(self.item_id) if item: return item.label()
def test_drop_with_complex_foreign_keys(self): from sqlalchemy.schema import ForeignKeyConstraint from sqlalchemy.schema import UniqueConstraint self.table.drop() self.meta.clear() # create FK's target reftable = Table('tmp_ref', self.meta, Column('id', Integer, primary_key=True), Column('jd', Integer), UniqueConstraint('id', 'jd')) if self.engine.has_table(reftable.name): reftable.drop() reftable.create() # add a table with a complex foreign key constraint self.table = Table( self.table_name, self.meta, Column('id', Integer, primary_key=True), Column('r1', Integer), Column('r2', Integer), ForeignKeyConstraint(['r1', 'r2'], [reftable.c.id, reftable.c.jd], name='test_fk')) self.table.create() # paranoid check self.assertEqual([['r1', 'r2']], self._actual_foreign_keys()) # delete one self.table.c.r2.drop() # check the constraint is gone, since part of it # is no longer there - if people hit this, # they may be confused, maybe we should raise an error # and insist that the constraint is deleted first, separately? self.assertEqual([], self._actual_foreign_keys())
class UserContact(Base): __tablename__ = 'user_contact' __table_args__ = ( PrimaryKeyConstraint('user_id', 'phonebook_id'), ForeignKeyConstraint(('phonebook_id', ), ('phonebook.id', ), ondelete='CASCADE'), ForeignKeyConstraint(('user_id', ), ('userfeatures.id', ), ondelete='CASCADE'), ) user_id = Column(Integer) phonebook_id = Column(Integer) phonebook = relationship('Phonebook', foreign_keys=phonebook_id) user = relationship('UserFeatures', foreign_keys=user_id)
def drop_tables(self): '''DROP all tables except those for PostGIS.''' # gather all data first before dropping anything. # some DBs lock after things have been dropped in # a transaction. log.debug('drop_tables') metadata = MetaData() tables = [] all_foreign_keys = [] for table_name in self.inspector.get_table_names(): foreign_keys = [] for foreign_key in self.inspector.get_foreign_keys(table_name): if not foreign_key['name']: continue foreign_keys.append( ForeignKeyConstraint((), (), name=foreign_key['name']) ) table = Table(table_name, metadata, *foreign_keys) tables.append(table) all_foreign_keys.extend(foreign_keys) for foreign_key in all_foreign_keys: self.conn.execute(DropConstraint(foreign_key)) for table in tables: # This table is part of PostGIS extension. if table.name == 'spatial_ref_sys': continue self.conn.execute(DropTable(table)) self.trans.commit()
class Feedback(db.Model): customer_feedback = db.Column(db.String(), db.ForeignKey('customer.username'), primary_key=True) #customer giving feedback rating = db.Column(db.Integer(), nullable=False) customer_review = db.Column( db.String(), primary_key=True) #customer who has reviewed a book ISBN = db.Column(db.String(13), primary_key=True) def __init__(self, **kwargs): self.customer_feedback = kwargs['customer_feedback'] self.rating = kwargs['rating'] self.customer_review = kwargs['customer_review'] self.ISBN = kwargs['ISBN'] def __repr__(self): return 'Feedback by {} on Review submitted by {} on {}'.format( self.customer_feedback, self.customer_review, self.ISBN) __tablename__ = 'feedback' __tableargs__ = ( CheckConstraint('score<=2 AND score>=0', name='score_between_0_and_2'), ForeignKeyConstraint([customer_review, ISBN], [Review.username, Review.ISBN]), )
def rebuild_db(): # See this SQLAlchemy recipe: http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything inspector = reflection.Inspector.from_engine(db.engine) # gather all data first before dropping anything. # some DBs lock after things have been dropped in # a transaction. metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: db.engine.execute(DropConstraint(fkc)) for table in tbs: db.engine.execute(DropTable(table)) db.session.commit() db.create_all()
class CtiProfileService(Base): __tablename__ = 'cti_profile_service' __table_args__ = ( PrimaryKeyConstraint('profile_id', 'service_id'), ForeignKeyConstraint(('profile_id', ), ('cti_profile.id', ), ondelete='CASCADE'), ForeignKeyConstraint(('service_id', ), ('cti_service.id', ), ondelete='CASCADE'), ) profile_id = Column(Integer) service_id = Column(Integer) cti_profile = relationship("CtiProfile") cti_service = relationship("CtiService")
class PlaceMatcher(Base): __tablename__ = 'place_matcher' start = Column(DateTime, default=now_utc(), primary_key=True) end = Column(DateTime) osm_type = Column(osm_type_enum, primary_key=True) osm_id = Column(BigInteger, primary_key=True) remote_addr = Column(String) user_id = Column(Integer, ForeignKey('user.id')) user_agent = Column(String) is_refresh = Column(Boolean, nullable=False) place = relationship('Place', uselist=False, backref=backref('matcher_runs', lazy='dynamic', order_by='PlaceMatcher.start.desc()')) user = relationship('User', uselist=False, backref=backref('matcher_runs', lazy='dynamic', order_by='PlaceMatcher.start.desc()')) __table_args__ = (ForeignKeyConstraint( ['osm_type', 'osm_id'], ['place.osm_type', 'place.osm_id'], ), ) def duration(self): if self.end: return self.end - self.start def complete(self): self.end = now_utc()
class Workprogress(BASE, ModelBase): """Represents a workprogress which monitors the progress of a workflow""" __tablename__ = 'workprogresses' workprogress_id = Column(BigInteger().with_variant(Integer, "sqlite"), Sequence('WORKPROGRESS_ID_SEQ', schema=DEFAULT_SCHEMA_NAME), primary_key=True) request_id = Column(BigInteger().with_variant(Integer, "sqlite")) workload_id = Column(Integer()) scope = Column(String(SCOPE_LENGTH)) name = Column(String(NAME_LENGTH)) # requester = Column(String(20)) # request_type = Column(EnumWithValue(RequestType)) # transform_tag = Column(String(20)) # workload_id = Column(Integer()) priority = Column(Integer()) status = Column(EnumWithValue(WorkprogressStatus)) substatus = Column(EnumWithValue(WorkprogressStatus), default=0) locking = Column(EnumWithValue(WorkprogressLocking)) created_at = Column("created_at", DateTime, default=datetime.datetime.utcnow) updated_at = Column("updated_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) next_poll_at = Column("next_poll_at", DateTime, default=datetime.datetime.utcnow) accessed_at = Column("accessed_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) expired_at = Column("expired_at", DateTime) errors = Column(JSON()) workprogress_metadata = Column(JSON()) processing_metadata = Column(JSON()) _table_args = (PrimaryKeyConstraint('workprogress_id', name='WORKPROGRESS_PK'), ForeignKeyConstraint(['request_id'], ['requests.request_id'], name='REQ2WORKPROGRESS_REQ_ID_FK'), CheckConstraint('status IS NOT NULL', name='WORKPROGRESS_STATUS_ID_NN'), # UniqueConstraint('name', 'scope', 'requester', 'request_type', 'transform_tag', 'workload_id', name='REQUESTS_NAME_SCOPE_UQ '), Index('WORKPROGRESS_SCOPE_NAME_IDX', 'workprogress_id', 'request_id', 'name', 'scope'), Index('WORKPROGRESS_STATUS_PRIO_IDX', 'status', 'priority', 'workprogress_id', 'locking', 'updated_at', 'next_poll_at', 'created_at'))
class Fragment(Base): """Fragment model for fragments table""" __tablename__ = 'fragments' # Fragment identifier fragid = Column(Integer, primary_key=True, autoincrement=True) # Molecule identifier molid = Column(Integer, ForeignKey('molecules.molid'), index=True) # Scan identifier scanid = Column(Integer, ForeignKey('scans.scanid'), index=True) # m/z of peak in scan mz = Column(Float) # Mass of fragment in Dalton, corrected with h delta mass = Column(Float) # Score of how good the molecule fragment matches the mass spectras score = Column(Float) # From which fragment this fragment is a fragment of parentfragid = Column(Integer, ForeignKey('fragments.fragid')) # Atom indices of molecule which are the fragment # is a comma seperated list, starting with 0 atoms = Column(Unicode) deltah = Column(Float) # (mz+deltah*1.007825032-mass)/(mz*1e6) as deltappm deltappm = Column(Float) smiles = Column(Unicode) # molecular formula of fragment formula = Column(Unicode) # A fragment can have child fragments children_backref = backref('parent', remote_side=[fragid]) children = relationship('Fragment', backref=children_backref, lazy='joined', join_depth=1) __table_args__ = (ForeignKeyConstraint(['scanid', 'mz'], ['peaks.scanid', 'peaks.mz'] ), {} )
def _add_image_properties_table(): op.create_table('image_properties', Column('id', Integer(), nullable=False), Column('image_id', String(length=36), nullable=False), Column('name', String(length=255), nullable=False), Column('value', Text(), nullable=True), Column('created_at', DateTime(), nullable=False), Column('updated_at', DateTime(), nullable=True), Column('deleted_at', DateTime(), nullable=True), Column('deleted', Boolean(), nullable=False), PrimaryKeyConstraint('id'), ForeignKeyConstraint( ['image_id'], ['images.id'], ), UniqueConstraint('image_id', 'name', name='ix_image_properties_image_id_name'), mysql_engine='InnoDB', mysql_charset='utf8', extend_existing=True) op.create_index('ix_image_properties_deleted', 'image_properties', ['deleted'], unique=False) op.create_index('ix_image_properties_image_id', 'image_properties', ['image_id'], unique=False)
class FuncKeyDestQueue(Base): DESTINATION_TYPE_ID = 3 __tablename__ = 'func_key_dest_queue' __table_args__ = ( ForeignKeyConstraint(['func_key_id', 'destination_type_id'], ['func_key.id', 'func_key.destination_type_id']), CheckConstraint( 'destination_type_id = {}'.format(DESTINATION_TYPE_ID)), ) func_key_id = Column(Integer, primary_key=True) destination_type_id = Column( Integer, primary_key=True, server_default="{}".format(DESTINATION_TYPE_ID)) queue_id = Column(Integer, ForeignKey('queuefeatures.id'), primary_key=True) type = 'queue' func_key = relationship(FuncKey, cascade='all,delete-orphan', single_parent=True) queue = relationship(QueueFeatures) def to_tuple(self): return (('queue_id', self.queue_id), )
class PhonebookAddress(Base): __tablename__ = 'phonebookaddress' __table_args__ = ( PrimaryKeyConstraint('id'), ForeignKeyConstraint(('phonebookid', ), ('phonebook.id', ), ondelete='CASCADE'), Index('phonebookaddress__uidx__phonebookid_type', 'phonebookid', 'type', unique=True), ) id = Column(Integer) phonebookid = Column(Integer, nullable=False) address1 = Column(String(30), nullable=False, server_default='') address2 = Column(String(30), nullable=False, server_default='') city = Column(String(128), nullable=False, server_default='') state = Column(String(128), nullable=False, server_default='') zipcode = Column(String(16), nullable=False, server_default='') country = Column(String(3), nullable=False, server_default='') type = Column(Enum('home', 'office', 'other', name='phonebookaddress_type', metadata=Base.metadata), nullable=False) phonebook = relationship('Phonebook')
def drop_all_table(): from sqlalchemy.engine import reflection from sqlalchemy.schema import MetaData, Table, DropTable, ForeignKeyConstraint, DropConstraint conn = db.engine.connect() trans = conn.begin() inspector = reflection.Inspector.from_engine(db.engine) metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(DropConstraint(fkc)) for table in tbs: conn.execute(DropTable(table)) trans.commit()
class User(Base): __tablename__ = 'user' __table_args__ = ( PrimaryKeyConstraint('id'), ForeignKeyConstraint(('entity_id', ), ('entity.id', ), ondelete='RESTRICT'), UniqueConstraint('login', 'meta'), ) id = Column(Integer, nullable=False) entity_id = Column(Integer) login = Column(String(64), nullable=False, server_default='') passwd = Column(String(64), nullable=False, server_default='') meta = Column(Enum('user', 'admin', 'root', name='user_meta', metadata=Base.metadata), nullable=False, server_default='user') valid = Column(Integer, nullable=False, server_default='1') time = Column(Integer, nullable=False, server_default='0') dcreate = Column(Integer, nullable=False, server_default='0') dupdate = Column(Integer, nullable=False, server_default='0') obj = Column(Text, nullable=False) entity = relationship('Entity')
class Ocena(Base): __tablename__ = 'ocena' slusa_ucenik_id = Column(Integer) slusa_predmet_id = Column(Integer) ocena_id = Column(Integer, default=random_integer) datum = Column(DateTime, default=datetime.datetime.utcnow) vrednost = Column(Integer, nullable=False) slusa = relationship('Slusa', back_populates='ocene', lazy='joined') def __init__(self, vrednost, ucenik, predmet): self.vrednost = vrednost self.ucenik_id = ucenik.id self.predmet_id = predmet.id def __init__(self, vrednost, slusa): self.vrednost = vrednost self.slusa = slusa __table_args__ = ( ForeignKeyConstraint( [slusa_ucenik_id, slusa_predmet_id], [Slusa.ucenik_id, Slusa.predmet_id]), # add on delete cascade PrimaryKeyConstraint(slusa_ucenik_id, slusa_predmet_id, ocena_id), {}) def __repr__(self): return f'<Ocena(vrednost={vrednost})>'
def _add_metadef_objects_table(): ns_id_name_constraint = 'uq_metadef_objects_namespace_id_name' op.create_table('metadef_objects', Column('id', Integer(), nullable=False), Column('namespace_id', Integer(), nullable=False), Column('name', String(length=80), nullable=False), Column('description', Text(), nullable=True), Column('required', Text(), nullable=True), Column('json_schema', JSONEncodedDict(), nullable=False), Column('created_at', DateTime(), nullable=False), Column('updated_at', DateTime(), nullable=True), ForeignKeyConstraint( ['namespace_id'], ['metadef_namespaces.id'], ), PrimaryKeyConstraint('id'), UniqueConstraint('namespace_id', 'name', name=ns_id_name_constraint), mysql_engine='InnoDB', mysql_charset='utf8', extend_existing=True) op.create_index('ix_metadef_objects_name', 'metadef_objects', ['name'], unique=False)
def _add_image_locations_table(): op.create_table('image_locations', Column('id', Integer(), nullable=False), Column('image_id', String(length=36), nullable=False), Column('value', Text(), nullable=False), Column('created_at', DateTime(), nullable=False), Column('updated_at', DateTime(), nullable=True), Column('deleted_at', DateTime(), nullable=True), Column('deleted', Boolean(), nullable=False), Column('meta_data', JSONEncodedDict(), nullable=True), Column('status', String(length=30), server_default='active', nullable=False), PrimaryKeyConstraint('id'), ForeignKeyConstraint( ['image_id'], ['images.id'], ), mysql_engine='InnoDB', mysql_charset='utf8', extend_existing=True) op.create_index('ix_image_locations_deleted', 'image_locations', ['deleted'], unique=False) op.create_index('ix_image_locations_image_id', 'image_locations', ['image_id'], unique=False)
class Implementation(Base): __tablename__ = "implementation" hash = Column(String, nullable=False) name = Column(String, nullable=False) operation_name = Column(String, nullable=False) primitive_name = Column(String, nullable=False) path = Column(String) #Path relative to algobase macros = Column(JSONEncodedDict) __table_args__ = ( PrimaryKeyConstraint("hash"), ForeignKeyConstraint( ["primitive_name", "operation_name"], ["primitive.name", "primitive.operation_name"], ), ) def validate_hash(self, platforms_path): """Verifies if hash still valid""" hash = dirchecksum(os.path.join(platforms_path, self.path)) return hash == self.hash def get_config_assoc(self, config): s = xbxdb.scoped_session() a = s.query(ConfigImplAssociation).filter( ConfigImplAssociation.config_hash == config.hash, ConfigImplAssociation.implementation_hash == self.hash).one() return a