class TopoMapAssociation(Base): """Associations between documents and maps. Used to cache which documents are within the area of a map. The entries in this table are created automatically when a maps is changed/added, when a document is added or a document geometry changes. """ __tablename__ = 'map_associations' document_id = Column( Integer, ForeignKey(schema + '.documents.document_id'), nullable=False) document = relationship( Document, primaryjoin=document_id == Document.document_id ) topo_map_id = Column( Integer, ForeignKey(schema + '.maps.document_id'), nullable=False) topo_map = relationship( TopoMap, primaryjoin=topo_map_id == TopoMap.document_id) __table_args__ = ( PrimaryKeyConstraint(document_id, topo_map_id), Base.__table_args__ )
def _add_metadef_objects_table(): ns_id_name_constraint = 'uq_metadef_objects_namespace_id_name' op.create_table('metadef_objects', Column('id', Integer(), nullable=False), Column('namespace_id', Integer(), nullable=False), Column('name', String(length=80), nullable=False), Column('description', Text(), nullable=True), Column('required', Text(), nullable=True), Column('json_schema', JSONEncodedDict(), nullable=False), Column('created_at', DateTime(), nullable=False), Column('updated_at', DateTime(), nullable=True), ForeignKeyConstraint( ['namespace_id'], ['metadef_namespaces.id'], ), PrimaryKeyConstraint('id'), UniqueConstraint('namespace_id', 'name', name=ns_id_name_constraint), mysql_engine='InnoDB', mysql_charset='utf8', extend_existing=True) op.create_index('ix_metadef_objects_name', 'metadef_objects', ['name'], unique=False)
class GroupRelationshipM2M(db.Model, Timestamp): """Many-to-many model for Group Relationships.""" __tablename__ = 'grouprelationshipm2m' __table_args__ = ( PrimaryKeyConstraint('relationship_id', 'subrelationship_id', name='pk_grouprelationshipm2m'), ) relationship_id = Column(UUIDType, ForeignKey(GroupRelationship.id, onupdate="CASCADE", ondelete="CASCADE"), nullable=False) subrelationship_id = Column(UUIDType, ForeignKey(GroupRelationship.id, onupdate="CASCADE", ondelete="CASCADE"), nullable=False) relationship = orm_relationship(GroupRelationship, foreign_keys=[relationship_id], backref='subrelationshipsm2m') subrelationship = orm_relationship(GroupRelationship, foreign_keys=[subrelationship_id], backref='superrelationshipsm2m') def __repr__(self): """String representation of the model.""" return ('<{self.relationship}: {self.subrelationship}>' .format(self=self))
class OutcallTrunk(Base): __tablename__ = 'outcalltrunk' __table_args__ = ( PrimaryKeyConstraint('outcallid', 'trunkfeaturesid'), Index('outcalltrunk__idx__priority', 'priority'), ) outcallid = Column(Integer, ForeignKey('outcall.id'), nullable=False) trunkfeaturesid = Column(Integer, ForeignKey('trunkfeatures.id'), nullable=False) priority = Column(Integer, nullable=False, server_default='0') trunk = relationship('TrunkFeatures', back_populates='outcall_trunks') outcall = relationship('Outcall', back_populates='outcall_trunks') @hybrid_property def outcall_id(self): return self.outcallid @outcall_id.setter def outcall_id(self, value): self.outcallid = value @hybrid_property def trunk_id(self): return self.trunkfeaturesid @trunk_id.setter def trunk_id(self, value): self.trunkfeaturesid = value
class User(Base): __tablename__ = 'user' __table_args__ = ( PrimaryKeyConstraint('id'), ForeignKeyConstraint(('entity_id', ), ('entity.id', ), ondelete='RESTRICT'), UniqueConstraint('login', 'meta'), ) id = Column(Integer, nullable=False) entity_id = Column(Integer) login = Column(String(64), nullable=False, server_default='') passwd = Column(String(64), nullable=False, server_default='') meta = Column(Enum('user', 'admin', 'root', name='user_meta', metadata=Base.metadata), nullable=False, server_default='user') valid = Column(Integer, nullable=False, server_default='1') time = Column(Integer, nullable=False, server_default='0') dcreate = Column(Integer, nullable=False, server_default='0') dupdate = Column(Integer, nullable=False, server_default='0') obj = Column(Text, nullable=False) entity = relationship('Entity')
def create_table(metadata, iso_request_tbl): "Table factory" tbl = Table( 'stock_sample_creation_iso_request', metadata, Column('iso_request_id', Integer, ForeignKey(iso_request_tbl.c.iso_request_id, onupdate='CASCADE', ondelete='CASCADE'), nullable=False), Column('stock_volume', Float, CheckConstraint('stock_volume>0'), nullable=False), Column('stock_concentration', Float, CheckConstraint('stock_concentration>0'), nullable=False), Column('number_designs', Integer, CheckConstraint('number_designs>1'), nullable=False), Column('preparation_plate_volume', Float, CheckConstraint('preparation_plate_volume>0')), ) PrimaryKeyConstraint(tbl.c.iso_request_id) return tbl
class Fingerprint(Base): __tablename__ = "fingerprints" _hash = Column(LargeBinary(8), name=Database.FIELD_HASH, index=True, nullable=False) song_id = Column(Integer, ForeignKey(Song.song_id, ondelete="CASCADE"), name=Database.FIELD_SONG_ID, nullable=False) song_offset = Column(Integer, name=Database.FIELD_OFFSET, nullable=False) PrimaryKeyConstraint(_hash, song_id, song_offset, name="pk_constraint") UniqueConstraint(_hash, song_id, song_offset, name="unique_constraint") @property def hash(self): return self._hash.hex() @hash.setter def hash(self, hash): self._hash = bytes.fromhex(hash) hash = synonym('_hash', descriptor=hash)
class Ucenik(Korisnik): __tablename__ = 'ucenik' id = Column(Integer) ime = Column(String(30), nullable=False) prezime = Column(String(30), nullable=False) razred_id = Column(Integer) razred = relationship('Razred', back_populates='ucenici', lazy='joined') __table_args__ = (ForeignKeyConstraint([id], [Korisnik.id]), ForeignKeyConstraint([razred_id], [Razred.id]), PrimaryKeyConstraint(id), {}) __mapper_args__ = {'polymorphic_identity': 'ucenik'} def __init__(self, username, password, ime, prezime, razred): super(Ucenik, self).__init__(username, password) self.ime = ime self.prezime = prezime if isinstance(razred, int): self.razred_id = razred elif isinstance(razred, Razred): self.razred = razred else: raise ValueError('Cannot accept type of argument razred') def __repr__(self): return f'<Ucenik(id={self.id}, username={self.username}, ime={self.ime}, prezime={self.prezime}, razred={self.razred})>'
class Implementation(Base): __tablename__ = "implementation" hash = Column(String, nullable=False) name = Column(String, nullable=False) operation_name = Column(String, nullable=False) primitive_name = Column(String, nullable=False) path = Column(String) #Path relative to algobase macros = Column(JSONEncodedDict) __table_args__ = ( PrimaryKeyConstraint("hash"), ForeignKeyConstraint( ["primitive_name", "operation_name"], ["primitive.name", "primitive.operation_name"], ), ) def validate_hash(self, platforms_path): """Verifies if hash still valid""" hash = dirchecksum(os.path.join(platforms_path, self.path)) return hash == self.hash def get_config_assoc(self, config): s = xbxdb.scoped_session() a = s.query(ConfigImplAssociation).filter( ConfigImplAssociation.config_hash == config.hash, ConfigImplAssociation.implementation_hash == self.hash).one() return a
class Slusa(Base): __tablename__ = 'slusa' ucenik_id = Column(Integer) predmet_id = Column(Integer) predaje_profesor_id = Column(Integer) predaje_predmet_id = Column(Integer) ucenik = relationship('Ucenik', lazy='joined') predmet = relationship('Predmet', lazy='joined') predaje = relationship('Predaje', lazy='joined') ocene = relationship('Ocena', back_populates='slusa', lazy='joined') __table_args__ = ( ForeignKeyConstraint([ucenik_id], [Ucenik.id]), # add on delete cascade ForeignKeyConstraint([predmet_id], [Predmet.id]), # add on delete cascade ForeignKeyConstraint([predaje_profesor_id, predaje_predmet_id], [Predaje.profesor_id, Predaje.predmet_id ]), # add on delete set null PrimaryKeyConstraint(ucenik_id, predmet_id), {}) def __init__(self, ucenik, predmet, profesor): self.ucenik_id = ucenik.id self.predmet_id = predmet.id self.predaje_predmet_id = predmet.id self.predaje_profesor_id = profesor.id def __repr__(self): return f'<Slusa()>'
class Ocena(Base): __tablename__ = 'ocena' slusa_ucenik_id = Column(Integer) slusa_predmet_id = Column(Integer) ocena_id = Column(Integer, default=random_integer) datum = Column(DateTime, default=datetime.datetime.utcnow) vrednost = Column(Integer, nullable=False) slusa = relationship('Slusa', back_populates='ocene', lazy='joined') def __init__(self, vrednost, ucenik, predmet): self.vrednost = vrednost self.ucenik_id = ucenik.id self.predmet_id = predmet.id def __init__(self, vrednost, slusa): self.vrednost = vrednost self.slusa = slusa __table_args__ = ( ForeignKeyConstraint( [slusa_ucenik_id, slusa_predmet_id], [Slusa.ucenik_id, Slusa.predmet_id]), # add on delete cascade PrimaryKeyConstraint(slusa_ucenik_id, slusa_predmet_id, ocena_id), {}) def __repr__(self): return f'<Ocena(vrednost={vrednost})>'
class Predaje(Base): __tablename__ = 'predaje' profesor_id = Column(Integer) predmet_id = Column(Integer) profesor = relationship('Profesor', lazy='joined') predmet = relationship('Predmet') __table_args__ = ( ForeignKeyConstraint([profesor_id], [Profesor.id]), # add on delete cascade ForeignKeyConstraint([predmet_id], [Predmet.id]), # add on delete cascade PrimaryKeyConstraint(profesor_id, predmet_id), {}) def __init__(self, profesor, predmet): if isinstance(profesor, Profesor): self.profesor = profesor elif isintance(profesor, int): self.profesor_id = profesor else: raise ValueError('Cannot accept type of argument profesor') if isinstance(predmet, Predmet): self.predmet = predmet elif isintance(predmet, int): self.predmet_id = predmet else: raise ValueError('Cannot accept type of argument predmet') def __repr__(self): return f'<Predaje()>'
class DozvoljeniRazredi(Base): __tablename__ = 'dozvoljenirazredi' razred_id = Column(Integer) predmet_id = Column(Integer) razred = relationship('Razred') predmet = relationship('Predmet') __table_args__ = (ForeignKeyConstraint([razred_id], [Razred.id]), ForeignKeyConstraint([predmet_id], [Predmet.id]), PrimaryKeyConstraint(razred_id, predmet_id), {}) def __init__(self, razred, predmet): if isinstance(razred, Razred): self.razred = razred elif isintance(razred, int): self.razred_id = razred else: raise ValueError('Cannot accept type of argument razred') if isinstance(predmet, Predmet): self.predmet = predmet elif isintance(predmet, int): self.predmet_id = predmet else: raise ValueError('Cannot accept type of argument predmet') def __repr__(self): return f'<DozvoljeniRazredi()>'
class Predmet(Base): __tablename__ = 'predmet' id = Column(Integer) naziv = Column(String(50), nullable=False) profesori = relationship('Profesor', secondary='predaje') razredi = relationship('Razred', secondary='dozvoljenirazredi') __table_args__ = (PrimaryKeyConstraint(id), UniqueConstraint(naziv), {}) def __init__(self, naziv): self.razredi = [] self.naziv = naziv def __eq__(self, obj): if self is obj: return True if obj is None: return False if not isinstance(obj, Predmet): return False return self.id == obj.id def __repr__(self): return f'<Predmet(id={self.id}, naziv={self.naziv})>'
def _add_image_locations_table(): op.create_table('image_locations', Column('id', Integer(), nullable=False), Column('image_id', String(length=36), nullable=False), Column('value', Text(), nullable=False), Column('created_at', DateTime(), nullable=False), Column('updated_at', DateTime(), nullable=True), Column('deleted_at', DateTime(), nullable=True), Column('deleted', Boolean(), nullable=False), Column('meta_data', JSONEncodedDict(), nullable=True), Column('status', String(length=30), server_default='active', nullable=False), PrimaryKeyConstraint('id'), ForeignKeyConstraint( ['image_id'], ['images.id'], ), mysql_engine='InnoDB', mysql_charset='utf8', extend_existing=True) op.create_index('ix_image_locations_deleted', 'image_locations', ['deleted'], unique=False) op.create_index('ix_image_locations_image_id', 'image_locations', ['image_id'], unique=False)
class ConfigImplAssociation(Base): __tablename__ = 'config_impl_dep_assoc' config_hash = Column(String, nullable=False) implementation_hash = Column(String, nullable=False) implementation = relationship( "Implementation", backref="config_impl_assocs", ) config = relationship("Config", backref=backref("config_impl_assocs", cascade="all, delete-orphan")) dependencies = relationship( "Implementation", secondary=_impl_dep_join_table, primaryjoin=and_( _impl_dep_join_table.c.dependent_impl_hash == implementation_hash, _impl_dep_join_table.c.config_hash == config_hash), secondaryjoin=(_impl_dep_join_table.c.dependency_impl_hash == Implementation.hash)) __table_args__ = ( PrimaryKeyConstraint("config_hash", "implementation_hash"), ForeignKeyConstraint(["config_hash"], ["config.hash"], ondelete="CASCADE"), ForeignKeyConstraint(["implementation_hash"], ["implementation.hash"], ondelete="CASCADE"), )
def get_indicator_table(indicator_config, metadata, override_table_name=None): sql_columns = [column_to_sql(col) for col in indicator_config.get_columns()] table_name = override_table_name or get_table_name(indicator_config.domain, indicator_config.table_id) columns_by_col_id = {col.database_column_name.decode('utf-8') for col in indicator_config.get_columns()} extra_indices = [] citus_config = indicator_config.sql_settings.citus_config if citus_config.distribution_type == 'hash': # Create hash index on doc_id for distributed tables extra_indices.append(Index( _custom_index_name(table_name, ['doc_id']), 'doc_id', postgresql_using='hash' )) for index in indicator_config.sql_column_indexes: if set(index.column_ids).issubset(columns_by_col_id): extra_indices.append(Index( _custom_index_name(table_name, index.column_ids), *index.column_ids )) else: logger.error(f"Invalid index specified on {table_name} ({index.column_ids})") constraints = [PrimaryKeyConstraint(*indicator_config.pk_columns)] columns_and_indices = sql_columns + extra_indices + constraints # todo: needed to add extend_existing=True to support multiple calls to this function for the same table. # is that valid? return sqlalchemy.Table( table_name, metadata, extend_existing=True, *columns_and_indices )
def upgrade(): op.create_table( 'user', Column('id', UUID(), nullable=False), Column('created_at', DateTime(timezone=True), nullable=False), Column('email', Unicode(), nullable=False), Column('password', PasswordType(), nullable=False), PrimaryKeyConstraint('id'), UniqueConstraint('email'))
class Workprogress(BASE, ModelBase): """Represents a workprogress which monitors the progress of a workflow""" __tablename__ = 'workprogresses' workprogress_id = Column(BigInteger().with_variant(Integer, "sqlite"), Sequence('WORKPROGRESS_ID_SEQ', schema=DEFAULT_SCHEMA_NAME), primary_key=True) request_id = Column(BigInteger().with_variant(Integer, "sqlite")) workload_id = Column(Integer()) scope = Column(String(SCOPE_LENGTH)) name = Column(String(NAME_LENGTH)) # requester = Column(String(20)) # request_type = Column(EnumWithValue(RequestType)) # transform_tag = Column(String(20)) # workload_id = Column(Integer()) priority = Column(Integer()) status = Column(EnumWithValue(WorkprogressStatus)) substatus = Column(EnumWithValue(WorkprogressStatus), default=0) locking = Column(EnumWithValue(WorkprogressLocking)) created_at = Column("created_at", DateTime, default=datetime.datetime.utcnow) updated_at = Column("updated_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) next_poll_at = Column("next_poll_at", DateTime, default=datetime.datetime.utcnow) accessed_at = Column("accessed_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) expired_at = Column("expired_at", DateTime) errors = Column(JSON()) workprogress_metadata = Column(JSON()) processing_metadata = Column(JSON()) _table_args = (PrimaryKeyConstraint('workprogress_id', name='WORKPROGRESS_PK'), ForeignKeyConstraint(['request_id'], ['requests.request_id'], name='REQ2WORKPROGRESS_REQ_ID_FK'), CheckConstraint('status IS NOT NULL', name='WORKPROGRESS_STATUS_ID_NN'), # UniqueConstraint('name', 'scope', 'requester', 'request_type', 'transform_tag', 'workload_id', name='REQUESTS_NAME_SCOPE_UQ '), Index('WORKPROGRESS_SCOPE_NAME_IDX', 'workprogress_id', 'request_id', 'name', 'scope'), Index('WORKPROGRESS_STATUS_PRIO_IDX', 'status', 'priority', 'workprogress_id', 'locking', 'updated_at', 'next_poll_at', 'created_at'))
class PhonebookAddress(Base): __tablename__ = 'phonebookaddress' __table_args__ = ( PrimaryKeyConstraint('id'), ForeignKeyConstraint(('phonebookid', ), ('phonebook.id', ), ondelete='CASCADE'), Index('phonebookaddress__uidx__phonebookid_type', 'phonebookid', 'type', unique=True), ) id = Column(Integer) phonebookid = Column(Integer, nullable=False) address1 = Column(String(30), nullable=False, server_default='') address2 = Column(String(30), nullable=False, server_default='') city = Column(String(128), nullable=False, server_default='') state = Column(String(128), nullable=False, server_default='') zipcode = Column(String(16), nullable=False, server_default='') country = Column(String(3), nullable=False, server_default='') type = Column(Enum('home', 'office', 'other', name='phonebookaddress_type', metadata=Base.metadata), nullable=False) phonebook = relationship('Phonebook')
def get_indicator_table(indicator_config, metadata, override_table_name=None): sql_columns = [ column_to_sql(col) for col in indicator_config.get_columns() ] table_name = override_table_name or get_table_name( indicator_config.domain, indicator_config.table_id) columns_by_col_id = { col.database_column_name.decode('utf-8') for col in indicator_config.get_columns() } extra_indices = [] for index in indicator_config.sql_column_indexes: if set(index.column_ids).issubset(columns_by_col_id): extra_indices.append( Index(_custom_index_name(table_name, index.column_ids), *index.column_ids)) else: logger.error( f"Invalid index specified on {table_name} ({index.column_ids})" ) constraints = [PrimaryKeyConstraint(*indicator_config.pk_columns)] columns_and_indices = sql_columns + extra_indices + constraints current_table = metadata.tables.get(table_name) if current_table is not None: metadata.remove(current_table) return sqlalchemy.Table(table_name, metadata, *columns_and_indices)
class Association(Base): """Associations between documents. Certain associations build a hierarchy between the documents (e.g. between summits), in this case it's important which document is the "parent" and which is the "child" of the association. For other undirected associations it doesn't matter which document is the "parent" or "child". """ __tablename__ = 'associations' parent_document_id = Column(Integer, ForeignKey(schema + '.documents.document_id'), nullable=False) parent_document = relationship( Document, primaryjoin=parent_document_id == Document.document_id) child_document_id = Column(Integer, ForeignKey(schema + '.documents.document_id'), nullable=False) child_document = relationship( Document, primaryjoin=child_document_id == Document.document_id) __table_args__ = (PrimaryKeyConstraint(parent_document_id, child_document_id), Base.__table_args__) def get_log(self, user_id, is_creation=True): return AssociationLog(parent_document_id=self.parent_document_id, child_document_id=self.child_document_id, user_id=user_id, is_creation=is_creation)
class TestScore(Base): __tablename__ = "testscores" __table_args__ = (PrimaryKeyConstraint('test_id', 'user_id'),) test_id = Column( Integer, ForeignKey(Test.id, onupdate="CASCADE", ondelete="CASCADE"), nullable=False, index=True) test = relationship( Test, backref=backref( 'test_scores', order_by=[test_id], cascade="all, delete-orphan", passive_deletes=True)) user_id = Column( Integer, ForeignKey(User.id, onupdate="CASCADE", ondelete="CASCADE"), nullable=False, index=True) user = relationship( User, backref=backref( 'test_scores', order_by=[test_id], cascade="all, delete-orphan", passive_deletes=True)) score = Column(Integer, default=0)
def get_indicator_table(indicator_config, metadata, override_table_name=None): sql_columns = [ column_to_sql(col) for col in indicator_config.get_columns() ] table_name = override_table_name or get_table_name( indicator_config.domain, indicator_config.table_id) columns_by_col_id = { col.database_column_name.decode('utf-8') for col in indicator_config.get_columns() } extra_indices = [] for index in indicator_config.sql_column_indexes: if set(index.column_ids).issubset(columns_by_col_id): extra_indices.append( Index(_custom_index_name(table_name, index.column_ids), *index.column_ids)) else: _assert = soft_assert('{}@{}'.format('jemord', 'dimagi.com')) _assert(False, "Invalid index specified on {}".format(table_name)) break constraints = [PrimaryKeyConstraint(*indicator_config.pk_columns)] columns_and_indices = sql_columns + extra_indices + constraints # todo: needed to add extend_existing=True to support multiple calls to this function for the same table. # is that valid? return sqlalchemy.Table(table_name, metadata, extend_existing=True, *columns_and_indices)
def _add_metadef_namespace_resource_types_table(): op.create_table('metadef_namespace_resource_types', Column('resource_type_id', Integer(), nullable=False), Column('namespace_id', Integer(), nullable=False), Column('properties_target', String(length=80), nullable=True), Column('prefix', String(length=80), nullable=True), Column('created_at', DateTime(), nullable=False), Column('updated_at', DateTime(), nullable=True), ForeignKeyConstraint( ['namespace_id'], ['metadef_namespaces.id'], ), ForeignKeyConstraint( ['resource_type_id'], ['metadef_resource_types.id'], ), PrimaryKeyConstraint('resource_type_id', 'namespace_id'), mysql_engine='InnoDB', mysql_charset='utf8', extend_existing=True) op.create_index('ix_metadef_ns_res_types_namespace_id', 'metadef_namespace_resource_types', ['namespace_id'], unique=False)
def _add_images_table(): op.create_table('images', Column('id', String(length=36), nullable=False), Column('name', String(length=255), nullable=True), Column('size', BigInteger(), nullable=True), Column('status', String(length=30), nullable=False), Column('is_public', Boolean(), nullable=False), Column('created_at', DateTime(), nullable=False), Column('updated_at', DateTime(), nullable=True), Column('deleted_at', DateTime(), nullable=True), Column('deleted', Boolean(), nullable=False), Column('disk_format', String(length=20), nullable=True), Column('container_format', String(length=20), nullable=True), Column('checksum', String(length=32), nullable=True), Column('owner', String(length=255), nullable=True), Column('min_disk', Integer(), nullable=False), Column('min_ram', Integer(), nullable=False), Column('protected', Boolean(), server_default=sql.false(), nullable=False), Column('virtual_size', BigInteger(), nullable=True), PrimaryKeyConstraint('id'), mysql_engine='InnoDB', mysql_charset='utf8', extend_existing=True) op.create_index('checksum_image_idx', 'images', ['checksum'], unique=False) op.create_index('ix_images_deleted', 'images', ['deleted'], unique=False) op.create_index('ix_images_is_public', 'images', ['is_public'], unique=False) op.create_index('owner_image_idx', 'images', ['owner'], unique=False)
class Relationship2GroupRelationship(db.Model, Timestamp): """Many-to-many model for Relationship to GroupRelationship.""" __tablename__ = 'relationship2grouprelationship' __table_args__ = ( PrimaryKeyConstraint('relationship_id', 'group_relationship_id', name='pk_relationship2grouprelationship'), ) relationship_id = Column(UUIDType, ForeignKey(Relationship.id, onupdate='CASCADE', ondelete='CASCADE'), nullable=False) group_relationship_id = Column(UUIDType, ForeignKey(GroupRelationship.id, onupdate='CASCADE', ondelete='CASCADE'), nullable=False) # DB relationships relationship = orm_relationship(Relationship, foreign_keys=[relationship_id]) group_relationship = orm_relationship(GroupRelationship, foreign_keys=[group_relationship_id]) def __repr__(self): """String representation of the model.""" return ('<{self.group_relationship}: {self.relationship}>' .format(self=self))
def _add_image_properties_table(): op.create_table('image_properties', Column('id', Integer(), nullable=False), Column('image_id', String(length=36), nullable=False), Column('name', String(length=255), nullable=False), Column('value', Text(), nullable=True), Column('created_at', DateTime(), nullable=False), Column('updated_at', DateTime(), nullable=True), Column('deleted_at', DateTime(), nullable=True), Column('deleted', Boolean(), nullable=False), PrimaryKeyConstraint('id'), ForeignKeyConstraint( ['image_id'], ['images.id'], ), UniqueConstraint('image_id', 'name', name='ix_image_properties_image_id_name'), mysql_engine='InnoDB', mysql_charset='utf8', extend_existing=True) op.create_index('ix_image_properties_deleted', 'image_properties', ['deleted'], unique=False) op.create_index('ix_image_properties_image_id', 'image_properties', ['image_id'], unique=False)
class Request(BASE, ModelBase): """Represents a pre-cache request from other service""" __tablename__ = 'requests' request_id = Column(BigInteger().with_variant(Integer, "sqlite"), Sequence('REQUEST_ID_SEQ', schema=DEFAULT_SCHEMA_NAME), primary_key=True) scope = Column(String(SCOPE_LENGTH)) name = Column(String(NAME_LENGTH)) requester = Column(String(20)) request_type = Column(EnumWithValue(RequestType)) transform_tag = Column(String(10)) priority = Column(Integer()) status = Column(EnumWithValue(RequestStatus)) locking = Column(EnumWithValue(RequestLocking)) workload_id = Column(Integer()) created_at = Column("created_at", DateTime, default=datetime.datetime.utcnow) updated_at = Column("updated_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) accessed_at = Column("accessed_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) expired_at = Column("expired_at", DateTime) errors = Column(JSON()) request_metadata = Column(JSON()) processing_metadata = Column(JSON()) _table_args = (PrimaryKeyConstraint('request_id', name='_REQUESTS_PK'), CheckConstraint('status IS NOT NULL', name='REQ_STATUS_ID_NN'), UniqueConstraint('name', 'scope', 'requester', 'request_type', 'transform_tag', 'workload_id', name='REQUESTS_NAME_SCOPE_UQ '), Index('REQUESTS_SCOPE_NAME_IDX', 'scope', 'name', 'workload_id'), Index('REQUESTS_STATUS_PRIO_IDX', 'status', 'priority', 'request_id'))
class QueueMember(Base): __tablename__ = 'queuemember' __table_args__ = ( PrimaryKeyConstraint('queue_name', 'interface'), UniqueConstraint('queue_name', 'channel', 'usertype', 'userid', 'category'), Index('queuemember__idx__category', 'category'), Index('queuemember__idx__channel', 'channel'), Index('queuemember__idx__userid', 'userid'), Index('queuemember__idx__usertype', 'usertype'), ) queue_name = Column(String(128)) interface = Column(String(128)) penalty = Column(Integer, nullable=False, server_default='0') commented = Column(Integer, nullable=False, server_default='0') usertype = Column(Enum('agent', 'user', name='queuemember_usertype', metadata=Base.metadata), nullable=False) userid = Column(Integer, nullable=False) channel = Column(String(25), nullable=False) category = Column(Enum('queue', 'group', name='queue_category', metadata=Base.metadata), nullable=False) position = Column(Integer, nullable=False, server_default='0')