def test_composite_column_mapped_collection(self): users, addresses = self.tables.users, self.tables.addresses mapper( User, users, properties={ "addresses": relationship( Address, collection_class=column_mapped_collection( [addresses.c.id, addresses.c.email_address] ), ) }, ) mapper(Address, addresses) u1 = User() u1.addresses = { (1, "email1"): Address(id=1, email_address="email1"), (2, "email2"): Address(id=2, email_address="email2"), } for loads, dumps in picklers(): repickled = loads(dumps(u1)) eq_(u1.addresses, repickled.addresses) eq_( repickled.addresses[(1, "email1")], Address(id=1, email_address="email1"), )
def test_column_mapped_collection(self): users, addresses = self.tables.users, self.tables.addresses mapper( User, users, properties={ "addresses": relationship( Address, collection_class=column_mapped_collection( addresses.c.email_address ), ) }, ) mapper(Address, addresses) u1 = User() u1.addresses = { "email1": Address(email_address="email1"), "email2": Address(email_address="email2"), } for loads, dumps in picklers(): repickled = loads(dumps(u1)) eq_(u1.addresses, repickled.addresses) eq_(repickled.addresses["email1"], Address(email_address="email1"))
def orm_load_baseordereddataset(man): man.baseordereddataset_table = Table('baseordereddataset', man.metadata, Column('id', Integer, primary_key=True), Column('created', DateTime), Column('label', String(50), nullable=False, unique=True), Column('type', String(30), nullable=False)) man.ordereditems_table = Table('ordereddata_items', man.metadata, Column('dataset_id', Integer, ForeignKey('baseordereddataset.id'), primary_key=True), Column('item_id', Integer, ForeignKey('basedata.basedata_id'), primary_key=True), Column('index', Integer, nullable=False) ) man.metadata.create_all() mapper(BaseOrderedDataSet, man.baseordereddataset_table, polymorphic_on=man.baseordereddataset_table.c.type, polymorphic_identity='base_ordered_dataset', properties={'data_items': relation(OrderedDataSetItem, backref='ordered_datasets_items', cascade='all, delete-orphan', collection_class=column_mapped_collection(man.ordereditems_table.c.index)) } ) mapper(OrderedDataSetItem, man.ordereditems_table, properties={ 'item': relation(BaseData, lazy='joined', backref='dataitem') })
class UserTestExecutable(Base): """Class to store information about one file generated by the compilation of a user test. Not to be used directly (import it from SQLAlchemyAll). """ __tablename__ = 'user_test_executables' __table_args__ = ( UniqueConstraint('user_test_id', 'filename', name='cst_executables_user_test_id_filename'), ) # Auto increment primary key. id = Column( Integer, primary_key=True) # Filename and digest of the file. filename = Column( String, nullable=False) digest = Column( String, nullable=False) # Submission (id and object) of the submission. user_test_id = Column( Integer, ForeignKey(UserTest.id, onupdate="CASCADE", ondelete="CASCADE"), nullable=False, index=True) user_test = relationship( UserTest, backref=backref('executables', collection_class=column_mapped_collection(filename), cascade="all, delete-orphan", passive_deletes=True)) def __init__(self, digest, filename=None, user_test=None): self.filename = filename self.digest = digest self.user_test = user_test def export_to_dict(self): """Return object data as a dictionary. """ return { 'filename': self.filename, 'digest': self.digest }
class UserTestManager(Base): """Class to store additional files needed to compile or evaluate a user test (e.g., graders). Not to be used directly (import it from SQLAlchemyAll). """ __tablename__ = 'user_test_managers' __table_args__ = ( UniqueConstraint('user_test_id', 'filename', name='cst_managers_user_test_id_filename'), ) # Auto increment primary key. id = Column( Integer, primary_key=True) # Filename and digest of the manager. filename = Column( String, nullable=False) digest = Column( String, nullable=False) # Task (id and object) owning the manager. user_test_id = Column( Integer, ForeignKey(UserTest.id, onupdate="CASCADE", ondelete="CASCADE"), nullable=False, index=True) user_test = relationship( UserTest, backref=backref('managers', collection_class=column_mapped_collection(filename), cascade="all, delete-orphan", passive_deletes=True)) def __init__(self, digest, filename=None, user_test=None): self.filename = filename self.digest = digest self.user_test = user_test def export_to_dict(self): """Return object data as a dictionary. """ return {'filename': self.filename, 'digest': self.digest}
class Statement(Base): """Class to store a translation of the task statement. Not to be used directly (import it from SQLAlchemyAll). """ __tablename__ = 'statements' __table_args__ = (UniqueConstraint( 'task_id', 'language', name='cst_statements_task_id_language'), ) # Auto increment primary key. id = Column(Integer, primary_key=True) # Code for the language the statement is written in. # It can be an arbitrary string, but if it's in the form "en" or "en_US" # it will be rendered appropriately on the interface (i.e. "English" and # "English (United States of America)"). These codes need to be taken from # ISO 639-1 and ISO 3166-1 respectively. language = Column(String, nullable=False) # Digest of the file. digest = Column(String, nullable=False) # Task (id and object) the statement is for. task_id = Column(Integer, ForeignKey(Task.id, onupdate="CASCADE", ondelete="CASCADE"), nullable=False, index=True) task = relationship( Task, backref=backref('statements', collection_class=column_mapped_collection(language), cascade="all, delete-orphan", passive_deletes=True)) def __init__(self, digest, language, task=None): self.language = language self.digest = digest self.task = task def export_to_dict(self): """Return object data as a dictionary. """ return {'language': self.language, 'digest': self.digest}
class File(Base): """Class to store information about one file submitted within a submission. Not to be used directly (import it from SQLAlchemyAll). """ __tablename__ = 'files' __table_args__ = ( UniqueConstraint('submission_id', 'filename', name='cst_files_submission_id_filename'), ) # Auto increment primary key. id = Column(Integer, primary_key=True) # Filename and digest of the submitted file. filename = Column(String, nullable=False) digest = Column(String, nullable=False) # Submission (id and object) of the submission. submission_id = Column(Integer, ForeignKey(Submission.id, onupdate="CASCADE", ondelete="CASCADE"), nullable=False, index=True) submission = relationship( Submission, backref=backref('files', collection_class=column_mapped_collection(filename), single_parent=True, cascade="all, delete, delete-orphan")) def __init__(self, digest, filename=None, submission=None): self.filename = filename self.digest = digest self.submission = submission def export_to_dict(self): """Return object data as a dictionary. """ return { 'filename': self.filename, 'digest': self.digest }
class User: all_notifications = with_roles( db.relationship( UserNotification, lazy='dynamic', order_by=UserNotification.created_at.desc(), ), read={'owner'}, ) notification_preferences = db.relationship( NotificationPreferences, collection_class=column_mapped_collection( NotificationPreferences.notification_type ), ) # This relationship is wrapped in a property that creates it on first access _main_notification_preferences = db.relationship( NotificationPreferences, primaryjoin=db.and_( NotificationPreferences.user_id == User.id, NotificationPreferences.notification_type == '', ), uselist=False, ) @property def main_notification_preferences(self): if not self._main_notification_preferences: self._main_notification_preferences = NotificationPreferences( user=self, notification_type='', by_email=True, by_sms=False, by_webpush=False, by_telegram=False, by_whatsapp=False, ) db.session.add(self._main_notification_preferences) return self._main_notification_preferences
def test_column_mapped_collection(self): users, addresses = self.tables.users, self.tables.addresses mapper(User, users, properties={ 'addresses': relationship(Address, collection_class=column_mapped_collection( addresses.c.email_address)) }) mapper(Address, addresses) u1 = User() u1.addresses = { "email1": Address(email_address="email1"), "email2": Address(email_address="email2") } for loads, dumps in picklers(): repickled = loads(dumps(u1)) eq_(u1.addresses, repickled.addresses) eq_(repickled.addresses['email1'], Address(email_address="email1"))
class Attachment(Base): """Class to store additional files to give to the user together with the statement of the task. Not to be used directly (import it from SQLAlchemyAll). """ __tablename__ = 'attachments' __table_args__ = (UniqueConstraint( 'task_id', 'filename', name='cst_attachments_task_id_filename'), ) # Auto increment primary key. id = Column(Integer, primary_key=True) # Filename and digest of the manager. filename = Column(String, nullable=False) digest = Column(String, nullable=False) # Task (id and object) owning the attachment. task_id = Column(Integer, ForeignKey(Task.id, onupdate="CASCADE", ondelete="CASCADE"), nullable=False, index=True) task = relationship( Task, backref=backref('attachments', collection_class=column_mapped_collection(filename), cascade="all, delete-orphan", passive_deletes=True)) def __init__(self, digest, filename=None, task=None): self.filename = filename self.digest = digest self.task = task def export_to_dict(self): """Return object data as a dictionary. """ return {'filename': self.filename, 'digest': self.digest}
def orm_load_baseordereddataset(man): man.baseordereddataset_table = Table( "baseordereddataset", man.metadata, Column("id", Integer, primary_key=True), Column("created", DateTime), Column("label", String(50), nullable=False, unique=True), Column("type", String(30), nullable=False), ) man.ordereditems_table = Table( "ordereddata_items", man.metadata, Column("dataset_id", Integer, ForeignKey("baseordereddataset.id"), primary_key=True), Column("item_id", Integer, ForeignKey("basedata.basedata_id"), primary_key=True), Column("index", Integer, nullable=False), ) man.metadata.create_all() mapper( BaseOrderedDataSet, man.baseordereddataset_table, polymorphic_on=man.baseordereddataset_table.c.type, polymorphic_identity="base_ordered_dataset", properties={ "data_items": relation( OrderedDataSetItem, backref="ordered_datasets_items", cascade="all, delete-orphan", collection_class=column_mapped_collection(man.ordereditems_table.c.index), ) }, ) mapper( OrderedDataSetItem, man.ordereditems_table, properties={"item": relation(BaseData, lazy="joined", backref="dataitem")}, )
def test_composite_column_mapped_collection(self): users, addresses = self.tables.users, self.tables.addresses mapper(User, users, properties={ 'addresses':relationship( Address, collection_class= column_mapped_collection([ addresses.c.id, addresses.c.email_address]) ) }) mapper(Address, addresses) u1 = User() u1.addresses = { (1, "email1"):Address(id=1, email_address="email1"), (2, "email2"):Address(id=2, email_address="email2") } for loads, dumps in picklers(): repickled = loads(dumps(u1)) eq_(u1.addresses, repickled.addresses) eq_(repickled.addresses[(1, 'email1')], Address(id=1, email_address="email1"))
class RelationshipParent(BaseNameMixin, db.Model): __tablename__ = 'relationship_parent' children_list = db.relationship(RelationshipChild, backref='parent') children_list_lazy = db.relationship(RelationshipChild, lazy='dynamic') children_set = db.relationship(RelationshipChild, collection_class=set) children_dict_attr = db.relationship( RelationshipChild, collection_class=attribute_mapped_collection('name') ) children_dict_column = db.relationship( RelationshipChild, collection_class=column_mapped_collection(RelationshipChild.name), ) __roles__ = { 'all': { 'read': { 'name', 'title', 'children_list', 'children_set', 'children_dict_attr', 'children_dict_column', } } } __datasets__ = { 'primary': { 'name', 'title', 'children_list', 'children_set', 'children_dict_attr', 'children_dict_column', }, 'related': {'name', 'title'}, }
def orm_load_baseordereddataset(man): man.baseordereddataset_table = Table( 'baseordereddataset', man.metadata, Column('id', Integer, primary_key=True), Column('created', DateTime), Column('label', String(50), nullable=False, unique=True), Column('type', String(30), nullable=False)) man.ordereditems_table = Table( 'ordereddata_items', man.metadata, Column('dataset_id', Integer, ForeignKey('baseordereddataset.id'), primary_key=True), Column('item_id', Integer, ForeignKey('basedata.basedata_id'), primary_key=True), Column('index', Integer, nullable=False)) man.metadata.create_all() mapper(BaseOrderedDataSet, man.baseordereddataset_table, polymorphic_on=man.baseordereddataset_table.c.type, polymorphic_identity='base_ordered_dataset', properties={ 'data_items': relation(OrderedDataSetItem, backref='ordered_datasets_items', cascade='all, delete-orphan', collection_class=column_mapped_collection( man.ordereditems_table.c.index)) }) mapper(OrderedDataSetItem, man.ordereditems_table, properties={ 'item': relation(BaseData, lazy='joined', backref='dataitem') })
def init(prefix='wp'): metadata = MetaData() # tables terms = Table('%s_terms' % prefix, metadata, Column('term_id', Integer(), primary_key=True, nullable=False), Column('name', String(length=55), primary_key=False, nullable=False), Column('slug', String(length=200), primary_key=False, nullable=False), Column('term_group', Integer(), primary_key=False, nullable=False), UniqueConstraint('slug'), ) term_taxonomy = Table('%s_term_taxonomy' % prefix, metadata, Column('term_taxonomy_id', Integer(), primary_key=True, nullable=False), Column('term_id', Integer(), primary_key=False, nullable=False), Column('taxonomy', String(length=32), primary_key=False, nullable=False), Column('description', Text(length=None), primary_key=False, nullable=False), Column('parent', Integer(), primary_key=False, nullable=False), Column('count', Integer(), primary_key=False, nullable=False), UniqueConstraint('term_id', 'taxonomy'), ForeignKeyConstraint(['term_id'], ['%s_terms.term_id' % prefix]), ForeignKeyConstraint(['parent'], ['%s_term_taxonomy.term_taxonomy_id' % prefix]), ) term_relationships = Table('%s_term_relationships' % prefix, metadata, Column('object_id', Integer(), primary_key=True, nullable=False), Column('term_taxonomy_id', Integer(), primary_key=True, nullable=False), ForeignKeyConstraint(['term_taxonomy_id'], ['%s_term_taxonomy.term_taxonomy_id' % prefix]), ) postmeta = Table('%s_postmeta' % prefix, metadata, Column('meta_id', Integer(), primary_key=True, nullable=False), Column('post_id', Integer(), primary_key=False, nullable=False), Column('meta_key', String(length=255), primary_key=False), Column('meta_value', Text(length=None), primary_key=False), ForeignKeyConstraint(['post_id'], ['%s_posts.ID' % prefix]), ) posts = Table('%s_posts' % prefix, metadata, Column('ID', Integer(), primary_key=True, nullable=False), Column('post_author', Integer(), primary_key=False, nullable=False), Column('post_date', DateTime(timezone=False), primary_key=False, nullable=False), Column('post_date_gmt', DateTime(timezone=False), primary_key=False, nullable=False), Column('post_content', Text(length=None), primary_key=False, nullable=False), Column('post_title', Text(length=None), primary_key=False, nullable=False), Column('post_excerpt', Text(length=None), primary_key=False, nullable=False), Column('post_status', String(length=10), primary_key=False, nullable=False), Column('comment_status', String(length=15), primary_key=False, nullable=False), Column('ping_status', String(length=6), primary_key=False, nullable=False), Column('post_password', String(length=20), primary_key=False, nullable=False), Column('post_name', String(length=200), primary_key=False, nullable=False), Column('to_ping', Text(length=None), primary_key=False, nullable=False), Column('pinged', Text(length=None), primary_key=False, nullable=False), Column('post_modified', DateTime(timezone=False), primary_key=False, nullable=False), Column('post_modified_gmt', DateTime(timezone=False), primary_key=False, nullable=False), Column('post_content_filtered', Text(length=None), primary_key=False, nullable=False), Column('post_parent', Integer(), primary_key=False, nullable=False), Column('guid', String(length=255), primary_key=False, nullable=False), Column('menu_order', Integer(), primary_key=False, nullable=False), Column('post_type', String(length=20), primary_key=False, nullable=False), Column('post_mime_type', String(length=100), primary_key=False, nullable=False), Column('comment_count', Integer(), primary_key=False, nullable=False), ForeignKeyConstraint(['post_author'], ['%s_users.ID' % prefix]), ForeignKeyConstraint(['post_parent'], ['%s_posts.ID' % prefix]), ) links = Table('%s_links' % prefix, metadata, Column('link_id', Integer(), primary_key=True, nullable=False), Column('link_url', String(length=255), primary_key=False, nullable=False), Column('link_name', String(length=255), primary_key=False, nullable=False), Column('link_image', String(length=255), primary_key=False, nullable=False), Column('link_target', String(length=25), primary_key=False, nullable=False), Column('link_category', Integer(), primary_key=False, nullable=False), Column('link_description', String(length=255), primary_key=False, nullable=False), Column('link_visible', String(length=1), primary_key=False, nullable=False), Column('link_owner', Integer(), primary_key=False, nullable=False), Column('link_rating', Integer(), primary_key=False, nullable=False), Column('link_updated', DateTime(timezone=False), primary_key=False, nullable=False), Column('link_rel', String(length=255), primary_key=False, nullable=False), Column('link_notes', Text(length=None), primary_key=False, nullable=False), Column('link_rss', String(length=255), primary_key=False, nullable=False), ForeignKeyConstraint(['link_owner'], ['%s_users.ID' % prefix]), ) commentmeta = Table('%s_commentmeta' % prefix, metadata, Column('meta_id', Integer(), primary_key=True, nullable=False), Column('comment_id', Integer(), primary_key=False, nullable=False), Column('meta_key', String(length=255), primary_key=False), Column('meta_value', Text(length=None), primary_key=False), ForeignKeyConstraint(['comment_id'], ['%s_comments.comment_ID' % prefix]), ) comments = Table('%s_comments' % prefix, metadata, Column('comment_ID', Integer(), primary_key=True, nullable=False), Column('comment_post_ID', Integer(), primary_key=False, nullable=False), Column('comment_author', Text(length=None), primary_key=False, nullable=False), Column('comment_author_email', String(length=100), primary_key=False, nullable=False), Column('comment_author_url', String(length=200), primary_key=False, nullable=False), Column('comment_author_IP', String(length=100), primary_key=False, nullable=False), Column('comment_date', DateTime(timezone=False), primary_key=False, nullable=False), Column('comment_date_gmt', DateTime(timezone=False), primary_key=False, nullable=False), Column('comment_content', Text(length=None), primary_key=False, nullable=False), Column('comment_karma', Integer(), primary_key=False, nullable=False), Column('comment_approved', String(length=4), primary_key=False, nullable=False), Column('comment_agent', String(length=255), primary_key=False, nullable=False), Column('comment_type', String(length=20), primary_key=False, nullable=False), Column('comment_parent', Integer(), primary_key=False, nullable=False), Column('user_id', Integer(), primary_key=False, nullable=False), ForeignKeyConstraint(['comment_post_ID'], ['%s_posts.ID' % prefix]), ForeignKeyConstraint(['comment_parent'], ['%s_comments.comment_ID' % prefix]), ForeignKeyConstraint(['user_id'], ['%s_users.ID' % prefix]), ) usermeta = Table('%s_usermeta' % prefix, metadata, Column('umeta_id', Integer(), primary_key=True, nullable=False), Column('user_id', Integer(), primary_key=False, nullable=False), Column('meta_key', String(length=255), primary_key=False), Column('meta_value', Text(length=None), primary_key=False), ForeignKeyConstraint(['user_id'], ['%s_users.ID' % prefix]), ) users = Table('%s_users' % prefix, metadata, Column('ID', Integer(), primary_key=True, nullable=False), Column('user_login', String(length=60), primary_key=False, nullable=False), Column('user_pass', String(length=64), primary_key=False, nullable=False), Column('user_nicename', String(length=50), primary_key=False, nullable=False), Column('user_email', String(length=100), primary_key=False, nullable=False), Column('user_url', String(length=100), primary_key=False, nullable=False), Column('user_registered', DateTime(timezone=False), primary_key=False, nullable=False), Column('user_activation_key', String(length=60), primary_key=False, nullable=False), Column('user_status', Integer(), primary_key=False, nullable=False), Column('display_name', String(length=250), primary_key=False, nullable=False), ) options = Table('%s_options' % prefix, metadata, Column('option_id', Integer(), primary_key=True, nullable=False), Column('option_name', String(length=64), primary_key=True, nullable=False), Column('option_value', Text(length=None), primary_key=False, nullable=False), Column('autoload', String(length=3), primary_key=False, nullable=False), ) # mappings mapper(Term, terms) taxonomy_mapper = mapper( Taxonomy, term_taxonomy, properties={'term': relation(Term)}, polymorphic_on=term_taxonomy.c.taxonomy, ) mapper( PostTag, properties={ 'posts': dynamic_loader( Post, secondary=term_relationships, primaryjoin=(term_taxonomy.c.term_taxonomy_id == term_relationships.c.term_taxonomy_id), secondaryjoin=(term_relationships.c.object_id == posts.c.ID), foreign_keys=[term_relationships.c.object_id, term_relationships.c.term_taxonomy_id], ), }, inherits=taxonomy_mapper, polymorphic_identity='post_tag', ) mapper( Category, properties={ 'children': relation( Category, backref=backref('parent_category', remote_side=[term_taxonomy.c.term_taxonomy_id]), ), 'posts': dynamic_loader( Post, secondary=term_relationships, primaryjoin=(term_taxonomy.c.term_taxonomy_id == term_relationships.c.term_taxonomy_id), secondaryjoin=(term_relationships.c.object_id == posts.c.ID), foreign_keys=[term_relationships.c.object_id, term_relationships.c.term_taxonomy_id], ), }, inherits=taxonomy_mapper, polymorphic_identity='category', ) mapper( LinkCategory, properties={ 'links': relation( Link, secondary=term_relationships, primaryjoin=(term_taxonomy.c.term_taxonomy_id == term_relationships.c.term_taxonomy_id), secondaryjoin=(term_relationships.c.object_id == links.c.link_id), foreign_keys=[term_relationships.c.object_id, term_relationships.c.term_taxonomy_id], ), }, inherits=taxonomy_mapper, polymorphic_identity='link_category', ) mapper(PostMeta, postmeta) mapper( Post, posts, properties={ '_metadict': relation(PostMeta, collection_class=column_mapped_collection(postmeta.c.meta_key)), 'children': relation( Post, backref=backref('parent', remote_side=[posts.c.ID]), ), 'post_tags': relation( PostTag, secondary=term_relationships, primaryjoin=(posts.c.ID == term_relationships.c.object_id), secondaryjoin=(term_relationships.c.term_taxonomy_id == term_taxonomy.c.term_taxonomy_id), foreign_keys=[term_relationships.c.object_id, term_relationships.c.term_taxonomy_id], ), 'categories': relation( Category, secondary=term_relationships, primaryjoin=(posts.c.ID == term_relationships.c.object_id), secondaryjoin=(term_relationships.c.term_taxonomy_id == term_taxonomy.c.term_taxonomy_id), foreign_keys=[term_relationships.c.object_id, term_relationships.c.term_taxonomy_id], ), 'comments': dynamic_loader(Comment, backref='post'), }, ) mapper( Link, links, properties={ 'categories': relation( LinkCategory, secondary=term_relationships, primaryjoin=(links.c.link_id == term_relationships.c.object_id), secondaryjoin=(term_relationships.c.term_taxonomy_id == term_taxonomy.c.term_taxonomy_id), foreign_keys=[term_relationships.c.object_id, term_relationships.c.term_taxonomy_id], ), }, ) mapper(CommentMeta, commentmeta) mapper( Comment, comments, properties={ '_metadict': relation(CommentMeta, collection_class=column_mapped_collection(commentmeta.c.meta_key)), 'children': relation( Comment, backref=backref('parent', remote_side=[comments.c.comment_ID]), ), }, ) mapper(UserMeta, usermeta) mapper( User, users, properties={ 'metadata': relation( UserMeta, collection_class=column_mapped_collection(usermeta.c.meta_key), ), 'posts': dynamic_loader(Post, backref='author'), 'links': dynamic_loader(Link, backref='user'), 'comments': dynamic_loader(Comment, backref='user'), }, ) mapper(Option, options)
def compute_orm_properties(self, THIS_IS_A_DIRTY_HACK_PROPERTIES_DICT): mappedsotype = setobject_type_registry.lookup(self.target_model.klass) xref_table_class = None sourcetable = setobject_table_registry.lookup_by_class(self.source_model.klass) targettable = setobject_table_registry.lookup_by_class(self.target_model.klass) if self.cardinality.id == 'MANY_TO_ONE' or \ self.cardinality.id == 'ONE(FK)_TO_ONE': # take primary key on our side primaryfk = getattr(sourcetable.c, self.relation.foreignkeycol) primaryidentifier = getattr(targettable.c, mappedsotype.get_primary_key_attr_name()) primaryjoin = (primaryfk == primaryidentifier) secondaryjoin = None elif self.cardinality.id == 'ONE_TO_MANY' or \ self.cardinality.id == 'ONE_TO_ONE(FK)': # take primary key on other side primaryfk = getattr(targettable.c, self.relation.foreignkeycol) primaryidentifier = getattr( sourcetable.c, setobject_type_registry.lookup(self.source_model.klass).get_primary_key_attr_name() ) primaryjoin = (primaryfk == primaryidentifier) secondaryjoin = None elif self.cardinality.id == 'MANY_TO_MANY': # xref! we actually need two clauses here. # Obtain table class: xref_table_class = setobject_table_registry.get_by_table(self.relation.xref_table) if xref_table_class is None: # Class is not mapped (probably not having a primary key or something like that) -> autoload xref_table_class = Table(self.relation.xref_table, metadata, autoload=True, useexisting=True, autoload_with=getUtility(IDbUtility).engine) # Compose two join clauses primaryjoin = (cls.get_primary_key_column() == getattr(xref_table_class.c, self.relation.foreignkeycol)) secondaryjoin = (mappedsotype.get_primary_key_column() == getattr(xref_table_class.c, self.relation.foreignkeycol2)) # add the obtained setobject to our mapped properties if self.cardinality.id == 'MANY_TO_ONE': # This mapping should really not happen here and now. # Instead, the linkage MUST be persisted into # table p2_linkage at save time! THIS_IS_A_DIRTY_HACK_PROPERTIES_DICT[self.attr_name] = orm.relation( mappedsotype, uselist=False, cascade=self.cascade, back_populates=self.back_populates, primaryjoin=primaryjoin, post_update=self.post_update ) relationarguments = { 'uselist' : False, 'cascade' : self.cascade, 'back_populates' : self.back_populates, 'primaryjoin' : primaryjoin, 'post_update' : self.post_update } else: # the other side has the foreign key, store things as a collection order_by = None collection_class = None # This is a special case for the spans as they should not be mapped as ordinary # dictionary, but rather as an orderable dictionary. This is necessary to retain the insert order # as a ordinary dict isn't ordered. Consider this to be implemented in a more generic way if this # use case is occuring for user relations as well. if self.attr_name == 'spans' and self.source_model.klass == 'WidgetType': collection_class=SpanCollectionClass span_table = setobject_table_registry.lookup_by_table('p2_span') order_by=span_table.c.order # This is another special case to ensure the widgets being tab ordered if self.attr_name == 'widgets' and self.source_model.klass == 'FormType': collection_class = WidgetCollectionClass widget_table = setobject_table_registry.lookup_by_table('p2_widget') order_by=widget_table.c.tab_order # Get collection class if collection_class == None: if self.ref_key is None: collection_class = column_mapped_collection(mappedsotype.get_primary_key_column()) else: mapped_column = mappedsotype.get_column(self.ref_key) collection_class = column_mapped_collection(mapped_column) # Compute arguments: relationarguments = { 'back_populates' : self.back_populates, 'collection_class' : collection_class, 'uselist' : True, 'primaryjoin': primaryjoin, 'cascade' : self.cascade, 'post_update' : self.post_update } if order_by is not None: relationarguments['order_by'] = order_by # Set xref table if we got one if xref_table_class is not None: relationarguments['secondary'] = xref_table_class if secondaryjoin is not None: relationarguments['secondaryjoin'] = secondaryjoin return relationarguments
class User(db.Model, UserMixin): __tablename__ = 'users' id = db.Column(db.Integer, primary_key=True) active = db.Column('is_active', db.Boolean(), nullable=False, server_default='1') created_at = db.Column(db.DateTime()) confirmed = db.Column(db.Boolean(), default=False) void = db.Column(db.Boolean(), default=False) email_address = db.Column(db.String(64), unique=True) password_hash = db.Column(db.String(255), nullable=True, server_default='') last_seen = db.Column(db.DateTime()) roles = db.relationship('Role', secondary='user_roles', back_populates="users") phoneme_recording_objects = db.relationship("PhonemeRecording", back_populates="user") phoneme_recordings = db.relationship( "PhonemeRecording", collection_class=column_mapped_collection( PhonemeRecording.__table__.c.phoneme_id), viewonly=True) def __init__(self, email_address, password): self.email_address = email_address self.password = password self.created_at = datetime.now().replace(microsecond=0) current_app.logger.info(f"New User {self} created") @property def password(self): # raise AttributeError("password is not a readable attribute") # this would have been ideal but breaks # flask-user thus the following has been implemented instead return self.password_hash @password.setter def password(self, password): self.password_hash = generate_password_hash(password) def check_password(self, password): return check_password_hash(self.password_hash, password) def generate_confirmation_token(self, expiration=86400): s = Serializer(current_app.config['SECRET_KEY'], expires_in=expiration) current_app.logger.debug(f"{self} has generated a confirmation token") return s.dumps({"confirm": self.id}).decode('utf-8') def confirm_token(self, token): s = Serializer(current_app.config['SECRET_KEY']) try: data = s.loads(token.encode('utf-8')) except (BadSignature, BadHeader): return False if data.get('confirm') != self.id: return False current_app.logger.debug(f"{self} has successfully confirmed a token") return True @property def full_name(self): return self.first_name + " " + self.last_name @property def email_confirmed_at(self): # fake property for Flask-User return True @property def user_parent_dir(self): # Allows for large number of users by separating all user folders into 16 different parent folders return hashlib.md5(str( self.email_address).encode('utf-8')).hexdigest().lower()[0] @property def user_secure_dir(self): # Allows for large number of users by separating all user folders into 16 different parent folders return os.path.join(self.user_parent_dir, str(self.id)) @property def relative_recording_dir(self): return os.path.join(self.user_secure_dir, "recordings") @property def relative_output_dir(self): return os.path.join(self.user_secure_dir, "output") def ensure_dir_is_built(self): if not os.path.isdir(current_app.config["USER_DIR"]): os.mkdir(current_app.config["USER_DIR"]) current_app.logger.info("Building directories!!") for directory in [ self.user_parent_dir, self.user_secure_dir, self.relative_recording_dir, self.relative_output_dir ]: directory = os.path.join(current_app.config["USER_DIR"], directory) if not os.path.isdir(directory): os.mkdir(directory) current_app.logger.info(f"{directory} was just built!") def __repr__(self): return f'<User {self.email_address}>'
class Citation(_Base): """ A MEDLINE or PubMed citation record. Attributes: pmid the record's identifier (PubMed ID) status the current status of this record (see `Citation.STATES`) year the year of publication title the record's title journal the journal name (Medline TA) pub_date the string of the publication date issue the journal issue string pagination the pagination string of the journal created the record's creation date completed the record's completion date revised the record's revision date modified the date the record was last modified in the DB Relations: sections a :class:`list` of the record's text sections authors a :class:`list` of the record's author names identifiers a :class:`dict` of the record's alternate IDs using the :attr:`AlternateID.key` a dictionary keys descriptors a :class:`list` of the record's MeSH descriptors qualifiers a :class:`list` of the record's MeSH qualifiers chemicals a :class:`list` of the record's chemicals databases a :class:`list` of the record's external DB references Primary Key: ``pmid`` """ STATES = frozenset({ 'Completed', 'In-Process', 'PubMed-not-MEDLINE', 'In-Data-Review', 'Publisher', 'MEDLINE', 'OLDMEDLINE' }) CHILDREN = ( Section, Identifier, Database, Author, Descriptor, Qualifier, # Qualifier last! ) TABLENAMES = [cls.__tablename__ for cls in CHILDREN] TABLES = {cls.__tablename__: cls.__table__ for cls in CHILDREN} __tablename__ = 'citations' abstracts = relation(Abstract, backref='citation', cascade='all, delete-orphan', collection_class=column_mapped_collection( Abstract.source)) authors = relation(Author, backref='citation', cascade='all, delete-orphan', order_by=Author.__table__.c.pos) chemicals = relation(Chemical, backref='citation', cascade='all, delete-orphan') databases = relation(Database, backref='citation', cascade='all, delete-orphan') descriptors = relation(Descriptor, backref='citation', cascade='all, delete-orphan', order_by=Descriptor.__table__.c.num) identifiers = relation(Identifier, backref='citation', cascade='all, delete-orphan', collection_class=column_mapped_collection( Identifier.namespace)) keywords = relation(Keyword, backref='citation', cascade='all, delete-orphan', order_by=Keyword.__table__.c.owner) publication_types = relation(PublicationType, backref='citation', cascade='all, delete-orphan') qualifiers = relation(Qualifier, backref='citation') sections = relation(Section, backref='citation') pmid = Column(BigInteger, CheckConstraint('pmid > 0'), primary_key=True, autoincrement=False) status = Column(Enum(*STATES, name='state'), nullable=False) year = Column(SmallInteger, CheckConstraint('year > 1000 AND year < 3000'), nullable=False) title = Column(UnicodeText, CheckConstraint("title <> ''"), nullable=False) journal = Column(Unicode(length=256), CheckConstraint("journal <> ''"), nullable=False) pub_date = Column(Unicode(length=256), CheckConstraint("pub_date <> ''"), nullable=False) issue = Column(Unicode(length=256), CheckConstraint("issue <> ''"), nullable=True) pagination = Column(Unicode(length=256), CheckConstraint("pagination <> ''"), nullable=True) created = Column(Date, nullable=False) completed = Column(Date, nullable=True) revised = Column(Date, nullable=True) modified = Column(Date, default=date.today, onupdate=date.today, nullable=False) def __init__(self, pmid: int, status: str, title: str, journal: str, pub_date: str, created: date, completed: date = None, revised: date = None, issue: str = None, pagination: str = None): assert pmid > 0, pmid assert status in Citation.STATES, repr(status) assert title, repr(title) assert journal, repr(journal) assert pub_date, repr(pub_date) assert isinstance(created, date), repr(created) assert completed is None or isinstance(completed, date), repr(completed) assert revised is None or isinstance(revised, date), repr(revised) assert pagination is None or pagination assert issue is None or issue assert len(pub_date) > 4, pub_date self.pmid = pmid self.status = status self.title = title self.journal = journal self.pub_date = pub_date self.year = int(pub_date[:4]) self.issue = issue self.pagination = pagination self.created = created self.completed = completed self.revised = revised def __str__(self): return '{}\n'.format('\t'.join( map(str, [ NULL(self.pmid), NULL(self.status), NULL(self.year), STRING(self.title), STRING(self.journal), STRING(self.pub_date), NULL(self.issue), NULL(self.pagination), DATE(self.created), DATE(self.completed), DATE(self.revised), DATE(date.today() if self.modified is None else self.modified) ]))) def __repr__(self): return "Citation<{}>".format(self.pmid) def __eq__(self, other): return isinstance(other, Citation) and \ self.pmid == other.pmid and \ self.status == other.status and \ self.year == other.year and \ self.journal == other.journal and \ self.title == other.title and \ self.pub_date == other.pub_date and \ self.issue == other.issue and \ self.pagination == other.pagination and \ self.created == other.created and \ self.completed == other.completed and \ self.revised == other.revised def citation(self): issue = '; {}'.format(self.issue) if self.issue else "" pagination = ': {}'.format(self.pagination) if self.pagination else "" return "{}{}{}".format(self.pub_date, issue, pagination) @classmethod def insert(cls, data: dict): """ Insert *data* into all relevant tables. """ target_ins = dict( (tname, cls.TABLES[tname].insert()) for tname in cls.TABLENAMES) conn = _db.engine.connect() transaction = conn.begin() try: if cls.__tablename__ in data and len(data[cls.__tablename__]): conn.execute(cls.__table__.insert(), data[cls.__tablename__]) if Abstract.__tablename__ in data and len( data[Abstract.__tablename__]): conn.execute(Abstract.__table__.insert(), data[Abstract.__tablename__]) if Descriptor.__tablename__ in data and len( data[Descriptor.__tablename__]): conn.execute(Descriptor.__table__.insert(), data[Descriptor.__tablename__]) for tname in cls.TABLENAMES: if tname == Descriptor.__tablename__ or tname == Abstract.__tablename__: pass elif tname in data and len(data[tname]): conn.execute(target_ins[tname], data[tname]) transaction.commit() except: transaction.rollback() raise finally: conn.close() @classmethod def select(cls, pmids: list, attributes: iter): """ Return the `pmid` and *attributes* for each row as a `sqlalchemy.engine.RowProxy` that matches one of the *pmids*. """ if not len(pmids): return [] c = cls.__table__.c mapping = {col.key: col for col in c} columns = [mapping[name] for name in attributes] columns.insert(0, c.pmid) query = select(columns, c.pmid.in_(pmids)) return _fetch_all(query) @classmethod def selectAll(cls, pmids: list): """ Return all columns for each row as a `sqlalchemy.engine.RowProxy` that matches one of the *pmids*. """ if not len(pmids): return [] c = cls.__table__.c query = select([cls.__table__], c.pmid.in_(pmids)) return _fetch_all(query) @classmethod def delete(cls, primary_keys: list): """ Delete records and their dependent entities (authors, identifiers, etc.) for the given *primary_keys* (a list of PMIDs). """ if not len(primary_keys): return t = cls.__table__ query = t.delete(t.c.pmid.in_(primary_keys)) conn = _db.engine.connect() transaction = conn.begin() try: conn.execute(query) transaction.commit() except: transaction.rollback() raise finally: conn.close() @classmethod def existing(cls, pmids: list): """Return the sub- `set` of all *pmids* that exist in the DB.""" if not len(pmids): return set() c = cls.__table__.c query = select([c.pmid], c.pmid.in_(pmids)) conn = _db.engine.connect() try: return {row[0] for row in conn.execute(query)} finally: conn.close() @classmethod def missing(cls, pmids: list): """Return the sub- `set` of all *pmids* that do not exist in the DB.""" return set(pmids) - Citation.existing(pmids) @classmethod def modifiedBefore(cls, pmids: list, before: date): """ Return the sub- `set` of all *pmids* that have been `modified` *before* a `datetime.date` in the DB. """ if not len(pmids): return set() c = cls.__table__.c query = select([c.pmid], c.pmid.in_(pmids) & (c.modified < before)) conn = _db.engine.connect() try: return set(row[0] for row in conn.execute(query)) finally: conn.close()