class FlowAction(db.Model, TimestampMixin): """Action list belong to Flow""" __tablename__ = 'workflow_flow_action' id = db.Column(db.Integer(), nullable=False, primary_key=True, autoincrement=True) """FlowAction identifier.""" flow_id = db.Column( UUIDType, db.ForeignKey(FlowDefine.flow_id), nullable=False, unique=False, index=True) """the id of flow.""" action_id = db.Column(db.Integer(), db.ForeignKey(Action.id), nullable=False, unique=False) """the id of action.""" action_version = db.Column(db.String(64), nullable=True) """the version of used action.""" action_order = db.Column(db.Integer(), nullable=False, unique=False) """the order of action.""" action_condition = db.Column(db.String(255), nullable=True, unique=False) """the condition of transition.""" TATUSPOLICY = [ (AvailableStautsPolicy.USABLE, AvailableStautsPolicy.describe(AvailableStautsPolicy.USABLE)), (AvailableStautsPolicy.UNUSABLE, AvailableStautsPolicy.describe(AvailableStautsPolicy.UNUSABLE)), ] """Subscription policy choices.""" action_status = db.Column( ChoiceType(TATUSPOLICY, impl=db.String(1)), nullable=False, default=AvailableStautsPolicy.USABLE) """the status of flow action.""" action_date = db.Column( db.DateTime, nullable=False, default=datetime.now) """the use date of action.""" action = db.relationship( Action, backref=db.backref('flow_action')) """flow action relationship.""" action_roles = db.relationship( 'FlowActionRole', backref=db.backref('flow_action')) """flow action relationship."""
class GroupRelationship(db.Model, Timestamp): """Group relationship model.""" __tablename__ = 'grouprelationship' __table_args__ = ( UniqueConstraint('source_id', 'target_id', 'relation', name='uq_grouprelationship_source_target_relation'), # TODO: Change to "index=True" Index('ix_grouprelationship_source', 'source_id'), Index('ix_grouprelationship_target', 'target_id'), Index('ix_grouprelationship_relation', 'relation'), ) id = db.Column(UUIDType, default=uuid.uuid4, primary_key=True) type = db.Column(db.Enum(GroupType), nullable=False) relation = db.Column(db.Enum(Relation), nullable=False) source_id = db.Column(UUIDType, db.ForeignKey(Group.id, ondelete='CASCADE', onupdate='CASCADE'), nullable=False) target_id = db.Column(UUIDType, db.ForeignKey(Group.id, ondelete='CASCADE', onupdate='CASCADE'), nullable=False) # DB relationships source = db.relationship(Group, foreign_keys=[source_id], backref='sources') target = db.relationship(Group, foreign_keys=[target_id], backref='targets') relationships = db.relationship( 'GroupRelationship', secondary=lambda: GroupRelationshipM2M.__table__, primaryjoin=lambda: (GroupRelationship.id == GroupRelationshipM2M.relationship_id), secondaryjoin=lambda: (GroupRelationship.id == GroupRelationshipM2M.subrelationship_id)) # TODO: # We don't store 'deleted' as in the relation as most likely don't need # that as 'ground truth' in precomputed groups anyway def __repr__(self): """String representation of the group relationship.""" return f'<{self.source} {self.relation.name} {self.target}>'
class ReanaJob(db.Model): """Model defining REANA job.""" __tablename__ = 'reana' id = db.Column(UUIDType, primary_key=True, nullable=False, default=uuid.uuid4) user_id = db.Column( db.Integer, db.ForeignKey(User.id), nullable=False, ) record_id = db.Column( UUIDType, db.ForeignKey(RecordMetadata.id), nullable=False, ) name = db.Column(db.String(100), unique=False, nullable=False) params = db.Column(JSONType().with_variant( postgresql.JSONB(none_as_null=True), 'postgresql', ).with_variant( JSONType(), 'sqlite', ), default=lambda: dict(), nullable=True) output = db.Column(JSONType().with_variant( postgresql.JSONB(none_as_null=True), 'postgresql', ).with_variant( JSONType(), 'sqlite', ), default=lambda: dict(), nullable=True) user = db.relationship('User') record = db.relationship('RecordMetadata') @classmethod def get_jobs(cls, user_id, record_id): """Return all the jobs run by user for this record.""" return cls.query.filter_by(user_id=user_id, record_id=record_id).all()
class TermSources(db.Model): __tablename__ = 'iroko_terms_sources' # TODO: Esta relacion deberia hacerse con los UUIDs y no con los IDs term_id = db.Column(db.Integer, db.ForeignKey('iroko_terms.id'), primary_key=True) sources_id = db.Column(db.Integer, db.ForeignKey('iroko_sources.id'), primary_key=True) data = db.Column(JSONType) source = db.relationship("Source", backref=db.backref("term_sources")) term = db.relationship("Term", backref=db.backref("term_sources"))
class FlowDefine(db.Model, TimestampMixin): """define Flow""" __tablename__ = 'workflow_flow_define' id = db.Column(db.Integer(), nullable=False, primary_key=True, autoincrement=True) """Flow identifier.""" flow_id = db.Column( UUIDType, nullable=False, unique=True, index=True, default=uuid.uuid4() ) """the id of flow.""" flow_name = db.Column( db.String(255), nullable=True, unique=True, index=True) """the name of flow.""" flow_user = db.Column( db.Integer(), db.ForeignKey(User.id), nullable=True, unique=False) """the user who update the flow.""" user_profile = db.relationship(User) """User relationship""" FLOWSTATUSPOLICY = [ (FlowStatusPolicy.AVAILABLE, FlowStatusPolicy.describe(FlowStatusPolicy.AVAILABLE)), (FlowStatusPolicy.INUSE, FlowStatusPolicy.describe(FlowStatusPolicy.INUSE)), (FlowStatusPolicy.MAKING, FlowStatusPolicy.describe(FlowStatusPolicy.MAKING)), ] """Subscription policy choices.""" flow_status = db.Column( ChoiceType(FLOWSTATUSPOLICY, impl=db.String(1)), nullable=False, default=FlowStatusPolicy.MAKING, info=dict( label=_('Subscription Policy'), widget=RadioGroupWidget(FlowStatusPolicy.descriptions), )) """the status of flow.""" flow_actions = db.relationship('FlowAction', backref=db.backref('flow')) """flow action relationship."""
class PropertyValue(db.Model, Timestamp): """Property and Value match to be used in Property based ACL queries.""" __tablename__ = 'explicit_acls_propertyvalue' # # Fields # id = db.Column(db.String(36), default=gen_uuid_key, primary_key=True) """Primary key.""" acl_id = db.Column( db.ForeignKey('explicit_acls_propertyvalueacl.id', name='fk_explicit_acls_propertyvalue_acl_id')) acl = db.relationship('PropertyValueACL', back_populates='property_values') name = db.Column(db.String(64)) """Name of the property in elasticsearch.""" value = db.Column(db.String(128)) """Value of the property in elasticsearch.""" match_operation = db.Column(ChoiceType(MatchOperation, impl=db.String(length=10)), default=MatchOperation.term) """Property value matching mode: can be either term or match.""" bool_operation = db.Column(ChoiceType(BoolOperation, impl=db.String(length=10)), default=BoolOperation.must) """Bool filter operation mode this property belongs to.""" originator_id = db.Column(db.ForeignKey( User.id, ondelete='CASCADE', ), nullable=False, index=True) originator = db.relationship(User, backref=db.backref("authored_properties")) """The originator (person that last time modified the Property)""" def __str__(self): """Returns string representation of the class.""" return '%s: %s(%s=%s)' % ( self.bool_operation, self.match_operation, self.name, self.value, )
class HEPSubmission(db.Model): """ This is the main submission object. It maintains the submissions to HEPData and who the coordinator and who the reviewers/uploaders are (via participants). """ __tablename__ = "hepsubmission" id = db.Column(db.Integer, primary_key=True, autoincrement=True) publication_recid = db.Column(db.Integer) inspire_id = db.Column(db.String(128)) data_abstract = db.Column(db.LargeBinary) resources = db.relationship("DataResource", secondary="data_resource_link", cascade="all,delete") # coordinators are already logged in to submit records, # so we know their User id. coordinator = db.Column(db.Integer, db.ForeignKey(User.id)) participants = db.relationship("SubmissionParticipant", secondary="submission_participant_link", cascade="all,delete") # when this flag is set to 'ready', all data records will have an # invenio record created for them. overall_status = db.Column(db.String(128), default='todo') created = db.Column(db.DateTime, nullable=False, default=func.now(), index=True) last_updated = db.Column(db.DateTime, nullable=True, default=func.now(), index=True) # this links to the latest version of the data files to be shown # in the submission and allows one to go back in time via the # interface to view various stages of the submission. version = db.Column(db.Integer, default=1) # the doi for the whole submission. doi = db.Column(db.String(128), nullable=True) reviewers_notified = db.Column(db.Boolean, default=False)
class WorkFlow(db.Model, TimestampMixin): """Define WorkFlow.""" __tablename__ = 'workflow_workflow' id = db.Column(db.Integer(), nullable=False, primary_key=True, autoincrement=True) """Flows identifier.""" flows_id = db.Column(UUIDType, nullable=False, unique=True, index=True, default=uuid.uuid4()) """the id of flows.""" flows_name = db.Column(db.String(255), nullable=True, unique=False, index=False) """the name of flows.""" itemtype_id = db.Column(db.Integer(), db.ForeignKey(ItemType.id), nullable=False, unique=False) """the id of itemtype.""" itemtype = db.relationship(ItemType, backref=db.backref( 'workflow', lazy='dynamic', order_by=desc('item_type.tag'))) index_tree_id = db.Column(db.BigInteger, nullable=True, unique=False) """Index tree id that this workflow will belong to""" flow_id = db.Column(db.Integer(), db.ForeignKey(FlowDefine.id), nullable=False, unique=False) """the id of flow.""" flow_define = db.relationship(FlowDefine, backref=db.backref('workflow', lazy='dynamic'))
class RecordCitations(db.Model): """Adds Citation table which holds all references which are also eligible citations""" __tablename__ = "records_citations" __table_args__ = ( db.Index("ix_records_citations_cited_id_citer_id", "cited_id", "citer_id"), db.Index( "ix_records_citations_cited_id_citation_type", "cited_id", "is_self_citation", ), db.Index( "ix_records_citations_citer_id_citation_type", "citer_id", "is_self_citation", ), ) citer_id = db.Column( UUIDType, db.ForeignKey("records_metadata.id", name="fk_records_citations_citer_id"), nullable=False, primary_key=True, ) cited_id = db.Column( UUIDType, db.ForeignKey("records_metadata.id", name="fk_records_citations_cited_id"), nullable=False, primary_key=True, ) citation_date = db.Column(Date) # Relationship: Relation to record which cites # Backref: List of all references of this record # which are counted as citations in other records. citer = db.relationship(RecordMetadata, backref="references", foreign_keys=[citer_id]) # Relationship: Relation to cited article # Backref: List of all citations of this record. cited = db.relationship(RecordMetadata, backref="citations", foreign_keys=[cited_id]) is_self_citation = db.Column(Boolean, nullable=False, default=False)
class GitSnapshot(db.Model): """Snapshot information for a Git repo.""" __tablename__ = 'git_snapshot' id = db.Column(db.Integer, primary_key=True) # webhook payload / event payload = db.Column(json_type, default={}, nullable=True) # git specifics tag = db.Column(db.String(255), nullable=True) ref = db.Column(db.String(255), nullable=True) # foreign keys (connecting to repo and events) webhook_id = db.Column(db.Integer, db.ForeignKey(GitWebhook.id), nullable=False) webhook = db.relationship(GitWebhook, backref=db.backref("snapshots", cascade="all, delete-orphan")) created = db.Column(db.DateTime, server_default=db.func.now()) @staticmethod def create(webhook, data): snapshot = GitSnapshot(payload=data, webhook_id=webhook.id, tag=data['commit'].get('tag'), ref=data['commit']['id']) db.session.add(snapshot) db.session.commit()
class Event(db.Model, Timestamp): """Event model.""" __tablename__ = 'event' id = db.Column(UUIDType, default=uuid.uuid4, primary_key=True) payload = db.Column(JSONType) status = db.Column(db.Enum(EventStatus), nullable=False) user_id = db.Column(db.Integer, db.ForeignKey(User.id), nullable=True) user = db.relationship(User) @classmethod def get(cls, id: str = None, **kwargs): """Get the event from the database.""" return cls.query.filter_by(id=id).one_or_none() @classmethod def getStatsFromLastWeek(cls): """Gets the stats from the last 7 days""" last_week = datetime.datetime.now() - datetime.timedelta(days=7) resp = db.session.query(cls.status, func.count('*')).filter( cls.updated > str(last_week)).group_by(cls.status).all() return resp def __repr__(self): """String representation of the event.""" return f"<{self.id}: {self.created}>"
class DraftMetadata(db.Model, DraftMetadataBase): """Represent a marc21 record draft metadata.""" __tablename__ = "marc21_drafts_metadata" bucket_id = db.Column(UUIDType, db.ForeignKey(Bucket.id)) bucket = db.relationship(Bucket)
class Redirect(db.Model, Timestamp): """Redirect for a persistent identifier. You can redirect a PID to another one. E.g. .. code-block:: python pid1 = PersistentIdentifier.get(pid_type="recid", pid_value="1") pid2 = PersistentIdentifier.get(pid_type="recid", pid_value="2") pid1.redirect(pid=pid2) assert pid2.pid_value == pid.get_redirect().pid_value """ __tablename__ = 'pidstore_redirect' id = db.Column(UUIDType, default=uuid.uuid4, primary_key=True) """Id of redirect entry.""" pid_id = db.Column(db.Integer, db.ForeignKey(PersistentIdentifier.id, onupdate="CASCADE", ondelete="RESTRICT"), nullable=False) """Persistent identifier.""" pid = db.relationship(PersistentIdentifier, backref='redirects') """Relationship to persistent identifier."""
class DataReview(db.Model): """ Represent a data review including links to the messages made about a data record upload and its current status. """ __tablename__ = "datareview" id = db.Column(db.Integer, primary_key=True, nullable=False, autoincrement=True) publication_recid = db.Column(db.Integer) data_recid = db.Column( db.Integer, db.ForeignKey("datasubmission.id", ondelete='CASCADE')) creation_date = db.Column(db.DateTime, nullable=False, default=datetime.utcnow, index=True) modification_date = db.Column(db.DateTime, nullable=False, default=datetime.utcnow, index=True, onupdate=datetime.utcnow) # as in, passed, attention, to do status = db.Column(db.String(20), default="todo") messages = db.relationship("Message", secondary="review_messages", cascade="all,delete") version = db.Column(db.Integer, default=0)
class PropertyValueACL(ESACLMixin, ACL): """An ACL that matches all records that have a metadata property equal to a given constant value.""" __tablename__ = 'explicit_acls_propertyvalueacl' __mapper_args__ = { 'polymorphic_identity': 'propertyvalue', } # # Fields # id = db.Column(db.String(36), db.ForeignKey('explicit_acls_acl.id'), primary_key=True) """Id maps to base class' id""" property_values = db.relationship("PropertyValue", back_populates="acl") """A set of actors for this ACL (who have rights to perform an operation this ACL references)""" @property def record_selector(self): """Returns an elasticsearch query matching resources that this ACL maps to.""" boolProps = {} for prop in self.property_values: # type: PropertyValue boolProps.setdefault(prop.bool_operation.value, []).append( {prop.match_operation.value: { prop.name: prop.value }}) return {'bool': boolProps} def __repr__(self): """String representation for model.""" return '"{0.name}" ({0.id}) on schemas {0.schemas}'.format(self)
class ActionRoles(ActionNeedMixin, db.Model): """ActionRoles data model. It relates an allowed action with a role. """ __tablename__ = 'access_actionsroles' __table_args__ = (UniqueConstraint('action', 'exclude', 'argument', 'role_id', name='access_actionsroles_unique'), ) role_id = db.Column(db.Integer(), db.ForeignKey(Role.id, ondelete='CASCADE'), nullable=False, index=True) role = db.relationship("Role", backref=db.backref("actionusers", cascade="all, delete-orphan")) @property def need(self): """Return RoleNeed instance.""" return RoleNeed(self.role.name)
class ObjectEvent(db.Model, Timestamp): """Event related to an Identifier or Relationship.""" __tablename__ = 'objectevent' __table_args__ = (PrimaryKeyConstraint('event_id', 'object_uuid', 'payload_type', 'payload_index', name='pk_objectevent'), ) event_id = db.Column(UUIDType, db.ForeignKey(Event.id), nullable=False) object_uuid = db.Column(UUIDType, nullable=False) payload_type = db.Column(db.Enum(PayloadType), nullable=False) payload_index = db.Column(db.Integer, nullable=False) event = db.relationship(Event, backref='object_events') @property def object(self) -> Union[Identifier, Relationship]: """Get the associated Identifier or Relationship.""" if self.payload_type == PayloadType.Identifier: return Identifier.query.get(self.object_uuid) else: return Relationship.query.get(self.object_uuid) def __repr__(self): """String representation of the object event.""" return f"<{self.event_id}: {self.object_uuid}>"
class DraftMetadata(db.Model, DraftMetadataBase): """Represent a bibliographic record draft metadata.""" __tablename__ = 'rdm_drafts_metadata' bucket_id = db.Column(UUIDType, db.ForeignKey(Bucket.id)) bucket = db.relationship(Bucket)
class Request(db.Model, RecordMetadataBase): """Requests model.""" __tablename__ = 'requests_request' __table_args__ = {'extend_existing': True} __versioned__ = {'versioning': False} owner_id = db.Column( db.Integer, db.ForeignKey(User.id), ) expires_at = db.Column( db.DateTime().with_variant(mysql.DATETIME(fsp=6), "mysql"), ) cancelled_at = db.Column( db.DateTime().with_variant(mysql.DATETIME(fsp=6), "mysql"), ) sla = db.Column(db.DateTime().with_variant(mysql.DATETIME(fsp=6), "mysql"), ) routing_key = db.Column(db.String(255), ) owner = db.relationship( User, backref='requests', ) @classmethod def delete(cls, request): """Delete request.""" with db.session.begin_nested(): db.session.delete(request)
class UserIdentity(db.Model, Timestamp): """Represent a UserIdentity record.""" __tablename__ = "accounts_useridentity" id = db.Column(db.String(255), primary_key=True, nullable=False) method = db.Column(db.String(255), primary_key=True, nullable=False) id_user = db.Column(db.Integer(), db.ForeignKey(User.id), nullable=False) user = db.relationship(User, backref="external_identifiers") __table_args__ = (db.Index("accounts_useridentity_id_user_method", id_user, method, unique=True), ) @classmethod def get_user(cls, method, external_id): """Get the user for a given identity.""" identity = cls.query.filter_by(id=external_id, method=method).one_or_none() if identity is not None: return identity.user return None @classmethod def create(cls, user, method, external_id): """Link a user to an external id. :param user: A :class:`invenio_accounts.models.User` instance. :param method: The identity source (e.g. orcid, github) :param method: The external identifier. :raises AlreadyLinkedError: Raised if already exists a link. """ try: with db.session.begin_nested(): db.session.add( cls(id=external_id, method=method, id_user=user.id)) except IntegrityError: raise AlreadyLinkedError( # dict used for backward compatibility (came from oauthclient) user, { "id": external_id, "method": method }, ) @classmethod def delete_by_external_id(cls, method, external_id): """Unlink a user from an external id.""" with db.session.begin_nested(): cls.query.filter_by(id=external_id, method=method).delete() @classmethod def delete_by_user(cls, method, user): """Unlink a user from an external id.""" with db.session.begin_nested(): cls.query.filter_by(id_user=user.id, method=method).delete()
class UserActor(UserMixin, Actor): """An actor matching a set of users identified by ID.""" __tablename__ = 'explicit_acls_useractor' __mapper_args__ = { 'polymorphic_identity': 'user', } id = db.Column(db.String(36), db.ForeignKey('explicit_acls_actor.id'), primary_key=True) """Id maps to base class' id""" users = db.relationship(User, secondary=users_actors, lazy='subquery', backref=db.backref('actors', lazy=True)) def __str__(self): """Returns the string representation of the actor.""" return 'UserActor[%s]' % self.name def get_elasticsearch_representation(self, another=None, record=None, **kwargs): """ Returns ES representation of this Actor. :param another: A serialized representation of the previous Actor of the same type. The implementation should merge it with its own ES representation :return: The elasticsearch representation of the property on Record """ return list(set([x.id for x in self.users] + (another or []))) def user_matches(self, user: Union[User, AnonymousUser], context: Dict, record: Record = None) -> bool: """ Checks if a user is allowed to perform any operation according to the ACL. :param user: user being checked against the ACL :param context: any extra context carrying information about the user """ if user.is_anonymous: return False for x in self.users: if x == user: return True return False def get_matching_users(self, record: Record = None) -> Iterable[int]: """ Returns a list of users matching this Actor. :return: Iterable of a user ids """ return [x.id for x in self.users]
class RDMDraftMetadata(db.Model, DraftMetadataBase, ParentRecordMixin): """Draft metadata for a record.""" __tablename__ = 'rdm_drafts_metadata' __parent_record_model__ = RDMParentMetadata bucket_id = db.Column(UUIDType, db.ForeignKey(Bucket.id)) bucket = db.relationship(Bucket)
class Workflow(db.Model): """Represents a workflow instance storing the state of the workflow.""" __tablename__ = "workflows_workflow" uuid = db.Column(UUIDType, primary_key=True, nullable=False, default=uuid.uuid4()) name = db.Column(db.String(255), default="Default workflow", nullable=False) created = db.Column(db.DateTime, default=datetime.now, nullable=False) modified = db.Column(db.DateTime, default=datetime.now, onupdate=datetime.now, nullable=False) id_user = db.Column(db.Integer, default=0, nullable=False) extra_data = db.Column(JSONType().with_variant( postgresql.JSON(none_as_null=True), 'postgresql', ), default=lambda: dict(), nullable=False) status = db.Column(ChoiceType(WorkflowStatus, impl=db.Integer()), default=WorkflowStatus.NEW, nullable=False) objects = db.relationship("WorkflowObjectModel", backref='workflows_workflow', cascade="all, delete-orphan") def __repr__(self): """Represent a Workflow instance.""" return "<Workflow(name: %s, cre: %s, mod: %s," \ "id_user: %s, status: %s)>" % \ (str(self.name), str(self.created), str(self.modified), str(self.id_user), str(self.status)) def __str__(self): """Represent a Workflow instance.""" return self.__repr__() @classmethod def delete(cls, uuid): """Delete a workflow.""" to_delete = Workflow.query.get(uuid) db.session.delete(to_delete) def save(self, status=None): """Save object to persistent storage.""" with db.session.begin_nested(): self.modified = datetime.now() if status is not None: self.status = status if self.extra_data is None: self.extra_data = dict() flag_modified(self, 'extra_data') db.session.merge(self)
class TemplateDefinition(db.Model, object): """Representation of a template definition.""" __tablename__ = 'sequencegenerator_template' COUNTER_REGEX = re.compile(r'({counter(!.)?(:.*)?})') """Regular expression matching the counter inside the template string.""" name = db.Column(db.String(255), primary_key=True) """The identifier of the template definition.""" meta_template = db.Column(db.String(255), unique=True) """The template generator.""" parent_name = db.Column( db.ForeignKey(name, name='fk_seqgen_template_parent_name_seqgen_template')) """Indicate that the template depends on another one.""" start = db.Column(db.Integer, default=0) """The starting counter of sequences generated from ``meta_template``.""" step = db.Column(db.Integer, default=1) """The incremental step of sequences generated from ``meta_template``.""" children = db.relationship('TemplateDefinition', backref=db.backref('parent', remote_side=name)) @validates('meta_template') def validate_meta_template(self, key, value): """Validate template string of template definition.""" if not self.COUNTER_REGEX.search(value): raise InvalidTemplate('No counter placeholder') return value def counter(self, **kwargs): """Get counter of this template definition, based on given kwargs.""" meta_template = double_counter(self.meta_template, self.COUNTER_REGEX) counter = Counter.get(meta_template, kwargs) if counter is None: with db.session.begin_nested(): counter = Counter.create( meta_template=meta_template, ctx=kwargs, counter=self.start, template_definition=self, ) db.session.add(counter) return counter def __repr__(self): """Canonical representation of ``TemplateDefinition``.""" return ('TemplateDefinition(' 'name={0.name!r}, ' 'meta_template={0.meta_template!r}, ' 'start={0.start!r}, ' 'step={0.step!r})').format(self)
class RecordMetadata(db.Model, RecordMetadataBase): """Represent a marc21 record metadata.""" __tablename__ = "marc21_records_metadata" __versioned__ = {} bucket_id = db.Column(UUIDType, db.ForeignKey(Bucket.id)) bucket = db.relationship(Bucket)
class RecordMetadata(db.Model, RecordMetadataBase): """Model for mock module metadata.""" __tablename__ = 'mock_metadata' expires_at = db.Column(db.DateTime(), nullable=True) bucket_id = db.Column(UUIDType, db.ForeignKey(Bucket.id)) bucket = db.relationship(Bucket)
class DraftMetadata(db.Model, DraftMetadataBase, ParentRecordMixin): """Represent a marc21 record draft metadata.""" __tablename__ = "marc21_drafts_metadata" __parent_record_model__ = ParentMetadata __versioned__ = {} bucket_id = db.Column(UUIDType, db.ForeignKey(Bucket.id)) bucket = db.relationship(Bucket)
class ConferenceLiterature(db.Model): """Keeps track of proceedings and contributions linked to a Conference Record.""" __tablename__ = "conference_literature" __table_args__ = ( db.Index( "ix_conference_literature_conference_uuid", "conference_uuid", "relationship_type", ), db.Index("ix_conference_literature_literature_uuid", "literature_uuid"), ) conference_uuid = db.Column( UUIDType, db.ForeignKey("records_metadata.id", name="fk_conference_literature_conference_uuid"), nullable=False, primary_key=True, ) literature_uuid = db.Column( UUIDType, db.ForeignKey("records_metadata.id", name="fk_conference_literature_literature_uuid"), nullable=False, primary_key=True, ) relationship_type = db.Column( Enum( ConferenceToLiteratureRelationshipType, name="enum_conference_to_literature_relationship_type", ), primary_key=True, ) conference = db.relationship(RecordMetadata, backref="conference_documents", foreign_keys=[conference_uuid]) conference_document = db.relationship(RecordMetadata, backref="conferences", foreign_keys=[literature_uuid])
class CommunityMetadata(db.Model, RecordMetadataBase): """Represent a community.""" __tablename__ = 'communities_metadata' # Enables SQLAlchemy-Continuum versioning __versioned__ = {} bucket_id = db.Column(UUIDType, db.ForeignKey(Bucket.id)) bucket = db.relationship(Bucket)
class LOMDraftMetadata(db.Model, DraftMetadataBase, ParentRecordMixin): """Flask-SQLAlchemy model for "lom_drafts_metadata"-SQL-table.""" __tablename__ = "lom_drafts_metadata" # ParentRecordMixin adds to __parent_record_model__ a foreign key to self __parent_record_model__ = LOMParentMetadata bucket_id = db.Column(UUIDType, db.ForeignKey(Bucket.id)) bucket = db.relationship(Bucket)
"""Get profile by username.""" return cls.query.filter( UserProfile._username == username.lower() ).one() @classmethod def get_by_userid(cls, user_id): """Get profile by username.""" return cls.query.filter_by(user_id=user_id).one_or_none() @property def is_anonymous(self): """Return whether this UserProfile is anonymous.""" return False def set_userprofile(target, value, oldvalue, initiator): """Create an instance of UserProfile when initializing User.profile.""" if not isinstance(value, UserProfile): return UserProfile(**value) return value User.profile = db.relationship( UserProfile, back_populates='user', uselist=False, cascade='all, delete-orphan', ) # Setup listener on User.userprofile attribute, instructing # it to use the return value. listen(User.profile, 'set', set_userprofile, retval=True)