class UserGroup(DeclarativeBase, TimeStamp, UserGroupMixin): __tablename__ = "usergroup" __versioned__ = {} id = integer_pk() name = C(Unicode, nullable=False, unique=True) description = C(UnicodeText) hidden_edit_functions = C(ARRAY(Unicode), server_default="{}") is_editor_group = C(Boolean, server_default="false") is_workflow_editor_group = C(Boolean, server_default="false") is_admin_group = C(Boolean, server_default="false") users = association_proxy("user_assocs", "user", creator=lambda u: UserToUserGroup(user=u)) @property def user_names(self): _user_names = [unicode(u) for u in self.users] return sorted(_user_names, key=unicode.lower) @property def metadatatype_access(self): from schema.schema import Metadatatype return db.query(Metadatatype).join(NodeToAccessRuleset).filter_by( ruleset_name=self.name).all() def __unicode__(self): return self.name def __repr__(self): return u"UserGroup<{} '{}'> ({})".format( self.id, self.name, object.__repr__(self)).encode("utf8")
class Fts(DeclarativeBase): __tablename__ = "fts" id = integer_pk() # XXX: we allow multiple search items with the same configuration # XXX: otherwise, we could use nid, config, searchtype as primary key # XXX: may change in the future nid = C(FK(Node.id, ondelete="CASCADE")) config = C(Text) searchtype = C(Text) tsvec = C(TSVECTOR)
class AccessRule(DeclarativeBase): __tablename__ = "access_rule" __versioned__ = {} id = integer_pk() invert_subnet = C(Boolean, server_default="false", index=True) invert_date = C(Boolean, server_default="false", index=True) invert_group = C(Boolean, server_default="false", index=True) group_ids = C(ARRAY(Integer), index=True) subnets = C(ARRAY(CIDR), index=True) dateranges = C(ARRAY(Daterange), index=True) group_names = column_property( mediatumfunc.group_ids_to_names(sql.text("group_ids")))
class AuthenticatorInfo(DeclarativeBase): __tablename__ = "authenticator" id = integer_pk() name = C(Unicode, nullable=False) auth_type = C(Unicode, nullable=False) @property def authenticator_key(self): return (self.auth_type, self.name) def __unicode__(self): return self.auth_type + ":" + self.name def __repr__(self): return u"AuthenticatorInfo<id: {} key: ({}, {})> ({})".format( self.id, self.name, self.auth_type, object.__repr__(self)).encode("utf8") __table_args__ = (UniqueConstraint(name, auth_type), )
class User(DeclarativeBase, TimeStamp, UserMixin): __tablename__ = "user" __versioned__ = {} id = integer_pk() login_name = C(Unicode, nullable=False) display_name = C(Unicode) lastname = C(Unicode) firstname = C(Unicode) telephone = C(Unicode) organisation = C(Unicode) comment = C(UnicodeText) email = C(EmailType) password_hash = C(String) salt = C(String) password = u'' # user activity last_login = C(DateTime) active = C(Boolean, server_default="true") # options can_edit_shoppingbag = C(Boolean, server_default="false") can_change_password = C(Boolean, server_default="false") home_dir_id = integer_fk("node.id") # relationships groups = association_proxy( "group_assocs", "usergroup", creator=lambda ug: UserToUserGroup(usergroup=ug)) home_dir = rel("Directory", foreign_keys=[home_dir_id]) authenticator_info = rel(AuthenticatorInfo) authenticator_id = integer_fk(AuthenticatorInfo.id, nullable=False) @property def group_ids(self): return [g.id for g in self.groups] @property def group_names(self): return [g.name for g in self.groups] @property def is_editor(self): return any(g.is_editor_group for g in self.groups) @property def is_admin(self): return any(g.is_admin_group for g in self.groups) @property def is_guest(self): return self.login_name == config.get_guest_name( ) and self.authenticator_id == 0 @property def is_workflow_editor(self): return any(g.is_workflow_editor_group for g in self.groups) @property def hidden_edit_functions(self): return [ f for group in self.groups for f in group.hidden_edit_functions or [] ] @property def upload_dir(self): from contenttypes import Directory if self.home_dir: return self.home_dir.children.filter( Directory.system_attrs[u"used_as"].astext == u"upload").one() @property def trash_dir(self): from contenttypes import Directory if self.home_dir: return self.home_dir.children.filter( Directory.system_attrs[u"used_as"].astext == u"trash").one() def get_or_add_private_group(self): """Gets the private group for this user. Creates the group if it's missing and adds it to the session. Always use this method and don't create private groups by yourself! :rtype: UserGroup """ maybe_group_assoc = [g for g in self.group_assocs if g.private == True] if not maybe_group_assoc: # the name doesn't really matter, but it must be unique group = UserGroup(name=u"_user_{}".format(unicode(self.id))) group_assoc = UserToUserGroup(usergroup=group, private=True) self.group_assocs.append(group_assoc) else: group = maybe_group_assoc[0].usergroup return group def change_password(self, password): from core.auth import create_password_hash self.password_hash, self.salt = create_password_hash(password) def create_home_dir(self): from contenttypes.container import Directory, Home from core.database.postgres.permission import AccessRulesetToRule from core.permission import get_or_add_access_rule s = object_session(self) home_root = s.query(Home).one() homedir_name = self.login_name home = Directory(homedir_name) home_root.container_children.append(home) home.children.extend(create_special_user_dirs()) # add access rules so only the user itself can access the home dir private_group = self.get_or_add_private_group() # we need the private group ID, it's set on flush by the DB s.flush() user_access_rule = get_or_add_access_rule(group_ids=[private_group.id]) for access_type in (u"read", u"write", u"data"): ruleset = home.get_or_add_special_access_ruleset(access_type) arr = AccessRulesetToRule(rule=user_access_rule) ruleset.rule_assocs.append(arr) self.home_dir = home logg.info("created home dir for user '%s (id: %s)'", self.login_name, self.id) return home # Flask-Login integration functions def is_authenticated(self): return not self.is_guest def is_active(self): return not self.is_guest @property def is_anonymous(self): return self.is_guest def __eq__(self, other): ''' Checks the equality of two `UserMixin` objects using `get_id`. ''' if isinstance(other, UserMixin): return self.get_id() == other.get_id() return NotImplemented def __ne__(self, other): ''' Checks the inequality of two `UserMixin` objects using `get_id`. ''' equal = self.__eq__(other) if equal is NotImplemented: return NotImplemented return not equal def get_id(self): return unicode(self.id) def __unicode__(self): return u"{} \"{}\" ({}:{})".format( self.login_name, self.display_name if self.display_name else "", self.authenticator_info.auth_type, self.authenticator_info.name) def __repr__(self): return u"User<{} '{}'> ({})".format( self.id, self.login_name, object.__repr__(self)).encode("utf8") __table_args__ = (UniqueConstraint(login_name, authenticator_id), )
class File(DeclarativeBase, FileMixin): """Represents an item on the filesystem """ __versioned__ = {"base_classes": (FileVersionMixin, DeclarativeBase)} #: True means: physical file should be deleted when database object is deleted unlink_after_deletion = False def __init__(self, path, filetype, mimetype, node=None, **kwargs): # legacy stuff datadir = config.settings["paths.datadir"] if path.startswith(datadir): warn("file path starts with paths.datadir, should be relative", DeprecationWarning) path = path[len(datadir):] if "type" in kwargs: raise Exception( "type keyword arg is not allowed anymore, use filetype") if "filename" in kwargs: raise Exception( "name positional arg is not allowed anymore, use path") self.path = path self.filetype = filetype self.mimetype = mimetype if node is not None: self.node = node __tablename__ = "file" id = integer_pk() path = C(Unicode(4096)) filetype = C(Unicode(126)) mimetype = C(String(255)) _size = C('size', BigInteger) # Checksum/hash columns sha512 = C(String(128)) # LargeBinary could be an alternative sha512_created_at = C(DateTime()) sha512_checked_at = C(DateTime()) sha512_ok = C(Boolean()) nodes = rel(Node, secondary=NodeToFile.__table__, backref=bref("files", lazy="dynamic", query_class=AppenderQueryWithLen), lazy="dynamic") _node_objects = rel(Node, secondary=NodeToFile.__table__, backref=bref("file_objects", viewonly=True), viewonly=True) def unlink(self): if self.exists: os.unlink(self.abspath) else: logg.warn( "tried to unlink missing physical file %s at %s, ignored", self.id, self.path) def __repr__(self): return "File #{} ({}:{}|{}) at {}".format(self.id, self.path, self.filetype, self.mimetype, hex(id(self))) def __unicode__(self): return u"# {} {} {} in {}".format(self.id, self.filetype, self.mimetype, self.path) @property def size(self): """Return size of file in bytes""" if self._size is None: return get_filesize(self.path) return self._size @property def size_humanized(self): """Return string with the size in human-friendly format, e.g. '7.9 kB'""" return humanize.naturalsize(self.size) def calculate_sha512(self): """Calculate the hash from the file on disk.""" if not self.exists: return None return sha512_from_file(self.abspath) def update_sha512(self): """Overwrite the stored checksum value with the current checksum of the file on disk. Use with caution, should not be necessary under usual circumstances!""" if not self.exists: return None logg.info('Updating sha512 for file ID: %s.' % self.id) self.sha512 = self.calculate_sha512() self.sha512_ok = True self.sha512_created_at = self.sha512_checked_at = datetime.utcnow() self._size = get_filesize(self.path) return self.sha512 def get_or_create_sha512(self): """Return the stored hash. If there is none, create and store it.""" if not self.exists: return None, False created = False if not self.sha512: created = True logg.info('Checksum not in DB, creating it for file ID: %s.' % self.id) self.update_sha512() return self.sha512, created def verify_checksum(self): """Make sure the file exists and has the same checksum as before""" if not self.exists: #raise IOError() logg.warn('check_checksum: file %s does not exist at %s!' % (self.id, self.abspath)) self.sha512_ok = None return None self.sha512_checked_at = datetime.utcnow() sha_stored, created = self.get_or_create_sha512() if created: # checksum was just created, skip a second calculation of the hash return True else: sha_calculated = self.calculate_sha512() if sha_stored == sha_calculated and sha_calculated is not None: logg.debug('Matching checksums :) for file ID: %s.' % self.id) self.sha512_ok = True else: logg.warn('Checksum mismatch for file ID: %s.' % self.id) self.sha512_ok = False return self.sha512_ok