def test_evaluate_double_synonym_attr(self): class Foo(object): pass mapper(Foo, self.tables.users, properties={ 'uname': synonym("name"), 'ufoo': synonym('uname') }) s = Session() jill = s.query(Foo).get(3) s.query(Foo).update( {Foo.ufoo: 'moonbeam'}, synchronize_session='evaluate') eq_(jill.ufoo, 'moonbeam')
def MongoReference(field, ref_cls, queryset=None): """ Reference to a MongoDB document. The value is cached until an assignment is made. To use a custom queryset (instead of the default `ref_cls.objects`), pass it as the `queryset` kwarg. """ if queryset is None: queryset = ref_cls.objects def _get(obj): if not hasattr(obj, '_%s__cache' % field): ref_id = getattr(obj, field) if ref_id is None: ref = None else: ref = queryset.get(pk=ref_id) setattr(obj, '_%s__cache' % field, ref) return getattr(obj, '_%s__cache' % field) def _set(obj, val): if hasattr(obj, '_%s__cache' % field): delattr(obj, '_%s__cache' % field) if isinstance(val, ref_cls): val = val.pk if isinstance(val, ObjectId): val = str(val) setattr(obj, field, val) return synonym(field, descriptor=property(_get, _set))
def _get_password(self): return self._password def _set_password(self, password): self._password = hash_password(password) password = property(_get_password, _set_password) password = synonym('_password', descriptor=password) def __init__(self, username, password, name, email): self.username = username self.name = name self.email = email self.password = password @classmethod def get_by_username(cls, username): return DBSession.query(cls).filter(cls.username == username).first() @classmethod def check_password(cls, username, password): user = cls.get_by_username(username) if not user: return False return crypt.check(user.password, password)
def test_evaluate_double_synonym_attr(self): class Foo(object): pass mapper( Foo, self.tables.users, properties={"uname": synonym("name"), "ufoo": synonym("uname")}, ) s = Session() jill = s.query(Foo).get(3) s.query(Foo).update( {Foo.ufoo: "moonbeam"}, synchronize_session="evaluate" ) eq_(jill.ufoo, "moonbeam")
def test_synonym_comparable(self): users = self.tables.users class User(object): class Comparator(PropComparator): pass def _getValue(self): return self._value def _setValue(self, value): setattr(self, '_value', value) value = property(_getValue, _setValue) mapper(User, users, properties={ 'uid':synonym('id'), 'foobar':comparable_property(User.Comparator,User.value), }) sess = create_session() u = User() u.name = 'ed' sess.add(u) sess.flush() sess.expunge(u) sess.merge(u)
def _set_password(self, password): if password: self._password = generate_password_hash(password) password_descriptor = property(self._get_password, self._set_password) password = synonym('_password', descriptor=password_descriptor)
def setup_orm(): tables = meta.metadata.tables columns = tables['group_mailing_list_messages'].c orm.mapper(GroupMailingListMessage, tables['group_mailing_list_messages'], inherits=ContentItem, polymorphic_identity='mailing_list_message', polymorphic_on=tables['content_items'].c.content_type, properties = { 'reply_to': relation(GroupMailingListMessage, backref=backref('replies'), foreign_keys=(columns.reply_to_message_machine_id), primaryjoin=columns.id == columns.reply_to_message_machine_id, remote_side=(columns.id)), 'thread': relation(GroupMailingListMessage, post_update=True, order_by=[asc(columns.sent)], backref=backref('posts'), foreign_keys=(columns.thread_message_machine_id), primaryjoin=columns.id == columns.thread_message_machine_id, remote_side=(columns.id)), 'author': relation(User, backref=backref('messages')), 'group': relation(Group, primaryjoin=(columns.group_id == tables['groups'].c.id)), 'attachments': synonym("files") })
def test_synonym_comparable(self): users = self.tables.users class User(object): class Comparator(PropComparator): pass def _getValue(self): return self._value def _setValue(self, value): setattr(self, "_value", value) value = property(_getValue, _setValue) mapper( User, users, properties={"uid": synonym("id"), "foobar": comparable_property(User.Comparator, User.value)} ) sess = create_session() u = User() u.name = "ed" sess.add(u) sess.flush() sess.expunge(u) sess.merge(u)
def end(cls): return synonym( "_end", descriptor=property( cls._end_getter, cls._end_setter ) )
def start(cls): return synonym( "_start", descriptor=property( cls._start_getter, cls._start_setter, ) )
def json_synonym(name): """Use json to serialize python objects for db storage.""" def getter(self): return json.loads(getattr(self, name), decode_datetime=True) def setter(self, entry): setattr(self, name, unicode(json.dumps(entry, encode_datetime=True))) return synonym(name, descriptor=property(getter, setter))
def status(cls): def get_status(self): return self._status def set_status(self, value): if self._status != value: self._status = value signal_execution_status_change.send(self) return synonym('_status', descriptor=property(get_status, set_status))
def target_entity_type(cls): return synonym( "_target_entity_type", descriptor=property( fget=cls._target_entity_type_getter, doc="""The entity type which this object is valid for. Usually it is set to the TargetClass directly. """ ) )
def MongoEmbedded(field, emb_cls): """ Converts the JSON value to/from an EmbeddedDocument. Note that a new instance is returned every time we access and we must reassign any changes back to the model. """ def _get(obj): return emb_cls._from_son(getattr(obj, field)) def _set(obj, val): setattr(obj, field, val.to_mongo()) return synonym(field, descriptor=property(_get, _set))
def test_synonym_group_bug(self): orders, Order = self.tables.orders, self.classes.Order mapper(Order, orders, properties={ 'isopen':synonym('_isopen', map_column=True), 'description':deferred(orders.c.description, group='foo') }) sess = create_session() o1 = sess.query(Order).get(1) eq_(o1.description, "order 1")
def safe_pickle_synonym(name): """Used to store Entry instances into a PickleType column in the database. In order to ensure everything can be loaded after code changes, makes sure no custom python classes are pickled. """ def only_builtins(item): """Casts all subclasses of builtin types to their builtin python type. Works recursively on iterables. Raises ValueError if passed an object that doesn't subclass a builtin type. """ supported_types = [str, unicode, int, float, long, bool, datetime] # dict, list, tuple and set are also supported, but handled separately if type(item) in supported_types: return item elif isinstance(item, dict): result = {} for key, value in item.iteritems(): try: result[key] = only_builtins(value) except TypeError: continue return result elif isinstance(item, (list, tuple, set)): result = [] for value in item: try: result.append(only_builtins(value)) except ValueError: continue if isinstance(item, list): return result elif isinstance(item, tuple): return tuple(result) else: return set(result) else: for s_type in supported_types: if isinstance(item, s_type): return s_type(item) # If item isn't a subclass of a builtin python type, raise ValueError. raise TypeError('%r is not a subclass of a builtin python type.' % type(item)) def getter(self): return getattr(self, name) def setter(self, entry): setattr(self, name, only_builtins(entry)) return synonym(name, descriptor=property(getter, setter))
def test_evaluate_synonym_string(self): class Foo(object): pass mapper(Foo, self.tables.users, properties={"uname": synonym("name")}) s = Session() jill = s.query(Foo).get(3) s.query(Foo).update( {"uname": "moonbeam"}, synchronize_session="evaluate" ) eq_(jill.uname, "moonbeam")
def entry_synonym(name): """Use json to serialize python objects for db storage.""" def only_builtins(item): supported_types = (str, unicode, int, float, long, bool, datetime) # dict, list, tuple and set are also supported, but handled separately if isinstance(item, supported_types): return item elif isinstance(item, Mapping): result = {} for key, value in item.items(): try: result[key] = only_builtins(value) except TypeError: continue return result elif isinstance(item, (list, tuple, set)): result = [] for value in item: try: result.append(only_builtins(value)) except ValueError: continue if isinstance(item, list): return result elif isinstance(item, tuple): return tuple(result) else: return set(result) elif isinstance(item, qualities.Quality): return item.name else: for s_type in supported_types: if isinstance(item, s_type): return s_type(item) # If item isn't a subclass of a builtin python type, raise ValueError. raise TypeError('%r is not of type Entry.' % type(item)) def getter(self): return Entry(json.loads(getattr(self, name), decode_datetime=True)) def setter(self, entry): if isinstance(entry, Entry) or isinstance(entry, dict): setattr( self, name, unicode(json.dumps(only_builtins(dict(entry)), encode_datetime=True)) ) else: raise TypeError('%r is not of type Entry or dict.' % type(entry)) return synonym(name, descriptor=property(getter, setter))
def test_evaluate_synonym_string(self): class Foo(object): pass mapper(Foo, self.tables.users, properties={ 'uname': synonym("name", ) }) s = Session() jill = s.query(Foo).get(3) s.query(Foo).update( {'uname': 'moonbeam'}, synchronize_session='evaluate') eq_(jill.uname, 'moonbeam')
def text_date_synonym(name): """Converts Y-M-D date strings into datetime objects""" def getter(self): return getattr(self, name) def setter(self, value): if isinstance(value, basestring): setattr(self, name, datetime.strptime(value, '%Y-%m-%d')) else: setattr(self, name, value) return synonym(name, descriptor=property(getter, setter))
def duration(self): return synonym( '_duration', descriptor=property( self._duration_getter, self._duration_setter, doc="""Duration of the entity. It is a datetime.timedelta instance. Showing the difference of the :attr:`.start` and the :attr:`.end`. If edited it changes the :attr:`.end` attribute value.""" ) )
def test_parententity_vs_parentmapper(self): class Point(object): pass self._fixture(Point, properties={"x_syn": synonym("x")}) pa = aliased(Point) is_(Point.x_syn._parententity, inspect(Point)) is_(Point.x._parententity, inspect(Point)) is_(Point.x_syn._parentmapper, inspect(Point)) is_(Point.x._parentmapper, inspect(Point)) is_( Point.x_syn.__clause_element__()._annotations["parententity"], inspect(Point), ) is_( Point.x.__clause_element__()._annotations["parententity"], inspect(Point), ) is_( Point.x_syn.__clause_element__()._annotations["parentmapper"], inspect(Point), ) is_( Point.x.__clause_element__()._annotations["parentmapper"], inspect(Point), ) pa = aliased(Point) is_(pa.x_syn._parententity, inspect(pa)) is_(pa.x._parententity, inspect(pa)) is_(pa.x_syn._parentmapper, inspect(Point)) is_(pa.x._parentmapper, inspect(Point)) is_( pa.x_syn.__clause_element__()._annotations["parententity"], inspect(pa), ) is_( pa.x.__clause_element__()._annotations["parententity"], inspect(pa) ) is_( pa.x_syn.__clause_element__()._annotations["parentmapper"], inspect(Point), ) is_( pa.x.__clause_element__()._annotations["parentmapper"], inspect(Point), )
def pipe_list_synonym(name): """Converts pipe separated text into a list""" def getter(self): attr = getattr(self, name) if attr: return attr.strip('|').split('|') def setter(self, value): if isinstance(value, basestring): setattr(self, name, value) else: setattr(self, name, '|'.join(value)) return synonym(name, descriptor=property(getter, setter))
def test_synonym_group_bug(self): orders, Order = self.tables.orders, self.classes.Order mapper( Order, orders, properties={ "isopen": synonym("_isopen", map_column=True), "description": deferred(orders.c.description, group="foo"), }, ) sess = create_session() o1 = sess.query(Order).get(1) eq_(o1.description, "order 1")
def create_properties(self): if self.deferred: group = None if isinstance(self.deferred, basestring): group = self.deferred self.property = deferred(self.column, group=group) elif self.name != self.colname: # if the property name is different from the column name, we need # to add an explicit property (otherwise nothing is needed as it's # done automatically by SA) self.property = self.column if self.property is not None: self.add_mapper_property(self.name, self.property) if self.synonym: self.add_mapper_property(self.synonym, synonym(self.name))
def test_proxy_descriptor_one(self): class Point(object): def __init__(self, x, y): self.x, self.y = x, y self._fixture(Point, properties={"x_syn": synonym("x")}) alias = aliased(Point) eq_(str(Point.x_syn), "Point.x_syn") eq_(str(alias.x_syn), "AliasedClass_Point.x_syn") sess = Session() self.assert_compile( sess.query(alias.x_syn).filter(alias.x_syn > Point.x_syn), "SELECT point_1.x AS point_1_x FROM point AS point_1, point " "WHERE point_1.x > point.x", )
def __init__(cls, classname, bases, dict_): """ Map the Schematics fields to the SQLAlchemy columns using synonym properties. """ super(SchemAlchemyModelMeta, cls).__init__(classname, bases, dict_) if not hasattr(cls, '__mapper__'): return mapper = cls.__mapper__ for field_name in cls._fields: column_name = (mapper.column_prefix or '') + field_name if not column_name in mapper.all_orm_descriptors: continue field_descriptor = cls.__dict__.get(field_name) field_descriptor.column_name = column_name field_synonym = orm.synonym(column_name, descriptor=field_descriptor) mapper.add_property(field_name, field_synonym)
def MongoReference(field, ref_cls): """ Reference to a MongoDB table. The value is cached until an assignment is made. """ def _get(obj): if not hasattr(obj, '_%s__cache' % field): setattr(obj, '_%s__cache' % field, ref_cls.objects.get(pk=getattr(obj, field))) return getattr(obj, '_%s__cache' % field) def _set(obj, val): if hasattr(obj, '_%s__cache' % field): delattr(obj, '_%s__cache') if isinstance(val, ref_cls): val = val.pk if isinstance(val, ObjectId): val = str(val) setattr(obj, field, val) return synonym(field, descriptor=property(_get, _set))
class User(Base): __tablename__ = 'users' id = Column(Integer, primary_key=True) salutation = Column(Unicode(10), nullable=False) given_name = Column(Unicode(200), nullable=False) surname = Column(Unicode(200), nullable=False) organization = Column(Unicode(200), default='', nullable=False) _password = Column('password', String(200)) _password_changed = Column('password_changed', DateTime, default=datetime.utcnow, nullable=False) resets = relationship(PasswordReset, backref='user', cascade='all, delete-orphan', passive_deletes=True) _created = Column('created', DateTime, default=datetime.utcnow, nullable=False) timezone_name = Column('timezone', Unicode(max(len(t) for t in pytz.all_timezones)), default='UTC', nullable=False) emails = relationship(EmailAddress, backref='user', cascade='all, delete-orphan', passive_deletes=True) limits_id = Column(Unicode(20), ForeignKey('user_limits.id', onupdate='RESTRICT', ondelete='RESTRICT'), nullable=False) # limits defined as backref on UserLimit templates = relationship(LabelTemplate, backref='creator') # user_collections defined as backref on UserCollection collections = association_proxy( 'user_collections', 'role', creator=lambda k, v: UserCollection(collection=k, role=v)) # user_groups defined as backref on Group groups = association_proxy('user_groups', 'id') def __repr__(self): return ('<User: name="%s">' % ' '.join( (self.salutation, self.given_name, self.surname))).encode('utf-8') def __str__(self): return unicode(self).encode('utf-8') def __unicode__(self): return ' '.join((self.salutation, self.given_name, self.surname)) def _get_created(self): if self._created is None: return None if self._created.tzinfo is None: return pytz.utc.localize(self._created) else: return self._created.astimezone(pytz.utc) def _set_created(self, value): if value.tzinfo is None: self._created = value else: self._created = value.astimezone(pytz.utc).replace(tzinfo=None) created = synonym('_created', descriptor=property(_get_created, _set_created)) def _get_password_changed(self): if self._password_changed is None: return None if self._password_changed.tzinfo is None: return pytz.utc.localize(self._password_changed) else: return self._password_changed.astimezone(pytz.utc) def _set_password_changed(self, value): if value.tzinfo is None: self._password_changed = value else: self._password_changed = value.astimezone( pytz.utc).replace(tzinfo=None) password_changed = synonym('_password_changed', descriptor=property(_get_password_changed, _set_password_changed)) @classmethod def by_id(cls, id): """return the user with id ``id``""" return DBSession.query(cls).filter_by(id=id).first() @classmethod def by_email(cls, email): """return the user with an email ``email``""" return DBSession.query(cls).join(EmailAddress).\ filter(EmailAddress.email == email).\ filter(EmailAddress.verified != None).first() def _get_timezone(self): """Return the timezone object corresponding to the name""" return pytz.timezone(self.timezone_name) def _set_timezone(self, value): """Set the timezone to the name of the timezone object""" self.timezone_name = value.zone timezone = synonym('timezone_name', descriptor=property(_get_timezone, _set_timezone)) def _set_password(self, password): """Store a hashed version of password""" self._password = PASSWORD_CONTEXT.encrypt(password) self.password_changed = utcnow() def _get_password(self): """Return the hashed version of the password""" return self._password password = synonym('_password', descriptor=property(_get_password, _set_password)) def authenticate(self, password): """Check the password against existing credentials""" # We call verify_and_update here in case we've defined any new # (hopefully stronger) algorithms in the context above. If so, this'll # take care of migrating users as they login (result, new_password) = PASSWORD_CONTEXT.verify_and_update( password, self._password) if result and new_password: self._password = new_password return result @property def full_name(self): return ' '.join(( self.salutation, self.given_name, self.surname, )) @property def verified_emails(self): # XXX Do this with a query return [email for email in self.emails if email.verified] @property def editable_collections(self): # XXX Do this with a query return [ collection for collection, role in self.collections.items() if role.id in ('editor', 'owner') ] @property def owned_samples(self): # XXX Do this with a query return [ sample for collection in self.editable_collections for sample in collection.all_samples ] @property def storage_used(self): # XXX Do this with a query return sum(sample.attachments.storage_used for sample in self.owned_samples)
def password(cls): return synonym('_password', descriptor=property(cls.get_password, cls.set_password))
class Sample(Base): __tablename__ = 'samples' id = Column(Integer, primary_key=True) description = Column(Unicode(200), nullable=False) _created = Column('created', DateTime, default=datetime.utcnow, nullable=False) _destroyed = Column('destroyed', DateTime) location = Column(Unicode(200), default='', nullable=False) default_attachment = Column(Unicode(200)) notes_markup = Column( Unicode(8), CheckConstraint( "notes_markup IN ('text', 'html', 'md', 'rst', 'creole', 'textile')" ), default='text', nullable=False) notes = Column(UnicodeText, default='', nullable=False) collection_id = Column(Integer, ForeignKey('collections.id', onupdate='RESTRICT', ondelete='CASCADE'), nullable=False) # collection defined as backref on Collection parents = relationship( 'Sample', secondary='sample_origins', primaryjoin='sample_origins.c.sample_id==samples.c.id', secondaryjoin='sample_origins.c.parent_id==samples.c.id') children = relationship( 'Sample', secondary='sample_origins', primaryjoin='sample_origins.c.parent_id==samples.c.id', secondaryjoin='sample_origins.c.sample_id==samples.c.id') log = relationship(SampleLogEntry, backref='sample', cascade='all, delete-orphan', passive_deletes=True, order_by=SampleLogEntry._created) # sample_codes defined as backref on SampleCode codes = association_proxy('sample_codes', 'value', creator=lambda k, v: SampleCode(name=k, value=v)) # attachments are added by the two event listeners (load and init) below @property def status(self): return 'Destroyed' if self.destroyed else 'Existing' def __repr__(self): return ('<Sample: description="%s">' % self.description).encode('utf-8') def __str__(self): return unicode(self).encode('utf-8') def __unicode__(self): return self.description @property def image(self): return DBSession.query(SampleAttachment).\ filter(SampleAttachment.sample_id==self.id).\ filter(SampleAttachment.default==True).first() def _get_created(self): if self._created is None: return None if self._created.tzinfo is None: return pytz.utc.localize(self._created) else: return self._created.astimezone(pytz.utc) def _set_created(self, value): if value.tzinfo is None: self._created = value else: self._created = value.astimezone(pytz.utc).replace(tzinfo=None) created = synonym('_created', descriptor=property(_get_created, _set_created)) def _get_destroyed(self): if self._destroyed is None: return None if self._destroyed.tzinfo is None: return pytz.utc.localize(self._destroyed) else: return self._destroyed.astimezone(pytz.utc) def _set_destroyed(self, value): if value.tzinfo is None: self._destroyed = value else: self._destroyed = value.astimezone(pytz.utc).replace(tzinfo=None) destroyed = synonym('_destroyed', descriptor=property(_get_destroyed, _set_destroyed)) @classmethod def by_id(cls, id): """Return the permission object with id ``id``""" return DBSession.query(cls).filter_by(id=id).first() @classmethod def create(cls, creator, collection, **kwargs): """Create a new sample""" assert collection.users[creator].id in ('owner', 'editor') sample = cls(collection_id=collection.id, **kwargs) sample.log.append( SampleLogEntry(creator_id=creator.id, event='create', message='Sample created')) return sample def destroy(self, destroyer, reason): """Mark the sample as destroyed""" if self.destroyed: raise SampleDestroyed('Sample #%d is already destroyed' % self.id) self.log.append( SampleLogEntry(creator_id=destroyer.id, event='destroy', message=reason)) self.destroyed = utcnow() @classmethod def combine(cls, creator, collection, aliquots, **kwargs): """Generate a new sample out of several aliquots""" sample = cls.create(creator, collection, **kwargs) for aliquot in aliquots: if aliquot.destroyed: raise SampleDestroyed('Sample #%d is already destroyed' % self.id) aliquot.destroy(creator, 'Sample combined into sample #%d' % sample.id) sample.parents.append(aliquot) return sample def split(self, creator, collection, aliquots, aliquant=False, **kwargs): """Split this sample into several aliquots""" if aliquots < 1: raise ValueError('Cannot split a sample into less than 1 aliquot') if self.destroyed: raise SampleDestroyed('Sample #%d is already destroyed' % self.id) aliargs = kwargs.copy() if not 'location' in aliargs: aliargs['location'] = self.location reason = 'Sample destroyed to create %d aliquots%s' % ( aliquots, ' and an aliquant' if aliquant else '') aliquots = [ Sample.create(creator, collection, description='Aliquot %d of sample #%d' % (i + 1, self.id), **aliargs) for i in range(aliquots) ] for aliquot in aliquots: aliquot.parents.append(self) if aliquant: aliargs = kwargs.copy() if not 'description' in aliargs: aliargs['description'] = 'Aliquant of sample #%d' % self.id if not 'location' in aliargs: aliargs['location'] = self.location aliquant = Sample.create(creator, collection, **aliargs) aliquant.parents.append(self) aliquots.append(aliquant) self.destroy(creator, reason) return aliquots
def extra(cls): """Extra data. The value is decrypted/encrypted when reading/setting the value.""" return synonym('_extra', descriptor=property(cls.get_extra, cls.set_extra))
def extra(cls): return synonym('_extra', descriptor=property(cls.get_extra, cls.set_extra))
class PasswordReset(Base): __tablename__ = 'password_resets' id = Column(String(32), primary_key=True) _created = Column('created', DateTime, default=datetime.utcnow, nullable=False) _expiry = Column('expiry', DateTime, nullable=False) user_id = Column(Integer, ForeignKey('users.id', onupdate='RESTRICT', ondelete='CASCADE'), nullable=False) # user defined as backref on User def __init__(self, user): super(PasswordReset, self).__init__(**kwargs) if DBSession.query(PasswordReset).\ filter(PasswordReset.user_id == user.id).\ filter(PasswordReset.created > (utcnow() - timedelta(seconds=RESET_INTERVAL))).first(): raise ResetError('A reset was requested for that ' 'account less than %d seconds ago' % RESET_INTERVAL) if DBSession.query(PasswordReset).\ filter(PasswordReset.expiry > utcnow()).\ filter(PasswordReset.user_id == user.id).count() >= RESET_LIMIT: raise ResetError('Too many active resets currently ' 'exist for this account') self.user_id = user.id self.expiry = utcnow() + timedelta(seconds=RESET_TIMEOUT) self.id = os.urandom(self.__table__.c.id.type.length // 2).encode('hex') def __repr__(self): return ('<PasswordReset: id="%s">' % self.id).encode('utf-8') def __str__(self): return unicode(self).encode('utf-8') def __unicode__(self): return self.id def _get_created(self): if self._created is None: return None if self._created.tzinfo is None: return pytz.utc.localize(self._created) else: return self._created.astimezone(pytz.utc) def _set_created(self, value): if value.tzinfo is None: self._created = value else: self._created = value.astimezone(pytz.utc).replace(tzinfo=None) created = synonym('_created', descriptor=property(_get_created, _set_created)) def _get_expiry(self): if self._expiry is None: return None if self._expiry.tzinfo is None: return pytz.utc.localize(self._expiry) else: return self._expiry.astimezone(pytz.utc) def _set_expiry(self, value): if value.tzinfo is None: self._expiry = value else: self._expiry = value.astimezone(pytz.utc).replace(tzinfo=None) expiry = synonym('_expiry', descriptor=property(_get_expiry, _set_expiry)) @classmethod def by_id(cls, id): """return the password reset record with id ``id``""" return DBSession.query(cls).filter_by(id=id).first() def reset_password(self, user, new_password): if utcnow() > self.expiry: raise ResetError('Reset code has expired') if user is not self.user: raise ResetError('Invalid user for reset code %s' % self.id) self.user.password = new_password DBSession.query(PasswordReset).\ filter(PasswordReset.user_id == self.user_id).delete()
class EmailVerification(Base): __tablename__ = 'email_verifications' id = Column(String(32), primary_key=True) _created = Column('created', DateTime, default=datetime.utcnow, nullable=False) _expiry = Column('expiry', DateTime, nullable=False) email_ref = Column(Unicode(200), ForeignKey('email_addresses.email', onupdate='RESTRICT', ondelete='CASCADE'), nullable=False) # email defined as backref on EmailAddress def __init__(self, email): super(EmailVerification, self).__init__() if DBSession.query(EmailVerification).\ filter(EmailVerification.email_ref == email).\ filter(EmailVerification.created > (utcnow() - timedelta(seconds=VERIFICATION_INTERVAL))).first(): raise VerificationTooFast( 'A verification was requested for that ' 'email address less than %d seconds ago' % VERIFICATION_INTERVAL) if DBSession.query(EmailVerification).\ filter(EmailVerification.email_ref == email).\ filter(EmailVerification.expiry > utcnow()).count() >= VERIFICATION_LIMIT: raise VerificationTooMany('Too many active verifications ' 'currently exist for this account') self.email_ref = email self.expiry = utcnow() + timedelta(seconds=VERIFICATION_TIMEOUT) self.id = os.urandom(self.__table__.c.id.type.length // 2).encode('hex') def __repr__(self): return ('<EmailVerification: id="%s">' % self.id).encode('utf-8') def __str__(self): return unicode(self).encode('utf-8') def __unicode__(self): return self.id def _get_created(self): if self._created is None: return None if self._created.tzinfo is None: return pytz.utc.localize(self._created) else: return self._created.astimezone(pytz.utc) def _set_created(self, value): if value.tzinfo is None: self._created = value else: self._created = value.astimezone(pytz.utc).replace(tzinfo=None) created = synonym('_created', descriptor=property(_get_created, _set_created)) def _get_expiry(self): if self._expiry is None: return None if self._expiry.tzinfo is None: return pytz.utc.localize(self._expiry) else: return self._expiry.astimezone(pytz.utc) def _set_expiry(self, value): if value.tzinfo is None: self._expiry = value else: self._expiry = value.astimezone(pytz.utc).replace(tzinfo=None) expiry = synonym('_expiry', descriptor=property(_get_expiry, _set_expiry)) @classmethod def by_id(cls, id): """return the email verification record with id ``id``""" return DBSession.query(cls).filter_by(id=id).first() def verify(self): if utcnow() > self.expiry: raise VerificationError('Verification code has expired') self.email.verified = utcnow() DBSession.query(EmailVerification).\ filter(EmailVerification.email_ref == self.email_ref).delete()
marketgroups_table = Table( "invmarketgroups", gamedata_meta, Column("marketGroupID", Integer, primary_key=True), Column("marketGroupName", String), Column("description", String), Column("hasTypes", Boolean), Column( "parentGroupID", Integer, ForeignKey("invmarketgroups.marketGroupID", initially="DEFERRED", deferrable=True)), Column("iconID", Integer, ForeignKey("icons.iconID"))) mapper(MarketGroup, marketgroups_table, properties={ "items": relation(Item, backref="marketGroup"), "parent": relation(MarketGroup, backref="children", remote_side=[marketgroups_table.c.marketGroupID]), "icon": relation(Icon), "ID": synonym("marketGroupID"), "name": synonym("marketGroupName"), "description": deferred(marketgroups_table.c.description) })
"_Item__attributes": relation(Attribute, cascade='all, delete, delete-orphan', collection_class=attribute_mapped_collection('name')), "effects": relation(Effect, secondary=typeeffects_table, collection_class=attribute_mapped_collection('name')), "metaGroup": relation(MetaGroup, backref=backref("items", cascade="all,delete")), "varParent": relation(Item, backref=backref("varChildren", cascade="all,delete"), remote_side=items_table.c.typeID), "ID": synonym("typeID"), "name": synonym("typeName{}".format(eos.config.lang)), "description": synonym("_description{}".format(eos.config.lang)), "traits": relation(Traits, primaryjoin=traits_table.c.typeID == items_table.c.typeID, uselist=False), "mutaplasmids": relation( DynamicItem, primaryjoin=dynamicApplicable_table.c.applicableTypeID == items_table.c.typeID, secondaryjoin=dynamicApplicable_table.c.typeID == DynamicItem.typeID, secondary=dynamicApplicable_table,
class User(DeclarativeMappedObject, ActivityMixin): """ Reasonably basic User definition. Probably would want additional attributes. """ __tablename__ = 'tg_user' __table_args__ = {'mysql_engine': 'InnoDB'} user_id = Column(Integer, primary_key=True) id = synonym('user_id') user_name = Column(Unicode(255), unique=True) email_address = Column(Unicode(255), index=True) display_name = Column(Unicode(255)) _password = Column('password', UnicodeText, nullable=True, default=None) _root_password = Column('root_password', String(255), nullable=True, default=None) rootpw_changed = Column(DateTime, nullable=True, default=None) openstack_trust_id = Column(Unicode(4000)) use_old_job_page = Column(Boolean, nullable=False, default=False) notify_job_completion = Column(Boolean, nullable=False, default=True) notify_broken_system = Column(Boolean, nullable=False, default=True) notify_system_loan = Column(Boolean, nullable=False, default=True) notify_group_membership = Column(Boolean, nullable=False, default=True) notify_reservesys = Column(Boolean, nullable=False, default=True) created = Column(DateTime, default=datetime.utcnow) disabled = Column(Boolean, nullable=False, default=False) removed = Column(DateTime, nullable=True, default=None) submission_delegates = relationship('User', secondary=SubmissionDelegate.__table__, primaryjoin=user_id == SubmissionDelegate.user_id, secondaryjoin=user_id == SubmissionDelegate.delegate_id) activity = relationship(Activity, back_populates='user') config_values_int = relationship(ConfigValueInt, back_populates='user') config_values_string = relationship(ConfigValueString, back_populates='user') user_activity = relationship(UserActivity, back_populates='object', primaryjoin=user_id == UserActivity.object_id) group_user_assocs = relationship('UserGroup', back_populates='user', cascade='all, delete-orphan') excluded_group_user_assocs = relationship('ExcludedUserGroup', back_populates='user', cascade='all,delete-orphan') sshpubkeys = relationship('SSHPubKey', back_populates='user') reservations = relationship('Reservation', back_populates='user', order_by='Reservation.start_time.desc()') system_access_policy_rules = relationship('SystemAccessPolicyRule', back_populates='user', cascade='all, delete, delete-orphan') notes = relationship('Note', back_populates='user') lab_controller = relationship('LabController', uselist=False, back_populates='user') jobs = relationship('Job', back_populates='owner', cascade_backrefs=False, primaryjoin='Job.owner_id == User.user_id') tasks = relationship('Task', back_populates='uploader') activity_type = UserActivity system_pools = relationship('SystemPool', back_populates='owning_user') _unnormalized_username_pattern = re.compile(r'^\s|\s\s|\s$') @validates('user_name') def validate_user_name(self, key, value): if not value: raise ValueError('Username must not be empty') # Reject username values which would be normalized into a different # value according to the LDAP normalization rules [RFC4518]. For # sanity we always enforce this, even if LDAP is not being used. if self._unnormalized_username_pattern.search(value): raise ValueError('Username %r contains unnormalized whitespace') return value @validates('display_name') def validate_display_name(self, key, value): if not value: raise ValueError('Display name must not be empty') return value @validates('email_address') def validate_email_address(self, key, value): if not value: raise ValueError('Email address must not be empty') email_validator = validators.Email(not_empty=True) try: value = email_validator.to_python(value) except Invalid as e: raise ValueError('Invalid email address: %s' % e) return value def __json__(self): return { 'user_name': self.user_name, 'display_name': self.display_name, 'email_address': self.email_address, 'disabled': self.disabled, 'removed': self.removed, } def to_json(self): """ Get a full list of JSON representation data. """ data = self.__json__() data['id'] = self.user_id if identity.current.user: data['can_edit'] = self.can_edit(identity.current.user) if data['can_edit']: data['root_password'] = self._root_password data['root_password_changed'] = self.rootpw_changed data['root_password_expiry'] = self.rootpw_expiry data['ssh_public_keys'] = self.sshpubkeys data['submission_delegates'] = self.submission_delegates data['use_old_job_page'] = self.use_old_job_page data['notify_job_completion'] = self.notify_job_completion data['notify_broken_system'] = self.notify_broken_system data['notify_system_loan'] = self.notify_system_loan data['notify_group_membership'] = self.notify_group_membership data['notify_reservesys'] = self.notify_reservesys data['openstack_trust_id'] = self.openstack_trust_id data['can_change_password'] = \ self.can_change_password(identity.current.user) data['can_edit_keystone_trust'] = self.can_edit_keystone_trust( identity.current.user) else: data['can_edit'] = False data['can_change_password'] = False data['can_edit_keystone_trust'] = False return data def permissions(self): perms = set() for g in self.groups: perms |= set(g.permissions) return perms permissions = property(permissions) # XXX I would rather do this as a setter on 'submission_delegates' # but don't think I can with non declarative def add_submission_delegate(self, delegate, service=u'WEBUI'): if delegate.is_delegate_for(self): raise NoChangeException('%s is already a' ' submission delegate for %s' % (delegate, self)) else: self.submission_delegates.append(delegate) self.record_activity(user=self, service=service, field=u'Submission delegate', action=u'Added', old=None, new=delegate.user_name) def remove_submission_delegate(self, delegate, service=u'WEBUI'): self.submission_delegates.remove(delegate) self.record_activity(user=self, service=service, field=u'Submission delegate', action=u'Removed', old=delegate.user_name, new=None) def is_delegate_for(self, user): """Return True if we can delegate jobs on behalf of user""" return SubmissionDelegate.query.filter_by(delegate_id=self.user_id, user_id=user.user_id).first() is not None def email_link(self): a = Element('a', {'href': 'mailto:%s' % self.email_address}) a.text = self.user_name return a email_link = property(email_link) @property def href(self): """Returns a relative URL for this user's page.""" return (u'/users/%s' % urllib.quote(self.user_name.encode('utf8'))) @classmethod def by_id(cls, user_id): """ A class method that permits to search users based on their user_id attribute. """ return cls.query.filter_by(user_id=user_id).first() @classmethod def by_user_name(cls, user_name): """ A class method that permits to search users based on their user_name attribute. """ # Try to look up the user via local DB first. user = cls.query.filter_by(user_name=user_name).first() # If user doesn't exist in DB check ldap if enabled. ldapenabled = get('identity.ldap.enabled', False) autocreate = get('identity.soldapprovider.autocreate', False) # Presence of '/' indicates a Kerberos service principal. if not user and ldapenabled and autocreate and '/' not in user_name: filter = ldap.filter.filter_format('(uid=%s)', [user_name.encode('utf8')]) ldapcon = ldap.initialize(get('identity.soldapprovider.uri')) objects = ldapcon.search_st(get('identity.soldapprovider.basedn', ''), ldap.SCOPE_SUBTREE, filter, timeout=get('identity.soldapprovider.timeout', 20)) # no match if(len(objects) == 0): return None # need exact match elif(len(objects) > 1): return None attrs = objects[0][1] # LDAP normalization rules means that we might have found a user # who doesn't actually match the username we were given. if attrs['uid'][0].decode('utf8') != user_name: return None user = User() user.user_name = attrs['uid'][0].decode('utf8') user.display_name = attrs['cn'][0].decode('utf8') user.email_address = attrs['mail'][0].decode('utf8') session.add(user) session.flush() return user @classmethod def list_by_name(cls, username,find_anywhere=False,find_ldap_users=True): ldap_users = [] ldapenabled = get('identity.ldap.enabled', False) if ldapenabled and find_ldap_users is True: filter = ldap.filter.filter_format('(uid=%s*)', [username.encode('utf8')]) ldapcon = ldap.initialize(get('identity.soldapprovider.uri')) objects = ldapcon.search_st(get('identity.soldapprovider.basedn', ''), ldap.SCOPE_SUBTREE, filter, timeout=get('identity.soldapprovider.timeout', 20)) ldap_users = [(object[1]['uid'][0].decode('utf8'), object[1]['cn'][0].decode('utf8')) for object in objects] if find_anywhere: f = User.user_name.like('%%%s%%' % username) else: f = User.user_name.like('%s%%' % username) # Don't return Removed Users # They may still be listed in ldap though. db_users = [(user.user_name, user.display_name) for user in cls.query.filter(f).filter(User.removed==None)] return list(set(db_users + ldap_users)) def can_edit(self, user): """ Is the given user permitted to change this user's details (except for their username)? """ if user.is_admin(): return True if user == self: return True return False def can_rename(self, user): """ Is the given user permitted to change this user's username? """ if user.is_admin(): return True # Users are not allowed to change their own usernames. return False def can_edit_keystone_trust(self, user): """ Is the given user permitted to change this user's OpenStack Keystone trust? """ return bool(get('openstack.identity_api_url')) and self.can_edit(user) _password_context = passlib.context.CryptContext( schemes=['pbkdf2_sha512', 'hex_sha1'], # unsalted SHA1 was the scheme inherited from TurboGears 1.0, # this allows passwords to match against the old hashes but we will # replace it with a new hash on successful login deprecated=['hex_sha1'], ) def _set_password(self, raw_password): self._password = self._password_context.encrypt(raw_password).decode('ascii') def _get_password(self): return self._password password = property(_get_password, _set_password) def can_change_password(self, user): """ Is the given user permitted to reset this user's password? """ if get('identity.ldap.enabled', False): filter = ldap.filter.filter_format('(uid=%s)', [self.user_name.encode('utf8')]) ldapcon = ldap.initialize(get('identity.soldapprovider.uri')) objects = ldapcon.search_st(get('identity.soldapprovider.basedn', ''), ldap.SCOPE_SUBTREE, filter, timeout=get('identity.soldapprovider.timeout', 20)) if len(objects) != 0: # LDAP user. No chance of changing password. return False if user.is_admin(): return True if user == self: return True return False def check_password(self, raw_password): # Empty passwords are not accepted. if not raw_password: return False # If the account has a password set in Beaker, try verifying it. if self._password: verified, new_hash = self._password_context.verify_and_update( raw_password, self._password) if verified: if new_hash: log.info('Upgrading obsolete password hash for user %s', self) # replace obsolete hash with new one self._password = new_hash return True else: return False # If LDAP is enabled, try an LDAP bind. ldapenabled = get('identity.ldap.enabled', False) # Presence of '/' indicates a Kerberos service principal. if ldapenabled and '/' not in self.user_name: filter = ldap.filter.filter_format('(uid=%s)', [self.user_name.encode('utf8')]) ldapcon = ldap.initialize(get('identity.soldapprovider.uri')) objects = ldapcon.search_st(get('identity.soldapprovider.basedn', ''), ldap.SCOPE_SUBTREE, filter, timeout=get('identity.soldapprovider.timeout', 20)) if len(objects) == 0: return False elif len(objects) > 1: return False dn = objects[0][0] try: rc = ldapcon.simple_bind(dn, raw_password) ldapcon.result(rc) return True except ldap.INVALID_CREDENTIALS: return False return False def can_log_in(self): if self.disabled: log.warning('Login attempt from disabled account %s', self.user_name) return False if self.removed: log.warning('Login attempt from removed account %s', self.user_name) return False return True def _set_root_password(self, password): "Set the password to be used for root on provisioned systems, hashing if necessary" if password: if len(password.split('$')) != 4: try: cracklib.VeryFascistCheck(password) except ValueError as e: msg = re.sub(r'^it', 'Root password', str(e)) raise ValueError(msg) salt = ''.join(random.choice(string.digits + string.ascii_letters) for i in range(8)) self._root_password = crypt.crypt(password, "$1$%s$" % salt) else: self._root_password = password self.rootpw_changed = datetime.utcnow() else: self._root_password = None self.rootpw_changed = None def _get_root_password(self): if self._root_password: return self._root_password else: pw = ConfigItem.by_name(u'root_password').current_value() if pw: salt = ''.join(random.choice(string.digits + string.ascii_letters) for i in range(8)) return crypt.crypt(pw, "$1$%s$" % salt) root_password = property(_get_root_password, _set_root_password) @property def rootpw_expiry(self): if not self._root_password: return validity = ConfigItem.by_name(u'root_password_validity').current_value() if validity: return self.rootpw_changed + timedelta(days=validity) @property def rootpw_expired(self): if self.rootpw_expiry and self.rootpw_expiry < datetime.utcnow(): return True else: return False def __repr__(self): return self.user_name def is_admin(self): return u'admin' in [group.group_name for group in self.groups] @hybrid_method def in_group(self, check_groups): my_groups = [group.group_name for group in self.groups] for my_g in check_groups: if my_g in my_groups: return True return False @in_group.expression def in_group(cls, group): #pylint: disable=E0213 if group.membership_type == GroupMembershipType.inverted: return not_(cls.excluded_group_user_assocs.any( ExcludedUserGroup.group == group)) else: return cls.group_user_assocs.any(UserGroup.group == group) def has_permission(self, requested_permission): """ Check if user has requested permission """ try: permission = Permission.by_name(requested_permission) except NoResultFound: permission = None if permission in self.permissions: return True return False @property def groups(self): return session.object_session(self).query(Group)\ .filter(Group.has_member(self)).all()
class Contributor(Base): """Someone (usually human) who contributes to books.""" __tablename__ = 'contributors' id = Column(Integer, primary_key=True) # Standard identifiers for this contributor. lc = Column(Unicode, index=True) viaf = Column(Unicode, index=True) # This is the name by which this person is known in the original # catalog. It is sortable, e.g. "Twain, Mark". _sort_name = Column('sort_name', Unicode, index=True) aliases = Column(ARRAY(Unicode), default=[]) # This is the name we will display publicly. Ideally it will be # the name most familiar to readers. display_name = Column(Unicode, index=True) # This is a short version of the contributor's name, displayed in # situations where the full name is too long. For corporate contributors # this value will be None. family_name = Column(Unicode, index=True) # This is the name used for this contributor on Wikipedia. This # gives us an entry point to Wikipedia, Wikidata, etc. wikipedia_name = Column(Unicode, index=True) # This is a short biography for this contributor, probably # provided by a publisher. biography = Column(Unicode) extra = Column(MutableDict.as_mutable(JSON), default={}) contributions = relationship("Contribution", backref="contributor") # Types of roles AUTHOR_ROLE = u"Author" PRIMARY_AUTHOR_ROLE = u"Primary Author" EDITOR_ROLE = u"Editor" ARTIST_ROLE = u"Artist" PHOTOGRAPHER_ROLE = u"Photographer" TRANSLATOR_ROLE = u"Translator" ILLUSTRATOR_ROLE = u"Illustrator" INTRODUCTION_ROLE = u"Introduction Author" FOREWORD_ROLE = u"Foreword Author" AFTERWORD_ROLE = u"Afterword Author" COLOPHON_ROLE = u"Colophon Author" UNKNOWN_ROLE = u'Unknown' DIRECTOR_ROLE = u'Director' PRODUCER_ROLE = u'Producer' EXECUTIVE_PRODUCER_ROLE = u'Executive Producer' ACTOR_ROLE = u'Actor' LYRICIST_ROLE = u'Lyricist' CONTRIBUTOR_ROLE = u'Contributor' COMPOSER_ROLE = u'Composer' NARRATOR_ROLE = u'Narrator' COMPILER_ROLE = u'Compiler' ADAPTER_ROLE = u'Adapter' PERFORMER_ROLE = u'Performer' MUSICIAN_ROLE = u'Musician' ASSOCIATED_ROLE = u'Associated name' COLLABORATOR_ROLE = u'Collaborator' ENGINEER_ROLE = u'Engineer' COPYRIGHT_HOLDER_ROLE = u'Copyright holder' TRANSCRIBER_ROLE = u'Transcriber' DESIGNER_ROLE = u'Designer' AUTHOR_ROLES = set([PRIMARY_AUTHOR_ROLE, AUTHOR_ROLE]) # Map our recognized roles to MARC relators. # https://www.loc.gov/marc/relators/relaterm.html # # This is used when crediting contributors in OPDS feeds. MARC_ROLE_CODES = { ACTOR_ROLE: 'act', ADAPTER_ROLE: 'adp', AFTERWORD_ROLE: 'aft', ARTIST_ROLE: 'art', ASSOCIATED_ROLE: 'asn', AUTHOR_ROLE: 'aut', # Joint author: USE Author COLLABORATOR_ROLE: 'ctb', # USE Contributor COLOPHON_ROLE: 'aft', # Author of afterword, colophon, etc. COMPILER_ROLE: 'com', COMPOSER_ROLE: 'cmp', CONTRIBUTOR_ROLE: 'ctb', COPYRIGHT_HOLDER_ROLE: 'cph', DESIGNER_ROLE: 'dsr', DIRECTOR_ROLE: 'drt', EDITOR_ROLE: 'edt', ENGINEER_ROLE: 'eng', EXECUTIVE_PRODUCER_ROLE: 'pro', FOREWORD_ROLE: 'wpr', # Writer of preface ILLUSTRATOR_ROLE: 'ill', INTRODUCTION_ROLE: 'win', LYRICIST_ROLE: 'lyr', MUSICIAN_ROLE: 'mus', NARRATOR_ROLE: 'nrt', PERFORMER_ROLE: 'prf', PHOTOGRAPHER_ROLE: 'pht', PRIMARY_AUTHOR_ROLE: 'aut', PRODUCER_ROLE: 'pro', TRANSCRIBER_ROLE: 'trc', TRANSLATOR_ROLE: 'trl', UNKNOWN_ROLE: 'asn', } # People from these roles can be put into the 'author' slot if no # author proper is given. AUTHOR_SUBSTITUTE_ROLES = [ EDITOR_ROLE, COMPILER_ROLE, COMPOSER_ROLE, DIRECTOR_ROLE, CONTRIBUTOR_ROLE, TRANSLATOR_ROLE, ADAPTER_ROLE, PHOTOGRAPHER_ROLE, ARTIST_ROLE, LYRICIST_ROLE, COPYRIGHT_HOLDER_ROLE ] PERFORMER_ROLES = [ ACTOR_ROLE, PERFORMER_ROLE, NARRATOR_ROLE, MUSICIAN_ROLE ] # Extra fields BIRTH_DATE = 'birthDate' DEATH_DATE = 'deathDate' def __repr__(self): extra = "" if self.lc: extra += " lc=%s" % self.lc if self.viaf: extra += " viaf=%s" % self.viaf return native_string(u"Contributor %d (%s)" % (self.id, self.sort_name)) @classmethod def author_contributor_tiers(cls): yield [cls.PRIMARY_AUTHOR_ROLE] yield cls.AUTHOR_ROLES yield cls.AUTHOR_SUBSTITUTE_ROLES yield cls.PERFORMER_ROLES @classmethod def lookup(cls, _db, sort_name=None, viaf=None, lc=None, aliases=None, extra=None, create_new=True, name=None): """Find or create a record (or list of records) for the given Contributor. :return: A tuple of found Contributor (or None), and a boolean flag indicating if new Contributor database object has beed created. """ new = False contributors = [] # TODO: Stop using 'name' attribute, everywhere. sort_name = sort_name or name extra = extra or dict() create_method_kwargs = { Contributor.sort_name.name: sort_name, Contributor.aliases.name: aliases, Contributor.extra.name: extra } if not sort_name and not lc and not viaf: raise ValueError( "Cannot look up a Contributor without any identifying " "information whatsoever!") if sort_name and not lc and not viaf: # We will not create a Contributor based solely on a name # unless there is no existing Contributor with that name. # # If there *are* contributors with that name, we will # return all of them. # # We currently do not check aliases when doing name lookups. q = _db.query(Contributor).filter( Contributor.sort_name == sort_name) contributors = q.all() if contributors: return contributors, new else: try: contributor = Contributor(**create_method_kwargs) _db.add(contributor) flush(_db) contributors = [contributor] new = True except IntegrityError: _db.rollback() contributors = q.all() new = False else: # We are perfecly happy to create a Contributor based solely # on lc or viaf. query = dict() if lc: query[Contributor.lc.name] = lc if viaf: query[Contributor.viaf.name] = viaf if create_new: contributor, new = get_one_or_create( _db, Contributor, create_method_kwargs=create_method_kwargs, on_multiple='interchangeable', **query) if contributor: contributors = [contributor] else: contributor = get_one(_db, Contributor, **query) if contributor: contributors = [contributor] return contributors, new @property def sort_name(self): return self._sort_name @sort_name.setter def sort_name(self, new_sort_name): """ See if the passed-in value is in the prescribed Last, First format. If it is, great, set the self._sprt_name to the new value. If new value is not in correct format, then attempt to re-format the value to look like: "Last, First Middle, Dr./Jr./etc.". Note: If for any reason you need to force the sort_name to an improper value, set it like so: contributor._sort_name="Foo Bar", and you'll avoid further processing. Note: For now, have decided to not automatically update any edition.sort_author that might have contributions by this Contributor. """ if not new_sort_name: self._sort_name = None return # simplistic test of format, but catches the most frequent problem # where display-style names are put into sort name metadata by third parties. if new_sort_name.find(",") == -1: # auto-magically fix syntax self._sort_name = display_name_to_sort_name(new_sort_name) return self._sort_name = new_sort_name # tell SQLAlchemy to use the sort_name setter for ort_name, not _sort_name, after all. sort_name = synonym('_sort_name', descriptor=sort_name) def merge_into(self, destination): """Two Contributor records should be the same. Merge this one into the other one. For now, this should only be used when the exact same record comes in through two sources. It should not be used when two Contributors turn out to represent different names for the same human being, e.g. married names or (especially) pen names. Just because we haven't thought that situation through well enough. """ if self == destination: # They're already the same. return logging.info(u"MERGING %r (%s) into %r (%s)", self, self.viaf, destination, destination.viaf) # make sure we're not losing any names we know for the contributor existing_aliases = set(destination.aliases) new_aliases = list(destination.aliases) for name in [self.sort_name] + self.aliases: if name != destination.sort_name and name not in existing_aliases: new_aliases.append(name) if new_aliases != destination.aliases: destination.aliases = new_aliases if not destination.family_name: destination.family_name = self.family_name if not destination.display_name: destination.display_name = self.display_name # keep sort_name if one of the contributor objects has it. if not destination.sort_name: destination.sort_name = self.sort_name if not destination.wikipedia_name: destination.wikipedia_name = self.wikipedia_name # merge non-name-related properties for k, v in self.extra.items(): if not k in destination.extra: destination.extra[k] = v if not destination.lc: destination.lc = self.lc if not destination.viaf: destination.viaf = self.viaf if not destination.biography: destination.biography = self.biography _db = Session.object_session(self) for contribution in self.contributions: # Is the new contributor already associated with this # Edition in the given role (in which case we delete # the old contribution) or not (in which case we switch the # contributor ID)? existing_record = _db.query(Contribution).filter( Contribution.contributor_id == destination.id, Contribution.edition_id == contribution.edition.id, Contribution.role == contribution.role) if existing_record.count(): _db.delete(contribution) else: contribution.contributor_id = destination.id _db.commit() _db.delete(self) _db.commit() # Regular expressions used by default_names(). PARENTHETICAL = re.compile("\([^)]*\)") ALPHABETIC = re.compile("[a-zA-z]") NUMBERS = re.compile("[0-9]") DATE_RES = [ re.compile("\(?" + x + "\)?") for x in "[0-9?]+-", "[0-9]+st cent", "[0-9]+nd cent", "[0-9]+th cent", "\bcirca", ] def default_names(self, default_display_name=None): """Attempt to derive a family name ("Twain") and a display name ("Mark Twain") from a catalog name ("Twain, Mark"). This is full of pitfalls, which is why we prefer to use data from VIAF. But when there is no data from VIAF, the output of this algorithm is better than the input in pretty much every case. """ return self._default_names(self.sort_name, default_display_name) @classmethod def _default_names(cls, name, default_display_name=None): name = name or "" original_name = name """Split out from default_names to make it easy to test.""" display_name = default_display_name # "Little, Brown & Co." => "Little, Brown & Co." name = name.replace("&", "&") # "Philadelphia Broad Street Church (Philadelphia, Pa.)" # => "Philadelphia Broad Street Church" name = cls.PARENTHETICAL.sub("", name) name = name.strip() if ', ' in name: # This is probably a personal name. parts = name.split(", ") if len(parts) > 2: # The most likely scenario is that the final part # of the name is a date or a set of dates. If this # seems true, just delete that part. if (cls.NUMBERS.search(parts[-1]) or not cls.ALPHABETIC.search(parts[-1])): parts = parts[:-1] # The final part of the name may have a date or a set # of dates at the end. If so, remove it from that string. final = parts[-1] for date_re in cls.DATE_RES: m = date_re.search(final) if m: new_part = final[:m.start()].strip() if new_part: parts[-1] = new_part else: del parts[-1] break family_name = parts[0] p = parts[-1].lower() if (p in ('llc', 'inc', 'inc.') or p.endswith("company") or p.endswith(" co.") or p.endswith(" co")): # No, this is a corporate name that contains a comma. # It can't be split on the comma, so don't bother. family_name = None display_name = display_name or name if not display_name: # The fateful moment. Swap the second string and the # first string. if len(parts) == 1: display_name = parts[0] family_name = display_name else: display_name = parts[1] + " " + parts[0] if len(parts) > 2: # There's a leftover bit. if parts[2] in ('Mrs.', 'Mrs', 'Sir'): # "Jones, Bob, Mrs." # => "Mrs. Bob Jones" display_name = parts[2] + " " + display_name else: # "Jones, Bob, Jr." # => "Bob Jones, Jr." display_name += ", " + " ".join(parts[2:]) else: # Since there's no comma, this is probably a corporate name. family_name = None display_name = name return family_name, display_name
def setup_mappers(cls): cls._setup_stock_mapping() inspect(cls.classes.User).add_property( "name_syn",synonym("name") )
class Task(Base): __tablename__ = 'task' """ A job that gets executed. Has a unique set of params within its Stage. """ # FIXME causes a problem with mysql? __table_args__ = (UniqueConstraint('stage_id', 'uid', name='_uc1'),) id = Column(Integer, primary_key=True) uid = Column(String(255), index=True) mem_req = Column(Integer) core_req = Column(Integer) cpu_req = synonym('core_req') time_req = Column(Integer) NOOP = Column(Boolean, nullable=False) params = Column(MutableDict.as_mutable(JSONEncodedDict), nullable=False, server_default='{}') stage_id = Column(ForeignKey('stage.id', ondelete="CASCADE"), nullable=False, index=True) log_dir = Column(String(255)) # output_dir = Column(String(255)) _status = Column(Enum_ColumnType(TaskStatus), default=TaskStatus.no_attempt, nullable=False) successful = Column(Boolean, nullable=False) started_on = Column(DateTime) # FIXME this should probably be deleted. Too hard to determine. submitted_on = Column(DateTime) finished_on = Column(DateTime) attempt = Column(Integer, nullable=False) must_succeed = Column(Boolean, nullable=False) drm = Column(String(255)) queue = Column(String(255)) max_attempts = Column(Integer) parents = relationship("Task", secondary=TaskEdge.__table__, primaryjoin=id == TaskEdge.parent_id, secondaryjoin=id == TaskEdge.child_id, backref="children", passive_deletes=True, cascade="save-update, merge, delete", ) input_map = Column(MutableDict.as_mutable(JSONEncodedDict), nullable=False, server_default='{}') output_map = Column(MutableDict.as_mutable(JSONEncodedDict), nullable=False, server_default='{}') @property def input_files(self): return self.input_map.values() @property def output_files(self): return self.output_map.values() # command = Column(Text) drm_native_specification = Column(String(255)) drm_jobID = Column(String(255)) profile_fields = ['wall_time', 'cpu_time', 'percent_cpu', 'user_time', 'system_time', 'io_read_count', 'io_write_count', 'io_read_kb', 'io_write_kb', 'ctx_switch_voluntary', 'ctx_switch_involuntary', 'avg_rss_mem_kb', 'max_rss_mem_kb', 'avg_vms_mem_kb', 'max_vms_mem_kb', 'avg_num_threads', 'max_num_threads', 'avg_num_fds', 'max_num_fds', 'exit_status'] exclude_from_dict = profile_fields + ['command', 'info', 'input_files', 'output_files'] exit_status = Column(Integer) percent_cpu = Column(Integer) wall_time = Column(Integer) cpu_time = Column(Integer) user_time = Column(Integer) system_time = Column(Integer) avg_rss_mem_kb = Column(Integer) max_rss_mem_kb = Column(Integer) avg_vms_mem_kb = Column(Integer) max_vms_mem_kb = Column(Integer) io_read_count = Column(Integer) io_write_count = Column(Integer) io_wait = Column(Integer) io_read_kb = Column(Integer) io_write_kb = Column(Integer) ctx_switch_voluntary = Column(Integer) ctx_switch_involuntary = Column(Integer) avg_num_threads = Column(Integer) max_num_threads = Column(Integer) avg_num_fds = Column(Integer) max_num_fds = Column(Integer) extra = Column(MutableDict.as_mutable(JSONEncodedDict), nullable=False, server_default='{}') @declared_attr def status(cls): def get_status(self): return self._status def set_status(self, value): if self._status != value: self._status = value signal_task_status_change.send(self) return synonym('_status', descriptor=property(get_status, set_status)) @property def workflow(self): return self.stage.workflow @property def log(self): return self.workflow.log @property def finished(self): return self.status in {TaskStatus.successful, TaskStatus.killed, TaskStatus.failed} _cache_profile = None output_profile_path = logplus('profile.json') output_command_script_path = logplus('command.bash') output_stderr_path = logplus('stderr.txt') output_stdout_path = logplus('stdout.txt') @property def stdout_text(self): return readfile(self.output_stdout_path) @property def stderr_text(self): r = readfile(self.output_stderr_path) if r == 'file does not exist': if self.drm == 'lsf' and self.drm_jobID: r += '\n\nbpeek %s output:\n\n' % self.drm_jobID try: r += codecs.decode(sp.check_output('bpeek %s' % self.drm_jobID, shell=True), 'utf-8') except Exception as e: r += str(e) return r @property def command_script_text(self): # return self.command return readfile(self.output_command_script_path).strip() or self.command def descendants(self, include_self=False): """ :return: (list) all stages that descend from this stage in the stage_graph """ x = nx.descendants(self.workflow.task_graph(), self) if include_self: return sorted({self}.union(x), key=lambda task: task.stage.number) else: return x @property def label(self): """Label used for the taskgraph image""" params = '' if len(self.params) == 0 else "\\n {0}".format( "\\n".join(["{0}: {1}".format(k, v) for k, v in self.params.items()])) return "[%s] %s%s" % (self.id, self.stage.name, params) def args_as_query_string(self): import urllib return urllib.urlencode(self.params) def delete(self, descendants=False): if descendants: tasks_to_delete = self.descendants(include_self=True) self.log.debug('Deleting %s and %s of its descendants' % (self, len(tasks_to_delete) - 1)) for t in tasks_to_delete: self.session.delete(t) else: self.log.debug('Deleting %s' % self) self.session.delete(self) self.session.commit() @property def url(self): return url_for('cosmos.task', ex_name=self.workflow.name, stage_name=self.stage.name, task_id=self.id) @property def params_pretty(self): return '%s' % ', '.join('%s=%s' % (k, "'%s'" % v if isinstance(v, basestring) else v) for k, v in self.params.items()) @property def params_pformat(self): return pprint.pformat(self.params, indent=2, width=1) def __repr__(self): return "<Task[%s] %s(uid='%s')>" % (self.id or 'id_%s' % id(self), self.stage.name if self.stage else '', self.uid ) def __str__(self): return self.__repr__()
def password(cls): """Password. The value is decrypted/encrypted when reading/setting the value.""" return synonym('_password', descriptor=property(cls.get_password, cls.set_password))
class Group(DeclarativeMappedObject, ActivityMixin): """ A group definition that records changes to the group """ __tablename__ = 'tg_group' __table_args__ = {'mysql_engine': 'InnoDB'} group_id = Column(Integer, primary_key=True) id = synonym('group_id') group_name = Column(Unicode(255), unique=True, nullable=False) display_name = Column(Unicode(255)) description = Column(Unicode(4000)) _root_password = Column('root_password', String(255), nullable=True, default=None) membership_type = Column(GroupMembershipType.db_type(), nullable=False, default=GroupMembershipType.normal, index=True) created = Column(DateTime, default=datetime.utcnow) activity = relationship(GroupActivity, back_populates='object', cascade='all, delete-orphan') permissions = relationship('Permission', back_populates='groups', secondary=group_permission_table) user_group_assocs = relationship('UserGroup', back_populates='group', cascade='all, delete-orphan') excluded_user_group_assocs = relationship('ExcludedUserGroup', back_populates='group', cascade='all, delete-orphan') system_access_policy_rules = relationship('SystemAccessPolicyRule', back_populates='group', cascade='all, delete, delete-orphan') jobs = relationship('Job', back_populates='group', cascade_backrefs=False) system_pools = relationship('SystemPool', back_populates='owning_group') activity_type = GroupActivity @classmethod def by_name(cls, name, lockmode=False): if lockmode: return cls.query.with_lockmode(lockmode).filter(cls.group_name == name).one() else: return cls.query.filter_by(group_name=name).one() @classmethod def by_id(cls, id): with convert_db_lookup_error('No group with ID: %s' % id): return cls.query.filter_by(group_id=id).one() def __unicode__(self): return self.group_name def __str__(self): return unicode(self).encode('utf8') def __repr__(self): return 'Group(group_name=%r, display_name=%r)' % (self.group_name, self.display_name) def __json__(self): data = { 'id': self.group_id, 'group_name': self.group_name, 'display_name': self.display_name, 'description':self.description, 'membership_type': self.membership_type, } # for backwards compatibility only: if self.membership_type == GroupMembershipType.ldap: data['ldap'] = True else: data['ldap'] = False return data def to_json(self): """ Get a full list of JSON representation data. """ data = self.__json__() data.update({ 'created': self.created, 'owners': [user for user in self.owners()], 'permissions':[permission.permission_name for permission in self.permissions], }) if self.membership_type == GroupMembershipType.inverted: data['members'] = [] data['excluded_users'] = [euga.user for euga in self.excluded_user_group_assocs] else: data['members'] = [uga.user for uga in self.user_group_assocs] data['excluded_users'] = [] if identity.current.user: user = identity.current.user data['can_edit'] = self.can_edit(user) if self.can_edit(user) or user in self.users: data['root_password'] = self.root_password data['can_edit_ldap'] = self.can_edit_ldap(user) data['can_modify_membership'] = self.can_modify_membership(user) data['can_modify_ownership'] = self.can_modify_ownership(user) data['can_add_permission'] = self.can_add_permission(user) data['can_view_rootpassword'] = user in self.users or self.can_edit(user) data['can_delete'] = self.can_edit(user) and not self.is_protected_group() else: data['can_edit'] = False data['can_edit_ldap'] = False data['can_modify_membership'] = False data['can_modify_ownership'] = False data['can_add_permission'] = False data['can_view_rootpassword'] = False data['can_delete'] = False return data @classmethod def list_by_name(cls, name, find_anywhere=False): """ A class method that can be used to search groups based on the group_name """ if find_anywhere: q = cls.query.filter(Group.group_name.like('%%%s%%' % name)) else: q = cls.query.filter(Group.group_name.like('%s%%' % name)) return q @property def root_password(self): """ returns password """ return self._root_password @root_password.setter def root_password(self, password): """Set group job password Set the root password to be used by group jobs. """ if password: try: cracklib.VeryFascistCheck(password) except ValueError, msg: msg = re.sub(r'^it', 'Root password', str(msg)) raise ValueError(msg) else: self._root_password = password else:
def val(cls): return synonym('_val', descriptor=property(cls.get_val, cls.set_val))
def _set_password(self, password): if password: password = password.strip() self._password = generate_password_hash(password) password_descriptor = property(self._get_password, self._set_password) password = synonym('_password', descriptor=password_descriptor)
from sqlalchemy.ext.associationproxy import association_proxy from sqlalchemy.orm import mapper, synonym, relation, deferred from eos.db import gamedata_meta from eos.types import Effect, EffectInfo typeeffects_table = Table("dgmtypeeffects", gamedata_meta, Column("typeID", Integer, ForeignKey("invtypes.typeID"), primary_key=True, index=True), Column("effectID", Integer, ForeignKey("dgmeffects.effectID"), primary_key=True)) effects_table = Table("dgmeffects", gamedata_meta, Column("effectID", Integer, primary_key=True), Column("effectName", String), Column("description", String), Column("published", Boolean), Column("isAssistance", Boolean), Column("isOffensive", Boolean)) mapper(EffectInfo, effects_table, properties={"ID": synonym("effectID"), "name": synonym("effectName"), "description": deferred(effects_table.c.description)}) mapper(Effect, typeeffects_table, properties={"ID": synonym("effectID"), "info": relation(EffectInfo, lazy=False)}) Effect.name = association_proxy("info", "name") Effect.description = association_proxy("info", "description") Effect.published = association_proxy("info", "published")
class EmailAddress(Base): __tablename__ = 'email_addresses' email = Column(Unicode(200), primary_key=True) user_id = Column(Integer, ForeignKey('users.id', onupdate='RESTRICT', ondelete='CASCADE'), nullable=False) # user defined as backref on Users _created = Column('created', DateTime, default=datetime.utcnow, nullable=False) _verified = Column('verified', DateTime) verifications = relationship(EmailVerification, backref='email', cascade='all, delete-orphan', passive_deletes=True) def __repr__(self): return ('<EmailAddress: email="%s">' % self.email).encode('utf-8') def __str__(self): return unicode(self).encode('utf-8') def __unicode__(self): return self.email def _get_created(self): if self._created is None: return None if self._created.tzinfo is None: return pytz.utc.localize(self._created) else: return self._created.astimezone(pytz.utc) def _set_created(self, value): if value.tzinfo is None: self._created = value else: self._created = value.astimezone(pytz.utc).replace(tzinfo=None) created = synonym('_created', descriptor=property(_get_created, _set_created)) def _get_verified(self): if self._verified is None: return None if self._verified.tzinfo is None: return pytz.utc.localize(self._verified) else: return self._verified.astimezone(pytz.utc) def _set_verified(self, value): if value.tzinfo is None: self._verified = value else: self._verified = value.astimezone(pytz.utc).replace(tzinfo=None) verified = synonym('_verified', descriptor=property(_get_verified, _set_verified)) @classmethod def by_email(cls, email): """return the address with email ``email``""" return DBSession.query(cls).filter_by(email=email).first()
class POI_address(Base): __tablename__ = 'poi_address' _plural_name_ = 'poi_address' pa_id = Column(Integer, primary_key=True, index=True) id = synonym('pa_id') poi_common_id = Column(ForeignKey('poi_common.pc_id'), index=True) poi_branch = Column(Unicode(128), nullable=True, index=True) poi_addr_city = Column(ForeignKey('city.city_id'), index=True) poi_postcode = Column(Integer) poi_city = Column(Unicode(64)) poi_addr_street = Column(Unicode(128)) poi_addr_housenumber = Column(Unicode(16)) poi_conscriptionnumber = Column(Unicode(16)) poi_geom = Column( Geometry('POINT, {}'.format(config.get_geo_default_projection()))) original = Column(Unicode(128)) poi_website = Column(Unicode(256)) poi_description = Column(Unicode(1024)) poi_fuel_adblue = Column(Boolean) poi_fuel_octane_100 = Column(Boolean) poi_fuel_octane_98 = Column(Boolean) poi_fuel_octane_95 = Column(Boolean) poi_fuel_diesel_gtl = Column(Boolean) poi_fuel_diesel = Column(Boolean) poi_fuel_lpg = Column(Boolean) poi_fuel_e85 = Column(Boolean) poi_rent_lpg_bottles = Column(Boolean) poi_compressed_air = Column(Boolean) poi_restaurant = Column(Boolean) poi_food = Column(Boolean) poi_truck = Column(Boolean) poi_ref = Column(Unicode(32)) poi_phone = Column(Unicode(64)) poi_email = Column(Unicode(64)) poi_authentication_app = Column(Boolean) poi_authentication_none = Column(Boolean) poi_authentication_membership_card = Column(Boolean) poi_capacity = Column(Integer) poi_fee = Column(Boolean) poi_parking_fee = Column(Boolean) poi_motorcar = Column(Boolean) poi_socket_chademo = Column(Integer) poi_socket_chademo_output = Column(Unicode(16)) poi_socket_type2_combo = Column(Integer) poi_socket_type2_combo_output = Column(Unicode(16)) poi_socket_type2_cable = Column(Integer) poi_socket_type2_cable_output = Column(Unicode(16)) poi_socket_type2 = Column(Integer) poi_socket_type2_output = Column(Unicode(16)) poi_manufacturer = Column(Unicode(32)) poi_model = Column(Unicode(32)) poi_opening_hours_nonstop = Column(Boolean) poi_opening_hours_mo_open = Column(Time) poi_opening_hours_tu_open = Column(Time) poi_opening_hours_we_open = Column(Time) poi_opening_hours_th_open = Column(Time) poi_opening_hours_fr_open = Column(Time) poi_opening_hours_sa_open = Column(Time) poi_opening_hours_su_open = Column(Time) poi_opening_hours_mo_close = Column(Time) poi_opening_hours_tu_close = Column(Time) poi_opening_hours_we_close = Column(Time) poi_opening_hours_th_close = Column(Time) poi_opening_hours_fr_close = Column(Time) poi_opening_hours_sa_close = Column(Time) poi_opening_hours_su_close = Column(Time) poi_opening_hours_summer_mo_open = Column(Time) poi_opening_hours_summer_tu_open = Column(Time) poi_opening_hours_summer_we_open = Column(Time) poi_opening_hours_summer_th_open = Column(Time) poi_opening_hours_summer_fr_open = Column(Time) poi_opening_hours_summer_sa_open = Column(Time) poi_opening_hours_summer_su_open = Column(Time) poi_opening_hours_summer_mo_close = Column(Time) poi_opening_hours_summer_tu_close = Column(Time) poi_opening_hours_summer_we_close = Column(Time) poi_opening_hours_summer_th_close = Column(Time) poi_opening_hours_summer_fr_close = Column(Time) poi_opening_hours_summer_sa_close = Column(Time) poi_opening_hours_summer_su_close = Column(Time) poi_opening_hours_lunch_break_start = Column(Time) poi_opening_hours_lunch_break_stop = Column(Time) poi_public_holiday_open = Column(Boolean) poi_opening_hours = Column(Unicode(256), nullable=True, unique=False, index=True) poi_good = Column(JSON, nullable=True, index=False) poi_bad = Column(JSON, nullable=True, index=False) poi_hash = Column(Unicode(128), nullable=True, unique=False, index=True) poi_created = Column(DateTime(True), nullable=False, server_default=func.now()) poi_updated = Column(DateTime(True)) poi_deleted = Column(DateTime(True)) common = relationship( 'POI_common', primaryjoin='POI_address.poi_common_id == POI_common.pc_id', backref='poi_address') city = relationship( 'City', primaryjoin='POI_address.poi_addr_city == City.city_id', backref='poi_address')
class Collection(Base): __tablename__ = 'collections' id = Column(Integer, primary_key=True) name = Column(Unicode(200), nullable=False) _created = Column('created', DateTime, default=datetime.utcnow, nullable=False) # collection_users defined as backref on UserCollection users = association_proxy( 'collection_users', 'role', creator=lambda k, v: UserCollection(user=k, role=v)) owner = Column(Unicode(200), nullable=False) _license = Column('license', Unicode(30), default='notspecified', nullable=False) all_samples = relationship(Sample, backref='collection') def __repr__(self): return ('<Collection: name="%s">' % self.name).encode('utf-8') def __str__(self): return unicode(self).encode('utf-8') def __unicode__(self): return self.id def _get_created(self): if self._created is None: return None if self._created.tzinfo is None: return pytz.utc.localize(self._created) else: return self._created.astimezone(pytz.utc) def _set_created(self, value): if value.tzinfo is None: self._created = value else: self._created = value.astimezone(pytz.utc).replace(tzinfo=None) created = synonym('_created', descriptor=property(_get_created, _set_created)) @classmethod def by_id(cls, id): """return the collection with id ``id``""" return DBSession.query(cls).filter_by(id=id).first() def _get_license(self): return get_current_registry()['licenses']()[self._license] def _set_license(self, value): if isinstance(value, License): self._license = value.id else: self._license = value license = synonym('_license', descriptor=property(_get_license, _set_license)) @property def existing_samples(self): # XXX Do this with a query return [sample for sample in self.all_samples if not sample.destroyed] @property def destroyed_samples(self): # XXX Do this with a query return [sample for sample in self.all_samples if sample.destroyed]
class User(Base): """A user login, with credentials and authentication.""" __tablename__ = 'user' id = Column(Integer, primary_key=True) created = Column(DateTime, default=datetime.now) modified = Column(DateTime, default=datetime.now, onupdate=datetime.now) name = Column('name', String(200)) email = Column(String(100), unique=True, nullable=False) active = Column(Boolean, default=True) _password = Column('password', String(100)) def _get_password(self): return self._password def _set_password(self, password): if password: password = password.strip() self._password = generate_password_hash(password) password_descriptor = property(_get_password, _set_password) password = synonym('_password', descriptor=password_descriptor) def check_password(self, password): if self.password is None: return False password = password.strip() if not password: return False return check_password_hash(self.password, password) @classmethod def authenticate(cls, query, email, password): email = email.strip().lower() user = query(cls).filter(cls.email == email).first() if user is None: return None, False if not user.active: return user, False return user, user.check_password(password) # Hooks for Flask-Login. # # As methods, these are only valid for User instances, so the # authentication will have already happened in the view functions. # # If you prefer, you can use Flask-Login's UserMixin to get these methods. def get_id(self): return str(self.id) def is_active(self): return True def is_anonymous(self): return False def is_authenticated(self): return True def __repr__(self): return u'<{self.__class__.__name__}: {self.id}>'.format(self=self)
def create_translation_table(_table_name, foreign_class, relation_name, language_class, relation_lazy='select', **kwargs): """Creates a table that represents some kind of data attached to the given foreign class, but translated across several languages. Returns the new table's mapped class. It won't be declarative, but it will have a `__table__` attribute so you can retrieve the Table object. `foreign_class` must have a `__singlename__`, currently only used to create the name of the foreign key column. Also supports the notion of a default language, which is attached to the session. This is English by default, for historical and practical reasons. Usage looks like this: class Foo(Base): ... create_translation_table('foo_bars', Foo, 'bars', name = Column(...), ) # Now you can do the following: foo.name foo.name_map['en'] foo.foo_bars['en'] foo.name_map['en'] = "new name" del foo.name_map['en'] q.options(joinedload(Foo.bars_local)) q.options(joinedload(Foo.bars)) The following properties are added to the passed class: - `(relation_name)`, a relation to the new table. It uses a dict-based collection class, where the keys are language identifiers and the values are rows in the created tables. - `(relation_name)_local`, a relation to the row in the new table that matches the current default language. - `(relation_name)_table`, the class created by this function. Note that these are distinct relations. Even though the former necessarily includes the latter, SQLAlchemy doesn't treat them as linked; loading one will not load the other. Modifying both within the same transaction has undefined behavior. For each column provided, the following additional attributes are added to Foo: - `(column)_map`, an association proxy onto `foo_bars`. - `(column)`, an association proxy onto `foo_bars_local`. Pardon the naming disparity, but the grammar suffers otherwise. Modifying these directly is not likely to be a good idea. For Markdown-formatted columns, `(column)_map` and `(column)` will give Markdown objects. """ # n.b.: language_class only exists for the sake of tests, which sometimes # want to create tables entirely separate from the pokedex metadata foreign_key_name = foreign_class.__singlename__ + '_id' Translations = type( _table_name, (object, ), { '_language_identifier': association_proxy('local_language', 'identifier'), 'relation_name': relation_name, '__tablename__': _table_name, }) # Create the table object table = Table( _table_name, foreign_class.__table__.metadata, Column(foreign_key_name, Integer, ForeignKey(foreign_class.id), primary_key=True, nullable=False, doc=u"ID of the %s these texts relate to" % foreign_class.__singlename__), Column('local_language_id', Integer, ForeignKey(language_class.id), primary_key=True, nullable=False, doc=u"Language these texts are in"), ) Translations.__table__ = table # Add ye columns # Column objects have a _creation_order attribute in ascending order; use # this to get the (unordered) kwargs sorted correctly kwitems = list(kwargs.items()) kwitems.sort(key=lambda kv: kv[1]._creation_order) for name, column in kwitems: column.name = name table.append_column(column) # Construct ye mapper mapper(Translations, table, properties={ 'foreign_id': synonym(foreign_key_name), 'local_language': relationship( language_class, primaryjoin=table.c.local_language_id == language_class.id, innerjoin=True), }) # Add full-table relations to the original class # Foo.bars_table setattr(foreign_class, relation_name + '_table', Translations) # Foo.bars setattr( foreign_class, relation_name, relationship( Translations, primaryjoin=foreign_class.id == Translations.foreign_id, collection_class=attribute_mapped_collection('local_language'), )) # Foo.bars_local # This is a bit clever; it uses bindparam() to make the join clause # modifiable on the fly. db sessions know the current language and # populate the bindparam. # The 'dummy' value is to trick SQLA; without it, SQLA thinks this # bindparam is just its own auto-generated clause and everything gets # f****d up. local_relation_name = relation_name + '_local' setattr( foreign_class, local_relation_name, relationship( Translations, primaryjoin=and_( Translations.foreign_id == foreign_class.id, Translations.local_language_id == bindparam( '_default_language_id', value='dummy', type_=Integer, required=True), ), foreign_keys=[ Translations.foreign_id, Translations.local_language_id ], uselist=False, lazy=relation_lazy, )) # Add per-column proxies to the original class for name, column in kwitems: getset_factory = None string_getter = column.info.get('string_getter') if string_getter: getset_factory = _getset_factory_factory(column.name, string_getter) # Class.(column) -- accessor for the default language's value setattr( foreign_class, name, LocalAssociationProxy(local_relation_name, name, getset_factory=getset_factory)) # Class.(column)_map -- accessor for the language dict # Need a custom creator since Translations doesn't have an init, and # these are passed as *args anyway def creator(language, value): row = Translations() row.local_language = language setattr(row, name, value) return row setattr( foreign_class, name + '_map', association_proxy(relation_name, name, creator=creator, getset_factory=getset_factory)) # Add to the list of translation classes foreign_class.translation_classes.append(Translations) # Done return Translations
def tableName(cls): return synonym('table_name')
class Fragment(Base): """ Class representing a Fragment entity from CREDO. Attributes ---------- fragment_id ism Mapped Attributes ----------------- ChemCompFragments : Query ChemComps : Query Chemical components that share this fragment. """ __tablename__ = '%s.fragments' % schema['pdbchem'] ism_ob_can = synonym('ism') ChemCompFragments = relationship( "ChemCompFragment", primaryjoin="ChemCompFragment.fragment_id==Fragment.fragment_id", foreign_keys="[ChemCompFragment.fragment_id]", lazy='dynamic', uselist=True, innerjoin=True, backref=backref('Fragment', uselist=False, innerjoin=True, lazy=False)) ChemComps = relationship( "ChemComp", query_class=BaseQuery, secondary=Base.metadata.tables['%s.chem_comp_fragments' % schema['pdbchem']], primaryjoin="Fragment.fragment_id==ChemCompFragment.fragment_id", secondaryjoin="ChemCompFragment.het_id==ChemComp.het_id", foreign_keys="[ChemCompFragment.fragment_id, ChemComp.het_id]", lazy='dynamic', uselist=True, innerjoin=True) # RDMol = relationship( "FragmentRDMol", primaryjoin="FragmentRDMol.fragment_id==Fragment.fragment_id", foreign_keys="[FragmentRDMol.fragment_id]", uselist=False, innerjoin=True, backref=backref('Fragment', uselist=False, innerjoin=True)) RDFP = relationship( "FragmentRDFP", primaryjoin="FragmentRDFP.fragment_id==Fragment.fragment_id", foreign_keys="[FragmentRDFP.fragment_id]", uselist=False, innerjoin=True, backref=backref('Fragment', uselist=False, innerjoin=True)) def __repr__(self): """ """ return '<Fragment({self.fragment_id})>'.format(self=self) @hybrid_property def ism_ob_univ(self): return self.Synonyms.ism_ob @hybrid_property def ism_oe(self): return self.Synonyms.ism_oe @hybrid_property def ism_rdk(self): return self.Synonyms.ism_rdk @property def Children(self): """ Returns all fragments that are derived from this fragment (next level in fragmentation hierarchy). """ adaptor = FragmentAdaptor(dynamic=True) return adaptor.fetch_all_children(self.fragment_id) @property def Parents(self): """ """ adaptor = FragmentAdaptor(dynamic=True) return adaptor.fetch_all_parents(self.fragment_id) @property def Leaves(self): """ Returns all terminal fragments (leaves) of this fragment. """ adaptor = FragmentAdaptor(dynamic=True) return adaptor.fetch_all_leaves(self.fragment_id) @property def Descendants(self): """ Returns all children of this fragment in the complete hierarchy. """ adaptor = FragmentAdaptor(dynamic=True) return adaptor.fetch_all_descendants(self.fragment_id) @classmethod def like(self, smiles): """ Returns an SQL function expression that uses the PostgreSQL trigram index to compare the SMILES strings. """ return self.ism.op('%%')(smiles)
def dateCreated(cls): return synonym('issued_at')
class Team(BaseObject): ''' Team definition ''' _name = Column(Unicode(64), unique=True, nullable=False) name = synonym( '_name', descriptor=property( lambda self: self._name, lambda self, name: setattr( self, '_name', self.__class__.filter_string(name, " -_")))) motto = Column(Unicode(255)) members = relationship("User", backref=backref("Team", lazy="joined"), cascade="all, delete-orphan") files = relationship("FileUpload", backref=backref("Team", lazy="select")) pastes = relationship("PasteBin", backref=backref("Team", lazy="select")) money = Column(Integer, default=100, nullable=False) uuid = Column(String(36), unique=True, nullable=False, default=lambda: str(uuid4())) flags = relationship("Flag", secondary=team_to_flag, backref=backref("Team", lazy="select")) boxes = relationship("Box", secondary=team_to_box, backref=backref("Team", lazy="select")) items = relationship("MarketItem", secondary=team_to_item, backref=backref("Team", lazy="joined")) purchased_source_code = relationship("SourceCode", secondary=team_to_source_code, backref=backref("Team", lazy="select")) hints = relationship("Hint", secondary=team_to_hint, backref=backref("Team", lazy="select")) game_levels = relationship("GameLevel", secondary=team_to_game_level, backref=backref("Team", lazy="select")) @classmethod def all(cls): ''' Returns a list of all objects in the database ''' return dbsession.query(cls).all() @classmethod def ranks(cls): ''' Returns a list of all objects in the database ''' return sorted(dbsession.query(cls).all()) @classmethod def by_id(cls, identifier): ''' Returns a the object with id of identifier ''' return dbsession.query(cls).filter_by(id=identifier).first() @classmethod def by_uuid(cls, uuid): ''' Return and object based on a uuid ''' return dbsession.query(cls).filter_by(uuid=unicode(uuid)).first() @classmethod def by_name(cls, team_name): ''' Return the team object based on "team_name" ''' return dbsession.query(cls).filter_by(name=unicode(team_name)).first() @classmethod def filter_string(cls, string, extra_chars=''): char_white_list = ascii_letters + digits + extra_chars clean = filter(lambda char: char in char_white_list, string) return clean if 0 < len(clean) else 'foobar' @property def levels(self): ''' Sorted game_levels ''' return sorted(self.game_levels) def level_flags(self, lvl): ''' Given a level number return all flags captured for that level ''' return filter(lambda flag: flag.game_level.number == lvl, self.flags) @property def bot_count(self): bot_manager = BotManager.Instance() return bot_manager.count_by_team_uuid(self.uuid) def to_dict(self): ''' Use for JSON related tasks; return public data only ''' return { 'name': self.name, 'motto': self.motto, } def file_by_file_name(self, file_name): ''' Return file object based on file_name ''' ls = self.files.filter_by(file_name=file_name) return ls[0] if 0 < len(ls) else None def __repr__(self): return u'<Team - name: %s, money: %d>' % (self.name, self.money) def __str__(self): return self.name.encode('ascii', 'ignore') def __eq__(self, other): return self.id == other.id def __ne__(self, other): return not self.__eq__(other) def __cmp__(self, other): if len(self.flags) < len(other.flags): return 1 elif len(self.flags) == len(other.flags): return 0 else: return -1
query = sqlalchemy_query qstr = '%' + querystr + '%' filters = [ cls.name.ilike(qstr), cls.fullname.ilike(qstr), ] # sysadmins can search on user emails import ckan.authz as authz if user_name and authz.is_sysadmin(user_name): filters.append(cls.email.ilike(qstr)) query = query.filter(or_(*filters)) return query @classmethod def user_ids_for_name_or_id(self, user_list=[]): ''' This function returns a list of ids from an input that can be a list of names or ids ''' query = meta.Session.query(self.id) query = query.filter( or_(self.name.in_(user_list), self.id.in_(user_list))) return [user.id for user in query.all()] meta.mapper(User, user_table, properties={'password': synonym('_password', map_column=True)}, order_by=user_table.c.name)
# along with eos. If not, see <http://www.gnu.org/licenses/>. # =============================================================================== from sqlalchemy import Column, String, Integer, Boolean, Table, ForeignKey from sqlalchemy.orm import mapper, synonym, deferred from eos.db import gamedata_meta from eos.gamedata import Effect, ItemEffect typeeffects_table = Table("dgmtypeeffects", gamedata_meta, Column("typeID", Integer, ForeignKey("invtypes.typeID"), primary_key=True, index=True), Column("effectID", Integer, ForeignKey("dgmeffects.effectID"), primary_key=True)) effects_table = Table("dgmeffects", gamedata_meta, Column("effectID", Integer, primary_key=True), Column("effectName", String), Column("description", String), Column("published", Boolean), Column("isAssistance", Boolean), Column("isOffensive", Boolean), Column("resistanceID", Integer)) mapper(Effect, effects_table, properties={ "ID" : synonym("effectID"), "name" : synonym("effectName"), "description": deferred(effects_table.c.description) }) mapper(ItemEffect, typeeffects_table)
def state(self): return synonym('_state', descriptor=property(self.get_state, self.set_state))
class Permission(Base): """A class to hold permissions. Permissions in Stalker defines what one can do or do not. A Permission instance is composed by three attributes; access, action and class_name. Permissions for all the classes in SOM are generally created by Stalker when initializing the database. If you created any custom classes to extend SOM you are also responsible to create the Permissions for it by calling :meth:`stalker.db.register` and passing your class to it. See the :mod:`stalker.db` documentation for details. :param str access: An Enum value which can have the one of the values of ``Allow`` or ``Deny``. :param str action: An Enum value from the list ['Create', 'Read', 'Update', 'Delete', 'List']. Can not be None. The list can be changed from stalker.config.Config.default_actions. :param str class_name: The name of the class that this action is applied to. Can not be None or an empty string. Example: Let say that you want to create a Permission specifying a Group of Users are allowed to create Projects:: from stalker import db from stalker import db from stalker.models.auth import User, Group, Permission # first setup the db with the default database # # stalker.db.init() will create all the Actions possible with the # SOM classes automatically # # What is left to you is to create the permissions db.setup() user1 = User( name='Test User', login='******', password='******', email='*****@*****.**' ) user2 = User( name='Test User', login='******', password='******', email='*****@*****.**' ) group1 = Group(name='users') group1.users = [user1, user2] # get the permissions for the Project class project_permissions = Permission.query\ .filter(Actions.access='Allow')\ .filter(Actions.action='Create')\ .filter(Actions.class_name='Project')\ .first() # now we have the permission specifying the allowance of creating a # Project # to make group1 users able to create a Project we simply add this # Permission to the groups permission attribute group1.permissions.append(permission) # and persist this information in the database DBSession.add(group) DBSession.commit() """ __tablename__ = 'Permissions' __table_args__ = (UniqueConstraint('access', 'action', 'class_name'), { "extend_existing": True }) id = Column(Integer, primary_key=True) _access = Column('access', Enum('Allow', 'Deny', name='AccessNames')) _action = Column('action', Enum(*defaults.actions, name='ActionNames')) _class_name = Column('class_name', String(32)) def __init__(self, access, action, class_name): self._access = self._validate_access(access) self._action = self._validate_action(action) self._class_name = self._validate_class_name(class_name) def _validate_access(self, access): """validates the given access value """ from stalker import __string_types__ if not isinstance(access, __string_types__): raise TypeError( '%s.access should be an instance of str not %s' % (self.__class__.__name__, access.__class__.__name__)) if access not in ['Allow', 'Deny']: raise ValueError('%s.access should be "Allow" or "Deny" not %s' % (self.__class__.__name__, access)) return access def _access_getter(self): """returns the _access value """ return self._access access = synonym('_access', descriptor=property(_access_getter)) def _validate_class_name(self, class_name): """validates the given class_name value """ from stalker import __string_types__ if not isinstance(class_name, __string_types__): raise TypeError( '%s.class_name should be an instance of str not %s' % (self.__class__.__name__, class_name.__class__.__name__)) return class_name def _class_name_getter(self): """returns the _class_name attribute value """ return self._class_name class_name = synonym('_class_name', descriptor=property(_class_name_getter)) def _validate_action(self, action): """validates the given action value """ from stalker import __string_types__ if not isinstance(action, __string_types__): raise TypeError( '%s.action should be an instance of str not %s' % (self.__class__.__name__, action.__class__.__name__)) if action not in defaults.actions: raise ValueError( '%s.action should be one of the values of %s not %s' % (self.__class__.__name__, defaults.actions, action)) return action def _action_getter(self): """returns the _action value """ return self._action action = synonym('_action', descriptor=property(_action_getter)) def __eq__(self, other): """the equality of two Permissions """ return isinstance(other, Permission) \ and other.access == self.access \ and other.action == self.action \ and other.class_name == self.class_name
relation(Group, backref=backref("items", cascade="all,delete")), "_Item__attributes": relation(Attribute, cascade='all, delete, delete-orphan', collection_class=attribute_mapped_collection('name')), "effects": relation(Effect, secondary=typeeffects_table, collection_class=attribute_mapped_collection('name')), "metaGroup": relation( MetaType, primaryjoin=metatypes_table.c.typeID == items_table.c.typeID, uselist=False), "ID": synonym("typeID"), "name": synonym("typeName"), "description": deferred(items_table.c.description), "traits": relation(Traits, primaryjoin=traits_table.c.typeID == items_table.c.typeID, uselist=False), "mutaplasmids": relation(DynamicItem, primaryjoin=dynamicApplicable_table.c.applicableTypeID == items_table.c.typeID, secondaryjoin=dynamicApplicable_table.c.typeID == DynamicItem.typeID, secondary=dynamicApplicable_table,