def get_parent(self, attr_name): # first, try grabbing it directly parent = getattr(self, attr_name) if parent: return parent # if nothing was found, grab the fk and lookup manually mapper = inspect(type(self)) attr = getattr(type(self), attr_name) prop = attr.property local_col, remote_col = prop.local_remote_pairs[0] local_prop = mapper.get_property_by_column(local_col) value = getattr(self, local_prop.key) if not value: # no relation and no fk = no parent return None parent_cls = type(self).get_related_class(attr_name) mapper = inspect(parent_cls) remote_prop = mapper.get_property_by_column(remote_col) filters = {remote_prop.key: value} orm = ORM.get() session = orm.sessionmaker() parent = session.query(parent_cls).filter_by(**filters).first() return parent
def clean_unique_field(self, key, **kwargs): orm = ORM.get() value = self.cleaned_data[key] if value is None: return value filters = { key: value, } filters.update(kwargs) model = self._meta.model mapper = inspect(model) if mapper.polymorphic_on is not None: mapper = mapper.base_mapper # if all filter keys exist on the base mapper, query the base class # if the base class is missing any properties, query the # polymorphic subclass explicitly if all(map(mapper.has_property, filters.keys())): model = mapper.class_ session = orm.sessionmaker() instance = session.query(model) \ .filter_by(**filters) \ .filter_by(**kwargs) \ .first() if instance and identity_key(instance=instance) \ != identity_key(instance=self.instance): # this value is already in use raise forms.ValidationError(_('This value is already in use')) return value
def save(self, commit=False): session = object_session(self) if not session: from baph.db.orm import ORM orm = ORM.get() session = orm.sessionmaker() if commit: if not self in session: session.add(self) session.commit()
def has_perm(self, resource, action, filters=None): logger.debug('has_perm %s:%s called for user %s' % (action, resource, self.id)) logger.debug('filters: %s' % filters) if not filters: filters = {} ctx = self.get_context() logger.debug('perm context: %s' % ctx) from baph.db.orm import ORM if not self.is_authenticated(): # user is not logged in return False perms = self.get_resource_permissions(resource, action) if not perms: # user has no applicable permissions return False for p in perms: logger.debug(' perm: %s' % p) orm = ORM.get() cls_name = tuple(perms)[0].resource cls = orm.Base._decl_class_registry[cls_name] requires_load = False if action == 'add': # add operations have no pk info, so we can't load from db pass else: # if we have all 'protected' fks, we can evaluate without a # load. otherwise, we need to load to validate for rel in cls._meta.permission_parents: fk = tuple(getattr(cls, rel).property.local_columns)[0].name if not any(key in filters for key in (rel, fk)): requires_load = True session = orm.sessionmaker() if requires_load: obj = session.query(cls).filter_by(**filters).first() else: obj = cls(**filters) if obj in session: session.expunge(obj) return self.has_obj_perm(resource, action, obj)
def has_perm(self, resource, action, filters=None): logger.debug('has_perm %s:%s called for user %s' % (action, resource, self.id)) logger.debug('filters: %s' % filters) if not filters: filters = {} ctx = self.get_context() logger.debug('perm context: %s' % ctx) from baph.db.orm import ORM if not self.is_authenticated(): # user is not logged in return False perms = self.get_resource_permissions(resource, action) if not perms: # user has no applicable permissions return False for p in perms: logger.debug(' perm: %s' % p) orm = ORM.get() cls_name = tuple(perms)[0].resource cls = orm.Base._decl_class_registry[cls_name] requires_load = False if action == 'add': # add operations have no pk info, so we can't load from db pass else: # if we have all 'protected' fks, we can evaluate without a # load. otherwise, we need to load to validate for rel in cls._meta.permission_parents: fk = tuple(getattr(cls, rel).property.local_columns)[0].name if not any (key in filters for key in (rel, fk)): requires_load = True session = orm.sessionmaker() if requires_load: obj = session.query(cls).filter_by(**filters).first() else: obj = cls(**filters) if obj in session: session.expunge(obj) return self.has_obj_perm(resource, action, obj)
def globalize(self, commit=True): """ Converts object into a global by creating an instance of Meta.global_class with the same identity key. """ from baph.db.orm import ORM orm = ORM.get() if not self._meta.global_column: raise Exception('You cannot globalize a class with no value ' 'for Meta.global_column') # TODO: delete polymorphic extension, leave only the base setattr(self, self._meta.global_column, True) # handle meta.global_cascades for field in self._meta.global_cascades: value = getattr(self, field, None) if not value: continue if isinstance(value, orm.Base): # single object value.globalize(commit=False) elif hasattr(value, 'iteritems'): # dict-like collection for obj in value.values(): obj.globalize(commit=False) elif hasattr(value, '__iter__'): # list-like collection for obj in value: obj.globalize(commit=False) if commit: session = orm.sessionmaker() session.add(self) session.commit()
def get_cache_keys(self, child_updated=False, force_expire_pointers=False, force=False): cache_alias = self._meta.cache_alias cache = self.get_cache() cache_keys = set() version_keys = set() if not self.is_cacheable: return (cache_keys, version_keys) orm = ORM.get() session = object_session(self) or orm.sessionmaker() deleted = self.is_deleted or self in session.deleted changes = self.get_changes(ignore=IGNORABLE_KEYS) self_updated = bool(changes) or deleted if not self_updated and not child_updated and not force: return (cache_keys, version_keys) changed_attrs = set(changes.keys()) data = self.cache_data old_data = {} if has_identity(self): for attr in self.cache_fields(): ins, eq, rm = get_history(self, attr) old_data[attr] = rm[0] if rm else eq[0] if 'detail' in self._meta.cache_modes: # we only kill primary cache keys if the object exists # this key won't exist during CREATE if has_identity(self): cache_key = self.cache_key cache_keys.add((cache_alias, cache_key)) if 'list' in self._meta.cache_modes: # collections will be altered by any action, so we always # kill these keys version_key = self.cache_list_version_key version_keys.add((cache_alias, version_key)) if self._meta.cache_partitions: # add the partition keys as well for pversion_key in self.cache_partition_version_keys: version_keys.add((cache_alias, pversion_key)) if changed_attrs.intersection(self._meta.cache_partitions): # if partition field values were changed, we need to # increment the version keys for the previous values for pversion_key in self.get_cache_partition_version_keys(**old_data): version_keys.add((cache_alias, pversion_key)) if 'asset' in self._meta.cache_modes: # models with sub-assets need to increment the version key # of the parent detail if has_identity(self): key = self.cache_detail_version_key if deleted: # delete the detail version key cache_keys.add((cache_alias, key)) else: # for updates, increment the version key version_keys.add((cache_alias, key)) # pointer records contain only the id of the parent resource # if changed, we set the old key to False, and set the new key for raw_key, attrs, name in self._meta.cache_pointers: if not changed_attrs.intersection(attrs) and not force_expire_pointers: # the fields which trigger this pointer were not changed continue cache_key = raw_key % data _, ident = identity_key(instance=self) if len(ident) > 1: ident = ','.join(map(str, ident)) else: ident = ident[0] if not self.is_deleted: cache.set(cache_key, ident) if force_expire_pointers: cache_keys.add((cache_alias, cache_key)) # if this is a new object, we're done if not has_identity(self): continue # if this is an existing object, we need to handle the old key old_data = {} for attr in attrs: ins, eq, rm = get_history(self, attr) old_data[attr] = rm[0] if rm else eq[0] old_key = raw_key % old_data if old_key == cache_key and not self.is_deleted: # the pointer key is unchanged, nothing to do here continue old_ident = cache.get(old_key) if old_ident and str(old_ident) == str(ident): # this object is the current owner of the key. we need to remove # the reference to this instance cache.set(old_key, False) # cascade the cache kill operation to related objects, so parents # know if children have changed, in order to rebuild the cache for cascade in self._meta.cache_cascades: objs = getattr(self, cascade) if not objs: # no related objects continue if not isinstance(objs, list): # *-to-one relation, force into a list objs = [objs] for obj in objs: child_keys = obj.get_cache_keys(child_updated=True) cache_keys.update(child_keys[0]) version_keys.update(child_keys[1]) return (cache_keys, version_keys)
from baph.db import ORM orm = ORM.get() Base = orm.Base
def fields_for_model(model, fields=None, exclude=None, widgets=None, formfield_callback=None, localized_fields=None, labels=None, help_texts=None, error_messages=None, field_classes=None): orm = ORM.get() Base = orm.Base field_list = [] ignored = [] opts = model._meta for f in sorted(opts.fields): if not getattr(f, 'editable', False): continue if fields is not None and not f.name in fields: continue if exclude and f.name in exclude: continue if issubclass(f.data_type, Base): # TODO: Auto-generate fields, control via 'fields' param if fields is not None and f.name in fields: # manually included field pass else: # skip relations unless manually requested continue kwargs = {} if widgets and f.name in widgets: kwargs['widget'] = widgets[f.name] if localized_fields == ALL_FIELDS or (localized_fields and f.name in localized_fields): kwargs['localize'] = True if labels and f.name in labels: kwargs['label'] = labels[f.name] if help_texts and f.name in help_texts: kwargs['help_text'] = help_texts[f.name] if error_messages and f.name in error_messages: kwargs['error_messages'] = error_messages[f.name] if field_classes and f.name in field_classes: kwargs['form_class'] = field_classes[f.name] if f.collection_class: kwargs['form_class'] = FIELD_MAP['collection'] kwargs['collection_class'] = f.collection_class elif issubclass(f.data_type, Base): kwargs['form_class'] = FIELD_MAP['object'] else: kwargs['form_class'] = FIELD_MAP.get(f.data_type) if issubclass(f.data_type, Base): kwargs['related_class'] = f.data_type if f.nullable or f.blank: kwargs['required'] = False elif f.default is not None: kwargs['required'] = False else: kwargs['required'] = True if f.max_length and 'collection_class' not in kwargs: kwargs['max_length'] = f.max_length if f.content_length_func: kwargs['content_length_func'] = f.content_length_func if formfield_callback is None: formfield = f.formfield(**kwargs) elif not callable(formfield_callback): raise TypeError('formfield_callback must be a function or callable') else: formfield = formfield_callback(f, **kwargs) if formfield: field_list.append((f.name, formfield)) else: ignored.append(f.name) field_dict = SortedDict(field_list) if fields: field_dict = SortedDict( [(f, field_dict.get(f)) for f in fields if ((not exclude) or (exclude and f not in exclude)) and (f not in ignored)] ) return field_dict
def get_resource_filters(self, resource, action='view'): """ Returns resource filters in a format appropriate for applying to an existing query """ orm = ORM.get() cls = orm.Base._decl_class_registry[resource] if cls._meta.permission_handler: # permissions for this object are routed to parent object parent_cls = cls.get_related_class(cls._meta.permission_handler) if action != 'view': action = 'edit' return self.get_resource_filters(parent_cls.resource_name, action) ctx = self.get_context() perms = self.get_resource_permissions(resource, action) if not perms: return False allow_filters = [] deny_filters = [] for p in perms: if not p.key: # this is a boolean permission, so cannot be applied as a filter continue keys = p.key.split(',') if p.opcode == 'in': # range filter values = [json.loads(p.value)] else: # exact filter values = p.value.split(',') data = zip(keys, values) filters = [] for key, value in data: if key in cls._meta.filter_translations: lookup, key = cls._meta.filter_translations[key].split('.',1) else: lookup = resource cls_ = orm.Base._decl_class_registry[lookup] frags = key.split('.') attr = frags.pop() for frag in frags: cls_ = cls_.get_related_class(frag) col = getattr(cls_, attr) if p.opcode == 'in': filters.append(col.in_(value)) else: filters.append(col==value) if len(filters) == 1: filter_ = filters[0] else: filter_ = and_(*filters) if p._deny: deny_filters.append(not_(filter_)) else: allow_filters.append(filter_) final_filters = deny_filters[:] final_filters.append(or_(*allow_filters)) return [and_(*final_filters)]
def get_cache_keys(self, child_updated=False, force_expire_pointers=False, force=False): cache_alias = self._meta.cache_alias cache = self.get_cache() cache_keys = set() version_keys = set() if not self.is_cacheable: return (cache_keys, version_keys) orm = ORM.get() session = object_session(self) or orm.sessionmaker() deleted = self.is_deleted or self in session.deleted changes = self.get_changes(ignore=IGNORABLE_KEYS) self_updated = bool(changes) or deleted if not self_updated and not child_updated and not force: return (cache_keys, version_keys) changed_attrs = set(changes.keys()) data = self.cache_data old_data = {} if has_identity(self): for attr in self.cache_fields(): ins, eq, rm = get_history(self, attr) old_data[attr] = rm[0] if rm else eq[0] if 'detail' in self._meta.cache_modes: # we only kill primary cache keys if the object exists # this key won't exist during CREATE if has_identity(self): cache_key = self.cache_key cache_keys.add((cache_alias, cache_key)) if 'list' in self._meta.cache_modes: # collections will be altered by any action, so we always # kill these keys version_key = self.cache_list_version_key version_keys.add((cache_alias, version_key)) if self._meta.cache_partitions: # add the partition keys as well for pversion_key in self.cache_partition_version_keys: version_keys.add((cache_alias, pversion_key)) if changed_attrs.intersection(self._meta.cache_partitions): # if partition field values were changed, we need to # increment the version keys for the previous values for pversion_key in self.get_cache_partition_version_keys( **old_data): version_keys.add((cache_alias, pversion_key)) if 'asset' in self._meta.cache_modes: # models with sub-assets need to increment the version key # of the parent detail if has_identity(self): key = self.cache_detail_version_key if deleted: # delete the detail version key cache_keys.add((cache_alias, key)) else: # for updates, increment the version key version_keys.add((cache_alias, key)) # pointer records contain only the id of the parent resource # if changed, we set the old key to False, and set the new key for raw_key, attrs, name in self._meta.cache_pointers: if not changed_attrs.intersection( attrs) and not force_expire_pointers: # the fields which trigger this pointer were not changed continue cache_key = raw_key % data _, ident = identity_key(instance=self) if len(ident) > 1: ident = ','.join(map(str, ident)) else: ident = ident[0] if not self.is_deleted: cache.set(cache_key, ident) if force_expire_pointers: cache_keys.add((cache_alias, cache_key)) # if this is a new object, we're done if not has_identity(self): continue # if this is an existing object, we need to handle the old key old_data = {} for attr in attrs: ins, eq, rm = get_history(self, attr) old_data[attr] = rm[0] if rm else eq[0] old_key = raw_key % old_data if old_key == cache_key and not self.is_deleted: # the pointer key is unchanged, nothing to do here continue old_ident = cache.get(old_key) if old_ident and str(old_ident) == str(ident): # this object is the current owner of the key. we need to remove # the reference to this instance cache.set(old_key, False) # cascade the cache kill operation to related objects, so parents # know if children have changed, in order to rebuild the cache for cascade in self._meta.cache_cascades: objs = getattr(self, cascade) if not objs: # no related objects continue if not isinstance(objs, list): # *-to-one relation, force into a list objs = [objs] for obj in objs: child_keys = obj.get_cache_keys(child_updated=True) cache_keys.update(child_keys[0]) version_keys.update(child_keys[1]) return (cache_keys, version_keys)
def contribute_to_class(self, cls, name): cls._meta = self self.model = cls # First, construct the default values for these options. self.object_name = cls.__name__ self.model_name = self.object_name.lower() self.verbose_name = camel_case_to_spaces(self.object_name) self.original_attrs = {} # Next, apply any overridden values from 'class Meta'. if self.meta: meta_attrs = self.meta.__dict__.copy() for name in self.meta.__dict__: # Ignore any private attributes that Django doesn't care about. # NOTE: We can't modify a dictionary's contents while looping # over it, so we loop over the *original* dictionary instead. if name.startswith('_'): del meta_attrs[name] for attr_name in DEFAULT_NAMES: if attr_name in meta_attrs: setattr(self, attr_name, meta_attrs.pop(attr_name)) elif hasattr(self.meta, attr_name): setattr(self, attr_name, getattr(self.meta, attr_name)) # Any leftover attributes must be invalid. if meta_attrs != {}: raise TypeError("'class Meta' got invalid attribute(s): %s" % ','.join(meta_attrs.keys())) # initialize params that depend on other params being set if self.model_name_plural is None: self.model_name_plural = self.model_name + 's' if self.verbose_name_plural is None: self.verbose_name_plural = self.verbose_name + 's' if self.cache_alias and self.cache_timeout is None: self.cache_timeout = get_cache(self.cache_alias).default_timeout from baph.db import ORM Base = ORM.get().Base base_model_name = self.model_name base_model_name_plural = self.model_name_plural for base in self.model.__mro__: if not issubclass(base, Base): continue if base in (self.model, Base): continue if not hasattr(base, '__mapper_args__'): continue if 'polymorphic_on' in base.__mapper_args__: base_model_name = base._meta.base_model_name base_model_name_plural = base._meta.base_model_name_plural break self.base_model_name = unicode(base_model_name) self.base_model_name_plural = unicode(base_model_name_plural) del self.meta
def get_resource_filters(self, resource, action='view'): """ Returns resource filters in a format appropriate for applying to an existing query """ orm = ORM.get() cls = orm.Base._decl_class_registry[resource] if cls._meta.permission_handler: # permissions for this object are routed to parent object parent_cls = cls.get_related_class(cls._meta.permission_handler) if action != 'view': action = 'edit' return self.get_resource_filters(parent_cls.resource_name, action) ctx = self.get_context() perms = self.get_resource_permissions(resource, action) if not perms: return False allow_filters = [] deny_filters = [] for p in perms: if not p.key: # this is a boolean permission, so cannot be applied as a filter continue keys = p.key.split(',') if p.opcode == 'in': # range filter values = [json.loads(p.value)] else: # exact filter values = p.value.split(',') data = zip(keys, values) filters = [] for key, value in data: if key in cls._meta.filter_translations: lookup, key = cls._meta.filter_translations[key].split( '.', 1) else: lookup = resource cls_ = orm.Base._decl_class_registry[lookup] frags = key.split('.') attr = frags.pop() for frag in frags: cls_ = cls_.get_related_class(frag) col = getattr(cls_, attr) if p.opcode == 'in': filters.append(col.in_(value)) else: filters.append(col == value) if len(filters) == 1: filter_ = filters[0] else: filter_ = and_(*filters) if p._deny: deny_filters.append(not_(filter_)) else: allow_filters.append(filter_) final_filters = deny_filters[:] final_filters.append(or_(*allow_filters)) return [and_(*final_filters)]
from sqlalchemy.orm import (relationship, backref, object_session, RelationshipProperty, clear_mappers) from baph.auth.mixins import UserPermissionMixin from baph.auth.registration import settings as auth_settings from baph.db import ORM from baph.db.models.loading import cache from baph.db.types import UUID, Dict, List from baph.utils.collections import LazyDict from baph.utils.importing import remove_class from baph.utils.strings import random_string import inspect, sys orm = ORM.get() Base = orm.Base AUTH_USER_FIELD_TYPE = getattr(settings, 'AUTH_USER_FIELD_TYPE', 'UUID') AUTH_USER_FIELD = UUID if AUTH_USER_FIELD_TYPE == 'UUID' else Integer PERMISSION_TABLE = getattr(settings, 'BAPH_PERMISSION_TABLE', 'baph_auth_permissions') UNUSABLE_PASSWORD = '******' def _generate_user_id_column(): if AUTH_USER_FIELD_TYPE != 'UUID': return Column(AUTH_USER_FIELD, primary_key=True) return Column(UUID, primary_key=True, default=uuid.uuid4) def update_last_login(sender, user, **kwargs):