def test_identity_key_1(self): mapper(User, users) key = util.identity_key(User, 1) eq_(key, (User, (1, ))) key = util.identity_key(User, ident=1) eq_(key, (User, (1, )))
def test_identity_key_1(self): mapper(User, users) key = util.identity_key(User, 1) eq_(key, (User, (1,))) key = util.identity_key(User, ident=1) eq_(key, (User, (1,)))
def clean_unique_field(self, key, **kwargs): orm = ORM.get() value = self.cleaned_data[key] if value is None: return value filters = { key: value, } filters.update(kwargs) model = self._meta.model mapper = inspect(model) if mapper.polymorphic_on is not None: mapper = mapper.base_mapper # if all filter keys exist on the base mapper, query the base class # if the base class is missing any properties, query the # polymorphic subclass explicitly if all(map(mapper.has_property, filters.keys())): model = mapper.class_ session = orm.sessionmaker() instance = session.query(model) \ .filter_by(**filters) \ .filter_by(**kwargs) \ .first() if instance and identity_key(instance=instance) \ != identity_key(instance=self.instance): # this value is already in use raise forms.ValidationError(_('This value is already in use')) return value
def _is_pair(field1, field2): if field1.conv.__class__ == field2.conv.__class__: if isinstance(field1.conv, convs.ModelDictConv): ident1 = identity_key(instance=field1.clean_value)[1] ident2 = identity_key(instance=field2.clean_value)[1] return ident1 == ident2 # XXX how to implement indication in this case? return True return False
def test_identity_key_token(self): User, users = self.classes.User, self.tables.users self.mapper_registry.map_imperatively(User, users) key = orm_util.identity_key(User, [1], identity_token="token") eq_(key, (User, (1, ), "token")) key = orm_util.identity_key(User, ident=[1], identity_token="token") eq_(key, (User, (1, ), "token"))
def test_identity_key_scalar(self): User, users = self.classes.User, self.tables.users mapper(User, users) key = orm_util.identity_key(User, 1) eq_(key, (User, (1,))) key = orm_util.identity_key(User, ident=1) eq_(key, (User, (1,)))
def test_identity_key_scalar(self): User, users = self.classes.User, self.tables.users mapper(User, users) key = orm_util.identity_key(User, 1) eq_(key, (User, (1, ), None)) key = orm_util.identity_key(User, ident=1) eq_(key, (User, (1, ), None))
def test_identity_key_token(self): User, users = self.classes.User, self.tables.users mapper(User, users) key = orm_util.identity_key(User, [1], identity_token="token") eq_(key, (User, (1, ), "token")) key = orm_util.identity_key(User, ident=[1], identity_token="token") eq_(key, (User, (1, ), "token"))
def test_identity_key_1(self): User, users = self.classes.User, self.tables.users mapper(User, users) key = orm_util.identity_key(User, [1]) eq_(key, (User, (1, ))) key = orm_util.identity_key(User, ident=[1]) eq_(key, (User, (1, )))
def expected_changes(self, added_ann_id, changed_ann_id, deleted_ann_id): added_entry = (identity_key(Annotation, (added_ann_id, )), ObjectState.ADDED) changed_entry = (identity_key(Annotation, (changed_ann_id, )), ObjectState.CHANGED) deleted_entry = (identity_key(Annotation, (deleted_ann_id, )), ObjectState.DELETED) return (added_entry, changed_entry, deleted_entry)
def test_identity_key_1(self): User, users = self.classes.User, self.tables.users mapper(User, users) key = util.identity_key(User, [1]) eq_(key, (User, (1,))) key = util.identity_key(User, ident=[1]) eq_(key, (User, (1,)))
def test_identity_key_token(self): User, users = self.classes.User, self.tables.users mapper(User, users) key = orm_util.identity_key(User, [1], identity_token="token") eq_(key, (User, (1,), "token")) key = orm_util.identity_key(User, ident=[1], identity_token="token") eq_(key, (User, (1,), "token"))
def _is_pair(field1, field2): if field1.conv.__class__ == field2.conv.__class__: if isinstance(field1.conv, convs.ModelDictConv): try: ident1 = identity_key(instance=field1.clean_value)[1] ident2 = identity_key(instance=field2.clean_value)[1] return ident1 == ident2 except UnmappedInstanceError: pass # XXX how to implement indication in this case? return True return False
def replicate_no_merge(source, model, cache=None): '''Replicates the `source` object to `model` class and returns its reflection.''' # `cache` is used to break circular dependency: we need to replicate # attributes before merging target into the session, but replication of # some attributes may require target to be in session to avoid infinite # loop. if source is None: return None if cache is None: cache = {} elif source in cache: return cache[source] db = object_session(source) cls, ident = identity_key(instance=source) target = db.query(model).get(ident) if target is None: target = model() cache[source] = target try: replicate_attributes(source, target, cache=cache) except _PrimaryKeyIsNull: return None else: return target
def _bulk_load_column_for_instance_states( session: Session, mapper: Mapper, identities: Iterable[Tuple], attr_name: str, alter_query: Optional[QueryAlterator]): """ Load a column attribute for a list of instance states where the attribute is unloaded """ Model = mapper.class_ attr: Column = mapper.columns[attr_name] # Using those identities (primary keys), load the missing attribute q = load_by_primary_keys(session, mapper, identities, attr) # Alter the query if alter_query: q = alter_query(q, mapper, attr_name, False) # Having the missing attribute's value loaded, assign it to every instance in the session for identity, attr_value in q: # Build the identity key the way SqlAlchemy likes it: # (Model, primary-key, None) key = identity_key(Model, identity) # We do not iterate the Session to find an instance that matches the primary key. # Instead, we take it directly using the `identity_map` instance = session.identity_map[key] # Set the value of the missing attribute. # This is how it immediately becomes loaded. # Note that this action does not overwrite any modifications made to the attribute. set_committed_value(instance, attr_name, attr_value)
def _fill_img(self, mapper, connection, target): if self.prop.fill_from: # XXX Looks hacky value = getattr(target, self.prop.key) if value is None: base = getattr(target, self.prop.fill_from) if base is None: return if not os.path.isfile(base.path): # pragma: no cover, failure case, # don't know how to test it logger.warn('Original file is absent %s %s %s', identity_key(instance=target), self.prop.fill_from, base.path) return ext = os.path.splitext(base.name)[1] session = object_session(target) image_attr = getattr(target.__class__, self.prop.key) name = session.find_file_manager(image_attr).new_file_name( self.prop.name_template, target, ext, '') setattr(target, self.prop.attribute_name, name) persistent = self._2persistent(target, base) setattr(target, self.prop.key, persistent)
def _get_pk_from_identity(obj): """Copied / pasted, and fixed, from WTForms_sqlalchemy due to issue w/ SQLAlchemy >= 1.2.""" from sqlalchemy.orm.util import identity_key cls, key = identity_key(instance=obj)[0:2] return ":".join(text_type(x) for x in key)
def _fill_img(self, mapper, connection, target): if self.prop.fill_from: # XXX Looks hacky value = getattr(target, self.prop.key) if value is None: base = getattr(target, self.prop.fill_from) if base is None: return if not os.path.isfile( base.path): # pragma: no cover, failure case, # don't know how to test it logger.warn('Original file is absent %s %s %s', identity_key(instance=target), self.prop.fill_from, base.path) return ext = os.path.splitext(base.name)[1] session = object_session(target) image_attr = getattr(target.__class__, self.prop.key) name = session.find_file_manager(image_attr).new_file_name( self.prop.name_template, target, ext, '') setattr(target, self.prop.attribute_name, name) persistent = self._2persistent(target, base) setattr(target, self.prop.key, persistent)
def test_identity_key_3(self): User, users = self.classes.User, self.tables.users mapper(User, users) row = {users.c.id: 1, users.c.name: "Frank"} key = orm_util.identity_key(User, row=row) eq_(key, (User, (1,)))
def test_identity_key(self): User = self.classes.User u1 = User(name="ed") s = Session(testing.db) s.add(u1) s.flush() insp = inspect(u1) eq_(insp.identity_key, identity_key(User, (u1.id, )))
def _item_version(self, version): # XXX hacky models = getattr(AdminReplicated, version) model = getattr(models, self.__class__.__name__) db = object_session(self) ident = identity_key(instance=self)[1] assert ident is not None return db.query(model).get(ident)
def test_identity_key(self): User = self.classes.User u1 = User(name="ed") s = Session(testing.db) s.add(u1) s.flush() insp = inspect(u1) eq_(insp.identity_key, identity_key(User, (u1.id,)))
def pk_as_query_filters(self, force=False): " returns a filter expression for the primary key of the instance " " suitable for use with Query.filter() " cls, pk_values = identity_key(instance=self) if None in pk_values and not force: return None items = zip(self.pk_attrs, pk_values) return and_(attr == value for attr, value in items)
def pk_as_query_filters(self, force=False): " returns a filter expression for the primary key of the instance " " suitable for use with Query.filter() " cls, pk_values = identity_key(instance=self) if None in pk_values and not force: return None items = zip(self.pk_attrs, pk_values) return and_(attr==value for attr, value in items)
def test_identity_key_2(self): mapper(User, users) s = create_session() u = User(name='u1') s.add(u) s.flush() key = util.identity_key(instance=u) eq_(key, (User, (u.id,)))
def test_identity_key_3(self): User, users = self.classes.User, self.tables.users mapper(User, users) row = {users.c.id: 1, users.c.name: "Frank"} key = orm_util.identity_key(User, row=row) eq_(key, (User, (1, ), None))
def test_identity_key_2(self): mapper(User, users) s = create_session() u = User(name='u1') s.add(u) s.flush() key = util.identity_key(instance=u) eq_(key, (User, (u.id, )))
def user_id(self): if not self.user: return None cls, pk = identity_key(instance=self.user) if len(pk) > 1: raise Exception('chown cannot used for multi-column user pks. To ' 'specify ownership for a user with a multi-column pk, add the ' 'relationship attribute key to the chown rules') return pk[0]
def get_edit_url(self, env, item): ''' Checks if item belongs to the stream, and if it's true, returns an url to item edit page ''' if isinstance(item, self.get_model(env)): cls, id = identity_key(instance=item) if self.item_query(env).get(id): return self.url_for(env, 'item', item=item.id)
def user_id(self): if not self.user: return None cls, pk = identity_key(instance=self.user) if len(pk) > 1: raise Exception( 'chown cannot used for multi-column user pks. To ' 'specify ownership for a user with a multi-column pk, add the ' 'relationship attribute key to the chown rules') return pk[0]
def get_pk_from_identity(obj): """Get primary key for `obj`. If `obj` has a compound primary key, return a string of keys separated by ``":"``. This is the default keygetter for used by `ModelSchema <marshmallow_sqlalchemy.ModelSchema>`. """ _, key = identity_key(instance=obj) if len(key) == 1: return key[0] else: # Compund primary key return ':'.join(text_type(x) for x in key)
def test_identity_key_2(self): users, User = self.tables.users, self.classes.User mapper(User, users) s = create_session() u = User(name="u1") s.add(u) s.flush() key = orm_util.identity_key(instance=u) eq_(key, (User, (u.id, ), None))
def test_identity_key_2(self): users, User = self.tables.users, self.classes.User mapper(User, users) s = create_session() u = User(name='u1') s.add(u) s.flush() key = orm_util.identity_key(instance=u) eq_(key, (User, (u.id,)))
def reflect(source, model, cache=None): '''Finds an object of class `model` with the same identifier as the `source` object''' if source is None: return None if cache and source in cache: return cache[source] db = object_session(source) ident = identity_key(instance=source)[1] assert ident is not None return db.query(model).get(ident)
def assert_(self, model_cls, ident=None, mode='created'): dataset = self.last(model_cls, mode) error_msg = 'No instances of %s were %s' % (model_cls, mode) assert dataset, error_msg if ident is not None: ident = ident if isinstance(ident, (tuple, list)) else (ident,) item = [i for i in dataset \ if util.identity_key(instance=i)[1] == ident] assert item,'No insatances of %s with identity %r were %s' % \ (model_cls, ident, mode) return item[0] return dataset
def _item_version(self, version, lang=None): lang = lang or self.models.lang if not lang in self._iktomi_langs: return None # XXX hacky models = getattr(AdminReplicated, version) models = getattr(models, lang) modelname = _get_model_name(self) model = getattr(models, modelname) db = object_session(self) ident = identity_key(instance=self)[1] assert ident is not None return db.query(model).get(ident)
def normalize_collections(self, old, new): self.assertEqual( type(old), type(new), 'Collections have different classes. (initial=%s, cloned=%s)' % (type(old), type(new))) if type(old) == OrderingList: collection_class = OrderingList else: collection_class = duck_type_collection(old) if collection_class == OrderingList: # ordering is important here, so we pass through unmodified pass elif collection_class == dict: # we want to compare objects with matching keys, so we order # the objects by key self.assertItemsEqual(old.keys(), new.keys(), 'mapped collections have different keys') old = [i[1] for i in sorted(old.items())] new = [i[1] for i in sorted(new.items())] elif collection_class == list: # this is an unordered list, so we need to manually order # related items to prevent future tests from failing new_ = [] for obj in old: ident = identity_key(instance=obj) clone = self.registry[ident] if isinstance(clone, tuple): # this is an identity key idents = {identity_key(instance=i): i for i in new} self.assertIn(clone, idents) new_.append(idents[clone]) else: # this is an actual object self.assertIn(clone, new) new_.append(clone) new = new_ return (old, new)
def _unflushed_changes(self): """ Return a map of changes which have not yet been flushed to the DB. In the context of the "after_flush" event handler, this returns changes which have just been flushed. If an object goes through multiple states in the same session (eg. added, then flushed, then changed) then only the last state for a given object is recorded. """ changes = {} for obj in self.session.new: changes[identity_key(instance=obj)] = ObjectState.ADDED for obj in self.session.dirty: changes[identity_key(instance=obj)] = ObjectState.CHANGED for obj in self.session.deleted: changes[identity_key(instance=obj)] = ObjectState.DELETED return changes
def mget(cls, _ids, force=False, as_dict=False): """Query a list of objects by pks. :func:`~mget` will always use db slave, values will be stored to cache if cache misses. Use ``force=True`` to force load from db. :param _ids: a list of pks :param force: whether force to load from db :param as_dict: return dict or list :return: dict or list of objects """ if not _ids: return {} if as_dict else [] objs = {} if not force: # load from session if cls._db_session.identity_map: for i in _ids: ident_key = identity_key(cls, i) if ident_key in cls._db_session.identity_map: objs[i] = cls._db_session.identity_map[ident_key] # load from cache if len(_ids) > len(objs): missed_ids = list(set(_ids) - set(objs)) _objs = cls._from_cache(missed_ids, from_raw=True) objs.update(_objs) lack_ids = set(_ids) - set(objs) if lack_ids: pk = cls.pk_attribute() # we assume CacheMixin have pk, if not, bypass it. if pk: lack_objs = cls._db_session().using_bind('master').\ query(cls).filter(pk.in_(lack_ids)).all() if lack_objs: cls.mset(lack_objs, nx=True) cls._statsd_incr("miss", len(lack_ids)) objs.update({obj.pk: obj for obj in lack_objs}) else: # pragma: no cover logger.warn("No pk found for %s, skip %s" % cls.__tablename__, lack_ids) # TODO hack to make mget return list return objs if as_dict else _dict2list(_ids, objs)
def normalize_collections(self, old, new): self.assertEqual(type(old), type(new), 'Collections have different classes. (initial=%s, cloned=%s)' % (type(old), type(new))) if type(old) == OrderingList: collection_class = OrderingList else: collection_class = duck_type_collection(old) if collection_class == OrderingList: # ordering is important here, so we pass through unmodified pass elif collection_class == dict: # we want to compare objects with matching keys, so we order # the objects by key self.assertItemsEqual(old.keys(), new.keys(), 'mapped collections have different keys') old = [i[1] for i in sorted(old.items())] new = [i[1] for i in sorted(new.items())] elif collection_class == list: # this is an unordered list, so we need to manually order # related items to prevent future tests from failing new_ = [] for obj in old: ident = identity_key(instance=obj) clone = self.registry[ident] if isinstance(clone, tuple): # this is an identity key idents = {identity_key(instance=i): i for i in new} self.assertIn(clone, idents) new_.append(idents[clone]) else: # this is an actual object self.assertIn(clone, new) new_.append(clone) new = new_ return (old, new)
def compare_objects(self, old, new, path, extra_rules=None, data=None): self.assertEqual(type(old), type(new)) if not old: return rules = self.get_applicable_rules(type(old), path) if extra_rules: rules.update(extra_rules) if data: # data contains values applied to the objects which override the # default behavior rules['data'] = data ident = identity_key(instance=old) clone = self.registry[ident] if isinstance(clone, tuple): # this is an identity key self.assertEqual(clone, identity_key(instance=new)) else: # this is an actual object self.assertEqual(clone, new) self.check_base_columns(old, new, rules) self.check_chown_rules(old, new, rules) self.check_exclude_rules(old, new, rules) self.check_preserve_rules(old, new, rules) self.check_relations_rules(old, new, rules, path)
def reload_object(instance): """ Reloads an instance with the correct polymorphic subclass """ cls, pk_vals = identity_key(instance=instance) mapper = inspect(cls) pk_cols = [col.key for col in mapper.primary_key] pk = dict(zip(pk_cols, pk_vals)) session = object_session(instance) session.expunge(instance) instance = session.query(cls) \ .with_polymorphic('*') \ .filter_by(**pk) \ .one() return instance
def get_polymorphic_subclass(instance): """ Return the appropriate polymorphic subclass for an instance which may not have been loaded polymorphically, by checking the discriminator against the polymorphic map of the base class. for non-polymorphic classes, it returns the class """ cls, pk = identity_key(instance=instance) base_mapper = inspect(cls) if base_mapper.polymorphic_on is None: # this is not a polymorphic class return cls discriminator = base_mapper.polymorphic_on.key poly_ident = getattr(instance, discriminator) poly_mapper = base_mapper.polymorphic_map[poly_ident] return poly_mapper.class_
def get(cls, pk): ident_key = identity_key(cls, pk) if (cls._db_session.identity_map and ident_key in cls._db_session.identity_map): return cls._db_session.identity_map[identity_key] try: cached_val = cls._cache_client.get(cls.generate_key(pk)) if cached_val: return cls.from_cache(cached_val) except redis.ConnectionError as e: print e except TypeError as e: print e obj = cls._db_session().query(cls).get(pk) if obj is not None: cls.set(obj) return obj
def mget(cls, pks, force=False, as_dict=False): if not pks: return {} if as_dict else [] objs = {} if not force: if cls._db_session.identity_map: for pk in pks: ident_key = identity_key(cls, pk) if ident_key in cls._db_session.identity_map: objs[pk] = cls._db_session.identity_map[identity_key] if len(pks) > len(objs): missed_pks = list(set(pks) - set(objs)) vals = cls._cache_client.mget( cls.gen_raw_key(pk) for pk in missed_pks) if vals: cached = { k: cls.from_cache(v) for k, v in zip(missed_pks, vals) if v is not None } _hit_counts = len(cached) cls._statsd_incr('hit', _hit_counts) objs.update(cached) lack_pks = set(pks) - set(objs) if lack_pks: pk = cls.pk_attribute() if pk: lack_objs = cls._db_session().query(cls).\ filter(pk.in_(lack_pks)).all() if lack_objs: cls.mset(lack_objs) cls._statsd_incr('miss', len(lack_objs)) objs.update({obj.pk: obj} for obj in lack_objs) else: logger.warn("No pk found for %s, skip %s" % cls.__tablename__, lack_pks) return objs if as_dict else _dict2list(pks, objs)
def get(cls, pk, force=False): if not force: ident_key = identity_key(cls, pk) if cls._db_session.identity_map and \ ident_key in cls._db_session.identity_map: return cls._db_session.identity_map[ident_key] try: cached_val = cls._cache_client.get(cls.gen_raw_key(pk)) if cached_val: cls._statsd_incr('hit') return cls.from_cache(cached_val) except redis.ConnectionError as e: logger.error(e) except TypeError as e: logger.error(e) cls._statsd_incr('miss') obj = cls._db_session().query(cls).get(pk) if obj is not None: cls.set_raw(obj.__rawdata__) return obj
def _get_referers(self, env, item): '''Returns a dictionary mapping referer model class to query of all objects of this class refering to current object.''' # XXX not implemented cls, ident = identity_key(instance=item) result = {} for other_class in self._get_all_classes(cls): queries = {} for prop in class_mapper(other_class).iterate_properties: if not (isinstance(prop, RelationshipProperty) and \ issubclass(cls, prop.mapper.class_)): continue query = env.db.query(prop.parent) comp = prop.comparator if prop.uselist: query = query.filter(comp.contains(item)) else: query = query.filter(comp==item) count = query.count() if count: queries[prop] = (count, query) if queries: result[other_class] = queries return result
def _front_item(self): db = object_session(self) assert db is not None ident = identity_key(instance=self)[1] assert ident is not None return db.query(self._front_model).get(ident)
def get_pk_from_identity(obj): # TODO: Remove me cls, key = identity_key(instance=obj) return u':'.join(text_type(x) for x in key)
def keyfunc(self, value): """Get the key for this value. """ # XXX this isn't very general... class_, key_tuple = identity_key(instance=value) return unicode(key_tuple[0])