def get_where_for_local(self, other): """Generate a column comparison expression for reference properties. The returned expression may be used to find objects of the I{local} type referring to C{other}. It handles the following cases:: Class.reference == obj Class.reference == obj.id Class.reference == (obj.id1, obj.id2) Where the right-hand side is the C{other} object given. """ try: obj_info = get_obj_info(other) except ClassInfoError: if type(other) is not tuple: remote_variables = (other,) else: remote_variables = other else: # Don't use other here, as it might be # security proxied or something. other = get_obj_info(other).get_obj() remote_variables = self.get_remote_variables(other) return compare_columns(self.local_key, remote_variables)
def test_set_get_delete_with_wrapper(Class): obj = Class() get_obj_info(obj) # Ensure the obj_info exists for obj. Class.prop1.__set__(Wrapper(obj), 10) assert Class.prop1.__get__(Wrapper(obj)) == 10 Class.prop1.__delete__(Wrapper(obj)) assert Class.prop1.__get__(Wrapper(obj)) == None
def test_set_get_delete_with_wrapper(self): obj = self.Class() get_obj_info(obj) # Ensure the obj_info exists for obj. self.Class.prop1.__set__(Wrapper(obj), 10) self.assertEquals(self.Class.prop1.__get__(Wrapper(obj)), 10) self.Class.prop1.__delete__(Wrapper(obj)) self.assertEquals(self.Class.prop1.__get__(Wrapper(obj)), None)
def __set__(self, local, remote): # Don't use local here, as it might be security proxied or something. local = get_obj_info(local).get_obj() if self._cls is None: self._cls = _find_descriptor_class(local.__class__, self) if remote is None: if self._on_remote: remote = self.__get__(local) if remote is None: return else: remote = self._relation.get_remote(local) if remote is None: remote_info = None else: remote_info = get_obj_info(remote) self._relation.unlink(get_obj_info(local), remote_info, True) else: # Don't use remote here, as it might be # security proxied or something. try: remote = get_obj_info(remote).get_obj() except ClassInfoError: pass # It might fail when remote is a tuple or a raw value. self._relation.link(local, remote, True)
def get_where_for_local(self, other): """Generate a column comparison expression for reference properties. The returned expression may be used to find objects of the I{local} type referring to C{other}. It handles the following cases:: Class.reference == obj Class.reference == obj.id Class.reference == (obj.id1, obj.id2) Where the right-hand side is the C{other} object given. """ try: obj_info = get_obj_info(other) except ClassInfoError: if type(other) is not tuple: remote_variables = (other, ) else: remote_variables = other else: # Don't use other here, as it might be # security proxied or something. other = get_obj_info(other).get_obj() remote_variables = self.get_remote_variables(other) return compare_columns(self.local_key, remote_variables)
def test_class_is_collectable(self): class Class(Storm): __storm_table__ = "table_name" prop = Property(primary=True) obj = Class() get_obj_info(obj) # Build all wanted meta-information. obj_ref = weakref.ref(obj) del obj gc.collect() self.assertEquals(obj_ref(), None)
def test_class_is_collectable(self): class Class(Storm): __storm_table__ = "table_name" prop = Property(primary=True) obj = Class() get_obj_info(obj) # Build all wanted meta-information. obj_ref = weakref.ref(obj) del obj gc.collect() assert obj_ref() is None
def test_adding_similar_obj_infos(self): """If __eq__ is broken, this fails.""" obj_info1 = get_obj_info(StubClass()) obj_info2 = get_obj_info(StubClass()) cache = self.Cache(5) cache.add(obj_info1) cache.add(obj_info2) cache.add(obj_info2) cache.add(obj_info1) self.assertEquals([hash(obj_info) for obj_info in cache.get_cached()], [hash(obj_info1), hash(obj_info2)])
def test_adding_similar_obj_infos(self): """If __eq__ is broken, this fails.""" obj_info1 = get_obj_info(StubClass()) obj_info2 = get_obj_info(StubClass()) cache = self.Cache(5) cache.add(obj_info1) cache.add(obj_info2) cache.add(obj_info2) cache.add(obj_info1) self.assertEquals(sorted([hash(obj_info) for obj_info in cache.get_cached()]), sorted([hash(obj_info1), hash(obj_info2)]))
def __set__(self, local, remote): if self._relation is None: # Don't use local.__class__ here, as it might be security # proxied or something. # XXX UNTESTED! self._build_relation(get_obj_info(local).cls_info.cls) if remote is None: remote = self._relation.get_remote(local) if remote is not None: self._relation.unlink(get_obj_info(local), get_obj_info(remote), True) else: self._relation.link(local, remote, True)
def test_adding_similar_obj_infos(Cache): """If __eq__ is broken, this fails.""" obj_info1 = get_obj_info(StubClass()) obj_info2 = get_obj_info(StubClass()) cache = Cache(5) cache.add(obj_info1) cache.add(obj_info2) cache.add(obj_info2) cache.add(obj_info1) cached = [hash(obj_info) for obj_info in cache.get_cached()] expected = [hash(obj_info1), hash(obj_info2)] assert sorted(cached) == sorted(expected)
def __get__(self, local, cls=None): if local is None: if self._cls is None: # Must set earlier, since __eq__() has no access # to the used class. self._cls = _find_descriptor_class(cls, self) return self if self._relation is None: # Don't use local.__class__ here, as it might be security # proxied or something. # XXX UNTESTED! self._build_relation(get_obj_info(local).cls_info.cls) remote = self._relation.get_remote(local) if remote is not None: return remote store = Store.of(local) if store is None: return None if self._relation.remote_key_is_primary: remote = store.get(self._relation.remote_cls, self._relation.get_local_variables(local)) else: where = self._relation.get_where_for_remote(local) result = store.find(self._relation.remote_cls, where) remote = result.one() if remote is not None: self._relation.link(local, remote) return remote
def local_variables_are_none(self, local): """Return true if all variables of the local key have None values.""" local_info = get_obj_info(local) for column in self._get_local_columns(local.__class__): if local_info.variables[column].get() is not None: return False return True
def _remote_variables(relation, obj): """A helper function to extract the foreign key values of an object. """ try: get_obj_info(obj) except ClassInfoError: if type(obj) is not tuple: remote_variables = (obj,) else: remote_variables = obj else: # Don't use other here, as it might be # security proxied or something. obj = get_obj_info(obj).get_obj() remote_variables = relation.get_remote_variables(obj) return remote_variables
def __get__(self, local, cls=None): if local is not None: # Don't use local here, as it might be security proxied. local = get_obj_info(local).get_obj() if self._cls is None: self._cls = _find_descriptor_class(cls or local.__class__, self) if local is None: return self remote = self._relation.get_remote(local) if remote is not None: return remote if self._relation.local_variables_are_none(local): return None store = Store.of(local) if store is None: return None if self._relation.remote_key_is_primary: remote = store.get(self._relation.remote_cls, self._relation.get_local_variables(local)) else: where = self._relation.get_where_for_remote(local) result = store.find(self._relation.remote_cls, where) remote = result.one() if remote is not None: self._relation.link(local, remote) return remote
def test_events(prop_cls): class Class(object): __storm_table__ = "mytable" prop = prop_cls(default_factory=list, primary=True) obj = Class() obj_info = get_obj_info(obj) change_callback = Mock() obj_info.checkpoint() obj_info.event.emit("start-tracking-changes", obj_info.event) obj_info.event.hook("changed", change_callback) # Events shouldn't trigger until we flush assert obj.prop == [] assert change_callback.call_args_list == [] obj.prop.append("a") assert change_callback.call_args_list == [] # Check "flush" event. Notice that the other variable wasn't # listed, since it wasn't changed. obj_info.event.emit("flush") assert change_callback.call_args_list == [ call(ANY, Class.prop, None, ["a"], False), ] change_callback.reset_mock() # Check "object-deleted" event. Notice that the other variable # wasn't listed again, since it wasn't changed. del obj assert change_callback.call_args_list == [ call(ANY, Class.prop, None, ["a"], False), ]
def detach(self): obj_info = info.get_obj_info(self) store = obj_info.get('store') assert not obj_info in store._dirty, "Can't Detach Dirty Object" store._remove_from_alive(obj_info) store._disable_change_notification(obj_info) store._disable_lazy_resolving(obj_info) return self
def remove(self, remote): store = Store.of(self._local) if store is None: raise NoStoreError("Can't perform operation without a store") # Don't use remote here, as it might be security proxied or something. remote = get_obj_info(remote).get_obj() where = self._relation1.get_where_for_remote(self._local) & self._relation2.get_where_for_remote(remote) store.find(self._link_cls, where).remove()
def get_remote(self, local): """Return the remote object for this relation, using the local cache. If the object in the cache is invalidated, we validate it again to check if it's still in the database. """ local_info = get_obj_info(local) try: obj = local_info[self]["remote"] except KeyError: return None remote_info = get_obj_info(obj) if remote_info.get("invalidated"): try: Store.of(obj)._validate_alive(remote_info) except LostObjectError: return None return obj
def remove(self, remote): store = Store.of(self._local) if store is None: raise NoStoreError("Can't perform operation without a store") # Don't use remote here, as it might be security proxied or something. remote = get_obj_info(remote).get_obj() where = (self._relation1.get_where_for_remote(self._local) & self._relation2.get_where_for_remote(remote)) store.find(self._link_cls, where).remove()
def event_key(self): """See `ILongPollEvent`. Constructs the key from the table name and primary key values of the Storm model object. """ cls_info = get_obj_info(self.source).cls_info return generate_event_key(cls_info.table.name.lower(), *gen_primary_key(self.source))
def attach(self, store): if hasattr(store, 'store'): store = store.store obj_info = info.get_obj_info(self) obj_info['store'] = store store._enable_change_notification(obj_info) store._add_to_alive(obj_info) store._enable_lazy_resolving(obj_info) return self
def remove(self, obj): """Remove an objet from the store The associated row will be deleted from the database. """ # Overwrite store.remove so we can emit our own event for when the # object is goin to be deleted (but before anything is actually modified) obj_info = get_obj_info(obj) obj_info.event.emit("before-removed") super(StoqlibStore, self).remove(obj)
def __get__(self, obj, cls=None): if obj is None: return self._get_column(cls) obj_info = get_obj_info(obj) if cls is None: # Don't get obj.__class__ because we don't trust it # (might be proxied or whatever). cls = obj_info.cls_info.cls column = self._get_column(cls) return obj_info.variables[column].get()
def event_key(self): """See `ILongPollEvent`. Constructs the key from the table name and primary key values of the Storm model object. """ cls_info = get_obj_info(self.source).cls_info return generate_event_key( cls_info.table.name.lower(), *gen_primary_key(self.source))
def test_reload(self): # reload() loads the given objects using queries generated by # gen_reload_queries(). db_object = self.factory.makeComponent() db_object_naked = proxy.removeSecurityProxy(db_object) db_object_info = get_obj_info(db_object_naked) IStore(db_object).flush() self.failUnlessEqual(None, db_object_info.get('invalidated')) IStore(db_object).invalidate(db_object) self.failUnlessEqual(True, db_object_info.get('invalidated')) bulk.reload([db_object]) self.failUnlessEqual(None, db_object_info.get('invalidated'))
def _add_all(self, obj_info, local_info): store = Store.of(obj_info) store.add(local_info) local_info.event.unhook("added", self._add_all, local_info) def add(remote_info): remote_info.event.unhook("added", self._add_all, local_info) store.add(remote_info) self._add_flush_order(local_info, remote_info, remote_first=(not self.on_remote)) if self.many: for remote_info in local_info[self]["remote"]: add(remote_info) else: add(get_obj_info(local_info[self]["remote"]))
def dbify_value(col, val): """Convert a value into a form that Storm can compile directly.""" if isinstance(val, SQL): return (val,) elif isinstance(col, Reference): # References are mainly meant to be used as descriptors, so we # have to perform a bit of evil here to turn the (potentially # None) value into a sequence of primary key values. if val is None: return (None,) * len(col._relation._get_local_columns(col._cls)) else: return col._relation.get_remote_variables( get_obj_info(val).get_obj()) else: return (col.variable_factory(value=val),)
def __storm_pre_flush__(self): obj_info = get_obj_info(self) pending = obj_info.get("pending") stoq_pending = obj_info.get("stoq-status") store = obj_info.get("store") if pending is PENDING_ADD: obj_info["stoq-status"] = _OBJ_CREATED elif pending is PENDING_REMOVE: obj_info["stoq-status"] = _OBJ_DELETED else: # This is storm's approach to check if the obj has pending changes, # but only makes sense if the obj is not being created/deleted. if store._get_changes_map(obj_info, True) and stoq_pending not in [_OBJ_CREATED, _OBJ_DELETED]: obj_info["stoq-status"] = _OBJ_UPDATED
def dbify_value(col, val): """Convert a value into a form that Storm can compile directly.""" if isinstance(val, SQL): return (val, ) elif isinstance(col, Reference): # References are mainly meant to be used as descriptors, so we # have to perform a bit of evil here to turn the (potentially # None) value into a sequence of primary key values. if val is None: return (None, ) * len(col._relation._get_local_columns(col._cls)) else: return col._relation.get_remote_variables( get_obj_info(val).get_obj()) else: return (col.variable_factory(value=val), )
def __storm_flushed__(self): avatar_store.execute(DELETE_SQL, (self.__class__.__name__, self.id)) if get_obj_info(self).get("store") is not None: vals = [v for v in self.getSearchVals() if v is not None] if vals: text = self._searchSeparator.join(vals).encode("utf-8") avatar_store.execute(INSERT_SQL, (self.__class__.__name__, self.id, self.getSearchLanguage(), text)) avatar_store.commit()
def __storm_pre_flush__(self): obj_info = get_obj_info(self) pending = obj_info.get("pending") stoq_pending = obj_info.get('stoq-status') store = obj_info.get("store") if pending is PENDING_ADD: obj_info['stoq-status'] = _OBJ_CREATED elif pending is PENDING_REMOVE: obj_info['stoq-status'] = _OBJ_DELETED else: # This is storm's approach to check if the obj has pending changes, # but only makes sense if the obj is not being created/deleted. if (store._get_changes_map(obj_info, True) and stoq_pending not in [_OBJ_CREATED, _OBJ_DELETED]): obj_info['stoq-status'] = _OBJ_UPDATED
def __storm_pre_flush__(self): obj_info = get_obj_info(self) pending = obj_info.get("pending") stoq_pending = obj_info.get('stoq-status') store = obj_info.get("store") if pending is PENDING_ADD: obj_info['stoq-status'] = _OBJ_CREATED elif pending is PENDING_REMOVE: obj_info['stoq-status'] = _OBJ_DELETED else: # This is storm's approach to check if the obj # has pending changes if (store._get_changes_map(obj_info, True) and stoq_pending not in [_OBJ_CREATED, _OBJ_DELETED]): obj_info['stoq-status'] = _OBJ_UPDATED
def autoreload_object(obj): """Autoreload object in any other existing store. This will go through every open store and see if the object is alive in the store. If it is, it will be marked for autoreload the next time its used. """ for store in _stores: if Store.of(obj) is store: continue alive = store._alive.get((obj.__class__, (obj.id, ))) if alive: # Just to make sure its not modified before reloading it, otherwise, # we would lose the changes assert not store._is_dirty(get_obj_info(obj)) store.autoreload(alive)
def autoreload_object(obj): """Autoreload object in any other existing store. This will go through every open store and see if the object is alive in the store. If it is, it will be marked for autoreload the next time its used. """ for store in _stores: if Store.of(obj) is store: continue alive = store._alive.get((obj.__class__, (obj.id,))) if alive: # Just to make sure its not modified before reloading it, otherwise, # we would lose the changes assert not store._is_dirty(get_obj_info(obj)) store.autoreload(alive)
def setup(self, property, *args, **kwargs): prop2_kwargs = kwargs.pop("prop2_kwargs", {}) kwargs["primary"] = True class Class(object): __storm_table__ = "mytable" prop1 = property("column1", *args, **kwargs) prop2 = property(**prop2_kwargs) class SubClass(Class): pass self.Class = Class self.SubClass = SubClass self.obj = SubClass() self.obj_info = get_obj_info(self.obj) self.column1 = self.SubClass.prop1 self.column2 = self.SubClass.prop2 self.variable1 = self.obj_info.variables[self.column1] self.variable2 = self.obj_info.variables[self.column2]
def __get__(self, local, cls=None): if local is None: return self if self._relation1 is None: # Don't use local.__class__ here, as it might be security # proxied or something. # XXX UNTESTED! self._build_relations(get_obj_info(local).cls_info.cls) # store = Store.of(local) # if store is None: # return None if self._relation2 is None: return BoundReferenceSet(self._relation1, local, self._order_by) else: return BoundIndirectReferenceSet(self._relation1, self._relation2, local, self._order_by)
def emit(self, event, skip_duplicate=False, **kwargs): """Emits an event to be run when the model's associated store commits. Handlers will be called like handler(model, **kwargs). Does nothing if \"skip_duplicate\" is True, and an \"identical\" event was already emitted.""" store = get_obj_info(self)["store"] if store is None: raise Exception("Tried to emit event for store-less object") if skip_duplicate: for pending in store.events: # NTA XXX: Equality in Python is unreliable if pending.obj == self and pending.event == event and pending.kwargs == kwargs: return store.events.append(PendingEvent(self, event, kwargs))
def test_list_unnecessary_update(self): """ Flushing an object with a list variable doesn't create an unnecessary UPDATE statement. """ self.store.execute("INSERT INTO lst1 VALUES (1, '{}')", noresult=True) lst = self.store.find(Lst1, id=1).one() self.assertTrue(lst) self.store.invalidate() lst2 = self.store.find(Lst1, id=1).one() self.assertTrue(lst2) obj_info = get_obj_info(lst2) events = [] obj_info.event.hook("changed", lambda *args: events.append(args)) self.store.flush() self.assertEqual(events, [])
def test_list_unnecessary_update(self): """ Flushing an object with a list variable doesn't create an unnecessary UPDATE statement. """ self.store.execute("INSERT INTO lst1 VALUES (1, '{}')", noresult=True) lst = self.store.find(Lst1, id=1).one() self.assertTrue(lst) self.store.invalidate() lst2 = self.store.find(Lst1, id=1).one() self.assertTrue(lst2) obj_info = get_obj_info(lst2) events = [] obj_info.event.hook("changed", lambda *args: events.append(args)) self.store.flush() self.assertEquals(events, [])
def __get__(self, local, cls=None): if local is not None: # Don't use local here, as it might be security proxied. local = get_obj_info(local).get_obj() if self._cls is None: self._cls = _find_descriptor_class(cls or local.__class__, self) if local is None: return self # store = Store.of(local) # if store is None: # return None if self._relation2 is None: return BoundReferenceSet(self._relation1, local, self._order_by) else: return BoundIndirectReferenceSet(self._relation1, self._relation2, local, self._order_by)
def __get__(self, local, cls=None): if local is None: return self # Don't use local here, as it might be security proxied or something. local = get_obj_info(local).get_obj() if self._relation1 is None: self._build_relations(local.__class__) #store = Store.of(local) #if store is None: # return None if self._relation2 is None: return CBoundReferenceSet(self._relation1, local, self._order_by) else: return BoundIndirectReferenceSet(self._relation1, self._relation2, local, self._order_by)
def autoreload_object(obj, obj_store=False): """Autoreload object in any other existing store. This will go through every open store and see if the object is alive in the store. If it is, it will be marked for autoreload the next time its used. :param obj_store: if we should also autoreload the current store of the object """ # Since _stores is a weakref, copy it to a list to avoid it changing size during iteration # (specially when running threaded operations). stores = list(_stores) for store in stores: if not obj_store and Store.of(obj) is store: continue alive = store._alive.get((obj.__class__, (obj.id,))) if alive: # Just to make sure its not modified before reloading it, otherwise, # we would lose the changes assert not store._is_dirty(get_obj_info(obj)) store.autoreload(alive)
def _listen_to_events(self): event = get_obj_info(self).event event.hook('added', self._on_object_added) event.hook('changed', self._on_object_changed) event.hook('before-removed', self._on_object_before_removed)
def _listen_to_events(self): event = get_obj_info(self).event event.hook("changed", self._on_object_changed) event.hook("before-removed", self._on_object_before_removed) event.hook("before-commited", self._on_object_before_commited)