def receive_before_update(mapper, connection, target): """listen for the 'before_update' event""" from sqlalchemy.orm.attributes import get_history ssh_pass_hist = get_history(target, 'ssh_password') added, unchanged, deleted = ssh_pass_hist try: if added[0] == pwd_decryption(deleted[0]): #print 'same' target.ssh_password = deleted[0] else: #print 'not same' target.ssh_password = pwd_encryption(target.ssh_password) except Exception as e: print e target.ssh_password = pwd_encryption(target.ssh_password) db_pass_hist = get_history(target, 'db_password') added, unchanged, deleted = db_pass_hist try: if added[0] == pwd_decryption(deleted[0]): #print 'same' target.db_password = deleted[0] else: #print 'not same' target.db_password = pwd_encryption(target.db_password) except Exception as e: print e target.db_password = pwd_encryption(target.db_password)
def test_dict_collections(self): class Foo(fixtures.Base): pass class Bar(fixtures.Base): pass from sqlalchemy.orm.collections import attribute_mapped_collection attributes.register_class(Foo) attributes.register_attribute(Foo, 'someattr', uselist=True, useobject=True, typecallable=attribute_mapped_collection('name')) hi = Bar(name='hi') there = Bar(name='there') old = Bar(name='old') new = Bar(name='new') f = Foo() self.assertEquals(attributes.get_history(f._state, 'someattr'), ([], [], [])) f.someattr['hi'] = hi self.assertEquals(attributes.get_history(f._state, 'someattr'), ([hi], [], [])) f.someattr['there'] = there self.assertEquals(tuple([set(x) for x in attributes.get_history(f._state, 'someattr')]), (set([hi, there]), set([]), set([]))) f._state.commit(['someattr']) self.assertEquals(tuple([set(x) for x in attributes.get_history(f._state, 'someattr')]), (set([]), set([hi, there]), set([])))
def test_many_to_one_cascade(self): mapper(Address, addresses, properties={ 'user':relationship(User) }) mapper(User, users) u1 = User(id=1, name="u1") a1 =Address(id=1, email_address="a1", user=u1) u2 = User(id=2, name="u2") sess = create_session() sess.add_all([a1, u2]) sess.flush() a1.user = u2 sess2 = create_session() a2 = sess2.merge(a1) eq_( attributes.get_history(a2, 'user'), ([u2], (), [attributes.PASSIVE_NO_RESULT]) ) assert a2 in sess2.dirty sess.refresh(a1) sess2 = create_session() a2 = sess2.merge(a1, load=False) eq_( attributes.get_history(a2, 'user'), ((), [u1], ()) ) assert a2 not in sess2.dirty
def test_lazy_backref_collections(self): class Foo(_base.BasicEntity): pass class Bar(_base.BasicEntity): pass lazy_load = [] def lazyload(instance): def load(): return lazy_load return load attributes.register_class(Foo) attributes.register_class(Bar) attributes.register_attribute(Foo, 'bars', uselist=True, extension=attributes.GenericBackrefExtension('foo'), trackparent=True, callable_=lazyload, useobject=True) attributes.register_attribute(Bar, 'foo', uselist=False, extension=attributes.GenericBackrefExtension('bars'), trackparent=True, useobject=True) bar1, bar2, bar3, bar4 = [Bar(id=1), Bar(id=2), Bar(id=3), Bar(id=4)] lazy_load = [bar1, bar2, bar3] f = Foo() bar4 = Bar() bar4.foo = f eq_(attributes.get_history(attributes.instance_state(f), 'bars'), ([bar4], [bar1, bar2, bar3], [])) lazy_load = None f = Foo() bar4 = Bar() bar4.foo = f eq_(attributes.get_history(attributes.instance_state(f), 'bars'), ([bar4], [], [])) lazy_load = [bar1, bar2, bar3] attributes.instance_state(f).expire_attributes(['bars']) eq_(attributes.get_history(attributes.instance_state(f), 'bars'), ((), [bar1, bar2, bar3], ()))
def edit_task(name, goal, strategy, task): project = models.Projects.query.filter_by(id=name).first() pgoal = models.Goals.query.filter_by(id=goal).first() pstrat = models.Strategies.query.filter_by(id=strategy).first() ptask = models.Tasks.query.filter_by(id=task).first() form = task_form(obj=ptask) form.populate_obj(ptask) form.deadline.data = ptask.deadline.strftime("%m/%d/%Y") tform = task_form(request.values) if request.method == "POST" and form.validate_on_submit(): # if it changed from True to false, set complete date to None if get_history(ptask, "complete")[0] == [True] and get_history(ptask, "complete")[2] == [False]: print "changed from false to true" ptask.completeDate = datetime.datetime.utcnow() if get_history(ptask, "complete")[0] == [False] and get_history(ptask, "complete")[2] == [True]: print "changed from true to false" ptask.completeDate = None # task=tform.task.data # strat=pstrat # note = tform.note.data # staff=tform.staff.data # deadline=tform.deadline.data # complete=tform.complete.data # created=datetime.datetime.utcnow() db.session.commit() return redirect(url_for("task_outline", name=name, goal=goal, strategy=strategy)) return render_template( "edit_task.html", tform=tform, form=form, project=project, pgoal=pgoal, pstrat=pstrat, ptask=ptask )
def _assert_history(self, obj, compare, compare_passive=None): if isinstance(obj, self.classes.User): attrname = "addresses" elif isinstance(obj, self.classes.Order): attrname = "items" sess = inspect(obj).session if sess: sess.autoflush = False try: eq_(attributes.get_history(obj, attrname), compare) if compare_passive is None: compare_passive = compare eq_( attributes.get_history( obj, attrname, attributes.LOAD_AGAINST_COMMITTED ), compare_passive, ) finally: if sess: sess.autoflush = True
def before_upsert(self): self.check_importers() if get_history(self, 'externals').has_changes(): self.cleanup_externals() self.update_externals() if get_history(self, 'genres').has_changes(): self.update_genres()
def test_many_to_one_cascade(self): Address, addresses, users, User = (self.classes.Address, self.tables.addresses, self.tables.users, self.classes.User) mapper(Address, addresses, properties={'user': relationship(User)}) mapper(User, users) u1 = User(id=1, name="u1") a1 = Address(id=1, email_address="a1", user=u1) u2 = User(id=2, name="u2") sess = create_session() sess.add_all([a1, u2]) sess.flush() a1.user = u2 sess2 = create_session() a2 = sess2.merge(a1) eq_(attributes.get_history(a2, 'user'), ([u2], (), ())) assert a2 in sess2.dirty sess.refresh(a1) sess2 = create_session() a2 = sess2.merge(a1, load=False) eq_(attributes.get_history(a2, 'user'), ((), [u1], ())) assert a2 not in sess2.dirty
def get_attribute_history(self, state, key, passive=True): hashkey = ("history", state, key) # cache the objects, not the states; the strong reference here # prevents newly loaded objects from being dereferenced during the # flush process if hashkey in self.attributes: (added, unchanged, deleted, cached_passive) = self.attributes[hashkey] # if the cached lookup was "passive" and now we want non-passive, do a non-passive # lookup and re-cache if cached_passive and not passive: (added, unchanged, deleted) = attributes.get_history(state, key, passive=False) self.attributes[hashkey] = (added, unchanged, deleted, passive) else: (added, unchanged, deleted) = attributes.get_history(state, key, passive=passive) self.attributes[hashkey] = (added, unchanged, deleted, passive) if added is None: return (added, unchanged, deleted) else: return ( [getattr(c, '_state', c) for c in added], [getattr(c, '_state', c) for c in unchanged], [getattr(c, '_state', c) for c in deleted], )
def test_dict_collections(self): class Foo(_base.BasicEntity): pass class Bar(_base.BasicEntity): pass from sqlalchemy.orm.collections import attribute_mapped_collection attributes.register_class(Foo) attributes.register_attribute(Foo, 'someattr', uselist=True, useobject=True, typecallable=attribute_mapped_collection('name')) hi = Bar(name='hi') there = Bar(name='there') old = Bar(name='old') new = Bar(name='new') f = Foo() eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ((), [], ())) f.someattr['hi'] = hi eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([hi], [], [])) f.someattr['there'] = there eq_(tuple([set(x) for x in attributes.get_history(attributes.instance_state(f), 'someattr')]), (set([hi, there]), set(), set())) attributes.instance_state(f).commit(['someattr']) eq_(tuple([set(x) for x in attributes.get_history(attributes.instance_state(f), 'someattr')]), (set(), set([hi, there]), set()))
def channel_visibility_change(mapper, connection, target): if ( get_history(target, "public").has_changes() or get_history(target, "deleted").has_changes() or get_history(target, "visible").has_changes() ): instances = list(VideoInstance.query.filter(VideoInstance.channel == target.id).values("id")) if instances: update_video_instance_date_updated([i[0] for i in instances], visible=_channel_is_public(target))
def before_flush(session, flush_context, instances): Model = get_model() for instance in session.deleted: if isinstance(instance, Solves): # A solve has been deleted - delete any awards associated with this solve award_ids = FirstBloodAward.query.with_entities( FirstBloodAward.id).filter( FirstBloodAward.solve_id == instance.id).subquery() rowcount = Awards.query.filter( Awards.id.in_(award_ids)).delete(synchronize_session='fetch') if rowcount > 0: # Mark the awards for this challenge for recalculation if not hasattr(session, 'requires_award_recalculation'): session.requires_award_recalculation = set() session.requires_award_recalculation.add( Challenges.query.get(instance.challenge_id)) if isinstance(instance, Users): # A user has been deleted - mark all challenges where this user had awards for recalculation # NOTE: This doesn't seem to be used by CTFd - see after_bulk_delete for award in FirstBloodAward.query.join( Users, FirstBloodAward.user_id == Users.id).filter( Users.id == instance.id).all(): if not hasattr(session, 'requires_award_recalculation'): session.requires_award_recalculation = set() session.requires_award_recalculation.add(award.solve.challenge) session.delete(award) if isinstance(instance, Teams): # A team has been deleted - mark all challenges where this team had awards for recalculation for award in FirstBloodAward.query.join( Teams, FirstBloodAward.team_id == Teams.id).filter( Teams.id == instance.id).all(): if not hasattr(session, 'requires_award_recalculation'): session.requires_award_recalculation = set() session.requires_award_recalculation.add(award.solve.challenge) session.delete(award) for instance in session.dirty: if session.is_modified(instance): if isinstance(instance, Model): if get_history(instance, "hidden").has_changes() or get_history( instance, "banned").has_changes(): # The user/team hidden state has changed - update awards on all challenges this user has solved for solve in Solves.query.join( Challenges, Solves.challenge_id == Challenges.id).filter( Solves.account_id == instance.id, Challenges.type == "firstblood"): if not hasattr(session, 'requires_award_recalculation'): session.requires_award_recalculation = set() session.requires_award_recalculation.add( solve.challenge)
def test_get_history(self): Edge = self.classes.Edge Point = self.classes.Point from sqlalchemy.orm.attributes import get_history e1 = Edge() e1.start = Point(1, 2) eq_(get_history(e1, 'start'), ([Point(x=1, y=2)], (), [Point(x=None, y=None)])) eq_(get_history(e1, 'end'), ((), [Point(x=None, y=None)], ()))
def leasing_force_expiration(mapper, connection, target): expired = False added, unchanged, deleted = get_history(target, 'static_ip') expired = expired or added or deleted added, unchanged, deleted = get_history(target, 'pool_subnet') expired = expired or added or deleted if expired: Lease.query.with_parent(target).update({'force_expire': True})
def _assert_history(self, obj, compare, compare_passive=None): if isinstance(obj, self.classes.User): attrname = "addresses" elif isinstance(obj, self.classes.Order): attrname = "items" eq_(attributes.get_history(obj, attrname), compare) if compare_passive is None: compare_passive = compare eq_(attributes.get_history(obj, attrname, attributes.LOAD_AGAINST_COMMITTED), compare_passive)
def test_get_history(self): Edge = self.classes.Edge Point = self.classes.Point from sqlalchemy.orm.attributes import get_history e1 = Edge() e1.start = Point(1, 2) eq_( get_history(e1, "start"), ([Point(x=1, y=2)], (), [Point(x=None, y=None)]), ) eq_(get_history(e1, "end"), ((), [Point(x=None, y=None)], ()))
def get_old_number_context(self): number_history = get_history(self, 'mailbox') context_history = get_history(self, 'context') old_number = self.number if number_history[2]: old_number = number_history[2][0] old_context = self.context if context_history[2]: old_context = context_history[2][0] return old_number, old_context
def test_object_collections_set(self): class Foo(fixtures.Base): pass class Bar(fixtures.Base): def __nonzero__(self): assert False attributes.register_class(Foo) attributes.register_attribute(Foo, 'someattr', uselist=True, useobject=True) hi = Bar(name='hi') there = Bar(name='there') old = Bar(name='old') new = Bar(name='new') # case 1. new object f = Foo() self.assertEquals(attributes.get_history(f._state, 'someattr'), ([], [], [])) f.someattr = [hi] self.assertEquals(attributes.get_history(f._state, 'someattr'), ([hi], [], [])) f._state.commit(['someattr']) self.assertEquals(attributes.get_history(f._state, 'someattr'), ([], [hi], [])) f.someattr = [there] self.assertEquals(attributes.get_history(f._state, 'someattr'), ([there], [], [hi])) f._state.commit(['someattr']) self.assertEquals(attributes.get_history(f._state, 'someattr'), ([], [there], [])) f.someattr = [hi] self.assertEquals(attributes.get_history(f._state, 'someattr'), ([hi], [], [there])) f.someattr = [old, new] self.assertEquals(attributes.get_history(f._state, 'someattr'), ([old, new], [], [there])) # case 2. object with direct settings (similar to a load operation) f = Foo() collection = attributes.init_collection(f, 'someattr') collection.append_without_event(new) f._state.commit_all() self.assertEquals(attributes.get_history(f._state, 'someattr'), ([], [new], [])) f.someattr = [old] self.assertEquals(attributes.get_history(f._state, 'someattr'), ([old], [], [new])) f._state.commit(['someattr']) self.assertEquals(attributes.get_history(f._state, 'someattr'), ([], [old], []))
def test_object_collections_set(self): class Foo(_base.BasicEntity): pass class Bar(_base.BasicEntity): def __nonzero__(self): assert False attributes.register_class(Foo) attributes.register_attribute(Foo, 'someattr', uselist=True, useobject=True) hi = Bar(name='hi') there = Bar(name='there') old = Bar(name='old') new = Bar(name='new') # case 1. new object f = Foo() eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ((), [], ())) f.someattr = [hi] eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([hi], [], [])) attributes.instance_state(f).commit(['someattr']) eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ((), [hi], ())) f.someattr = [there] eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([there], [], [hi])) attributes.instance_state(f).commit(['someattr']) eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ((), [there], ())) f.someattr = [hi] eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([hi], [], [there])) f.someattr = [old, new] eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([old, new], [], [there])) # case 2. object with direct settings (similar to a load operation) f = Foo() collection = attributes.init_collection(attributes.instance_state(f), 'someattr') collection.append_without_event(new) attributes.instance_state(f).commit_all() eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ((), [new], ())) f.someattr = [old] eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([old], [], [new])) attributes.instance_state(f).commit(['someattr']) eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ((), [old], ()))
def _assert_history(self, obj, compare, compare_passive=None): if isinstance(obj, self.classes.User): attrname = "addresses" elif isinstance(obj, self.classes.Order): attrname = "items" eq_(attributes.get_history(obj, attrname), compare) if compare_passive is None: compare_passive = compare eq_( attributes.get_history(obj, attrname, attributes.LOAD_AGAINST_COMMITTED), compare_passive)
def _session_flush(cls, session, flush_context, instances): for obj in session.deleted: class_ = obj.__class__ tracked_columns = cls.mapped_entities.get(class_, tuple()) for col in tracked_columns: value = getattr(obj, col) if value is not None: session._depot_old = getattr(session, '_depot_old', set()) session._depot_old.update(value.files) for obj in session.new.union(session.dirty): class_ = obj.__class__ tracked_columns = cls.mapped_entities.get(class_, tuple()) for col in tracked_columns: history = get_history(obj, col) added_files = itertools.chain(*(f.files for f in history.added if f is not None)) deleted_files = itertools.chain(*(f.files for f in history.deleted if f is not None)) session._depot_new = getattr(session, '_depot_new', set()) session._depot_new.update(added_files) session._depot_old = getattr(session, '_depot_old', set()) session._depot_old.update(deleted_files)
def test_basic(self): mapper(User, users, properties={ 'addresses': dynamic_loader(mapper(Address, addresses)) }) sess = create_session() u1 = User(name='jack') u2 = User(name='ed') u2.addresses.append(Address(email_address='*****@*****.**')) u1.addresses.append(Address(email_address='*****@*****.**')) sess.add_all((u1, u2)) sess.flush() from sqlalchemy.orm import attributes self.assertEquals( attributes.get_history(attributes.instance_state(u1), 'addresses'), ([], [Address(email_address='*****@*****.**')], [])) sess.clear() # test the test fixture a little bit assert User(name='jack', addresses=[Address(email_address='wrong') ]) != sess.query(User).first() assert User(name='jack', addresses=[Address(email_address='*****@*****.**') ]) == sess.query(User).first() assert [ User(name='jack', addresses=[Address(email_address='*****@*****.**')]), User(name='ed', addresses=[Address(email_address='*****@*****.**')]) ] == sess.query(User).all()
def updates(self) -> Dict[str, Any]: if not db.session.is_modified(self): return {} changes: Dict[str, Any] = {} messages: List[str] = [] # track changes to these fields fields = [ "service_id", "tariff", "utility_account_id", "gen_service_id", "gen_tariff", "gen_utility", "gen_utility_account_id", "provider_type", ] for field in fields: history = get_history(self, field) added = history.added[0] if history.added else None deleted = history.deleted[0] if history.deleted else None changes[field] = added changes["%s_prev" % field] = deleted # clear: History(added=[None], unchanged=(), deleted=['value']) if deleted and not added: messages.append("%s: cleared %s." % (field, added)) # add: History(added=['123'], unchanged=(), deleted=[None]) if added and not deleted: messages.append("%s: set %s (was unset)." % (field, added)) # update: History(added=['B-19-S'], unchanged=(), deleted=['E-19-S']) if added and deleted: messages.append("%s: updated %s (was %s)." % (field, added, deleted)) return { "message": "\n".join(messages), "fields": changes, }
def test_basic(self): mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses)) }) sess = create_session() u1 = User(name='jack') u2 = User(name='ed') u2.addresses.append(Address(email_address='*****@*****.**')) u1.addresses.append(Address(email_address='*****@*****.**')) sess.add_all((u1, u2)) sess.flush() from sqlalchemy.orm import attributes self.assertEquals(attributes.get_history(attributes.instance_state(u1), 'addresses'), ([], [Address(email_address='*****@*****.**')], [])) sess.clear() # test the test fixture a little bit assert User(name='jack', addresses=[Address(email_address='wrong')]) != sess.query(User).first() assert User(name='jack', addresses=[Address(email_address='*****@*****.**')]) == sess.query(User).first() assert [ User(name='jack', addresses=[Address(email_address='*****@*****.**')]), User(name='ed', addresses=[Address(email_address='*****@*****.**')]) ] == sess.query(User).all()
def after_update(self): client_id_hist = get_history(self.client_id) if client_id_hist.deleted: session = orm.Session.object_session(self) session.pipe.delete(self._cache_name_client_id.format( client_id_hist.deleted[0] ))
def new_version(self, session): # convert to an INSERT make_transient(self) self.id = None # history of the 'elements' collection. # this is a tuple of groups: (added, unchanged, deleted) hist = attributes.get_history(self, "elements") # rewrite the 'elements' collection # from scratch, removing all history attributes.set_committed_value(self, "elements", {}) # new elements in the "added" group # are moved to our new collection. for elem in hist.added: self.elements[elem.name] = elem # copy elements in the 'unchanged' group. # the new ones associate with the new ConfigData, # the old ones stay associated with the old ConfigData for elem in hist.unchanged: self.elements[elem.name] = ConfigValueAssociation( elem.config_value )
def test_lazyhistory(self): """tests that history functions work with lazy-loading attributes""" class Foo(_base.BasicEntity): pass class Bar(_base.BasicEntity): pass attributes.register_class(Foo) attributes.register_class(Bar) bar1, bar2, bar3, bar4 = [Bar(id=1), Bar(id=2), Bar(id=3), Bar(id=4)] def func1(): return "this is func 1" def func2(): return [bar1, bar2, bar3] attributes.register_attribute(Foo, 'col1', uselist=False, callable_=lambda o:func1, useobject=True) attributes.register_attribute(Foo, 'col2', uselist=True, callable_=lambda o:func2, useobject=True) attributes.register_attribute(Bar, 'id', uselist=False, useobject=True) x = Foo() attributes.instance_state(x).commit_all() x.col2.append(bar4) eq_(attributes.get_history(attributes.instance_state(x), 'col2'), ([bar4], [bar1, bar2, bar3], []))
def process_version(session, obj, new, deleted): if REVERTING or isinstance(obj, (History, Block)): return obj_mapper = object_mapper(obj) history = History() history.block_height = session.query(func.max(Block.height)).scalar() history.table_name = obj.__table__.fullname history.next_value = None if deleted else {} history.previous_value = None if new else {} for prop in obj_mapper.iterate_properties: if isinstance(prop, RelationshipProperty): continue key = prop.key hs = attributes.get_history(obj, key) old_value = hs.non_added() new_value = hs.non_deleted() if not new: history.previous_value[ key] = None if old_value == [] else old_value[0] if not deleted: history.next_value[key] = None if new_value == [] else new_value[0] session.add(history)
def tracker(self, mapper, connection, target, db_alias='some_object', fields=None, is_new=False): if is_new: fields_with_changes = fields else: fields_with_changes = [] for field in fields: if get_history(target, field).deleted: fields_with_changes.append(field) updates = {} for field in fields_with_changes: new_value = getattr(target, field) if type(new_value) is Decimal: updates[field] = str(new_value) else: updates[field] = new_value if updates: db_table = HistoryModel.__table__ object_type = db_alias connection.execute( db_table.insert(), dict( user_id=current_user.id, object_type=object_type, object_id=target.id, data=updates, ))
def on_model_change(self, form, model, is_created): if form.password.data is None or len(form.password.data) < 3: prev_hash = get_history(model, 'password_hash')[2][0] model.password_hash = prev_hash db.session.commit() pass
def changed_action(mapper, connection, target): """Remove the action from cache when an item is updated.""" action_history = get_history(target, 'action') argument_history = get_history(target, 'argument') owner_history = get_history( target, 'user' if isinstance(target, ActionUsers) else 'role') if action_history.has_changes() or argument_history.has_changes() \ or owner_history.has_changes(): current_access.delete_action_cache( get_action_cache_key(target.action, target.argument)) current_access.delete_action_cache( get_action_cache_key( action_history.deleted[0] if action_history.deleted else target.action, argument_history.deleted[0] if argument_history.deleted else target.argument))
def get_ip_if_allowed(): """ Get the remote address (IP address) of the current Flask context, if the project's privacy settings allow it. Behind the scenes, this calls back to the FlaskPlugin from SQLAlchemy-Continuum in order to maintain forward compatibility """ ip_logging_allowed = False try: if g.project.logging_preference == LoggingMode.RECORD_IP: ip_logging_allowed = True # If ip recording WAS enabled prior to this transaction, # we record the IP for this one last transaction old_logging_mode = get_history(g.project, "logging_preference")[2] if old_logging_mode and old_logging_mode[0] == LoggingMode.RECORD_IP: ip_logging_allowed = True except AttributeError: # g.project doesn't exist, it's being created or this action is outside # the scope of a project. Use the default logging mode to decide if LoggingMode.default() == LoggingMode.RECORD_IP: ip_logging_allowed = True if ip_logging_allowed: return fetch_remote_addr() else: return None
def record_history(self, clocked: 'Clocked', session: orm.Session, timestamp: dt.datetime): """record all history for a given clocked object""" new_tick = self._get_new_tick(clocked) is_strict_mode = session.info[STRICT_MODE_KEY] vclock_history = attributes.get_history(clocked, 'vclock') is_vclock_unchanged = vclock_history.unchanged and new_tick == vclock_history.unchanged[ 0] new_clock = self.make_clock(timestamp, new_tick) attr = {'entity': clocked} for prop, cls in self.history_models.items(): value = self._get_prop_value(clocked, prop) if value is not NOT_FOUND_SENTINEL: if is_strict_mode: assert not is_vclock_unchanged, \ 'flush() has triggered for a changed temporalized property outside of a clock tick' self._cap_previous_history_row(clocked, new_clock, cls) # Add new history row hist = attr.copy() hist[prop.key] = value session.add( cls(vclock=new_clock.vclock, effective=new_clock.effective, **hist))
def test_merge(self): addresses = self.tables.addresses User, Address = self._user_address_fixture( addresses_args={"order_by": addresses.c.email_address}) sess = fixture_session(autoflush=False) u1 = User(name="jack") a1 = Address(email_address="a1") a2 = Address(email_address="a2") a3 = Address(email_address="a3") u1.addresses.append(a2) u1.addresses.append(a3) sess.add_all([u1, a1]) sess.flush() u1 = User(id=u1.id, name="jack") u1.addresses.append(a1) u1.addresses.append(a3) u1 = sess.merge(u1) eq_(attributes.get_history(u1, "addresses"), ([a1], [a3], [a2])) sess.flush() eq_(list(u1.addresses), [a1, a3])
def _record(self, mapper, model, operation): pk = tuple(mapper.primary_key_from_instance(model)) #orm.object_session(model)._model_changes[pk] = (model, operation) changes = {} for prop in object_mapper(model).iterate_properties: if not isinstance(prop, RelationshipProperty): try: history = attributes.get_history(model, prop.key) except: continue added, unchanged, deleted = history newvalue = added[0] if added else None if operation == 'delete': oldvalue = unchanged[0] if unchanged else None else: oldvalue = deleted[0] if deleted else None if newvalue or oldvalue: changes[prop.key] = (oldvalue, newvalue) orm.object_session(model)._model_changes[pk] = (model.__tablename__, pk[0], changes, operation) return EXT_CONTINUE
def test_merge(self): addresses = self.tables.addresses User, Address = self._user_address_fixture( addresses_args={"order_by": addresses.c.email_address}) sess = create_session() u1 = User(name='jack') a1 = Address(email_address='a1') a2 = Address(email_address='a2') a3 = Address(email_address='a3') u1.addresses.append(a2) u1.addresses.append(a3) sess.add_all([u1, a1]) sess.flush() u1 = User(id=u1.id, name='jack') u1.addresses.append(a1) u1.addresses.append(a3) u1 = sess.merge(u1) eq_(attributes.get_history(u1, 'addresses'), ([a1], [a3], [a2])) sess.flush() eq_(list(u1.addresses), [a1, a3])
def test_merge(self): addresses = self.tables.addresses User, Address = self._user_address_fixture( addresses_args={"order_by": addresses.c.email_address}) sess = create_session() u1 = User(name='jack') a1 = Address(email_address='a1') a2 = Address(email_address='a2') a3 = Address(email_address='a3') u1.addresses.append(a2) u1.addresses.append(a3) sess.add_all([u1, a1]) sess.flush() u1 = User(id=u1.id, name='jack') u1.addresses.append(a1) u1.addresses.append(a3) u1 = sess.merge(u1) eq_(attributes.get_history(u1, 'addresses'), ( [a1], [a3], [a2] )) sess.flush() eq_( list(u1.addresses), [a1, a3] )
def _flush_all(self, obj): for column in self._columns(): added, unchanged, deleted = get_history(obj, column) for value in list(deleted) + list(added): self.flush(self._cache_key(**{column: value})) self.flush(self._cache_key()) self.flush(self._cache_key(getattr(obj, self.pk)))
def test_flush(self): mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses)) }) sess = create_session() u1 = User(name='jack') u2 = User(name='ed') u2.addresses.append(Address(email_address='*****@*****.**')) u1.addresses.append(Address(email_address='*****@*****.**')) sess.add_all((u1, u2)) sess.flush() from sqlalchemy.orm import attributes eq_(attributes.get_history(u1, 'addresses'), ([], [Address(email_address='*****@*****.**')], [])) sess.expunge_all() # test the test fixture a little bit ne_(User(name='jack', addresses=[Address(email_address='wrong')]), sess.query(User).first()) eq_(User(name='jack', addresses=[Address(email_address='*****@*****.**')]), sess.query(User).first()) eq_([ User(name='jack', addresses=[Address(email_address='*****@*****.**')]), User(name='ed', addresses=[Address(email_address='*****@*****.**')]) ], sess.query(User).all())
def _record(self, mapper, model, operation): pk = tuple(mapper.primary_key_from_instance(model)) #orm.object_session(model)._model_changes[pk] = (model, operation) changes = {} for prop in object_mapper(model).iterate_properties: if not isinstance(prop, RelationshipProperty): try: history = attributes.get_history(model, prop.key) except: continue added, unchanged, deleted = history newvalue = added[0] if added else None if operation=='delete': oldvalue = unchanged[0] if unchanged else None else: oldvalue = deleted[0] if deleted else None if newvalue or oldvalue: changes[prop.key] = (oldvalue, newvalue) orm.object_session(model)._model_changes[pk] = (model.__tablename__, pk[0], changes, operation) return EXT_CONTINUE
def changeset(obj): """ Returns a humanized changeset for given SQLAlchemy declarative object. :param obj: SQLAlchemy declarative model object """ data = {} session = sa.orm.object_session(obj) if session and obj in session.deleted: for prop in obj.__mapper__.iterate_properties: if isinstance(prop, sa.orm.ColumnProperty): if not prop.columns[0].primary_key: value = getattr(obj, prop.key) if value is not None: data[prop.key] = [None, getattr(obj, prop.key)] else: for prop in obj.__mapper__.iterate_properties: history = get_history(obj, prop.key) if history.has_changes(): old_value = history.deleted[0] if history.deleted else None new_value = history.added[0] if history.added else None if new_value: data[prop.key] = [new_value, old_value] return data
def test_merge(self): mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses), order_by=addresses.c.email_address) }) sess = create_session() u1 = User(name='jack') a1 = Address(email_address='a1') a2 = Address(email_address='a2') a3 = Address(email_address='a3') u1.addresses.append(a2) u1.addresses.append(a3) sess.add_all([u1, a1]) sess.flush() u1 = User(id=u1.id, name='jack') u1.addresses.append(a1) u1.addresses.append(a3) u1 = sess.merge(u1) eq_(attributes.get_history(u1, 'addresses'), ( [a1], [a3], [a2] )) sess.flush() eq_( list(u1.addresses), [a1, a3] )
def audit_update(mapper, connection, target): """Listen for the `after_update` event and create an AuditLog entry with before and after state changes""" state_before = {} state_after = {} inspr = inspect(target) attrs = class_mapper(target.__class__).column_attrs for attr in attrs: hist = getattr(inspr.attrs, attr.key).history if hist.has_changes(): value_before = get_history(target, attr.key)[2].pop() value_after = getattr(target, attr.key) if (isinstance(value_before, UUID) or isinstance( value_after, UUID)) and str(value_before) == str(value_after): continue state_before[attr.key] = value_before state_after[attr.key] = value_after if state_after == state_before: return target.create_audit( connection, target.__tablename__, target.id, ACTION_UPDATE, state_before=json.dumps(state_before, cls=current_app.json_encoder), state_after=json.dumps(state_after, cls=current_app.json_encoder))
def attrs_changed(obj, *attrs): """Check if the given fields have been changed since the last flush. :param obj: SQLAlchemy-mapped object :param attrs: attribute names """ return any(get_history(obj, attr).has_changes() for attr in attrs)
def attrs_changed(obj, *attrs): """Checks if the given fields have been changed since the last flush :param obj: SQLAlchemy-mapped object :param attrs: attribute names """ return any(get_history(obj, attr).has_changes() for attr in attrs)
def create_version(obj, session, deleted = False): obj_mapper = object_mapper(obj) history_mapper = obj.__history_mapper__ history_cls = history_mapper.class_ obj_state = attributes.instance_state(obj) attr = {} obj_changed = False for om, hm in zip(obj_mapper.iterate_to_root(), history_mapper.iterate_to_root()): if hm.single: continue for hist_col in hm.local_table.c: if hist_col.key == 'version': continue obj_col = om.local_table.c[hist_col.key] # get the value of the # attribute based on the MapperProperty related to the # mapped column. this will allow usage of MapperProperties # that have a different keyname than that of the mapped column. try: prop = obj_mapper.get_property_by_column(obj_col) except UnmappedColumnError: # in the case of single table inheritance, there may be # columns on the mapped table intended for the subclass only. # the "unmapped" status of the subclass column on the # base class is a feature of the declarative module as of sqla 0.5.2. continue # expired object attributes and also deferred cols might not be in the # dict. force it to load no matter what by using getattr(). if prop.key not in obj_state.dict: getattr(obj, prop.key) a, u, d = attributes.get_history(obj, prop.key) if d: attr[hist_col.key] = d[0] obj_changed = True elif u: attr[hist_col.key] = u[0] else: # if the attribute had no value. attr[hist_col.key] = a[0] obj_changed = True if not obj_changed and not deleted: return attr['version'] = obj.version hist = history_cls() for key, value in attr.iteritems(): setattr(hist, key, value) session.add(hist) obj.version += 1
def test_accounting_for_rowswitch(self): parent, child = self.tables.parent, self.tables.child class Parent(object): def __init__(self, id): self.id = id self.child = Child() class Child(object): pass mapper(Parent, parent, properties={ 'child': relationship(Child, uselist=False, cascade="all, delete-orphan", backref="parent") }) mapper(Child, child) sess = create_session(autocommit=False) p1 = Parent(1) sess.add(p1) sess.commit() sess.close() p2 = Parent(1) p3 = sess.merge(p2) old = attributes.get_history(p3, 'child')[2][0] assert old in sess sess.flush() assert p3.child._sa_instance_state.session_id == sess.hash_key assert p3.child in sess p4 = Parent(1) p5 = sess.merge(p4) old = attributes.get_history(p5, 'child')[2][0] assert old in sess sess.flush()
def create_version(obj, session, deleted=False): obj_mapper = object_mapper(obj) history_mapper = obj.__history_mapper__ history_cls = history_mapper.class_ obj_state = attributes.instance_state(obj) attr = {} obj_changed = False for om, hm in zip(obj_mapper.iterate_to_root(), history_mapper.iterate_to_root()): if hm.single: continue for hist_col in hm.local_table.c: if hist_col.key == 'version': continue obj_col = om.local_table.c[hist_col.key] # SLIGHT SQLA HACK #3 - get the value of the # attribute based on the MapperProperty related to the # mapped column. this will allow usage of MapperProperties # that have a different keyname than that of the mapped column. try: prop = obj_mapper._get_col_to_prop(obj_col) except UnmappedColumnError: # in the case of single table inheritance, there may be # columns on the mapped table intended for the subclass only. # the "unmapped" status of the subclass column on the # base class is a feature of the declarative module as of sqla 0.5.2. continue # expired object attributes and also deferred cols might not be in the # dict. force it to load no matter what by using getattr(). if prop.key not in obj_state.dict: getattr(obj, prop.key) a, u, d = attributes.get_history(obj, prop.key) if d: attr[hist_col.key] = d[0] obj_changed = True elif u: attr[hist_col.key] = u[0] else: raise Exception("TODO: what makes us arrive here ?") if not obj_changed and not deleted: return attr['version'] = obj.version hist = history_cls() for key, value in attr.iteritems(): setattr(hist, key, value) session.add(hist) obj.version += 1
def _filter_keys(self, obj): keys = [] for column in self._columns: added, _, deleted = get_history( obj, column, passive=PASSIVE_NO_INITIALIZE) for value in itertools.chain(added or (), deleted or ()): keys.append(self.cache_key(**{column: value})) return keys
def before_update(mapper, connection, target): # remove old photo old_photo_path = get_history(target, 'photo') if old_photo_path.has_changes(): if len(old_photo_path[2]) and old_photo_path[2][0]: os.remove(old_photo_path[2][0]) # continue with base method return BaseMixin.before_update(mapper, connection, target)
def test_collections_via_backref(self): class Foo(_base.BasicEntity): pass class Bar(_base.BasicEntity): pass attributes.register_class(Foo) attributes.register_class(Bar) attributes.register_attribute(Foo, 'bars', uselist=True, extension=attributes.GenericBackrefExtension('foo'), trackparent=True, useobject=True) attributes.register_attribute(Bar, 'foo', uselist=False, extension=attributes.GenericBackrefExtension('bars'), trackparent=True, useobject=True) f1 = Foo() b1 = Bar() eq_(attributes.get_history(attributes.instance_state(f1), 'bars'), ((), [], ())) eq_(attributes.get_history(attributes.instance_state(b1), 'foo'), ((), [None], ())) #b1.foo = f1 f1.bars.append(b1) eq_(attributes.get_history(attributes.instance_state(f1), 'bars'), ([b1], [], [])) eq_(attributes.get_history(attributes.instance_state(b1), 'foo'), ([f1], (), ())) b2 = Bar() f1.bars.append(b2) eq_(attributes.get_history(attributes.instance_state(f1), 'bars'), ([b1, b2], [], [])) eq_(attributes.get_history(attributes.instance_state(b1), 'foo'), ([f1], (), ())) eq_(attributes.get_history(attributes.instance_state(b2), 'foo'), ([f1], (), ()))
def test_collections_via_backref(self): class Foo(fixtures.Base): pass class Bar(fixtures.Base): pass attributes.register_class(Foo) attributes.register_class(Bar) attributes.register_attribute(Foo, 'bars', uselist=True, extension=attributes.GenericBackrefExtension('foo'), trackparent=True, useobject=True) attributes.register_attribute(Bar, 'foo', uselist=False, extension=attributes.GenericBackrefExtension('bars'), trackparent=True, useobject=True) f1 = Foo() b1 = Bar() self.assertEquals(attributes.get_history(f1._state, 'bars'), ([], [], [])) self.assertEquals(attributes.get_history(b1._state, 'foo'), ([], [None], [])) #b1.foo = f1 f1.bars.append(b1) self.assertEquals(attributes.get_history(f1._state, 'bars'), ([b1], [], [])) self.assertEquals(attributes.get_history(b1._state, 'foo'), ([f1], [], [])) b2 = Bar() f1.bars.append(b2) self.assertEquals(attributes.get_history(f1._state, 'bars'), ([b1, b2], [], [])) self.assertEquals(attributes.get_history(b1._state, 'foo'), ([f1], [], [])) self.assertEquals(attributes.get_history(b2._state, 'foo'), ([f1], [], []))
def updated_fields(self): " returns a list of fields that have been modified " changed = [] insp = inspect(type(self)) for attr in insp.column_attrs: history = attributes.get_history(self, attr.key) if bool(history.added) and bool(history.deleted): changed.append(attr.key) return changed
def _flush_all(self, obj): for attr in self._attrs(): added, unchanged, deleted = get_history(obj, attr) for value in list(deleted) + list(added): self.flush(self._cache_key(**{attr: value})) # flush "all" listing self.flush(self._cache_key()) # flush the object self.flush(self._cache_key(getattr(obj, self.pk)))
def update(cls, user, copr): # we should call get_history before other requests, otherwise # the changes would be forgotten if get_history(copr, "name").has_changes(): raise MalformedArgumentException("Change name of the project is forbidden") users_logic.UsersLogic.raise_if_cant_update_copr( user, copr, "Only owners and admins may update their projects.") db.session.add(copr)