def test_relationship(ssn: sa.orm.Session): """ Test getting historical relationship values through an InstanceHistoryProxy """ # Prepare ssn.add(User(id=1, name='John', age=18)) ssn.add(User(id=2, name='Jack', age=18)) ssn.add(Article(id=1, title='Python', author_id=1)) ssn.commit() # Users john = ssn.query(User).get(1) jack = ssn.query(User).get(2) # Article article: Article = ssn.query(Article).get(1) old_article: Article = InstanceHistoryProxy(article) # noqa assert article.author == john # load it assert old_article.author == john # works # Modify article.author = jack assert old_article.author == john # still works # Flush ssn.flush() assert old_article.author == john # still works
def test_columns(ssn: sa.orm.Session): """ Simple test of InstanceHistoryProxy with columns """ # Prepare ssn.add(User(id=1, name='John', age=18)) ssn.commit() # Check initial state user: User = ssn.query(User).get(1) old_user: User = InstanceHistoryProxy(user) # noqa def old_user_is_correct(): assert old_user.id == 1 assert old_user.name == 'John' assert old_user.age == 18 # Modify user.id = 1000 user.name = 'CHANGED' user.age = 1800 old_user_is_correct() # still good # Flush ssn.flush() old_user_is_correct() # still good
async def handle_push(connection: Connection, session: sqlalchemy.orm.Session) -> Optional[int]: msgs_got = 0 version: Optional[Version] = None async for msg in connection.socket: msgs_got += 1 msg_json = json.loads(msg) pushmsg = PushMessage(msg_json) # print(f"pushmsg: {msg}") if not pushmsg.operations: logger.warn("empty operations list in client PushMessage") for op in pushmsg.operations: logger.info(f"operation: {op}") # await connection.socket.send(f"answer is:{msg}") logger.info(f"message key={pushmsg.key}") latest_version_id = core.get_latest_version_id(session=session) logger.info( f"** version on server:{latest_version_id}, version in pushmsg:{pushmsg.latest_version_id}" ) if latest_version_id != pushmsg.latest_version_id: exc = f"version identifier isn't the latest one; " \ f"incoming: {pushmsg.latest_version_id}, on server:{latest_version_id}" if latest_version_id is None: logger.warn(exc) raise PushRejected(exc) if pushmsg.latest_version_id is None: logger.warn(exc) raise PullSuggested(exc) if pushmsg.latest_version_id < latest_version_id: logger.warn(exc) raise PullSuggested(exc) raise PushRejected(exc) if not pushmsg.islegit(session): raise PushRejected("message isn't properly signed") for listener in before_push: listener(session, pushmsg) # I) detect unique constraint conflicts and resolve them if possible unique_conflicts = find_unique_conflicts(pushmsg, session) conflicting_objects = set() for uc in unique_conflicts: obj = uc['object'] conflicting_objects.add(obj) for key, value in zip(uc['columns'], uc['new_values']): setattr(obj, key, value) for obj in conflicting_objects: make_transient(obj) # remove from session for model in set(type(obj) for obj in conflicting_objects): pk_name = get_pk(model) pks = [ getattr(obj, pk_name) for obj in conflicting_objects if type(obj) is model ] session.query(model).filter(getattr(model, pk_name).in_(pks)). \ delete(synchronize_session=False) # remove from the database session.add_all(conflicting_objects) # reinsert session.flush() # II) perform the operations operations = [ o for o in pushmsg.operations if o.tracked_model is not None ] post_operations: List[Tuple[Operation, SQLClass, Optional[SQLClass]]] = [] try: op: Operation for op in operations: (obj, old_obj) = await op.perform_async(pushmsg, session, pushmsg.node_id, connection.socket) if obj is not None: # if the op has been skipped, it wont be appended for post_operation handling post_operations.append((op, obj, old_obj)) resp = dict(type="info", op=dict( row_id=op.row_id, version=op.version, command=op.command, content_type_id=op.content_type_id, )) call_after_tracking_fn(session, op, obj) await connection.socket.send(json.dumps(resp)) except OperationError as e: logger.exception( "Couldn't perform operation in push from node %s.", pushmsg.node_id) raise PushRejected("at least one operation couldn't be performed", *e.args) # III) insert a new version if post_operations: # only if operations have been done -> create the new version version = Version(created=datetime.datetime.now(), node_id=pushmsg.node_id) session.add(version) # IV) insert the operations, discarding the 'order' column accomplished_operations = [ op for (op, obj, old_obj) in post_operations ] for op in sorted(accomplished_operations, key=attr('order')): new_op = Operation() for k in [k for k in properties_dict(op) if k != 'order']: setattr(new_op, k, getattr(op, k)) session.add(new_op) new_op.version = version session.flush() for op, obj, old_obj in post_operations: op.call_after_operation_fn(session, obj, old_obj) # from woodmaster.model.sql.model import WoodPile, Measurement # orphans = session.query(Measurement).filter(Measurement.woodpile_id == None).all() # print(f"orphans:{orphans}") for listener in after_push: listener(session, pushmsg) # return the new version id back to the client logger.info(f"version is: {version}") if version: await connection.socket.send( json.dumps( dict(type="result", new_version_id=version.version_id))) return {'new_version_id': version.version_id} else: await connection.socket.send( json.dumps(dict(type="result", new_version_id=None))) logger.info("sent nothing message") await connection.socket.close() logger.info("push ready")
def test_does_not_lose_history(ssn: sa.orm.Session): """ Extensive test of InstanceHistoryProxy with query counters and lazy loads """ assert ssn.autoflush == False, 'this test relies on Session.autoflush=False' engine = ssn.get_bind() # Prepare ssn.add(User(id=1, name='John', age=18)) ssn.add(Article(id=1, title='Python', author_id=1)) ssn.commit() # === Test 1: ModelHistoryProxy does not lose history when flushing a session ssn.expunge_all( ) # got to reset; otherwise, the session might reuse loaded objects user = ssn.query(User).get(1) with ExpectedQueryCounter(engine, 0, 'Expected no queries here'): old_user_hist = InstanceHistoryProxy(user) # issues no queries # Modify user.name = 'CHANGED' # History works assert old_user_hist.name == 'John' # Flush ssn.flush() # History is NOT broken! assert old_user_hist.name == 'John' # Change another column after flush; history is still NOT broken! user.age = 1800 assert old_user_hist.age == 18 # correct # Undo ssn.rollback() # === Test 1: ModelHistoryProxy does not lose history when lazyloading a column ssn.expunge_all( ) # got to reset; otherwise, the session might reuse loaded objects user = ssn.query(User).options(load_only('name')).get(1) with ExpectedQueryCounter(engine, 0, 'Expected no queries here'): old_user_hist = InstanceHistoryProxy(user) # issues no queries user.name = 'CHANGED' assert old_user_hist.name == 'John' # Load a column with ExpectedQueryCounter(engine, 1, 'Expected 1 lazyload query'): user.age # get an unloaded column # History is NOT broken! assert old_user_hist.name == 'John' # === Test 2: ModelHistoryProxy does not lose history when lazyloading a one-to-many relationship ssn.expunge_all( ) # got to reset; otherwise, the session might reuse loaded objects user = ssn.query(User).get(1) with ExpectedQueryCounter(engine, 0, 'Expected no queries here'): old_user_hist = InstanceHistoryProxy(user) user.name = 'CHANGED' assert old_user_hist.name == 'John' # History works # Load a relationship with ExpectedQueryCounter(engine, 1, 'Expected 1 lazyload query'): list(user.articles) # History is NOT broken! assert old_user_hist.name == 'John' # === Test 3: ModelHistoryProxy does not lose history when lazyloading a one-to-one relationship ssn.expunge_all( ) # got to reset; otherwise, the session might reuse loaded objects article = ssn.query(Article).get(1) with ExpectedQueryCounter(engine, 0, 'Expected no queries here'): old_article_hist = InstanceHistoryProxy(article) article.title = 'CHANGED' assert old_article_hist.title == 'Python' # works # Load a relationship with ExpectedQueryCounter(engine, 1, 'Expected 1 lazyload query'): article.author # History is NOT broken! assert old_article_hist.title == 'Python' # works