def add_object(self, obj, include_extensions=True): "Adds an object to the message, if it's not already in." class_ = type(obj) classname = class_.__name__ obj_set = self.payload.get(classname, set()) if ObjectType(classname, getattr(obj, get_pk(class_))) in obj_set: return self properties = properties_dict(obj) if include_extensions: for field, ext in model_extensions.get(classname, {}).iteritems(): _, loadfn, _, _ = ext properties[field] = loadfn(obj) obj_set.add( ObjectType(classname, getattr(obj, get_pk(class_)), **properties)) self.payload[classname] = obj_set return self
def add_object(self, obj, include_extensions=True): "Adds an object to the message, if it's not already in." class_ = type(obj) classname = class_.__name__ obj_set = self.payload.get(classname, set()) if ObjectType(classname, getattr(obj, get_pk(class_))) in obj_set: return self properties = properties_dict(obj) if include_extensions: for field, ext in model_extensions.get(classname, {}).iteritems(): _, loadfn, _, _ = ext properties[field] = loadfn(obj) obj_set.add(ObjectType( classname, getattr(obj, get_pk(class_)), **properties)) self.payload[classname] = obj_set return self
def add_object(self, obj, include_extensions=True): """Adds an object to the message, if it's not already in.""" class_ = type(obj) classname = class_.__name__ obj_set = self.payload.get(classname, set()) objt = ObjectType(classname, getattr(obj, get_pk(class_))) if objt in obj_set: return self properties = properties_dict(obj) if include_extensions: ext: ExtensionField extensions: List[Extension] = get_model_extensions_for_obj(obj) for extension in extensions: for field, ext in list(extension.fields.items()): loadfn = ext.loadfn if loadfn: properties[field] = loadfn(obj) obj_set.add( ObjectType(classname, getattr(obj, get_pk(class_)), **properties)) self.payload[classname] = obj_set return self
async def perform_async( operation: "Operation", container: "BaseMessage", session: Session, node_id=None, websocket: Optional[WebSocketCommonProtocol] = None ) -> (Optional[SQLClass], Optional[SQLClass]): """ Performs *operation*, looking for required data in *container*, and using *session* to perform it. *container* is an instance of dbsync.messages.base.BaseMessage. *node_id* is the node responsible for the operation, if known (else ``None``). If at any moment this operation fails for predictable causes, it will raise an *OperationError*. """ from dbsync.core import mode model: DeclarativeMeta = operation.tracked_model res: Tuple[Optional[SQLClass], Optional[SQLClass]] = (None, None) if model is None: raise OperationError("no content type for this operation", operation) if operation.command == 'i': # check if the given object is already in the database obj = query_model(session, model). \ filter(getattr(model, get_pk(model)) == operation.row_id).first() # retrieve the object from the PullMessage qu = container.query(model). \ filter(attr('__pk__') == operation.row_id) # breakpoint() pull_obj = qu.first() # pull_obj._session = session if pull_obj is None: raise OperationError( f"no object backing the operation in container on {mode}", operation) if obj is None: logger.info( f"insert: calling request_payloads_for_extension for: {pull_obj.id}" ) try: operation.call_before_operation_fn(session, pull_obj) await request_payloads_for_extension( operation, pull_obj, websocket, session) session.add(pull_obj) res = pull_obj, None except SkipOperation as e: logger.info(f"operation {operation} skipped") # operation.call_after_operation_fn(pull_obj, session) else: # Don't raise an exception if the incoming object is # exactly the same as the local one. if properties_dict(obj) == properties_dict(pull_obj): logger.warning("insert attempted when an identical object " "already existed in local database: " "model {0} pk {1}".format( model.__name__, operation.row_id)) else: raise OperationError( "insert attempted when the object already existed: " "model {0} pk {1}".format(model.__name__, operation.row_id)) elif operation.command == 'u': obj = query_model(session, model). \ filter(getattr(model, get_pk(model)) == operation.row_id).one_or_none() if obj is not None: logger.info( f"update: calling request_payloads_for_extension for: {obj.id}" ) # breakpoint() else: # For now, the record will be created again, but is an # error because nothing should be deleted without # using dbsync # raise OperationError( # "the referenced object doesn't exist in database", operation) # addendum: # this can happen when tracking of an object has been suppressed and # later been activated during a 'u' operation, # so we keep this logic logger.warning( "The referenced object doesn't exist in database. " "Node %s. Operation %s", node_id, operation) # get new object from the PushMessage pull_obj = container.query(model). \ filter(attr('__pk__') == operation.row_id).first() if pull_obj is None: raise OperationError( "no object backing the operation in container", operation) try: operation.call_before_operation_fn(session, pull_obj, obj) await request_payloads_for_extension(operation, pull_obj, websocket, session) if obj is None: logger.warn(f"obj is None") old_obj = copy(obj) if obj is not None else None session.merge(pull_obj) res = pull_obj, old_obj except SkipOperation as e: logger.info(f"operation {operation} skipped") # operation.call_after_operation_fn(pull_obj, session) elif operation.command == 'd': try: obj = query_model(session, model, only_pk=True). \ filter(getattr(model, get_pk(model)) == operation.row_id).first() except NoSuchColumnError as ex: # for joins only_pk doesnt seem to work obj = query_model(session, model, only_pk=False). \ filter(getattr(model, get_pk(model)) == operation.row_id).first() if obj is None: # The object is already deleted in the server # The final state in node and server are the same. But # it's an error because nothing should be deleted # without using dbsync logger.warning( "The referenced object doesn't exist in database. " "Node %s. Operation %s", node_id, operation) else: try: # breakpoint() operation.call_before_operation_fn(session, obj) session.delete(obj) res = obj, None except SkipOperation as e: logger.info(f"operation {operation} skipped") else: raise OperationError( "the operation doesn't specify a valid command ('i', 'u', 'd')", operation) return res
async def handle_push(connection: Connection, session: sqlalchemy.orm.Session) -> Optional[int]: msgs_got = 0 version: Optional[Version] = None async for msg in connection.socket: msgs_got += 1 msg_json = json.loads(msg) pushmsg = PushMessage(msg_json) # print(f"pushmsg: {msg}") if not pushmsg.operations: logger.warn("empty operations list in client PushMessage") for op in pushmsg.operations: logger.info(f"operation: {op}") # await connection.socket.send(f"answer is:{msg}") logger.info(f"message key={pushmsg.key}") latest_version_id = core.get_latest_version_id(session=session) logger.info( f"** version on server:{latest_version_id}, version in pushmsg:{pushmsg.latest_version_id}" ) if latest_version_id != pushmsg.latest_version_id: exc = f"version identifier isn't the latest one; " \ f"incoming: {pushmsg.latest_version_id}, on server:{latest_version_id}" if latest_version_id is None: logger.warn(exc) raise PushRejected(exc) if pushmsg.latest_version_id is None: logger.warn(exc) raise PullSuggested(exc) if pushmsg.latest_version_id < latest_version_id: logger.warn(exc) raise PullSuggested(exc) raise PushRejected(exc) if not pushmsg.islegit(session): raise PushRejected("message isn't properly signed") for listener in before_push: listener(session, pushmsg) # I) detect unique constraint conflicts and resolve them if possible unique_conflicts = find_unique_conflicts(pushmsg, session) conflicting_objects = set() for uc in unique_conflicts: obj = uc['object'] conflicting_objects.add(obj) for key, value in zip(uc['columns'], uc['new_values']): setattr(obj, key, value) for obj in conflicting_objects: make_transient(obj) # remove from session for model in set(type(obj) for obj in conflicting_objects): pk_name = get_pk(model) pks = [ getattr(obj, pk_name) for obj in conflicting_objects if type(obj) is model ] session.query(model).filter(getattr(model, pk_name).in_(pks)). \ delete(synchronize_session=False) # remove from the database session.add_all(conflicting_objects) # reinsert session.flush() # II) perform the operations operations = [ o for o in pushmsg.operations if o.tracked_model is not None ] post_operations: List[Tuple[Operation, SQLClass, Optional[SQLClass]]] = [] try: op: Operation for op in operations: (obj, old_obj) = await op.perform_async(pushmsg, session, pushmsg.node_id, connection.socket) if obj is not None: # if the op has been skipped, it wont be appended for post_operation handling post_operations.append((op, obj, old_obj)) resp = dict(type="info", op=dict( row_id=op.row_id, version=op.version, command=op.command, content_type_id=op.content_type_id, )) call_after_tracking_fn(session, op, obj) await connection.socket.send(json.dumps(resp)) except OperationError as e: logger.exception( "Couldn't perform operation in push from node %s.", pushmsg.node_id) raise PushRejected("at least one operation couldn't be performed", *e.args) # III) insert a new version if post_operations: # only if operations have been done -> create the new version version = Version(created=datetime.datetime.now(), node_id=pushmsg.node_id) session.add(version) # IV) insert the operations, discarding the 'order' column accomplished_operations = [ op for (op, obj, old_obj) in post_operations ] for op in sorted(accomplished_operations, key=attr('order')): new_op = Operation() for k in [k for k in properties_dict(op) if k != 'order']: setattr(new_op, k, getattr(op, k)) session.add(new_op) new_op.version = version session.flush() for op, obj, old_obj in post_operations: op.call_after_operation_fn(session, obj, old_obj) # from woodmaster.model.sql.model import WoodPile, Measurement # orphans = session.query(Measurement).filter(Measurement.woodpile_id == None).all() # print(f"orphans:{orphans}") for listener in after_push: listener(session, pushmsg) # return the new version id back to the client logger.info(f"version is: {version}") if version: await connection.socket.send( json.dumps( dict(type="result", new_version_id=version.version_id))) return {'new_version_id': version.version_id} else: await connection.socket.send( json.dumps(dict(type="result", new_version_id=None))) logger.info("sent nothing message") await connection.socket.close() logger.info("push ready")
def to_json(self): encoded = {} encoded['node'] = None if self.node is not None: encoded['node'] = encode_dict(Node)(properties_dict(self.node)) return encoded
def handle_push(data: Dict[str, Any], session: Optional[Session] = None) -> Dict[str, int]: """ Handle the push request and return a dictionary object to be sent back to the node. If the push is rejected, this procedure will raise a dbsync.server.handlers.PushRejected exception. *data* must be a dictionary-like object, usually the product of parsing a JSON string. """ message: PushMessage try: message = PushMessage(data) except KeyError: raise PushRejected("request object isn't a valid PushMessage", data) latest_version_id = core.get_latest_version_id(session=session) if latest_version_id != message.latest_version_id: exc = "version identifier isn't the latest one; "\ "given: %s" % message.latest_version_id if latest_version_id is None: raise PushRejected(exc) if message.latest_version_id is None: raise PullSuggested(exc) if message.latest_version_id < latest_version_id: raise PullSuggested(exc) raise PushRejected(exc) if not message.operations: return {} # raise PushRejected("message doesn't contain operations") if not message.islegit(session): raise PushRejected("message isn't properly signed") for listener in before_push: listener(session, message) # I) detect unique constraint conflicts and resolve them if possible unique_conflicts = find_unique_conflicts(message, session) conflicting_objects = set() for uc in unique_conflicts: obj = uc['object'] conflicting_objects.add(obj) for key, value in zip(uc['columns'], uc['new_values']): setattr(obj, key, value) for obj in conflicting_objects: make_transient(obj) # remove from session for model in set(type(obj) for obj in conflicting_objects): pk_name = get_pk(model) pks = [ getattr(obj, pk_name) for obj in conflicting_objects if type(obj) is model ] session.query(model).filter(getattr(model, pk_name).in_(pks)).\ delete(synchronize_session=False) # remove from the database session.add_all(conflicting_objects) # reinsert session.flush() # II) perform the operations operations = [o for o in message.operations if o.tracked_model is not None] try: for op in operations: op.perform(message, session, message.node_id) except OperationError as e: logger.exception("Couldn't perform operation in push from node %s.", message.node_id) raise PushRejected("at least one operation couldn't be performed", *e.args) # III) insert a new version version = Version(created=datetime.datetime.now(), node_id=message.node_id) session.add(version) # IV) insert the operations, discarding the 'order' column for op in sorted(operations, key=attr('order')): new_op = Operation() for k in [k for k in properties_dict(op) if k != 'order']: setattr(new_op, k, getattr(op, k)) session.add(new_op) new_op.version = version session.flush() for listener in after_push: listener(session, message) # return the new version id back to the node return {'new_version_id': version.version_id}
def perform(operation, container, session, node_id=None): """ Performs *operation*, looking for required data in *container*, and using *session* to perform it. *container* is an instance of dbsync.messages.base.BaseMessage. *node_id* is the node responsible for the operation, if known (else ``None``). If at any moment this operation fails for predictable causes, it will raise an *OperationError*. """ model = operation.tracked_model if model is None: raise OperationError("no content type for this operation", operation) if operation.command == 'i': obj = query_model(session, model).\ filter(getattr(model, get_pk(model)) == operation.row_id).first() pull_obj = container.query(model).\ filter(attr('__pk__') == operation.row_id).first() if pull_obj is None: raise OperationError( "no object backing the operation in container", operation) if obj is None: session.add(pull_obj) else: # Don't raise an exception if the incoming object is # exactly the same as the local one. if properties_dict(obj) == properties_dict(pull_obj): logger.warning(u"insert attempted when an identical object " u"already existed in local database: " u"model {0} pk {1}".format(model.__name__, operation.row_id)) else: raise OperationError( u"insert attempted when the object already existed: " u"model {0} pk {1}".format(model.__name__, operation.row_id)) elif operation.command == 'u': obj = query_model(session, model).\ filter(getattr(model, get_pk(model)) == operation.row_id).first() if obj is None: # For now, the record will be created again, but is an # error because nothing should be deleted without # using dbsync # raise OperationError( # "the referenced object doesn't exist in database", operation) logger.warning( u"The referenced object doesn't exist in database. " u"Node %s. Operation %s", node_id, operation) pull_obj = container.query(model).\ filter(attr('__pk__') == operation.row_id).first() if pull_obj is None: raise OperationError( "no object backing the operation in container", operation) session.merge(pull_obj) elif operation.command == 'd': obj = query_model(session, model, only_pk=True).\ filter(getattr(model, get_pk(model)) == operation.row_id).first() if obj is None: # The object is already deleted in the server # The final state in node and server are the same. But # it's an error because nothing should be deleted # without using dbsync logger.warning( "The referenced object doesn't exist in database. " u"Node %s. Operation %s", node_id, operation) else: session.delete(obj) else: raise OperationError( "the operation doesn't specify a valid command ('i', 'u', 'd')", operation)
def handle_push(data, session=None): """ Handle the push request and return a dictionary object to be sent back to the node. If the push is rejected, this procedure will raise a dbsync.server.handlers.PushRejected exception. *data* must be a dictionary-like object, usually the product of parsing a JSON string. """ message = None try: message = PushMessage(data) except KeyError: raise PushRejected("request object isn't a valid PushMessage", data) latest_version_id = core.get_latest_version_id(session=session) if latest_version_id != message.latest_version_id: exc = "version identifier isn't the latest one; "\ "given: %s" % message.latest_version_id if latest_version_id is None: raise PushRejected(exc) if message.latest_version_id is None: raise PullSuggested(exc) if message.latest_version_id < latest_version_id: raise PullSuggested(exc) raise PushRejected(exc) if not message.operations: raise PushRejected("message doesn't contain operations") if not message.islegit(session): raise PushRejected("message isn't properly signed") for listener in before_push: listener(session, message) # I) detect unique constraint conflicts and resolve them if possible unique_conflicts = find_unique_conflicts(message, session) conflicting_objects = set() for uc in unique_conflicts: obj = uc['object'] conflicting_objects.add(obj) for key, value in izip(uc['columns'], uc['new_values']): setattr(obj, key, value) for obj in conflicting_objects: make_transient(obj) # remove from session for model in set(type(obj) for obj in conflicting_objects): pk_name = get_pk(model) pks = [getattr(obj, pk_name) for obj in conflicting_objects if type(obj) is model] session.query(model).filter(getattr(model, pk_name).in_(pks)).\ delete(synchronize_session=False) # remove from the database session.add_all(conflicting_objects) # reinsert session.flush() # II) perform the operations operations = filter(lambda o: o.tracked_model is not None, message.operations) try: for op in operations: op.perform(message, session, message.node_id) except OperationError as e: logger.exception(u"Couldn't perform operation in push from node %s.", message.node_id) raise PushRejected("at least one operation couldn't be performed", *e.args) # III) insert a new version version = Version(created=datetime.datetime.now(), node_id=message.node_id) session.add(version) # IV) insert the operations, discarding the 'order' column for op in sorted(operations, key=attr('order')): new_op = Operation() for k in ifilter(lambda k: k != 'order', properties_dict(op)): setattr(new_op, k, getattr(op, k)) session.add(new_op) new_op.version = version session.flush() for listener in after_push: listener(session, message) # return the new version id back to the node return {'new_version_id': version.version_id}