def dumps(data, excp_cls=coordination.SerializationError): """Serializes provided data using msgpack into a byte string.""" try: return msgpackutils.dumps(data) except (msgpack.PackException, ValueError) as e: coordination.raise_with_cause(excp_cls, exception_message(e), cause=e)
def dumps(data, excp_cls=SerializationError): """Serializes provided data using msgpack into a byte string.""" try: return msgpackutils.dumps(data) except (msgpack.PackException, ValueError) as e: raise_with_cause(excp_cls, encodeutils.exception_to_unicode(e), cause=e)
def _set_local_cache(self, key, value, ctx=None): # Set a serialized version of the returned value in local cache for # subsequent calls to the memoized method. if not ctx: ctx = self._get_request_context() serialize = {'payload': value.payload, 'metadata': value.metadata} setattr(ctx, self._get_request_key(key), msgpackutils.dumps(serialize)) ctx.update_store()
def _set_local_cache(self, key, value, ctx=None): # Set a serialized version of the returned value in local cache for # subsequent calls to the memoized method. if not ctx: ctx = self._get_request_context() serialize = {'payload': value.payload, 'metadata': value.metadata} setattr(ctx, self._get_request_key(key), msgpackutils.dumps(serialize)) ctx.update_store()
def _dumps(obj): try: return msgpackutils.dumps(obj) except (msgpack.PackException, ValueError): # TODO(harlowja): remove direct msgpack exception access when # oslo.utils provides easy access to the underlying msgpack # pack/unpack exceptions.. exc.raise_with_cause(exc.JobFailure, "Failed to serialize object to" " msgpack blob")
def _dumps(obj): try: return msgpackutils.dumps(obj) except (msgpack.PackException, ValueError): # TODO(harlowja): remove direct msgpack exception access when # oslo.utils provides easy access to the underlying msgpack # pack/unpack exceptions.. exc.raise_with_cause(exc.JobFailure, "Failed to serialize object to" " msgpack blob")
def test_custom_register(self): registry = msgpackutils.default_registry.copy(unfreeze=True) registry.register(ColorHandler()) c = Color(255, 254, 253) c_b = msgpackutils.dumps(c, registry=registry) c = msgpackutils.loads(c_b, registry=registry) self.assertEqual(255, c.r) self.assertEqual(254, c.g) self.assertEqual(253, c.b)
def test_custom_register(self): registry = msgpackutils.default_registry.copy(unfreeze=True) registry.register(ColorHandler()) c = Color(255, 254, 253) c_b = msgpackutils.dumps(c, registry=registry) c = msgpackutils.loads(c_b, registry=registry) self.assertEqual(255, c.r) self.assertEqual(254, c.g) self.assertEqual(253, c.b)
def add_compute(): """Generates connection string for adding a compute node to the cluster. Steps: * Make sure we are running in the clustered mode and this is a control node which is an initial node in the cluster; * Generate an application credential via Keystone scoped to the service project with restricted capabilities (reader role and only able to list the service catalog) and a short expiration time enough for a user to copy the connection string to the compute node; * Get an FQDN that will be used by the client to establish a connection to the clustering service; * Serialize the above data into a base64-encoded string. """ role = config_get('config.cluster.role') if role != 'control': raise Exception('Running add-compute is only supported on a' ' control node.') app_cred = _create_credential() data = { # TODO: we do not use hostname verification, however, using # an FQDN might be useful here since the host may be behind NAT # with a split-horizon DNS implemented where a hostname would point # us to a different IP. 'hostname': config_get('config.network.control-ip'), # Store bytes since the representation will be shorter than with hex. 'fingerprint': bytes.fromhex(config_get('config.cluster.fingerprint')), 'id': app_cred.id, 'secret': app_cred.secret, } connection_string = base64.encode_as_text(msgpackutils.dumps(data)) # Print the connection string and an expiration notice to the user. print( 'Use the following connection string to add a new compute node' f' to the cluster (valid for {VALIDITY_PERIOD.minutes} minutes from' f' this moment):', file=sys.stderr) print(connection_string)
def serialize(self, obj): return msgpackutils.dumps(obj.__dict__, registry=self._registry)
def add_measures(self, metric, measures): self._store_measures(metric, msgpackutils.dumps(list(map(tuple, measures))))
def add_measures(self, metric, measures): self._store_measures(metric, msgpackutils.dumps( list(map(tuple, measures))))
def serialize(self, obj): return msgpackutils.dumps(obj.revoke_map, registry=self._registry)
def _dumps_loads(obj): obj = msgpackutils.dumps(obj) return msgpackutils.loads(obj)
if six.PY3: blob = blob.encode('ascii') print("color blob: ", blob) return blob @staticmethod def deserialize(data): chunks = [int(c.strip()) for c in data.split(b",")] return Color(chunks[0], chunks[1], chunks[2]) registry = msgpackutils.default_registry.copy(unfreeze=True) registry.register(ColorHandler()) c = Color(255, 254, 253) c_b = msgpackutils.dumps(c, registry=registry) print(c_b) c = msgpackutils.loads(c_b, registry=registry) print(c.r, c.g, c.b) ''' msgpackutils 用于将python中的数据对象(某种类型,如这里的UUID,Color)串行化到文件或者字符串变量中。 方法及过程为: 1. 定义对象 2. 定对象的处理逻辑,主要包含四个部分: a. handles 变量:用于表明可以处理的数据类型。 b. identity 变量:用于串行化到字符串中表明原有数据类型。 c. serialize 函数:用于串行化对象 d. deserialize 函数:反串行化数据流(怎么串行化就怎么反过程处理) 3. 获取registry 对象,相当于一个namespace,所有的处理过程的定义在一个registry下。 4. 注册串行化Handler到registry。
def dump_as_bytes(self, obj): return msgpackutils.dumps(obj, registry=self._registry)
def _dumps_loads(obj): obj = msgpackutils.dumps(obj) return msgpackutils.loads(obj)
def serialize(self, obj): return msgpackutils.dumps(obj.__dict__, registry=self._registry)
def dump_as_bytes(self, obj): return msgpackutils.dumps(obj, registry=self._registry)