def listen(self): print("Now listenning...") while self.running: update = umsgpack.loads(self.zmq_socket.recv(), raw=False) #print("Recvd pack", update) self.on_receive(update) print("Listenner stopping")
def unpack(self, packed: bytes, ext_resolvers: Optional[Dict[Any, Any]] = None) -> Any: return loads(packed, ext_handlers={ 0x42: lambda x: self._unpack(x.data, ext_resolvers or {}), })
def chatting(msg_dict): print("Sending " + msg_dict['test_date'] + " .......") time.sleep(1) sock.send(umsgpack.dumps(msg_dict)) print("Receiving .... ") time.sleep(1) rdata = sock.recv(BYTES) print(umsgpack.loads(rdata)["result"])
def test_api_taxon_post_works(): """ api taxon - post works """ points, _ = t.rand_xy_list(150) points = points.tolist() body = json.dumps({'points': points}, ignore_nan=True) result1 = client.simulate_post('/outlierstaxon', body=body) _check_outlierstaxon_result(result1) body = msgpack.dumps({'points': points}) result2 = client.simulate_post('/outlierstaxon', body=body, headers={'Content-Type': falcon.MEDIA_MSGPACK}) content = msgpack.loads(result2.content) _check_outlierstaxon_result(result2, content) assert result1.json == content for return_values in [True, False]: body = json.dumps({'points': points, 'returnvalues': return_values}, ignore_nan=True) result1 = client.simulate_post('/outlierstaxon', body=body) _check_outlierstaxon_result(result1, return_values=return_values) body = msgpack.dumps({'points': points, 'returnvalues': return_values}) result2 = client.simulate_post('/outlierstaxon', body=body, headers={'Content-Type': falcon.MEDIA_MSGPACK}) content = msgpack.loads(result2.content) _check_outlierstaxon_result(result2, content, return_values=return_values)
def test_api_dataset_post_works(): """ api dataset - post works """ points, _ = t.rand_xy_list(150) points = points.tolist() body = json.dumps({'points': points}, ignore_nan=True) result1 = client.simulate_post('/outliersdataset', body=body) _check_outliersdataset_result(result1) body = msgpack.dumps({'points': points}) result2 = client.simulate_post('/outliersdataset', body=body, headers={'Content-Type': falcon.MEDIA_MSGPACK}) content = msgpack.loads(result2.content) _check_outliersdataset_result(result2, content) assert result1.json == content
async def subscription_server(self, source, path): self.subscriptions[source] self.onConnected(source) try: async for data in source: await self.onMessage(source, umsgpack.loads(data)) except websockets.ConnectionClosedError as ex: log.debug('ConnectionClosedError') except Exception as ex: log.exception('Error with websocket connection') finally: del self.subscriptions[source] self.onDisconnected(source)
def exposed_obtain_call(self, function, packed_args): if packed_args is not None: packed_args = zlib.decompress(packed_args) args, kwargs = umsgpack.loads(packed_args) else: args, kwargs = [], {} result = function(*args, **kwargs) packed_result = umsgpack.dumps(result) packed_result = zlib.compress(packed_result) return packed_result
def safe_obtain(proxy): ''' Safe version of rpyc's rpyc.utils.classic.obtain, without using pickle. ''' if type(proxy) in [list, str, bytes, dict, set, type(None)]: return proxy try: conn = object.__getattribute__(proxy, '____conn__')() except AttributeError: return proxy if not hasattr(conn, 'obtain'): setattr(conn, 'obtain', conn.root.msgpack_dumps) return umsgpack.loads(zlib.decompress(conn.obtain(proxy, compressed=True)))
def read(proc: Popen): """Read message from the process, returns None on failure.""" if proc._mpipe_last == "read": raise RuntimeError("Consecutive read not allowed in rpc_mode") proc._mpipe_last = "read" size = proc.stdout.read(ctypes.sizeof(ctypes.c_size_t)) size = int.from_bytes(size, sys.byteorder) pack = proc.stdout.read(size) try: return umsgpack.loads(pack) except umsgpack.InsufficientDataException as e: if proc.poll() != 0: raise RuntimeError("The process returned %d." % proc.returncode) from e else: raise
def handle_message(self, msg: bytes): try: message = msgpack.loads(msg) msg_type = Message(message.pop("type")) logger.info(f"Handling: {msg_type}") func_mapping: Dict[Message, Callable] = { Message.REQUEST_LATEST_BLOCK: self.handle_request_latest_block, Message.RECEIVE_LATEST_BLOCK: self.handle_receive_latest_block, Message.REQUEST_BLOCKCHAIN: self.handle_request_blockchain, Message.RECEIVE_BLOCKCHAIN: self.handle_receive_blockchain, } func_mapping[msg_type](**message) except (UnpackException, KeyError, ValueError) as e: logger.error("Unknown message received") logger.error(f"{e}")
def handle_message(self, msg): msg = umsgpack.loads(msg) if msg["_c"] not in self.root_objects: raise security.SecurityException( "Class %s is not available to RPC worker %s, so you cannot send messages to it." % (msg["_c"], self)) obj = self.root_objects[msg["_c"]] selector = msg["_s"] security.selector_is_ok(obj, selector) method = getattr(obj, selector) kwargs = caffeine.pack.unpack(msg["_a"]) result = method(**kwargs) print("result is", result) dictFormat = caffeine.pack.pack(result) print("dict is", dictFormat) return umsgpack.dumps(dictFormat)
def safe_obtain(proxy): """ safe version of rpyc's rpyc.utils.classic.obtain, without using pickle. """ if type(proxy) in [list, str, bytes, dict, set, type(None)]: return proxy try: conn = object.__getattribute__(proxy, "____conn__")() except: return proxy if not hasattr(conn, 'obtain'): setattr(conn, 'obtain', conn.root.msgpack_dumps) return umsgpack.loads(zlib.decompress(conn.obtain( proxy, compressed=True))) # should prevent any code execution
async def load_key(self): async with await trio.open_file(self.path_metadata, 'rb') as md_file: metadata_contents = await md_file.read() # type: ignore metadata = umsgpack.loads(metadata_contents) if not isinstance(metadata, dict): raise InvalidBundleMetadata() if not 'key' in metadata or not metadata['key']: logger.warning('No or invalid key found for %s in metadata: %s', self, metadata) raise InvalidBundleKey() self.key = metadata['key'] self.relpath = self.decode_path(metadata['filename']) assert len(self.key) == self.key_size
def on_recv (self, data): mode = self.mode() if mode == 'BIN': text = convert_binary(data, True) elif mode == 'TEXT': text = data elif mode == 'MSGPACK': try: import umsgpack hr = umsgpack.loads(data) text = repr(hr) except Exception, ex: import traceback text = "recv msgpack err: %s, %s\n" % (ex, traceback.format_exc()) text += "rdata:%r\n" % data text += convert_binary(data, True)
def __call__(self, *args, **kwargs): if not self.client.burned_ready: self.client.burned_ready = True self.client.socket.recv_multipart() packedObj = { "_c": self.class_name, "_s": self.method_name, "_a": caffeine.pack.pack(kwargs)} packedBytes = umsgpack.dumps(packedObj) result = self.client.socket.send(packedBytes) result = self.client.socket.recv_multipart() if self.client.router_style_messages: result = result[1] else: result = result[0] return caffeine.pack.unpack(umsgpack.loads(result))
def read(proc : Popen): """Read message from the process, returns None on failure.""" if proc._mpipe_last == "read": raise RuntimeError("Consecutive read not allowed in rpc_mode") proc._mpipe_last = "read" size = proc.stdout.read(ctypes.sizeof(ctypes.c_size_t)) size = int.from_bytes(size, sys.byteorder) pack = proc.stdout.read(size) try: return umsgpack.loads(pack) except umsgpack.InsufficientDataException as e: if proc.poll() != 0: raise RuntimeError( "The process returned %d." % proc.returncode ) from e else: raise
def process_request(self, req, resp): if req.content_length in (None, 0): return body = req.stream.read() if not body: raise falcon.HTTPBadRequest( title="Empty request body", description="A valid MessagePack document is required") try: req.context["body"] = mp.loads(body) except Exception as err: raise falcon.HTTPBadRequest( title="Malformed MassagePack", description="Could not decode request body")
def deserialize_MsgPack(data: bytes) -> Any: return umsgpack.loads(data) if data else {}
def deserialize_result(res): if type(res) == dict: for k in res.keys(): if type(res[k]) == dict: res[k] = ldap3.utils.ciDict.CaseInsensitiveDict(res[k]) return umsgpack.loads(res, encoding="utf-8")
def __iter__(self): val = self._state.rdb.hgetall(self._key) if val: return ((k, umsgpack.loads(v)) for k, v in val.items())
def _request(self, message): self.socket.send(umsgpack.dumps(message)) return umsgpack.loads(self.socket.recv())
def get(self): v = self.rdb.spop(self.key) if v: return umsgpack.loads(v)
def loads(buf: bytes) -> object: return umsgpack.loads(buf, raw=False)
def unpack(self, packetdata): return umsgpack.loads(zlib.decompress(packetdata))
def receive(self): event = self._socket.recv_multipart() topic, message = event[0], msgpack.loads(event[1]) yield topic, message
import socket import umsgpack import re LPORT = 9999 LHOST = "127.0.0.1" MAXCONNECTIONS = 1 BYTES = 4096 REGEXP = '[0-9]{2}\.[0-9]{2}\.[0-9]{4}' sock = socket.socket() sock.bind((LHOST, LPORT)) sock.listen(MAXCONNECTIONS) conn, addr = sock.accept() while True: try: data = conn.recv(BYTES) udata = umsgpack.loads(data) if re.match(REGEXP, udata['test_date']): conn.send(umsgpack.dumps({"result": "ok"})) continue else: conn.send(umsgpack.dumps({"result": "error"})) except: sock.close() break
def dataDecoded(self,arg,*args,**kwargs): if isinstance(arg,str): return msgpack.loads(arg) self.onDecode(arg,*args,**kwargs) return arg
def unpack_message(packed_msg): return umsgpack.loads(packed_msg)
def __getitem__(self, key): val = self._state.rdb.hget(self._key, str(key)) if val: return umsgpack.loads(val) return None
def observable_serializer(): """Provides message serializer to test with msgpack installed via observable mocks.""" mock_packer = mock.MagicMock() mock_packer.loads = mock.MagicMock(side_effect=lambda bindata: umsgpack.loads(bindata)) mock_packer.dumps = mock.MagicMock(side_effect=lambda pydict: umsgpack.dumps(pydict)) return MessageSerializer(mock_packer)