def on_send (self, *args, **kw): if self.state != 2: return 0 text = self.entry.get() mode = self.mode() if mode == 'BIN': try: data = input_parse(text) except: #self.append('error input format !!\n', COLOR_ERROR) self.set_status(1, 'INPUT ERROR') return 0 if type(data) == type(''): self.append(data + '\n', COLOR_ERROR) return 0 mask, output = data elif mode == 'TEXT': mask, output = self.mask, text elif mode == 'MSGPACK': mask = self.mask try: output = '' hr = eval(text) import umsgpack output = umsgpack.dumps(hr) except: self.set_status(1, 'SYNTAX ERROR') return 0 else: mask = self.mask try: output = '' hr = eval(text) import pickle output = pickle.dumps(hr) except: #self.append('input syntax error !!\n', COLOR_ERROR) self.set_status(1, 'SYNTAX ERROR') return 0 if len(output) == 0: self.set_status(1, 'EMPTY') return 0 if type(output) == type(u''): output = output.encode(CHARSET) self.sock.send(output, mask) self.entry_set('') data = [ '%02X'%ord(ch) for ch in output ] self.set_status(1, 'SEND: %s'%(' '.join(data))) self.append(text + '\n', COLOR_SEND) if len(self.history) == 0: self.history.append(text) elif self.history[-1] != text: self.history.append(text) if len(self.history) > 100: self.history = self.history[-100:] self.last_mode = mode self.entry.focus_set() return 0
def chatting(msg_dict): print("Sending " + msg_dict['test_date'] + " .......") time.sleep(1) sock.send(umsgpack.dumps(msg_dict)) print("Receiving .... ") time.sleep(1) rdata = sock.recv(BYTES) print(umsgpack.loads(rdata)["result"])
def render_msgpack_response(data, filename = 'data'): umsgpack.compatibility = True response = HttpResponse( umsgpack.dumps(data), mimetype = "application/x-msgpack", content_type = "application/x-msgpack") response['Content-Disposition'] = "attachment; filename=%s.msgpack" % slugify(filename) return response
def render_msgpack_response(data, filename='data'): umsgpack.compatibility = True response = HttpResponse(umsgpack.dumps(data), mimetype="application/x-msgpack", content_type="application/x-msgpack") response[ 'Content-Disposition'] = "attachment; filename=%s.msgpack" % slugify( filename) return response
def write(proc : Popen, data): """Write message to the process.""" if proc._mpipe_last == "write": raise RuntimeError("Consecutive write not allowed in rpc_mode") proc._mpipe_last = "write" pack = umsgpack.dumps(data) size = bytes(ctypes.c_size_t(len(pack))) proc.stdin.write(size) proc.stdin.write(pack) proc.stdin.flush()
def write(proc: Popen, data): """Write message to the process.""" if proc._mpipe_last == "write": raise RuntimeError("Consecutive write not allowed in rpc_mode") proc._mpipe_last = "write" pack = umsgpack.dumps(data) size = bytes(ctypes.c_size_t(len(pack))) proc.stdin.write(size) proc.stdin.write(pack) proc.stdin.flush()
def test_api_taxon_post_works(): """ api taxon - post works """ points, _ = t.rand_xy_list(150) points = points.tolist() body = json.dumps({'points': points}, ignore_nan=True) result1 = client.simulate_post('/outlierstaxon', body=body) _check_outlierstaxon_result(result1) body = msgpack.dumps({'points': points}) result2 = client.simulate_post('/outlierstaxon', body=body, headers={'Content-Type': falcon.MEDIA_MSGPACK}) content = msgpack.loads(result2.content) _check_outlierstaxon_result(result2, content) assert result1.json == content for return_values in [True, False]: body = json.dumps({'points': points, 'returnvalues': return_values}, ignore_nan=True) result1 = client.simulate_post('/outlierstaxon', body=body) _check_outlierstaxon_result(result1, return_values=return_values) body = msgpack.dumps({'points': points, 'returnvalues': return_values}) result2 = client.simulate_post('/outlierstaxon', body=body, headers={'Content-Type': falcon.MEDIA_MSGPACK}) content = msgpack.loads(result2.content) _check_outlierstaxon_result(result2, content, return_values=return_values)
def test_api_dataset_post_works(): """ api dataset - post works """ points, _ = t.rand_xy_list(150) points = points.tolist() body = json.dumps({'points': points}, ignore_nan=True) result1 = client.simulate_post('/outliersdataset', body=body) _check_outliersdataset_result(result1) body = msgpack.dumps({'points': points}) result2 = client.simulate_post('/outliersdataset', body=body, headers={'Content-Type': falcon.MEDIA_MSGPACK}) content = msgpack.loads(result2.content) _check_outliersdataset_result(result2, content) assert result1.json == content
def exposed_obtain_call(self, function, packed_args): if packed_args is not None: packed_args = zlib.decompress(packed_args) args, kwargs = umsgpack.loads(packed_args) else: args, kwargs = [], {} result = function(*args, **kwargs) packed_result = umsgpack.dumps(result) packed_result = zlib.compress(packed_result) return packed_result
def handle_message(self, msg): msg = umsgpack.loads(msg) if msg["_c"] not in self.root_objects: raise security.SecurityException( "Class %s is not available to RPC worker %s, so you cannot send messages to it." % (msg["_c"], self)) obj = self.root_objects[msg["_c"]] selector = msg["_s"] security.selector_is_ok(obj, selector) method = getattr(obj, selector) kwargs = caffeine.pack.unpack(msg["_a"]) result = method(**kwargs) print("result is", result) dictFormat = caffeine.pack.pack(result) print("dict is", dictFormat) return umsgpack.dumps(dictFormat)
def __call__(self, *args, **kwargs): if not self.client.burned_ready: self.client.burned_ready = True self.client.socket.recv_multipart() packedObj = { "_c": self.class_name, "_s": self.method_name, "_a": caffeine.pack.pack(kwargs)} packedBytes = umsgpack.dumps(packedObj) result = self.client.socket.send(packedBytes) result = self.client.socket.recv_multipart() if self.client.router_style_messages: result = result[1] else: result = result[0] return caffeine.pack.unpack(umsgpack.loads(result))
def test_RPCCall(self): import caffeine.worker import caffeine.pack @caffeine.RPC.Class class Foo: @caffeine.RPC.PublicMethod @classmethod def stringLength(self, string: str) -> int: return len(string) RPCWorker = caffeine.worker.RPCWorker({"Foo": Foo}) kwargs = {"string": "test123"} packed_kwargs = caffeine.pack.pack(kwargs) import umsgpack RPCWorker.handle_message( umsgpack.dumps({"_c": "Foo", "_s": "stringLength", "_a": packed_kwargs}))
async def actionMessage(self, source, data): for client, client_subscriptions in self.subscriptions.items(): if not self.echo_back_to_source and client == source: continue messages_for_this_client = tuple( message for message in data if (self.auto_subscribe_to_all_fallback and not client_subscriptions) or (set(deviceid.strip() for deviceid in message.get('deviceid', '').split(',')) & client_subscriptions) # (isinstance(message, dict) and ) if not messages_for_this_client: continue log.debug(f'message: {source.remote_address} {messages_for_this_client}') await client.send(umsgpack.dumps( {'action': 'message', 'data': messages_for_this_client} ))
def test_load_short_read(self): # When reading from files, the network, etc. # there's no guarantee that read(n) returns n bytes # so you need to keep calling read() until you get all the data. class File(object): def __init__(self, data): self._data = data def read(self, n=None): if n is None or n <= 0 or len(self._data) == 0: data, self._data = self._data, b'' return data n = int(math.ceil(n / 2.0)) self._data, data = self._data[n:], self._data[:n] return data p = {'hello': 'world'} file = File(umsgpack.dumps(p)) q = umsgpack.load(file) self.assertEqual(p, q)
def test_load_short_read(self): # When reading from files, the network, etc. there's no guarantee that # read(n) returns n bytes. Simulate this with a file-like object that # returns 1 byte at a time. class SlowFile(object): def __init__(self, data): self._data = data def read(self, n=None): if n is None or len(self._data) == 0: data, self._data = self._data, b'' return data chunk = self._data[0:1] self._data = self._data[1:] return chunk obj = {'hello': 'world'} f = SlowFile(umsgpack.dumps(obj)) unpacked = umsgpack.load(f) self.assertEqual(unpacked, obj)
def publish(self, topic, message): self._socket.send_multipart((topic, msgpack.dumps(message)))
def process_response(self, req, resp, resource): if not resp.context["result"]: return resp.body = mp.dumps(resp.context["result"])
def pack(self, obj): return zlib.compress(umsgpack.dumps(obj))
def dumps(obj: object) -> bytes: return umsgpack.dumps( obj, use_bin_type=False ) # TODO strict https://github.com/msgpack/msgpack-python/pull/158
def observable_serializer(): """Provides message serializer to test with msgpack installed via observable mocks.""" mock_packer = mock.MagicMock() mock_packer.loads = mock.MagicMock(side_effect=lambda bindata: umsgpack.loads(bindata)) mock_packer.dumps = mock.MagicMock(side_effect=lambda pydict: umsgpack.dumps(pydict)) return MessageSerializer(mock_packer)
def broadcast_message(self, message: dict) -> None: asyncio.ensure_future(self.broadcast(msgpack.dumps(message)))
def reply(self, data: dict) -> None: self.transport.write(msgpack.dumps(data))
def pack_message(msg): return umsgpack.dumps(msg)
def _request(self, message): self.socket.send(umsgpack.dumps(message)) return umsgpack.loads(self.socket.recv())
def serilize_MsgPack(data: Any) -> bytes: return umsgpack.dumps(data)
def pack(self, packable: Any) -> bytes: return dumps(packable, ext_handlers={object: self._pack}) # type: ignore
def __setitem__(self, key, value): self._state.rdb.hset(self._key, str(key), umsgpack.dumps(value))
def dataEncoded(self,arg,*args,**kwargs): if isinstance(arg,str): return arg self.onEncode(arg,*args,**kwargs) return msgpack.dumps(arg)
def dumps(obj: object) -> bytes: return umsgpack.dumps(obj, use_bin_type=False, strict_types=True)
import socket import umsgpack import re LPORT = 9999 LHOST = "127.0.0.1" MAXCONNECTIONS = 1 BYTES = 4096 REGEXP = '[0-9]{2}\.[0-9]{2}\.[0-9]{4}' sock = socket.socket() sock.bind((LHOST, LPORT)) sock.listen(MAXCONNECTIONS) conn, addr = sock.accept() while True: try: data = conn.recv(BYTES) udata = umsgpack.loads(data) if re.match(REGEXP, udata['test_date']): conn.send(umsgpack.dumps({"result": "ok"})) continue else: conn.send(umsgpack.dumps({"result": "error"})) except: sock.close() break
async def hello(): uri = "ws://localhost:9873" async with websockets.connect(uri) as websocket: await websocket.send(umsgpack.dumps(data))
def put(self, item): self.rdb.sadd(self.key, umsgpack.dumps(item))
def serialize_result(res): if type(res) == dict: for k in res.keys(): if type(res[k]) == ldap3.utils.ciDict.CaseInsensitiveDict: res[k] = dict(res[k]) return umsgpack.dumps(res, use_bin_type=True)