def recv(sock, **kwargs): msg = sock.recv() print "recv: ", msg, kwargs try: msgpack.unpackb(msg) except Exception: "Error: invalid msgpack"
def _unserializer(code, data): if code == 0: return uuid.UUID(hex=six.text_type(data, encoding='ascii')) if code == 1: return _deserialize_datetime(data) if code == 2: value = msgpack.unpackb(data) if not _PY26: return itertools.count(value[0], value[1]) else: return itertools.count(value[0]) if netaddr and code == 3: value = msgpack.unpackb(data) return netaddr.IPAddress(value) if code in (4, 5): value = loads(data) if code == 4: return set(value) else: return frozenset(value) if code == 6: dt = _deserialize_datetime(data) return xmlrpclib.DateTime(dt.timetuple()) if code == 7: return _deserialize_date(data) return msgpack.ExtType(code, data)
def process_request(): frames = socket.recv_multipart() # killed by gsd i = frames.index('') command = frames[i + 2] if command == '\x02': global interrupted interrupted = True return i = frames.index('', 1) sequence, timestamp, expiry = msgpack.unpackb(frames[i+1]) method = frames[i+2] params = msgpack.unpackb(frames[i+3]) try: global converter ret = getattr(converter, method)(*params) except: ret = '' frames = frames[:i+1] now = int(round(time.time() * 1000)) frames.append(msgpack.packb([sequence, now, 200])) frames.append(msgpack.packb(ret)) socket.send_multipart(frames)
def unserialize(self, payload): """ Implements :func:`autobahn.wamp.interfaces.IObjectSerializer.unserialize` """ if self._batched: msgs = [] N = len(payload) i = 0 while i < N: ## read message length prefix if i + 4 > N: raise Exception("batch format error [1]") l = struct.unpack("!L", payload[i:i+4])[0] ## read message data if i + 4 + l > N: raise Exception("batch format error [2]") data = payload[i+4:i+4+l] ## append parsed raw message msgs.append(msgpack.unpackb(data, encoding = 'utf-8')) ## advance until everything consumed i = i+4+l if i != N: raise Exception("batch format error [3]") return msgs else: return [msgpack.unpackb(payload, encoding = 'utf-8')]
def run_once(self): self._feed_buffer() channel_idx, buf, fin = self._unpack_buffer() if channel_idx is None: return elif channel_idx == -1: raise FluxUSBError("USB protocol broken") elif channel_idx < 0x80: channel = self.channels.get(channel_idx) if channel is None: raise FluxUSBError("Recv bad channel idx 0x%02x" % channel_idx) if fin == 0xfe: channel.on_object(msgpack.unpackb(buf)) elif fin == 0xff: self._send_binary_ack(channel_idx) channel.on_binary(buf) elif fin == 0x80: channel.on_binary_ack() else: raise FluxUSBError("Recv bad fin 0x%02x" % fin) elif channel_idx == 0xf0: if fin != 0xfe: raise FluxUSBError("Recv bad fin 0x%02x" % fin) self._on_channel_ctrl_response(msgpack.unpackb(buf)) else: raise FluxUSBError("Recv bad control channel 0x%02x" % channel_idx)
def decode(obj): if isinstance(obj, ExtType): if obj.code == TYPE_PSET: unpacked_data = unpackb(obj.data, use_list=False, encoding='utf-8') return pset(decode(item) for item in unpacked_data) if obj.code == TYPE_PLIST: unpacked_data = unpackb(obj.data, use_list=False, encoding='utf-8') return plist(decode(item) for item in unpacked_data) if obj.code == TYPE_PBAG: unpacked_data = unpackb(obj.data, use_list=False, encoding='utf-8') return pbag(decode(item) for item in unpacked_data) module_name, class_name, *data = unpackb(obj.data, use_list=False, encoding='utf-8') cls = getattr(sys.modules[module_name], class_name) return cls(*(decode(item) for item in data)) if isinstance(obj, tuple): return pvector(decode(item) for item in obj) if isinstance(obj, dict): new_dict = dict() for key in obj.keys(): new_dict[decode(key)] = decode(obj[key]) return pmap(new_dict) return obj
def recv(self, s): m = s.recv_multipart() if len(m) == 6: id, client_id, null, token, mtype, data = m mtype = msgpack.unpackb(mtype) mtype = MAP[mtype] return id, client_id, token.decode('utf-8'), mtype, data.decode('utf-8') elif len(m) == 5: id, null, token, mtype, data = m mtype = msgpack.unpackb(mtype) mtype = MAP[mtype] return id, token.decode('utf-8'), mtype, data.decode('utf-8') elif len(m) == 4: id, token, mtype, data = m mtype = msgpack.unpackb(mtype) mtype = MAP[mtype] return id, token.decode('utf-8'), mtype, data.decode('utf-8') elif len(m) == 3: id, mtype, data = m try: mtype = msgpack.unpackb(mtype) mtype = MAP[mtype] except msgpack.exceptions.ExtraData: pass return id, mtype, data.decode('utf-8') else: mtype, data = m return mtype, data.decode("utf-8")
def run_once(self): self._feed_buffer() channel_idx, buf, fin = self._unpack_buffer() if channel_idx is None: return elif channel_idx == -1: raise FluxUSBError("USB protocol broken, got zero data length.") elif channel_idx < 0x80: channel = self.channels.get(channel_idx) if channel is None: raise FluxUSBError("Recv bad channel idx 0x%02x" % channel_idx) if fin == 0xf0: channel.on_object(msgpack.unpackb(buf, encoding="utf8", unicode_errors="ignore")) elif fin == 0xff: self._send_binary_ack(channel_idx) channel.on_binary(buf) elif fin == 0xc0: channel.on_binary_ack() else: raise FluxUSBError("Recv bad fin 0x%02x" % fin) elif channel_idx == 0xa0 and fin == 0xff: logger.debug("Recv padding") elif channel_idx == 0xf1: if fin != 0xf0: raise FluxUSBError("Recv bad fin 0x%02x" % fin) self._on_channel_ctrl_response(msgpack.unpackb(buf)) elif channel_idx == 0xfb: self._on_pong(buf) else: self.stop() self.close() raise FluxUSBError("Recv bad control channel 0x%02x" % channel_idx)
def test_call_with_multi_args(self): addr = get_random_ipc_socket() with utils.TestingServer(methods=self.get_methods(), addresses=addr): context = zmq.Context() socket = context.socket(zmq.REQ) socket.connect(addr) socket.send(msgpack.packb({ 'm': 'method3', 'k': {'name': 'WORLD'}, }, encoding='utf-8')) response = msgpack.unpackb(socket.recv(), encoding='utf-8') assert response == {'r': 'Hello, WORLD!'} socket.send(msgpack.packb({ 'm': 'method3', 'k': {'greeting': 'HOWDY'}, }, encoding='utf-8')) response = msgpack.unpackb(socket.recv(), encoding='utf-8') assert response == {'r': 'HOWDY, world!'} socket.send(msgpack.packb({ 'm': 'method3', 'k': {'greeting': 'HOWDY', 'name': 'MAN'}, }, encoding='utf-8')) response = msgpack.unpackb(socket.recv(), encoding='utf-8') assert response == {'r': 'HOWDY, MAN!'} socket.send(msgpack.packb({ 'm': 'method3', 'a': ('hey', 'man'), }, encoding='utf-8')) response = msgpack.unpackb(socket.recv(), encoding='utf-8') assert response == {'r': 'hey, man!'}
def test_odict(): seq = [(b'one', 1), (b'two', 2), (b'three', 3), (b'four', 4)] od = OrderedDict(seq) assert unpackb(packb(od), use_list=1) == dict(seq) def pair_hook(seq): return list(seq) assert unpackb(packb(od), object_pairs_hook=pair_hook, use_list=1) == seq
def show_raw_results(campaign_id): campaign = CampaignController.get(campaign_id) if 'lastUpdate' in request.args: time = datetime.datetime.utcfromtimestamp(float(request.args['lastUpdate'])) results = ResultsController.get_all_since_time(campaign_id, time) else: results = ResultsController.get_all(campaign_id) if len(results) > 0: first_result = results[0] timestamp = (first_result.date_added - datetime.datetime(1970, 1, 1)).total_seconds() else: timestamp = (datetime.datetime.utcnow() - datetime.datetime(1970, 1, 1)).total_seconds() results_arr = [] for result in results: result_obj = msgpack.unpackb(result.data) dataset_obj = msgpack.unpackb(result.dataset.data) results_arr.append({'dataset': dataset_obj, 'result': result_obj}) data = msgpack.packb({'timestamp' : timestamp, 'results' : results_arr}) res = current_app.make_response(data) filename = safe_filename(campaign.title) + '_results_' + str(math.floor(timestamp)) + '.msgpack' res.headers.add('Content-Disposition', 'attachment;filename=' + filename) res.mimetype = 'application/octet-stream' return res
def query(client, i): t = time.time() v = {'dd_%s'%(i):'abc12333:'+ str(t)} print v client.send( packb(v)) while True: y = client.recv(10240) if not y: break print unpackb(y) print time.time() - t #print dir(client) #client.send('0') #y = client.recv(10240) #print "[%s]"%(y) gevent.sleep(0)
def unserialize(self, payload): """ Implements :func:`autobahn.wamp.interfaces.IObjectSerializer.unserialize` """ def ensure_string_keys(d): """ under python 2, with use_bin_type=True, most dict keys end up getting encoded as bytes (any syntax except {u"key": u"value"}) so instead of recursively looking through everything that's getting serialized, we fix them up on the way out using msgpack's `object_hook` as there's no corresponding hook for serialization... """ for (k, v) in six.iteritems(d): if not isinstance(k, six.text_type): newk = six.text_type(k, encoding='utf8') del d[k] d[newk] = v return d if self._batched: msgs = [] N = len(payload) i = 0 while i < N: # read message length prefix if i + 4 > N: raise Exception("batch format error [1]") l = struct.unpack("!L", payload[i:i + 4])[0] # read message data if i + 4 + l > N: raise Exception("batch format error [2]") data = payload[i + 4:i + 4 + l] # append parsed raw message msgs.append( msgpack.unpackb( data, encoding='utf-8', object_hook=ensure_string_keys, ) ) # advance until everything consumed i = i + 4 + l if i != N: raise Exception("batch format error [3]") return msgs else: unpacked = msgpack.unpackb( payload, encoding='utf-8', object_hook=ensure_string_keys, ) return [unpacked]
def flash_binary(fdesc, binary, base_address, device_class, destinations, page_size=2048): """ Writes a full binary to the flash using the given file descriptor. It also takes the binary image, the base address and the device class as parameters. """ print("Erasing pages...") pbar = progressbar.ProgressBar(maxval=len(binary)).start() # First erase all pages for offset in range(0, len(binary), page_size): erase_command = commands.encode_erase_flash_page(base_address + offset, device_class) res = utils.write_command_retry(fdesc, erase_command, destinations) failed_boards = [str(id) for id, success in res.items() if not msgpack.unpackb(success)] if failed_boards: msg = ", ".join(failed_boards) msg = "Boards {} failed during page erase, aborting...".format(msg) logging.critical(msg) sys.exit(2) pbar.update(offset) pbar.finish() print("Writing pages...") pbar = progressbar.ProgressBar(maxval=len(binary)).start() # Then write all pages in chunks for offset, chunk in enumerate(page.slice_into_pages(binary, page_size)): offset *= page_size command = commands.encode_write_flash(chunk, base_address + offset, device_class) res = utils.write_command_retry(fdesc, command, destinations) failed_boards = [str(id) for id, success in res.items() if not msgpack.unpackb(success)] if failed_boards: msg = ", ".join(failed_boards) msg = "Boards {} failed during page write, aborting...".format(msg) logging.critical(msg) sys.exit(2) pbar.update(offset) pbar.finish() # Finally update application CRC and size in config config = dict() config['application_size'] = len(binary) config['application_crc'] = crc32(binary) utils.config_update_and_save(fdesc, config, destinations)
def send_request_message(self, request): # for deep copy of Request object obj = msgpack.unpackb(msgpack.packb(request.packed_object())) rcv_request = Request.create_from_packed(obj) response = self.dispatcher.dispatch_request(rcv_request) # for deep copy of Response object obj = msgpack.unpackb(msgpack.packb(response.packed_object())) return Response.create_from_packed(obj)
def decode(obj, silent=False): """Decode msgpack binary data to python object. """ if obj is None: return if silent: return msgpack.unpackb(obj, object_hook=msgpack_decode_silent, encoding='utf8') return msgpack.unpackb(obj, object_hook=msgpack_decode, encoding='utf8')
def msgpack_unpack(code, data): if code == 21: data = msgpack.unpackb(data, encoding='utf-8', ext_hook=msgpack_unpack) return Folder(data['name'], data['files'], data['folders']) elif code == 81: data = msgpack.unpackb(data, encoding='utf-8', ext_hook=msgpack_unpack) return File(data['name'], data['size']) raise RuntimeError('unknown msgpack extension type %i', code)
def from_dict(cls, data, hash_children=False): """ """ instance = cls() for attribute in instance._attributes: value = data[attribute.name] # Dictionary if attribute.type is dict: if attribute.children_type and hasattr(attribute.children_type, 'to_dict'): attribute_value = attribute.type() for key in value.keys(): object = value[key] d = msgpack.unpackb(object) if hash_children else object attribute_value[key] = attribute.children_type.from_dict(d) setattr(instance, attribute.internal_name, attribute_value) else: setattr(instance, attribute.internal_name, msgpack.unpackb(value) if hash_children else value) # List elif attribute.type is list: if attribute.children_type and hasattr(attribute.children_type, 'to_dict'): attribute_value = attribute.type() for object in value: d = msgpack.unpackb(object) if hash_children else object attribute_value.append(attribute.children_type.from_dict(d)) setattr(instance, attribute.internal_name, attribute_value) else: setattr(instance, attribute.internal_name, msgpack.unpackb(value) if hash_children else value) elif value is not None: # Objects if hasattr(attribute.type, 'to_dict'): object = attribute.type() # Note: Allows to use NURESTObject as well d = msgpack.unpackb(value) if hash_children else value value = object.from_dict(d) setattr(instance, attribute.internal_name, value if value else object) # Datetime elif attribute.type is datetime: setattr(instance, attribute.internal_name, datetime.strptime(value, cls.DATE_FORMAT)) # Boolean elif attribute.type is bool: setattr(instance, attribute.internal_name, True if value == "True" else False) # Others else: setattr(instance, attribute.internal_name, value) # None else: setattr(instance, attribute.internal_name, None) return instance
def test_odict(): seq = [(b"one", 1), (b"two", 2), (b"three", 3), (b"four", 4)] od = odict(seq) assert_equal(unpackb(packb(od), use_list=1), dict(seq)) def pair_hook(seq): return seq assert_equal(unpackb(packb(od), object_pairs_hook=pair_hook, use_list=1), seq)
def mp_to_remote_iface(val): # hack to deal with the fact that this is for both Interface and # RemoteInterface going through this same function, and logging doing # weird stuff. Won't be in the final code. if isinstance(msgpack.unpackb(val), int): remote_id = msgpack.unpackb(val) return RemoteInterfaceDescriptor(remote_id, None) remote_id, user_id = msgpack.unpackb(val) return RemoteInterfaceDescriptor(remote_id, server.send_to(user_id))
def test_put_of_valid_key(self): message = self.request_message('PUT', ['a', '1']) header, content = self.handler.command(message) plain_header = msgpack.unpackb(header) plain_content = msgpack.unpackb(content) self.assertEqual(plain_header['status'], SUCCESS_STATUS) self.assertEqual(plain_content['datas'], None)
def test_mget_of_existing_keys(self): message = self.request_message('MGET', [['1', '2', '3']]) header, content = self.handler.command(message) plain_header = msgpack.unpackb(header) plain_content = msgpack.unpackb(content) self.assertEqual(plain_header['status'], SUCCESS_STATUS) self.assertEqual(plain_content['datas'], ('11', '12', '13'))
def test_auto_max_array_len(): packed = b'\xde\x00\x06zz' with pytest.raises(UnpackValueError): unpackb(packed, raw=False) unpacker = Unpacker(max_buffer_size=5, raw=False) unpacker.feed(packed) with pytest.raises(UnpackValueError): unpacker.unpack()
def test_strict_map_key(): valid = {u"unicode": 1, b"bytes": 2} packed = packb(valid, use_bin_type=True) assert valid == unpackb(packed, raw=False, strict_map_key=True) invalid = {42: 1} packed = packb(invalid, use_bin_type=True) with raises(ValueError): unpackb(packed, raw=False, strict_map_key=True)
def test_delete(self): message = self.request_message('DELETE', ['9']) header, content = self.handler.command(message) plain_header = msgpack.unpackb(header) plain_content = msgpack.unpackb(content) self.assertEqual(plain_header['status'], SUCCESS_STATUS) self.assertEqual(plain_content['datas'], None)
def test_command_with_existing_command(self): message = self.request_message('GET', ['1']) header, content = self.handler.command(message) plain_header = msgpack.unpackb(header) plain_content = msgpack.unpackb(content) self.assertEqual(plain_header['status'], SUCCESS_STATUS) self.assertNotEqual(plain_content['datas'], None)
def test_exists_of_non_existing_key_2(self): message = self.request_message('EXISTS', ['non_existing']) header, content = self.handler.command(message) plain_header = msgpack.unpackb(header) plain_content = msgpack.unpackb(content) self.assertEqual(plain_header['status'], SUCCESS_STATUS) self.assertEqual(plain_content['datas'], False)
def test_create_valid_db(self): message = self.request_message('DBCREATE', ['testdb']) header, content = self.handler.command(message) plain_header = msgpack.unpackb(header) plain_content = msgpack.unpackb(content) self.assertEqual(plain_header['status'], SUCCESS_STATUS) self.assertEqual(plain_content['datas'], None)
def test_integer(): x = -(2 ** 63) assert unpackb(packb(x)) == x with pytest.raises((OverflowError, ValueError)): packb(x-1) x = 2 ** 64 - 1 assert unpackb(packb(x)) == x with pytest.raises((OverflowError, ValueError)): packb(x+1)
def test_serialize_default_args(self, sender): now = time.time() r1 = sender.serialize('test data') r2 = sender.serialize({'message': 'test'}) r1 = msgpack.unpackb(r1, encoding='utf-8') r2 = msgpack.unpackb(r2, encoding='utf-8') assert r1[0] == r2[0] == sender.tag assert now < r1[1] <= r2[1] < now + 2 assert r1[2] == {'message': 'test data'} assert r2[2] == {'message': 'test'}
def process(self, message): datagram, host, port = msgpack.unpackb(message[0]) self.processAcct(datagram, host, port)
def decode_request(self, buffer): return self._request_from_object(unpackb(buffer))
def bench_msgpack_unpackb(self, *args, **kwargs): for weibo in self.weibos_to_decode_msgpack: msgpack.unpackb(weibo)
import msgpack with open("test_pack.bin", "rb") as fp: a = fp.read() l = msgpack.unpackb(a.split(b'\0', 1)[0]) print(l)
class UiRequestPlugin(object): def formatTableRow(self, row, class_name=""): back = [] for format, val in row: if val is None: formatted = "n/a" elif format == "since": if val: formatted = "%.0f" % (time.time() - val) else: formatted = "n/a" else: formatted = format % val back.append("<td>%s</td>" % formatted) return "<tr class='%s'>%s</tr>" % (class_name.encode("utf8"), "".join(back).encode("utf8")) def getObjSize(self, obj, hpy=None): if hpy: return float(hpy.iso(obj).domisize) / 1024 else: return 0 # /Stats entry point def actionStats(self): import gc import sys from Ui import UiRequest from Db import Db from Crypt import CryptConnection hpy = None if self.get.get("size") == "1": # Calc obj size try: import guppy hpy = guppy.hpy() except: pass self.sendHeader() if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local: yield "This function is disabled on this proxy" raise StopIteration s = time.time() main = sys.modules["main"] # Style yield """ <style> * { font-family: monospace } table td, table th { text-align: right; padding: 0px 10px } .connections td { white-space: nowrap } .serving-False { opacity: 0.3 } </style> """ # Memory yield "rev%s | " % config.rev yield "%s | " % main.file_server.ip_external_list yield "Port: %s | " % main.file_server.port yield "IP Network: %s | " % main.file_server.supported_ip_types yield "Opened: %s | " % main.file_server.port_opened yield "Crypt: %s | " % CryptConnection.manager.crypt_supported yield "In: %.2fMB, Out: %.2fMB | " % ( float(main.file_server.bytes_recv) / 1024 / 1024, float(main.file_server.bytes_sent) / 1024 / 1024) yield "Peerid: %s | " % main.file_server.peer_id yield "Time correction: %.2fs" % main.file_server.getTimecorrection() try: import psutil process = psutil.Process(os.getpid()) mem = process.get_memory_info()[0] / float(2**20) yield "Mem: %.2fMB | " % mem yield "Threads: %s | " % len(process.threads()) yield "CPU: usr %.2fs sys %.2fs | " % process.cpu_times() yield "Files: %s | " % len(process.open_files()) yield "Sockets: %s | " % len(process.connections()) yield "Calc size <a href='?size=1'>on</a> <a href='?size=0'>off</a>" except Exception: pass yield "<br>" # Connections yield "<b>Connections</b> (%s, total made: %s, in: %s, out: %s):<br>" % ( len(main.file_server.connections), main.file_server.last_connection_id, main.file_server.num_incoming, main.file_server.num_outgoing) yield "<table class='connections'><tr> <th>id</th> <th>type</th> <th>ip</th> <th>open</th> <th>crypt</th> <th>ping</th>" yield "<th>buff</th> <th>bad</th> <th>idle</th> <th>open</th> <th>delay</th> <th>cpu</th> <th>out</th> <th>in</th> <th>last sent</th>" yield "<th>wait</th> <th>version</th> <th>time</th> <th>sites</th> </tr>" for connection in main.file_server.connections: if "cipher" in dir(connection.sock): cipher = connection.sock.cipher()[0] tls_version = connection.sock.version() else: cipher = connection.crypt tls_version = "" if "time" in connection.handshake and connection.last_ping_delay: time_correction = connection.handshake[ "time"] - connection.handshake_time - connection.last_ping_delay else: time_correction = 0.0 yield self.formatTableRow([ ("%3d", connection.id), ("%s", connection.type), ("%s:%s", (connection.ip, connection.port)), ("%s", connection.handshake.get("port_opened")), ("<span title='%s %s'>%s</span>", (cipher, tls_version, connection.crypt)), ("%6.3f", connection.last_ping_delay), ("%s", connection.incomplete_buff_recv), ("%s", connection.bad_actions), ("since", max(connection.last_send_time, connection.last_recv_time)), ("since", connection.start_time), ("%.3f", max(-1, connection.last_sent_time - connection.last_send_time)), ("%.3f", connection.cpu_time), ("%.0fkB", connection.bytes_sent / 1024), ("%.0fkB", connection.bytes_recv / 1024), ("<span title='Recv: %s'>%s</span>", (connection.last_cmd_recv, connection.last_cmd_sent)), ("%s", connection.waiting_requests.keys()), ("%s r%s", (connection.handshake.get("version"), connection.handshake.get("rev", "?"))), ("%.2fs", time_correction), ("%s", connection.sites) ]) yield "</table>" # Trackers yield "<br><br><b>Trackers:</b><br>" yield "<table class='trackers'><tr> <th>address</th> <th>request</th> <th>successive errors</th> <th>last_request</th></tr>" for tracker_address, tracker_stat in sorted( sys.modules["Site.SiteAnnouncer"].global_stats.iteritems()): yield self.formatTableRow([ ("%s", tracker_address), ("%s", tracker_stat["num_request"]), ("%s", tracker_stat["num_error"]), ("%.0f min ago", min(999, (time.time() - tracker_stat["time_request"]) / 60)) ]) yield "</table>" if "AnnounceShare" in PluginManager.plugin_manager.plugin_names: yield "<br><br><b>Shared trackers:</b><br>" yield "<table class='trackers'><tr> <th>address</th> <th>added</th> <th>found</th> <th>latency</th> <th>successive errors</th> <th>last_success</th></tr>" from AnnounceShare import AnnounceSharePlugin for tracker_address, tracker_stat in sorted( AnnounceSharePlugin.tracker_storage.getTrackers( ).iteritems()): yield self.formatTableRow([ ("%s", tracker_address), ("%.0f min ago", min(999, (time.time() - tracker_stat["time_added"]) / 60)), ("%.0f min ago", min(999, (time.time() - tracker_stat.get("time_found", 0)) / 60)), ("%.3fs", tracker_stat["latency"]), ("%s", tracker_stat["num_error"]), ("%.0f min ago", min(999, (time.time() - tracker_stat["time_success"]) / 60)), ]) yield "</table>" # Tor hidden services yield "<br><br><b>Tor hidden services (status: %s):</b><br>" % main.file_server.tor_manager.status.encode( "utf8") for site_address, onion in main.file_server.tor_manager.site_onions.items( ): yield "- %-34s: %s<br>" % (site_address, onion.encode("utf8")) # Db yield "<br><br><b>Db</b>:<br>" for db in sys.modules["Db.Db"].opened_dbs: tables = [ row["name"] for row in db.execute( "SELECT name FROM sqlite_master WHERE type = 'table'"). fetchall() ] table_rows = {} for table in tables: table_rows[table] = db.execute("SELECT COUNT(*) AS c FROM %s" % table).fetchone()["c"] db_size = os.path.getsize(db.db_path) / 1024.0 / 1024.0 yield "- %.3fs: %s %.3fMB, table rows: %s<br>" % ( time.time() - db.last_query_time, db.db_path.encode("utf8"), db_size, json.dumps(table_rows, sort_keys=True)) # Sites yield "<br><br><b>Sites</b>:" yield "<table>" yield "<tr><th>address</th> <th>connected</th> <th title='connected/good/total'>peers</th> <th>content.json</th> <th>out</th> <th>in</th> </tr>" for site in sorted(self.server.sites.values(), lambda a, b: cmp(a.address, b.address)): yield self.formatTableRow([ ("""<a href='#' onclick='document.getElementById("peers_%s").style.display="initial"; return false'>%s</a>""", (site.address, site.address)), ("%s", [ peer.connection.id for peer in site.peers.values() if peer.connection and peer.connection.connected ]), ("%s/%s/%s", (len([ peer for peer in site.peers.values() if peer.connection and peer.connection.connected ]), len(site.getConnectablePeers(100)), len(site.peers))), ("%s (loaded: %s)", (len(site.content_manager.contents), len([ key for key, val in dict( site.content_manager.contents).iteritems() if val ]))), ("%.0fkB", site.settings.get("bytes_sent", 0) / 1024), ("%.0fkB", site.settings.get("bytes_recv", 0) / 1024), ], "serving-%s" % site.settings["serving"]) yield "<tr><td id='peers_%s' style='display: none; white-space: pre' colspan=6>" % site.address for key, peer in site.peers.items(): if peer.time_found: time_found = int(time.time() - peer.time_found) / 60 else: time_found = "--" if peer.connection: connection_id = peer.connection.id else: connection_id = None if site.content_manager.has_optional_files: yield "Optional files: %4s " % len(peer.hashfield) time_added = (time.time() - peer.time_added) / (60 * 60 * 24) yield "(#%4s, rep: %2s, err: %s, found: %3s min, add: %.1f day) %30s -<br>" % ( connection_id, peer.reputation, peer.connection_error, time_found, time_added, key) yield "<br></td></tr>" yield "</table>" # Big files yield "<br><br><b>Big files</b>:<br>" for site in self.server.sites.values(): if not site.settings.get("has_bigfile"): continue bigfiles = {} yield """<a href="#" onclick='document.getElementById("bigfiles_%s").style.display="initial"; return false'>%s</a><br>""" % ( site.address, site.address) for peer in site.peers.values(): if not peer.time_piecefields_updated: continue for sha512, piecefield in peer.piecefields.iteritems(): if sha512 not in bigfiles: bigfiles[sha512] = [] bigfiles[sha512].append(peer) yield "<div id='bigfiles_%s' style='display: none'>" % site.address for sha512, peers in bigfiles.iteritems(): yield "<br> - " + sha512 + " (hash id: %s)<br>" % site.content_manager.hashfield.getHashId( sha512) yield "<table>" for peer in peers: yield "<tr><td>" + peer.key + "</td><td>" + peer.piecefields[ sha512].tostring() + "</td></tr>" yield "</table>" yield "</div>" # Cmd stats yield "<div style='float: left'>" yield "<br><br><b>Sent commands</b>:<br>" yield "<table>" for stat_key, stat in sorted( main.file_server.stat_sent.items(), lambda a, b: cmp(a[1]["bytes"], b[1]["bytes"]), reverse=True): yield "<tr><td>%s</td><td style='white-space: nowrap'>x %s =</td><td>%.0fkB</td></tr>" % ( stat_key, stat["num"], stat["bytes"] / 1024) yield "</table>" yield "</div>" yield "<div style='float: left; margin-left: 20%; max-width: 50%'>" yield "<br><br><b>Received commands</b>:<br>" yield "<table>" for stat_key, stat in sorted( main.file_server.stat_recv.items(), lambda a, b: cmp(a[1]["bytes"], b[1]["bytes"]), reverse=True): yield "<tr><td>%s</td><td style='white-space: nowrap'>x %s =</td><td>%.0fkB</td></tr>" % ( stat_key, stat["num"], stat["bytes"] / 1024) yield "</table>" yield "</div>" yield "<div style='clear: both'></div>" # No more if not in debug mode if not config.debug: raise StopIteration # Object types obj_count = {} for obj in gc.get_objects(): obj_type = str(type(obj)) if obj_type not in obj_count: obj_count[obj_type] = [0, 0] obj_count[obj_type][0] += 1 # Count obj_count[obj_type][1] += float(sys.getsizeof(obj)) / 1024 # Size yield "<br><br><b>Objects in memory (types: %s, total: %s, %.2fkb):</b><br>" % ( len(obj_count), sum([stat[0] for stat in obj_count.values()]), sum([stat[1] for stat in obj_count.values()])) for obj, stat in sorted(obj_count.items(), key=lambda x: x[1][0], reverse=True): # Sorted by count yield " - %.1fkb = %s x <a href=\"/Listobj?type=%s\">%s</a><br>" % ( stat[1], stat[0], obj, cgi.escape(obj)) # Classes class_count = {} for obj in gc.get_objects(): obj_type = str(type(obj)) if obj_type != "<type 'instance'>": continue class_name = obj.__class__.__name__ if class_name not in class_count: class_count[class_name] = [0, 0] class_count[class_name][0] += 1 # Count class_count[class_name][1] += float( sys.getsizeof(obj)) / 1024 # Size yield "<br><br><b>Classes in memory (types: %s, total: %s, %.2fkb):</b><br>" % ( len(class_count), sum([stat[0] for stat in class_count.values()]), sum([stat[1] for stat in class_count.values()])) for obj, stat in sorted(class_count.items(), key=lambda x: x[1][0], reverse=True): # Sorted by count yield " - %.1fkb = %s x <a href=\"/Dumpobj?class=%s\">%s</a><br>" % ( stat[1], stat[0], obj, cgi.escape(obj)) from greenlet import greenlet objs = [obj for obj in gc.get_objects() if isinstance(obj, greenlet)] yield "<br>Greenlets (%s):<br>" % len(objs) for obj in objs: yield " - %.1fkb: %s<br>" % (self.getObjSize( obj, hpy), cgi.escape(repr(obj).encode("utf8"))) from Worker import Worker objs = [obj for obj in gc.get_objects() if isinstance(obj, Worker)] yield "<br>Workers (%s):<br>" % len(objs) for obj in objs: yield " - %.1fkb: %s<br>" % (self.getObjSize( obj, hpy), cgi.escape(repr(obj))) from Connection import Connection objs = [obj for obj in gc.get_objects() if isinstance(obj, Connection)] yield "<br>Connections (%s):<br>" % len(objs) for obj in objs: yield " - %.1fkb: %s<br>" % (self.getObjSize( obj, hpy), cgi.escape(repr(obj))) from socket import socket objs = [obj for obj in gc.get_objects() if isinstance(obj, socket)] yield "<br>Sockets (%s):<br>" % len(objs) for obj in objs: yield " - %.1fkb: %s<br>" % (self.getObjSize( obj, hpy), cgi.escape(repr(obj))) from msgpack import Unpacker objs = [obj for obj in gc.get_objects() if isinstance(obj, Unpacker)] yield "<br>Msgpack unpacker (%s):<br>" % len(objs) for obj in objs: yield " - %.1fkb: %s<br>" % (self.getObjSize( obj, hpy), cgi.escape(repr(obj))) from Site import Site objs = [obj for obj in gc.get_objects() if isinstance(obj, Site)] yield "<br>Sites (%s):<br>" % len(objs) for obj in objs: yield " - %.1fkb: %s<br>" % (self.getObjSize( obj, hpy), cgi.escape(repr(obj))) objs = [ obj for obj in gc.get_objects() if isinstance(obj, self.server.log.__class__) ] yield "<br>Loggers (%s):<br>" % len(objs) for obj in objs: yield " - %.1fkb: %s<br>" % (self.getObjSize( obj, hpy), cgi.escape(repr(obj.name))) objs = [obj for obj in gc.get_objects() if isinstance(obj, UiRequest)] yield "<br>UiRequests (%s):<br>" % len(objs) for obj in objs: yield " - %.1fkb: %s<br>" % (self.getObjSize( obj, hpy), cgi.escape(repr(obj))) from Peer import Peer objs = [obj for obj in gc.get_objects() if isinstance(obj, Peer)] yield "<br>Peers (%s):<br>" % len(objs) for obj in objs: yield " - %.1fkb: %s<br>" % (self.getObjSize( obj, hpy), cgi.escape(repr(obj))) objs = [(key, val) for key, val in sys.modules.iteritems() if val is not None] objs.sort() yield "<br>Modules (%s):<br>" % len(objs) for module_name, module in objs: yield " - %.3fkb: %s %s<br>" % (self.getObjSize( module, hpy), module_name, cgi.escape(repr(module))) gc.collect() # Implicit grabage collection yield "Done in %.1f" % (time.time() - s) def actionDumpobj(self): import gc import sys self.sendHeader() if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local: yield "This function is disabled on this proxy" raise StopIteration # No more if not in debug mode if not config.debug: yield "Not in debug mode" raise StopIteration class_filter = self.get.get("class") yield """ <style> * { font-family: monospace; white-space: pre } table * { text-align: right; padding: 0px 10px } </style> """ objs = gc.get_objects() for obj in objs: obj_type = str(type(obj)) if obj_type != "<type 'instance'>" or obj.__class__.__name__ != class_filter: continue yield "%.1fkb %s... " % (float(sys.getsizeof(obj)) / 1024, cgi.escape(str(obj))) for attr in dir(obj): yield "- %s: %s<br>" % (attr, cgi.escape(str(getattr(obj, attr)))) yield "<br>" gc.collect() # Implicit grabage collection def actionListobj(self): import gc import sys self.sendHeader() if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local: yield "This function is disabled on this proxy" raise StopIteration # No more if not in debug mode if not config.debug: yield "Not in debug mode" raise StopIteration type_filter = self.get.get("type") yield """ <style> * { font-family: monospace; white-space: pre } table * { text-align: right; padding: 0px 10px } </style> """ yield "Listing all %s objects in memory...<br>" % cgi.escape( type_filter) ref_count = {} objs = gc.get_objects() for obj in objs: obj_type = str(type(obj)) if obj_type != type_filter: continue refs = [ ref for ref in gc.get_referrers(obj) if hasattr(ref, "__class__") and ref.__class__.__name__ not in [ "list", "dict", "function", "type", "frame", "WeakSet", "tuple" ] ] if not refs: continue try: yield "%.1fkb <span title=\"%s\">%s</span>... " % ( float(sys.getsizeof(obj)) / 1024, cgi.escape( str(obj)), cgi.escape(str(obj)[0:100].ljust(100))) except: continue for ref in refs: yield " [" if "object at" in str(ref) or len(str(ref)) > 100: yield str(ref.__class__.__name__) else: yield str(ref.__class__.__name__) + ":" + cgi.escape( str(ref)) yield "] " ref_type = ref.__class__.__name__ if ref_type not in ref_count: ref_count[ref_type] = [0, 0] ref_count[ref_type][0] += 1 # Count ref_count[ref_type][1] += float( sys.getsizeof(obj)) / 1024 # Size yield "<br>" yield "<br>Object referrer (total: %s, %.2fkb):<br>" % ( len(ref_count), sum([stat[1] for stat in ref_count.values()])) for obj, stat in sorted(ref_count.items(), key=lambda x: x[1][0], reverse=True)[0:30]: # Sorted by count yield " - %.1fkb = %s x %s<br>" % (stat[1], stat[0], cgi.escape(str(obj))) gc.collect() # Implicit grabage collection def actionBenchmark(self): import sys import gc from contextlib import contextmanager output = self.sendHeader() if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local: yield "This function is disabled on this proxy" raise StopIteration @contextmanager def benchmark(name, standard): s = time.time() output("- %s" % name) try: yield 1 except Exception, err: output("<br><b>! Error: %s</b><br>" % err) taken = time.time() - s if taken > 0: multipler = standard / taken else: multipler = 99 if multipler < 0.3: speed = "Sloooow" elif multipler < 0.5: speed = "Ehh" elif multipler < 0.8: speed = "Goodish" elif multipler < 1.2: speed = "OK" elif multipler < 1.7: speed = "Fine" elif multipler < 2.5: speed = "Fast" elif multipler < 3.5: speed = "WOW" else: speed = "Insane!!" output("%.3fs [x%.2f: %s]<br>" % (taken, multipler, speed)) time.sleep(0.01) yield """ <style> * { font-family: monospace } table * { text-align: right; padding: 0px 10px } </style> """ yield "Benchmarking ZeroNet %s (rev%s) Python %s on: %s...<br>" % ( config.version, config.rev, sys.version, sys.platform) t = time.time() # CryptBitcoin yield "<br>CryptBitcoin:<br>" from Crypt import CryptBitcoin # seed = CryptBitcoin.newSeed() # yield "- Seed: %s<br>" % seed seed = "e180efa477c63b0f2757eac7b1cce781877177fe0966be62754ffd4c8592ce38" with benchmark("hdPrivatekey x 10", 0.7): for i in range(10): privatekey = CryptBitcoin.hdPrivatekey(seed, i * 10) yield "." valid = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk" assert privatekey == valid, "%s != %s" % (privatekey, valid) data = "Hello" * 1024 # 5k with benchmark("sign x 10", 0.35): for i in range(10): yield "." sign = CryptBitcoin.sign(data, privatekey) valid = "G1GXaDauZ8vX/N9Jn+MRiGm9h+I94zUhDnNYFaqMGuOiBHB+kp4cRPZOL7l1yqK5BHa6J+W97bMjvTXtxzljp6w=" assert sign == valid, "%s != %s" % (sign, valid) address = CryptBitcoin.privatekeyToAddress(privatekey) if CryptBitcoin.opensslVerify: # Openssl avalible with benchmark("openssl verify x 100", 0.37): for i in range(100): if i % 10 == 0: yield "." ok = CryptBitcoin.verify(data, address, sign) assert ok, "does not verify from %s" % address else: yield " - openssl verify x 100...not avalible :(<br>" openssl_verify_bk = CryptBitcoin.opensslVerify # Emulate openssl not found in any way CryptBitcoin.opensslVerify = None with benchmark("pure-python verify x 10", 1.6): for i in range(10): yield "." ok = CryptBitcoin.verify(data, address, sign) assert ok, "does not verify from %s" % address CryptBitcoin.opensslVerify = openssl_verify_bk # CryptHash yield "<br>CryptHash:<br>" from Crypt import CryptHash from cStringIO import StringIO data = StringIO("Hello" * 1024 * 1024) # 5m with benchmark("sha256 5M x 10", 0.6): for i in range(10): data.seek(0) hash = CryptHash.sha256sum(data) yield "." valid = "8cd629d9d6aff6590da8b80782a5046d2673d5917b99d5603c3dcb4005c45ffa" assert hash == valid, "%s != %s" % (hash, valid) data = StringIO("Hello" * 1024 * 1024) # 5m with benchmark("sha512 5M x 10", 0.6): for i in range(10): data.seek(0) hash = CryptHash.sha512sum(data) yield "." valid = "9ca7e855d430964d5b55b114e95c6bbb114a6d478f6485df93044d87b108904d" assert hash == valid, "%s != %s" % (hash, valid) with benchmark("os.urandom(256) x 1000", 0.0065): for i in range(10): for y in range(100): data = os.urandom(256) yield "." # Msgpack import msgpack yield "<br>Msgpack: (version: %s)<br>" % ".".join( map(str, msgpack.version)) binary = 'fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv' data = { "int": 1024 * 1024 * 1024, "float": 12345.67890, "text": "hello" * 1024, "binary": binary } with benchmark("pack 5K x 10 000", 0.78): for i in range(10): for y in range(1000): data_packed = msgpack.packb(data) yield "." valid = """\x84\xa3int\xce@\x00\x00\x00\xa4text\xda\x14\x00hellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohello\xa5float\xcb@\xc8\x1c\xd6\xe61\xf8\xa1\xa6binary\xda\x01\x00fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv""" assert data_packed == valid, "%s<br>!=<br>%s" % (repr(data_packed), repr(valid)) with benchmark("unpack 5K x 10 000", 1.2): for i in range(10): for y in range(1000): data_unpacked = msgpack.unpackb(data_packed) yield "." assert data == data_unpacked, "%s != %s" % (data_unpacked, data) with benchmark("streaming unpack 5K x 10 000", 1.4): for i in range(10): unpacker = msgpack.Unpacker() for y in range(1000): unpacker.feed(data_packed) for data_unpacked in unpacker: pass yield "." assert data == data_unpacked, "%s != %s" % (data_unpacked, data) # Db from Db import Db import sqlite3 yield "<br>Db: (version: %s, API: %s)<br>" % (sqlite3.sqlite_version, sqlite3.version) schema = { "db_name": "TestDb", "db_file": "%s/benchmark.db" % config.data_dir, "maps": { ".*": { "to_table": { "test": "test" } } }, "tables": { "test": { "cols": [["test_id", "INTEGER"], ["title", "TEXT"], ["json_id", "INTEGER REFERENCES json (json_id)"]], "indexes": ["CREATE UNIQUE INDEX test_key ON test(test_id, json_id)"], "schema_changed": 1426195822 } } } if os.path.isfile("%s/benchmark.db" % config.data_dir): os.unlink("%s/benchmark.db" % config.data_dir) with benchmark("Open x 10", 0.13): for i in range(10): db = Db(schema, "%s/benchmark.db" % config.data_dir) db.checkTables() db.close() yield "." db = Db(schema, "%s/benchmark.db" % config.data_dir) db.checkTables() import json with benchmark("Insert x 10 x 1000", 1.0): for u in range(10): # 10 user data = {"test": []} for i in range(1000): # 1000 line of data data["test"].append({ "test_id": i, "title": "Testdata for %s message %s" % (u, i) }) json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w")) db.updateJson("%s/test_%s.json" % (config.data_dir, u)) os.unlink("%s/test_%s.json" % (config.data_dir, u)) yield "." with benchmark("Buffered insert x 100 x 100", 1.3): cur = db.getCursor() cur.execute("BEGIN") cur.logging = False for u in range(100, 200): # 100 user data = {"test": []} for i in range(100): # 1000 line of data data["test"].append({ "test_id": i, "title": "Testdata for %s message %s" % (u, i) }) json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w")) db.updateJson("%s/test_%s.json" % (config.data_dir, u), cur=cur) os.unlink("%s/test_%s.json" % (config.data_dir, u)) if u % 10 == 0: yield "." cur.execute("COMMIT") yield " - Total rows in db: %s<br>" % db.execute( "SELECT COUNT(*) AS num FROM test").fetchone()[0] with benchmark("Indexed query x 1000", 0.25): found = 0 cur = db.getCursor() cur.logging = False for i in range(1000): # 1000x by test_id res = cur.execute("SELECT * FROM test WHERE test_id = %s" % i) for row in res: found += 1 if i % 100 == 0: yield "." assert found == 20000, "Found: %s != 20000" % found with benchmark("Not indexed query x 100", 0.6): found = 0 cur = db.getCursor() cur.logging = False for i in range(100): # 1000x by test_id res = cur.execute("SELECT * FROM test WHERE json_id = %s" % i) for row in res: found += 1 if i % 10 == 0: yield "." assert found == 18900, "Found: %s != 18900" % found with benchmark("Like query x 100", 1.8): found = 0 cur = db.getCursor() cur.logging = False for i in range(100): # 1000x by test_id res = cur.execute( "SELECT * FROM test WHERE title LIKE '%%message %s%%'" % i) for row in res: found += 1 if i % 10 == 0: yield "." assert found == 38900, "Found: %s != 11000" % found db.close() if os.path.isfile("%s/benchmark.db" % config.data_dir): os.unlink("%s/benchmark.db" % config.data_dir) gc.collect() # Implicit grabage collection # Zip yield "<br>Compression:<br>" import zipfile test_data = "Test" * 1024 file_name = "\xc3\x81rv\xc3\xadzt\xc5\xb0r\xc5\x91t\xc3\xbck\xc3\xb6r\xc3\xb3g\xc3\xa9p\xe4\xb8\xad\xe5\x8d\x8e%s.txt" with benchmark("Zip pack x 10", 0.12): for i in range(10): with zipfile.ZipFile('%s/test.zip' % config.data_dir, 'w') as archive: for y in range(100): zip_info = zipfile.ZipInfo(file_name % y, (1980, 1, 1, 0, 0, 0)) zip_info.compress_type = zipfile.ZIP_DEFLATED zip_info.create_system = 3 archive.writestr(zip_info, test_data) yield "." hash = CryptHash.sha512sum( open("%s/test.zip" % config.data_dir, "rb")) valid = "f6ef623e6653883a1758db14aa593350e26c9dc53a8406d6e6defd6029dbd483" assert hash == valid, "Invalid hash: %s != %s<br>" % (hash, valid) with benchmark("Zip unpack x 10", 0.2): for i in range(10): with zipfile.ZipFile('%s/test.zip' % config.data_dir) as archive: for y in range(100): assert archive.read(file_name % y) == test_data yield "." if os.path.isfile("%s/test.zip" % config.data_dir): os.unlink("%s/test.zip" % config.data_dir) # Tar.gz import tarfile import struct # Monkey patch _init_write_gz to use fixed date in order to keep the hash independent from datetime def nodate_write_gzip_header(self): self.mtime = 0 original_write_gzip_header(self) import gzip original_write_gzip_header = gzip.GzipFile._write_gzip_header gzip.GzipFile._write_gzip_header = nodate_write_gzip_header test_data_io = StringIO("Test" * 1024) with benchmark("Tar.gz pack x 10", 0.3): for i in range(10): with tarfile.open('%s/test.tar.gz' % config.data_dir, 'w:gz') as archive: for y in range(100): test_data_io.seek(0) tar_info = tarfile.TarInfo(file_name % y) tar_info.size = 4 * 1024 archive.addfile(tar_info, test_data_io) yield "." hash = CryptHash.sha512sum( open("%s/test.tar.gz" % config.data_dir, "rb")) valid = "4704ebd8c987ed6f833059f1de9c475d443b0539b8d4c4cb8b49b26f7bbf2d19" assert hash == valid, "Invalid hash: %s != %s<br>" % (hash, valid) with benchmark("Tar.gz unpack x 10", 0.2): for i in range(10): with tarfile.open('%s/test.tar.gz' % config.data_dir, 'r:gz') as archive: for y in range(100): assert archive.extractfile(file_name % y).read() == test_data yield "." if os.path.isfile("%s/test.tar.gz" % config.data_dir): os.unlink("%s/test.tar.gz" % config.data_dir) # Tar.bz2 import tarfile test_data_io = StringIO("Test" * 1024) with benchmark("Tar.bz2 pack x 10", 2.0): for i in range(10): with tarfile.open('%s/test.tar.bz2' % config.data_dir, 'w:bz2') as archive: for y in range(100): test_data_io.seek(0) tar_info = tarfile.TarInfo(file_name % y) tar_info.size = 4 * 1024 archive.addfile(tar_info, test_data_io) yield "." hash = CryptHash.sha512sum( open("%s/test.tar.bz2" % config.data_dir, "rb")) valid = "90cba0b4d9abaa37b830bf37e4adba93bfd183e095b489ebee62aaa94339f3b5" assert hash == valid, "Invalid hash: %s != %s<br>" % (hash, valid) with benchmark("Tar.bz2 unpack x 10", 0.5): for i in range(10): with tarfile.open('%s/test.tar.bz2' % config.data_dir, 'r:bz2') as archive: for y in range(100): assert archive.extractfile(file_name % y).read() == test_data yield "." if os.path.isfile("%s/test.tar.bz2" % config.data_dir): os.unlink("%s/test.tar.bz2" % config.data_dir) yield "<br>Done. Total: %.2fs" % (time.time() - t)
def __decodeComplete(self, event): return msgpack.unpackb(event["data"])
path = './data_merged/ubuntu/' files = os.listdir(path) context_windows = {} j = 0 # Note that all annotated code below were used to ensure context windows with same messages (it happens because of overlapping in one document, only consecutive 4 non-Ubuntu User could take them apart) won't be splitted away w.r.t train / test. However, it will result in poor accuracy... about 0.57 ~ 0.58 I think? Similarly, if I choose first 80% windows as train data, the accuracy was also about 0.58 ~ 0.59. So I just randomly split instead. # fw_1 = open('train_split', 'w') # fw_2 = open('test_split', 'w') # k = 0 for file in files: with open(path + file, 'rb') as handle: conversation = msgpack.unpackb(handle.read()) message_list = conversation[b'messages'] context_window = '' i = 1 for message in message_list: context_window += str(message[b'text'], encoding='utf-8') + '^' + str( message[b'response_time']) + '\n' if i == 5 and str(message[b'user'], encoding='utf-8') == 'Ubuntu User': context_windows[j] = context_window context_window = context_window[context_window.find('\n') + 1:] # if k == 0: # tmp = np.random.rand() # if tmp < 0.8: # fw_1.write(context_window + '\n')
info_config= str("average code length") appendfile.write(info_config+',') info_config= str('sleep time') appendfile.write(str(info_config)+'\n') info_config= str([TCP_IP , TCP_PORT]) appendfile.write(info_config+',') info_config=str(len(str(MESSAGE))) appendfile.write(info_config+',') info_config=str(sleep) appendfile.write(str(info_config)+'\n') while 1 : # TP_MESSAGE = str(['{0:08}'.format(seq) , datetime.datetime.now().strftime("%H:%M:%S.%f"), MESSAGE]) TP_MESSAGE = ['{0:08}'.format(seq) , datetime.now().strftime("%H:%M:%S.%f") , MESSAGE] MESSAGE = [random.random() * 1000 , random.random() * 1000 , random.random() * 1000 , random.random() * 1000 , random.random() * 1000 , random.random() * 1000] B_TP_MESSAGE=msgpack.packb(TP_MESSAGE,use_bin_type=True) s.sendto((B_TP_MESSAGE) ,(TCP_IP , TCP_PORT)) data = s.recv(BUFFER_SIZE) data =msgpack.unpackb(data,raw=False) rcv_time= float(datetime.now().strftime("%H:%M:%S.%f").split(':')[2]) # s_st_time = float(data.decode().split("',")[1].split(':')[2]) s_st_time =float( data[1].split(':')[2]) seq+=1 appendfile = open(save_path % filename, "a") appendfile.write(str(rcv_time-s_st_time)+'\n') print(data, rcv_time - s_st_time) # print(data.decode()) time.sleep(sleep) s.close()
def __decodeData(self, event): event["data"] = msgpack.unpackb(event["data"]) return event
def loads(s, registry=None): """Deserialize ``s`` messagepack ``str`` into a Python object.""" if registry is None: registry = default_registry ext_hook = functools.partial(_unserializer, registry) return msgpack.unpackb(s, ext_hook=ext_hook, encoding='utf-8')
def test_unpack_bytearray(): buf = bytearray(packb(('foo', 'bar'))) obj = unpackb(buf, use_list=1) assert [b'foo', b'bar'] == obj expected_type = bytes assert all(type(s) == expected_type for s in obj)
def write_packet(self, payload, topic): temp = msgpack.unpackb(payload) #print("$$$$$$$$$$$$$$$$$$$$$$$$$",self.tx_topic,topic, payload) self.mqtt_class.publish(self.tx_topic + topic, payload)
def from_msgpack(self, data): parsed = msgpack.unpackb(data) self._from_parsed(parsed)
def unpack_msg(msg): return msgpack.unpackb(msg, use_list=False, raw=False)
def process_message(self, message): message = msgpack.unpackb(message.value(), use_list=False) return message
socket.connect('tcp://localhost:%i' % (rpc_port_l1_base + i)) sockets.append(socket) token = 1 print('Sending get_statistics requests...') for socket in sockets: msg = msgpack.packb(['get_statistics', token]) socket.send(msg) beam_meta = [] print('Waiting for get_statistics replies...') for i, socket in enumerate(sockets): hdr, msg = socket.recv_multipart() print('Received reply: %i bytes' % len(msg)) reply = msgpack.unpackb(msg) beam_meta.append(reply) # make plots from replies npackets_grid = np.zeros((n_l0_nodes, n_l1_nodes), int) l0_addrs = {} for i in range(n_l0_nodes): for j in range(n_l1_nodes): l0_addrs['127.0.0.1:%i' % (udp_port_l0_base + i * n_l1_nodes + j)] = i nodestats = [] for i, rep in enumerate(beam_meta): print('Node stats:', rep[0]) print('Per-node packet counts:', rep[1]) for r in rep[2:]: print('Beam:', r)
def _load(file): return msgpack.unpackb(file, object_hook=msgpack_numpy.decode, raw=False)
def deserialize(self, message): """Deserialize message.""" if self.crypter: message = self.crypter.decrypt(message, self.expiry + 10) return msgpack.unpackb(message, encoding='utf8')
def value(self): data = self.session.i.get(to_bytes(self.key)) if not data: return return msgpack.unpackb(data, raw=False)
def test_unpack_buffer(): from array import array buf = array('b') buf.fromstring(packb((b'foo', b'bar'))) obj = unpackb(buf, use_list=1) assert [b'foo', b'bar'] == obj
def unpack(s): return unpackb(s, raw=False)
def deserializer(buf): return msgpack.unpackb(buf, raw=False, use_list=False, ext_hook=ext_hook)
def _recv_msgpack(socket, flags=0): """ Receives an object, unpacking it with msgpack. """ packed = socket.recv(flags) return msgpack.unpackb(packed, object_hook=_decode_custom)
def decoding(data): return msgpack.unpackb(data)
def unpackKey( self, key ): return msgpack.unpackb( base64.b64decode( key ) )
def print_message(message): print msgpack.unpackb(message.body) message.finish()
def UdpRecvFromRP(udp_socket): global motors_que while True: recv_data = udp_socket.recv(1024) motors_que.append(msgpack.unpackb(recv_data))
def reply(self, result): data, host, port = msgpack.unpackb(result[0]) self.transport.write(data, (host, int(port)))
def deserialize(data): return netaddr.IPAddress(msgpack.unpackb(data))
import zmq import msgpack as serializer ctx = zmq.Context() requester = ctx.socket(zmq.REQ) ip = 'localhost' port = 50020 requester.connect('tcp://%s:%s' % (ip, port)) requester.send_string('SUB_PORT') sub_port = requester.recv_string() print("Connecting to port {}".format(sub_port)) subscriber = ctx.socket(zmq.SUB) subscriber.connect('tcp://%s:%s' % (ip, sub_port)) id = '' subscriber.setsockopt_string(zmq.SUBSCRIBE, 'gaze.2d.{}'.format(id)) try: while True: topic = subscriber.recv_string() info = serializer.unpackb(subscriber.recv(), encoding='utf-8') # logger.info("Received Topic - {}, Timestamp - {}, Norm_Pos - {}, Confidence - {}".format(topic, info['timestamp'], info['norm_pos'], info['confidence'])) print(info) except KeyboardInterrupt: requester.close() subscriber.close() ctx.term() raise