def message(db_id, coll_id, data_id): # Connect to the database MongoDB try: connection = Connection("localhost", 27017) except: return json.dumps("Unable to connect to the database!") db = connection[db_id] collection = db[coll_id] try: message_collection = collection.find_one({"data_id": data_id}) except: return json.dumps("Unable to read data from the collection!") if message_collection is None: return json.dumps("No data with this ID in the collection!") message_collection["_id"] = str(message_collection["_id"]) if request.data == "protobuf": try: pb_message = protobuf_json.json2pb(raw_data_pb2.RawRFReadingCollection(), message_collection) pb_message = protobuf_json.json2pb(raw_metadata_pb2.Metadata(), message_collection) except: return json.dumps("Unable to read message from the collection!") pb_message_string = pb_message.SerializeToString() return pb_message_string else: return json.dumps(message_collection)
def __call__(self, environ, start_response): req = Request(environ) resp = Response(content_type="application/json") resp.dict = {} try: req.dict = json.loads(req.body) if self.mode == "debug": protobuf_json.json2pb(taggit_pb.JsonRequest(), req.dict) handler = getattr(self, environ['_HANDLER']) handler(req, resp) # handle ! if self.mode == "debug": protobuf_json.json2pb(taggit_pb.JsonResponse(), resp.dict) except Exception, e: print '(E) JsonApp call:', e
def __call__(self, environ, start_response): req = Request(environ) resp = Response(content_type="application/json") resp.dict = {} try: req.dict = json.loads(req.body) if self.mode == "debug": protobuf_json.json2pb(taggit_pb.JsonRequest(), req.dict) handler = getattr(self,environ['_HANDLER']) handler(req,resp) # handle ! if self.mode == "debug": protobuf_json.json2pb(taggit_pb.JsonResponse(), resp.dict) except Exception, e: print '(E) JsonApp call:', e
def ToProto(cursor, collection_name=None): """Transforms a cursor result (collection.find(...) into the associated protobuf. Data in mongo is not exactly proto -> dictionary -> mongo, e.g results of mapreduce are of the form {_id:, value:}, so this interfaces mongo and our protobuf data model. This is the reciprocal function of SaveProto plus it enables to read 'read-only' collections (SaveProto doesn't handle them).""" # example : ToProto(table.find_one()) -> None. if not cursor: return None col_name = cursor.collection.name if collection_name is None else collection_name cls = _COLLECTION_TO_PB_CLASS[col_name] if col_name in (RW_USERS, ): pb = protobuf_json.json2pb(cls(), cursor) pb.id = cursor['_id'] return pb return protobuf_json.json2pb(cls(), cursor)
def message(db_id, coll_id, data_id): # Connect to the database MongoDB try: connection = Connection(hostname, port_number) except: return json.dumps("Unable to connect to the database!") db = connection[db_id] collection = db[coll_id] try: message_collection = collection.find_one({'data_id': data_id}) except: return json.dumps("Unable to read data from the collection!") if message_collection is None: return json.dumps("No data with this ID in the collection!") message_collection['_id'] = str(message_collection['_id']) if request.data == 'protobuf': try: pb_message = protobuf_json.json2pb( raw_data_pb2.RawRFReadingCollection(), message_collection) except: return json.dumps("Unable to read message from the collection!") pb_message_string = pb_message.SerializeToString() return pb_message_string else: return json.dumps(message_collection)
def login(): """login as our user. input: LoginRequest (if POST) output: ok. Request: ?email=xx&password=xx[&remember_me=1] (if GET, else arguments in LoginRequest) """ if request.method == "POST": input_pb = protobuf_json.json2pb(all_pbs.LoginRequest(), request.get_json()) else: input_pb = all_pbs.LoginRequest() input_pb.email = request.args.get("email", "") input_pb.password = request.args.get("password", "") input_pb.remember = request.args.get("remember", "") == "1" table = data_models.GetTable(data_models.RW_USERS) user_pb = data_models.ToProto( table.find_one({"info.email": input_pb.email}), data_models.RW_USERS) if not user_pb or user_pb.info.password != input_pb.password: abort(400) user = user_util.FLUser(user_pb) flogin.login_user(user, remember=input_pb.remember) session.permanent = True return 'ok'
def tracker_list(request, format=None): trk_dict_list = serialize(Tracker.objects.filter(owner=request.user)) trklist_pb = pbjson.json2pb(tracker_pb2.TrackerList(), dict(trackers=trk_dict_list)) data = trklist_pb.SerializeToString() mimetype = 'application/octet-stream' return HttpResponse(data, mimetype=mimetype)
def experiment(db_id, coll_id): # Connect to the database MongoDB try: connection = Connection('localhost', 12345) except: return json.dumps("Unable to connect to the database!") db = connection[db_id] collection = db[coll_id] try: message_collection = collection.find_one({}) except: return json.dumps("Unable to read data from the experiment!") if message_collection is None: return json.dumps("No data with this ID in the experiment!") message_collection['_id'] = str(message_collection['_id']) if request.data == 'protobuf': pb_message = protobuf_json.json2pb(experiment_results_pb2.Experiment(), message_collection) pb_message_string = pb_message.SerializeToString() return pb_message_string else: return json.dumps(message_collection)
def _read(self): if self.response == None: raise Exception('No response stored') cls = getattr(proto, self.response['type']) inst = cls() pb = protobuf_json.json2pb(inst, self.response['message']) return ('protobuf', pb)
def test_json_and_back(enum_string=False): # convert it to JSON and back pb = get_pb() pprint(pb.SerializeToString()) json_obj = protobuf_json.pb2json(pb, enum_string=enum_string) pprint(json_obj) pb2 = protobuf_json.json2pb(pb_test.TestMessage(), json_obj) pprint(pb2.SerializeToString()) assert pb == pb2
def serialize(json_source: Path) -> None: svc_pb2 = smart_vacc_fhir_bundle_gc_pb2.SmartVaccCert() out_file = json_source.with_suffix(".bin") # populate protobuf object from json with open(json_source, "r") as f: svc_str = json.loads(f.read()) pb2 = protobuf_json.json2pb(svc_pb2, svc_str) # write out serialized protobuf with open(out_file, "wb") as f: f.write(pb2.SerializeToString())
def GetUrlByTitle(self, req_data, is_json): # 自动生成部分,反序列化请求包体 request = msec_pb2.GetUrlByTitleRequest() response = msec_pb2.GetUrlByTitleResponse() # json协议处理 if is_json: req_json = json.loads(req_data) request = protobuf_json.json2pb(request, req_json) else: request.ParseFromString(req_data) # TODO: 业务逻辑实现 log_info("GetUrlByTitle start....") attr_report("GetUrlByTitle Entry") crawlreq = VOA_py_Crawl_pb2.GetMP3ListRequest() crawlreq.type = request.type # result = CallMethod("VOA_java.Crawl", "crawl.CrawlService.getMP3List", crawlreq, 20000) result = CallMethod("VOA_py.Crawl", "crawl.CrawlService.GetMP3List", crawlreq, 20000) if result["ret"] != 0: response.status = 100 response.msg = "CallMethod failed: " + result["errmsg"] log_error('callmethod error: %d %s' % (result["ret"], result["errmsg"])) else: crawlrsp = VOA_py_Crawl_pb2.GetMP3ListResponse() crawlrsp.ParseFromString(result["response"]) if crawlrsp.status != 0: log_error('getmp3list response error: %d %s' % (crawlrsp.status, crawlrsp.msg)) response.status = 100 response.msg = "getmp3list response failed: " + crawlrsp.msg else: response.status = 100 response.msg = "failed to find the url" for mp3 in crawlrsp.mp3s: if request.title == mp3.title: response.url = mp3.url response.status = 0 response.msg = "success" log_info("GetUrlByTitle succ.") attr_report("GetUrlByTitle succ") break # 序列化回复包体 if is_json: return json.dumps(protobuf_json.pb2json(response)) else: return response.SerializeToString()
def find_AOrderPlaced(shipid): try: connection = psycopg2.connect(user=USER, password=PASSWORD, host=HOST, port=PORT, database=DATABASE) cursor = connection.cursor() # SELECT * FROM mytable WHERE "date" >= NOW() - INTERVAL '5 minutes'; #postgreSQL_select_Query = """select message from order_upsseq where time > (current_timestamp - make_interval(secs := %s))""" #intervalInSecs = 300; #print(postgreSQL_select_Query % intervalInSecs) #cursor.execute(postgreSQL_select_Query, [intervalInSecs]) postgreSQL_select_Query = """select message from order_placed where packageid = %s""" print(postgreSQL_select_Query % (shipid, )) cursor.execute(postgreSQL_select_Query, (shipid, )) print( "Selecting message from order_topack table using cursor.fetchall") records = cursor.fetchall() print("Print each row and it's columns values") cmd = None for row in records: jsonobj = row[0] cmd = protobuf_json.json2pb(commu_pb2.ACommunicate(), jsonobj) break if cmd: for aorderplaced in cmd.aorderplaced: insert_ACommunicate_to_DB(cmd, aorderplaced.seqnum) # insert_A_to_DB(aorderplaced.seqnum, cmd) postgreSQL_delete_Query = """delete from order_placed where packageid = %s""" #postgreSQL_delete_Query = 'delete from order_truck where "truckId" = ' + truckid cursor.execute(postgreSQL_delete_Query, (shipid, )) connection.commit() except (Exception, psycopg2.Error) as error: print("Error while fetching data from PostgreSQL", error) finally: #closing database connection. if (connection): cursor.close() connection.close() print("PostgreSQL connection is closed") return cmd
def GetMP3List(self, req_data, is_json): # 自动生成部分,反序列化请求包体 request = msec_pb2.GetMP3ListRequest() response = msec_pb2.GetMP3ListResponse() # json协议处理 if is_json: req_json = json.loads(req_data) request = protobuf_json.json2pb(request, req_json) else: request.ParseFromString(req_data) # TODO: 业务逻辑实现 log_info("GetMP3List start") monitor_add('GetMP3List entry') if request.type != "special" and request.type != "standard": response.status = 100 response.msg = "type field invalid" json_req = { "handleClass": "com.bison.GetMP3List", "requestBody": { "type": request.type } } json_ret = self.callmethod_tcp("Jsoup.jsoup", json_req, self.callback, 10.0) if json_ret["ret"] != 0: response.status = 100 response.msg = json_ret["errmsg"] else: if json_ret["data"]["status"] != 0: response.status = 100 response.msg = "jsoup returns " + str( json_ret["data"]["status"]) else: response.status = 0 response.msg = "success" log_info("GetMP3List successfully") monitor_add("GetMP3List succ") for mp3 in json_ret["data"]["mp3s"]: one_mp3 = response.mp3s.add() one_mp3.url = mp3["url"] one_mp3.title = mp3["title"] # 序列化回复包体 if is_json: return json.dumps(protobuf_json.pb2json(response)) else: return response.SerializeToString()
def select_timeout_from_ACommunicate(): try: connection = psycopg2.connect(user=USER, password=PASSWORD, host=HOST, port=PORT, database=DATABASE) cursor = connection.cursor() # SELECT * FROM mytable WHERE "date" >= NOW() - INTERVAL '5 minutes'; #postgreSQL_select_Query = """select message from order_upsseq where time > (current_timestamp - make_interval(secs := %s))""" #intervalInSecs = 300; #print(postgreSQL_select_Query % intervalInSecs) #cursor.execute(postgreSQL_select_Query, [intervalInSecs]) postgreSQL_select_Query = """select message from order_upsseq where time < %s - INTERVAL '30 second'""" print(postgreSQL_select_Query) dt = datetime.datetime.now() cursor.execute(postgreSQL_select_Query, [dt]) print("Selecting message from wareHouse table using cursor.fetchall") records = cursor.fetchall() print("Print each row and it's columns values") cmdlist = [] for row in records: jsonobj = row[0] cmd = protobuf_json.json2pb(commu_pb2.ACommunicate(), jsonobj) cmdlist.append(cmd) postgreSQL_update_Query = """update order_upsseq set time = %s where time < %s - INTERVAL '30 second'""" #postgreSQL_delete_Query = 'delete from order_truck where "truckId" = ' + truckid cursor.execute(postgreSQL_update_Query, (datetime.datetime.now(), datetime.datetime.now())) connection.commit() except (Exception, psycopg2.Error) as error: print("Error while fetching data from PostgreSQL", error) finally: #closing database connection. if (connection): cursor.close() connection.close() print("PostgreSQL connection is closed") return cmdlist
def send_unack_msg_to_amazon(amazon_fd): db_conn = psycopg2.connect( "dbname='postgres' user='******' password = '******'" "host='" + db_host + "' port='" + db_port + "'") db_cur = db_conn.cursor() while True: """ get all message that haven't receive ack """ db_cur.execute("""select message from amazon_ack""") """send them all again""" msgs_json = db_cur.fetchall() for msg_json in msgs_json: """restore it back to Message and send again""" msg = ups_amazon_pb2.UCommunicate() msg = json2pb(msg, msg_json, useFieldNumber=False) _EncodeVarint(amazon_fd.send, len(msg.SerializeToString()), None) amazon_fd.sendall(msg.SerializeToString()) sleep(60)
def login(): """login as our user. input: LoginRequest (if POST) output: ok. Request: ?email=xx&password=xx[&remember_me=1] (if GET, else arguments in LoginRequest) """ if request.method == "POST": input_pb = protobuf_json.json2pb(all_pbs.LoginRequest(), request.get_json()) else: input_pb = all_pbs.LoginRequest() input_pb.email = request.args.get("email", "") input_pb.password = request.args.get("password", "") input_pb.remember = request.args.get("remember", "") == "1" table = data_models.GetTable(data_models.RW_USERS) user_pb = data_models.ToProto(table.find_one({"info.email": input_pb.email}), data_models.RW_USERS) if not user_pb or user_pb.info.password != input_pb.password: abort(400) user = user_util.FLUser(user_pb) flogin.login_user(user, remember=input_pb.remember) session.permanent = True return 'ok'
resp = urllib2.urlopen(req) messages = json.loads(resp.read()) for i in messages.keys(): iteration = iteration + 1 ## Getting message as a json object data_id = str(messages[i]["data_id"]) req = RequestWithMethod( apiURL + "evarilos/raw_data/v1.0/database/" + db_id + "/collection/" + coll + "/message/" + data_id, "GET", headers={"Content-Type": "application/json"}, data="json", ) response = urllib2.urlopen(req) message = json.loads(response.read()) message["data_id"] = str(iteration) raw_data = raw_data_pb2.RawRFReadingCollection() json2pb(raw_data, message) try: obj = raw_data.SerializeToString() req = urllib2.Request( apiURL + "evarilos/raw_data/v1.0/database/" + db_id + "/collection/" + coll_id_new, headers={"Content-Type": "application/x-protobuf"}, data=obj, ) resp = urllib2.urlopen(req) print json.loads(resp.read()) except: print "Error"
def from_string(self, s): json2pb(self.config, s)
def from_file(self,filename): data = open(filename, "r").read() json2pb(self.config, data)
def from_string(self, s): return json2pb(self.config, json.loads(s))
training = {} for key in meas_aps.keys(): training[key] = np.array( mquantiles(meas_aps[key]['rssi'], [0, 0.33, 0.67, 1])) for key in training.keys(): for value in range(0, len(training[key])): raw_data_reading = raw_data_collection.raw_measurement.add() x = datetime.utcnow() raw_data_reading.timestamp_utc = int(time.mktime(x.timetuple())) raw_data_reading.receiver_location.coordinate_x = i[ 'receiver_location']['coordinate_x'] raw_data_reading.receiver_location.coordinate_y = i[ 'receiver_location']['coordinate_y'] raw_data_reading.run_nr = value + 1 raw_data_reading.sender_bssid = key raw_data_reading.rssi = int(training[key][value]) raw_data_reading.channel = meas_aps[key]['channel'] raw_data_collection.meas_number = len(training[key]) json2pb(raw_data_collection, message) obj = raw_data_collection.SerializeToString() req = urllib2.Request(apiURL + 'etd/v1.0/database/' + db_id_training + '/collection/' + coll_id_training, headers={"Content-Type": "application/x-protobuf"}, data=obj) resp = urllib2.urlopen(req) print json.loads(resp.read())
import os, sys, simplejson, log_definition_pb2 from pprint import pprint import protobuf_json, protobuf_json_writer if len(sys.argv) < 3: print "Usage: " + sys.argv[0] + " <json file in> <pb file out>" exit() f = open(sys.argv[1],'r') json_str = simplejson.loads(f.read()) f.close() msg_pb = log_definition_pb2.logdef() json_pb = protobuf_json.json2pb(msg_pb,json_str) #print protobuf_json_writer.proto2json(json_pb) fo = open(sys.argv[2],'w') fo.write(json_pb.SerializeToString()) fo.close()
def ArrayToProto(pb_class, json_array): """json array -> list of protobuf.""" return [protobuf_json.json2pb(pb_class(), x) for x in json_array]
needed coz PB dont initialize default values ! """ def ActuallyInit(obj): err = [] obj.IsInitialized(err) for field in err: attr = obj.__getattribute__(field) try: obj.__setattr__(field, attr) except: ActuallyInit(attr) # create and fill test message pb=pb_test.hotel() pb.hn = "The Royal Marigold Hotel" #print pb.hotel_url ActuallyInit(pb) # convert it to JSON and back #pprint(pb.SerializeToString()) json_obj=protobuf_json.pb2json(pb) with open("tab.json","w+") as j_file: j_file.write(json.dumps(json_obj)) print "size of json : " + str(sys.getsizeof(json_obj)) pb2=protobuf_json.json2pb(pb_test.hotel(), json_obj) with open("tab.pb","w+") as p_file: p_file.write(pb2.SerializeToString()) print "size of pb : " + str(sys.getsizeof(pb2))
pb.query="some text" pb.flag=True pb.test_enum=2 msg=pb.nested_msg msg.id=1010 msg.title="test title" msg.url="http://example.com/" msgs=pb.nested_msgs.add() msgs.id=456 msgs.title="test title" msgs.url="http://localhost/" pb.rep_int.append(1) pb.rep_int.append(2) pb.bs.append("\x00\x01\x02\x03\x04"); pb.bs.append("\x05\x06\x07\x08\x09"); # convert it to JSON and back pprint(pb.SerializeToString()) json_obj=protobuf_json.pb2json(pb) print json_obj pb2=protobuf_json.json2pb(pb_test.TestMessage(), json_obj) pprint(pb2.SerializeToString()) if pb == pb2: print "Test passed." else: print "Test FAILED!"
# training = {} # for key in meas_aps.keys(): # training[key] = np.mean(meas_aps[key]['rssi']) ## Example 2 - training fingerprint are the 4 quantile values for each AP visible at a certain ## measurement location. training = {} for key in meas_aps.keys(): training[key] = np.array(mquantiles(meas_aps[key]['rssi'], [0, 0.33, 0.67, 1])) for key in training.keys(): for value in range(0,len(training[key])): raw_data_reading = raw_data_collection.raw_measurement.add() x = datetime.utcnow() raw_data_reading.timestamp_utc = int(time.mktime(x.timetuple())) raw_data_reading.receiver_location.coordinate_x = i['receiver_location']['coordinate_x'] raw_data_reading.receiver_location.coordinate_y = i['receiver_location']['coordinate_y'] raw_data_reading.run_nr = value + 1 raw_data_reading.sender_bssid = key raw_data_reading.rssi = int(training[key][value]) raw_data_reading.channel = meas_aps[key]['channel'] raw_data_collection.meas_number = len(training[key]) json2pb(raw_data_collection, message) obj = raw_data_collection.SerializeToString() req = urllib2.Request(apiURL + 'etd/v1.0/database/' + db_id_training + '/collection/' + coll_id_training, headers={"Content-Type": "application/x-protobuf"}, data = obj) resp = urllib2.urlopen(req) print json.loads(resp.read())
def DictToProto(pb, json_data): """dictionary -> protobuf.""" return protobuf_json.json2pb(pb, json_data)
#!/usr/bin/python import os, sys, json from pprint import pprint import protobuf_json import json import sys import os import glob import urllib import urllib2 import binascii import inspect import zlib import time import traceback import msg_pb2 as pb_test with open(sys.argv[1], 'r') as f: _j = f.read() f.close() json_obj = json.loads(_j) pb2 = protobuf_json.json2pb(pb_test.MSG(), json_obj) pprint(pb2.SerializeToString())
pb.test_enum = 2 msg = pb.nested_msg msg.id = 1010 msg.title = 'test title' msg.url = 'http://example.com' msags = pb.nested_msgs.add() msags.id = 456 msags.title='test title22' msags.url='http://localhost/' pb.rep_int.append(1) pb.rep_int.append(22) pb.bs.append('\x00\x01\x02\x03\x04') pb.bs.append('\x05\x06\x07\x08\x09') pprint(pb.SerializeToString) json_obj = protobuf_json.pb2json(pb) print json_obj pb2=protobuf_json.json2pb(pb_test.TestMessage(),json_obj) pprint(pb2.SerializeToString()) if pb == pb2: print "test passed." else: print "Test FAILD"
def GetTitles(self, req_data, is_json): # 自动生成部分,反序列化请求包体 request = msec_pb2.GetTitlesRequest() response = msec_pb2.GetTitlesResponse() # json协议处理 if is_json: req_json = json.loads(req_data) request = protobuf_json.json2pb(request, req_json) else: request.ParseFromString(req_data) # TODO: 业务逻辑实现 level = get_config('LOG', 'Level') if level: log_info(level) level = get_config('LOG', 'level') if level: log_info(level) try: import MySQLdb #mysql except ImportError: log_error("Fail to import mysql library.") else: route = getroute("Database.mysql") if route: # 打开数据库连接 db = MySQLdb.connect(route["ip"], "msec", "msec@anyhost", "msec_test_java_db", route["port"]) cursor = db.cursor() cursor.execute("SELECT VERSION()") data = cursor.fetchone() log_info("Database version : %s" % data) db.close() crawlreq = VOA_py_Crawl_pb2.GetMP3ListRequest() crawlreq.type = request.type # result = CallMethod("VOA_java.Crawl", "crawl.CrawlService.getMP3List", crawlreq, 20000) result = CallMethod("VOA_py.Crawl", "crawl.CrawlService.GetMP3List", crawlreq, 20000) if result["ret"] != 0: response.status = 100 response.msg = "CallMethod failed: " + result["errmsg"] log_error('callmethod error: %d %s' % (result["ret"], result["errmsg"])) else: crawlrsp = VOA_py_Crawl_pb2.GetMP3ListResponse() crawlrsp.ParseFromString(result["response"]) if crawlrsp.status != 0: log_error('getmp3list response error: %d %s' % (crawlrsp.status, crawlrsp.msg)) response.status = 100 response.msg = "getmp3list response failed: " + crawlrsp.msg else: response.status = 0 response.msg = "success" for mp3 in crawlrsp.mp3s: response.titles.append(mp3.title) log_info('GetTitles Success') attr_report('GetTitles success') # 序列化回复包体 if is_json: return json.dumps(protobuf_json.pb2json(response)) else: return response.SerializeToString()