def orderplaced_handler(orderplaced_handle_list, world_id, amazon_fd, world_fd,
                        truck_list):
    UCommands = world_ups_pb2.UCommands()
    UCommu = ups_amazon_pb2.UCommunicate()
    #mutex.acquire()
    package_db_handle(orderplaced_handle_list, world_id, amazon_fd, world_fd,
                      truck_list)
    for i in range(0, len(orderplaced_handle_list)):
        UGoPickup = UCommands.pickups.add()
        UGoPickup.truckid = int(truck_list[i])
        UGoPickup.whid = orderplaced_handle_list[i].whid
        UGoPickup.seqnum = get_seqnum()
        #edited
        UOrderPlaced = UCommu.uorderplaced.add()
        UOrderPlaced.packageid = orderplaced_handle_list[i].packageid
        UOrderPlaced.truckid = int(truck_list[i])
        UOrderPlaced.seqnum = get_seqnum()
        update_truck_orderplaced(UOrderPlaced.packageid, world_id, amazon_fd,
                                 world_fd, UOrderPlaced.truckid)
        tmp2 = """update truck set status = 'E' where truckid =  %s and worldid =%s"""
        db_conn = psycopg2.connect(
            "dbname='postgres' user='******' password='******'"
            "host='" + db_host + "' port='" + db_port + "'")
        db_cur = db_conn.cursor()
        db_cur.execute(tmp2, (str(UGoPickup.truckid), str(world_id)))
        db_conn.commit()
    seqnum1 = get_seqnum()
    UCommu.acks.append(seqnum1)
    json_msg = pb2json(UCommu)
    db_cur.execute(
        """INSERT INTO amazon_ack (seqnum,message) VALUES(%s,%s);""",
        (seqnum1, json.dumps(json_msg)))
    db_conn.commit()
    send_to_amazon(UCommu, amazon_fd)
    #    if UOrderPlaced.seqnum in amazon_ack_list:
    #        break
    #while True:
    seqnum2 = get_seqnum()
    UCommands.acks.append(seqnum2)
    json_msg2 = pb2json(UCommands)
    db_cur.execute("""INSERT INTO world_ack (seqnum,message) VALUES(%s,%s);""",
                   (seqnum2, json.dumps(json_msg2)))
    db_conn.commit()
    send_to_world(UCommands, world_fd)
    #    if  UGoPickup.seqnum in world_ack_list:
    #        break
    #mutex.release()
    return
def lookup_entities(client, identifiers):
    """Search for entities by phone number, email, or gaia_id."""

    # Instantiate a GetEntityByIdRequest Protocol Buffer message describing the
    # request.

    lookup_dicts = [{'phone': sys.argv[1], 'create_offnetwork_gaia': True}]
    request = hangups.hangouts_pb2.GetEntityByIdRequest(
        request_header=client.get_request_header(),
        batch_lookup_spec=[
            hangups.hangouts_pb2.EntityLookupSpec(**d) for d in lookup_dicts
        ],
    )

    try:
        # Make the request to the Hangouts API.
        res = yield from client.get_entity_by_id(request)

        # Print the list of entities in the response.
        for entity_result in res.entity_result:
            print(json.dumps(protobuf_json.pb2json(entity_result)))

    finally:
        # Disconnect the hangups Client to make client.connect return.
        yield from client.disconnect()
Esempio n. 3
0
def type3_present():
    experiment = message_evarilos_engine_type2_presentation_pb2.ece_type2() 

    with open("message_type_2.pb", "rb") as f:
        experiment.ParseFromString(f.read())

    return json.dumps(protobuf_json.pb2json(experiment))
Esempio n. 4
0
def insert_ACommunicate_to_DB(acommu, seqnum):
    try:
        connection = psycopg2.connect(user=USER,
                                      password=PASSWORD,
                                      host=HOST,
                                      port=PORT,
                                      database=DATABASE)
        cursor = connection.cursor()
        jsonobj = protobuf_json.pb2json(acommu)

        print(type(jsonobj))
        postgreSQL_insert_Query = """INSERT INTO order_upsseq (seqnum, message, time) VALUES(%s, %s, %s) ;"""
        records = (seqnum, json.dumps(jsonobj), datetime.datetime.now())
        print(postgreSQL_insert_Query % records)
        cursor.execute(postgreSQL_insert_Query, records)
        connection.commit()

    except (Exception, psycopg2.Error) as error:
        print("Error while fetching data from PostgreSQL", error)

    finally:
        #closing database connection.
        if (connection):
            cursor.close()
            connection.close()
            print("PostgreSQL connection is closed")
Esempio n. 5
0
def asmdisk_add():
    if request.headers.get('Content-Type') != "application/json":
        MESSAGE("Unsupport Content-Type, default:'application/json'")
        abort(406)

    if not request.json or 'asmdisk_path' not in request.json or 'diskgroup_name' not in request.json:
        MESSAGE("Miss asmdisk_path/diskgroup_name field")
        abort(400)

    if 'rebalance' in request.json and not str(
            request.json['rebalance']).isdigit():
        MESSAGE("Illegal rebalance field")
        abort(400)

    if 'force' in request.json and not type(
            request.json['force']) == type(True):
        MESSAGE("Illegal force field")
        abort(400)

    mds_request = MakeRequest(msg_mds.ASMDISK_ADD_REQUEST)
    mds_request.body.Extensions[
        msg_mds.
        asmdisk_add_request].asmdisk_path = request.json['asmdisk_path']
    mds_request.body.Extensions[
        msg_mds.
        asmdisk_add_request].diskgroup_name = request.json['diskgroup_name']
    if 'rebalance' in request.json:
        mds_request.body.Extensions[
            msg_mds.asmdisk_add_request].rebalance_power = int(
                request.json['rebalance'])
    if 'force' in request.json:
        mds_request.body.Extensions[msg_mds.asmdisk_add_request].force = bool(
            request.json['force'])
    mds_response = send(mds_request)
    return get_response(protobuf_json.pb2json(mds_response), 200)
Esempio n. 6
0
def store_message(db_id, coll_id):

    experiment_collection = experiment_results_pb2.Experiment()

    try:
        experiment_collection.ParseFromString(request.data)
    except:
        return json.dumps('Experiment is not well defined!')

    # Connect to the database MongoDB
    try:
        connection = Connection('localhost', 12345)
    except:
        return json.dumps("Unable to connect to the database!")

    db_names = connection.database_names()
    if db_id in db_names:
        db = connection[db_id]
    else:
        return json.dumps("No such database!")

    coll_names = db.collection_names()
    if coll_id in coll_names:
        collection = db[coll_id]
    else:
        return json.dumps("No such experiment in the database!")

    try:
        collection.insert(protobuf_json.pb2json(experiment_collection))
    except:
        return json.dumps("Unable to store data into the database!")

    return json.dumps('Data stored!')
Esempio n. 7
0
def store_message(db_id, coll_id):

    try:
        raw_data_collection = raw_data_pb2.RawRFReadingCollection()
        raw_data_collection.ParseFromString(request.data)
    except:
        return json.dumps('Message is not well formated!')

    # Connect to the database MongoDB
    try:
        connection = Connection(hostname, port_number)
    except:
        return json.dumps("Unable to connect to the database!")

    db_names = connection.database_names()
    if db_id in db_names:
        db = connection[db_id]
    else:
        return json.dumps("No such database!")

    coll_names = db.collection_names()
    if coll_id in coll_names:
        collection = db[coll_id]
    else:
        return json.dumps("No such collection in the database!")

    try:
        collection.insert(protobuf_json.pb2json(raw_data_collection))
    except:
        return json.dumps("Unable to store data into the database!")

    return json.dumps('Data stored!')
def store_message(db_id, coll_id):
    
    experiment_collection = experiment_results_pb2.Experiment()

    try:
        experiment_collection.ParseFromString(request.data)
    except:
        return json.dumps('Experiment is not well defined!')

    # Connect to the database MongoDB
    try:
        connection = Connection('localhost', 12345)
    except:
        return json.dumps("Unable to connect to the database!")

    db_names = connection.database_names()
    if db_id in db_names:
        db = connection[db_id]
    else:
        return json.dumps("No such database!")  
    
    coll_names = db.collection_names()
    if coll_id in coll_names:
        collection = db[coll_id]
    else:
        return json.dumps("No such experiment in the database!")
    
    try:
        collection.insert(protobuf_json.pb2json(experiment_collection))
    except:
        return json.dumps("Unable to store data into the database!")

    return json.dumps('Data stored!')
Esempio n. 9
0
def pool_dirtythresh(pool_name):
    if request.headers.get('Content-Type') != "application/json":
        MESSAGE("Unsupport Content-Type, default:'application/json'")
        abort(406)
    if not request.json or 'dirty_thresh_lower' not in request.json:
        MESSAGE("Miss dirty_thresh_lower field")
        abort(400)
    if not request.json or 'dirty_thresh_upper' not in request.json:
        MESSAGE("Miss dirty_thresh_upper field")
        abort(400)
    if not str(request.json['dirty_thresh_lower']).isdigit() or int(
            request.json['dirty_thresh_lower']) > 100:
        MESSAGE("Param dirty_thresh_lower illegal")
        abort(400)
    if not str(request.json['dirty_thresh_upper']).isdigit() or int(
            request.json['dirty_thresh_upper']) > 100:
        MESSAGE("Param dirty_thresh_upper illegal")
        abort(400)
    mds_request = MakeRequest(msg_mds.POOL_CONFIG_REQUEST)
    mds_request.body.Extensions[
        msg_mds.pool_config_request].pool_name = pool_name
    mds_request.body.Extensions[
        msg_mds.pool_config_request].dirty_thresh.lower = int(
            request.json['dirty_thresh_lower'])
    mds_request.body.Extensions[
        msg_mds.pool_config_request].dirty_thresh.upper = int(
            request.json['dirty_thresh_upper'])
    mds_response = send(mds_request)
    return get_response(protobuf_json.pb2json(mds_response), 200)
Esempio n. 10
0
def disk_add():
    if request.headers.get('Content-Type') != "application/json":
        MESSAGE("Unsupport Content-Type, default:'application/json'")
        abort(406)
    if not request.json or 'dev_name' not in request.json:
        MESSAGE("Miss dev_name field")
        abort(400)
    if 'partition_count' in request.json:
        if not str(request.json['partition_count']).isdigit():
            MESSAGE("Illegal partition_count field")
            abort(400)
        if int(request.json['partition_count']) <= 0:
            MESSAGE("Illegal partition_count field")
            abort(400)
    if 'disk_type' in request.json:
        if request.json['disk_type'] not in ['ssd', 'hdd']:
            MESSAGE("Illegal disk_type field")
            abort(400)

    mds_request = MakeRequest(msg_mds.DISK_ADD_REQUEST)
    mds_request.body.Extensions[
        msg_mds.disk_add_request].dev_name = request.json['dev_name']
    if 'partition_count' in request.json:
        mds_request.body.Extensions[
            msg_mds.disk_add_request].partition_count = int(
                str(request.json['partition_count']))
    if 'disk_type' in request.json:
        if str(request.json['disk_type']) == "ssd":
            mds_request.body.Extensions[
                msg_mds.disk_add_request].disk_type = msg_pds.DISK_TYPE_SSD
        else:
            mds_request.body.Extensions[
                msg_mds.disk_add_request].disk_type = msg_pds.DISK_TYPE_HDD
    mds_response = send(mds_request)
    return get_response(protobuf_json.pb2json(mds_response), 200)
Esempio n. 11
0
def insert_AOrderPlaced_to_DB(pckid, acommu):
    try:
        connection = psycopg2.connect(user=USER,
                                      password=PASSWORD,
                                      host=HOST,
                                      port=PORT,
                                      database=DATABASE)
        cursor = connection.cursor()
        jsonobj = protobuf_json.pb2json(acommu)

        print(jsonobj)
        postgreSQL_insert_Query = """INSERT INTO order_placed (packageid, message) VALUES(%s, %s) ;"""
        records = (pckid, json.dumps(jsonobj))
        print(postgreSQL_insert_Query % records)
        cursor.execute(postgreSQL_insert_Query, records)
        connection.commit()

    except (Exception, psycopg2.Error) as error:
        print("Error while fetching data from PostgreSQL", error)

    finally:
        #closing database connection.
        if (connection):
            cursor.close()
            connection.close()
            print("PostgreSQL connection is closed")
Esempio n. 12
0
def set_second_storage(second_storage_ip):
    mds_request = MakeRequest(msg_mds.SET_SECOND_STORAGE_IP_REQUEST)
    mds_request.body.Extensions[
        msg_mds.
        set_second_storage_ip_request].second_storage_ip = second_storage_ip
    mds_response = send(mds_request)
    return get_response(protobuf_json.pb2json(mds_response), 200)
Esempio n. 13
0
def pcs_config(action):
    if action not in ['on', 'off', 'enable', 'disable']:
        MESSAGE("action field")
        abort(400)
    mds_request = MakeRequest(msg_mds.PCS_CONFIG_REQUEST)
    mds_request.body.Extensions[msg_mds.pcs_config_request].action = action
    mds_response = send(mds_request)
    return get_response(protobuf_json.pb2json(mds_response), 200)
Esempio n. 14
0
def store_message(db_id, coll_id):

    detect_message = 0
    try:
        raw_data_collection = raw_data_pb2.RawRFReadingCollection()
        raw_data_collection.ParseFromString(request.data)
        detect_message = 1
    except:
        try:
            raw_metadata = raw_metadata_pb2.Metadata()
            raw_metadata.ParseFromString(request.data)
            detect_message = 2
        except:
            return json.dumps("Message is not well formated!")

    # Connect to the database MongoDB
    try:
        connection = Connection("localhost", 27017)
    except:
        return json.dumps("Unable to connect to the database!")

    db_names = connection.database_names()
    if db_id in db_names:
        db = connection[db_id]
    else:
        return json.dumps("No such database!")

    coll_names = db.collection_names()
    if coll_id in coll_names:
        collection = db[coll_id]
    else:
        return json.dumps("No such collection in the database!")

    if detect_message == 1:
        try:
            collection.insert(protobuf_json.pb2json(raw_data_collection))
        except:
            return json.dumps("Unable to store data into the database!")
    else:
        try:
            collection.insert(protobuf_json.pb2json(raw_metadata))
        except:
            return json.dumps("Unable to store data into the database!")

    return json.dumps("Data stored!")
Esempio n. 15
0
def test_json_and_back(enum_string=False):
    # convert it to JSON and back
    pb = get_pb()
    pprint(pb.SerializeToString())
    json_obj = protobuf_json.pb2json(pb, enum_string=enum_string)
    pprint(json_obj)
    pb2 = protobuf_json.json2pb(pb_test.TestMessage(), json_obj)
    pprint(pb2.SerializeToString())
    assert pb == pb2
Esempio n. 16
0
 def _write(self, msg, protobuf_msg):
     cls = protobuf_msg.__class__.__name__
     msg = protobuf_json.pb2json(protobuf_msg)
     payload = '{"type": "%s", "message": %s}' % (cls, json.dumps(msg))
     r = requests.post(TREZORD_HOST + '/call/%s' % self.session, data=payload)
     if r.status_code != 200:
         raise Exception('trezord: Could not write message' + get_error(r))
     else:
         self.response = r.json()
Esempio n. 17
0
def getRealTimeTripUpdates(agency_name):
    try:
        request = BASE_URL+'/tripupdates?api_key=%s&agency=%s'%(TOKEN,agency_name)
        data = requests.get(request).content
        msg= gtfs_realtime_pb2.FeedMessage()
        msg.ParseFromString(data)
        return protobuf_json.pb2json(msg)['entity']
    except:
        raise NotFound(detail = 'Error '+traceback.format_exc())
Esempio n. 18
0
def node_config():
    if 'node_name' not in request.json:
        MESSAGE("Miss node_name field")
        abort(400)
    mds_request = MakeRequest(msg_mds.NODE_CONFIG_REQUEST)
    mds_request.body.Extensions[
        msg_mds.node_config_request].node_name = request.json['node_name']
    mds_response = send(mds_request)
    return get_response(protobuf_json.pb2json(mds_response), 200)
Esempio n. 19
0
 def _write(self, msg, protobuf_msg):
     cls = protobuf_msg.__class__.__name__
     msg = protobuf_json.pb2json(protobuf_msg)
     payload = '{"type": "%s", "message": %s}' % (cls, json.dumps(msg))
     r = self.conn.post(TREZORD_HOST + '/call/%s' % self.session,
                        data=payload)
     if r.status_code != 200:
         raise Exception('trezord: Could not write message' + get_error(r))
     else:
         self.response = r.json()
Esempio n. 20
0
def put_license_file():
    if 'license_base64' not in request.json:
        MESSAGE("Miss license_base64 field")
        abort(400)
    mds_request = MakeRequest(msg_mds.PUT_LICENSE_FILE_REQUEST)
    mds_request.body.Extensions[
        msg_mds.put_license_file_request].license_base64 = request.json[
            'license_base64']
    mds_response = send(mds_request)
    return get_response(protobuf_json.pb2json(mds_response), 200)
def completions_handler(completions, world_id, amazon_fd, world_fd):
    try:
        db_conn = psycopg2.connect(
            "dbname='postgres' user='******' password='******'"
            "host='" + db_host + "' port='" + db_port + "'")
        db_cur = db_conn.cursor()
    except psycopg2.OperationalError as error:
        print(error)
    UCommu = ups_amazon_pb2.UCommunicate()
    #mutex.acquire()
    for completion in completions:
        if completion.status == "ARRIVE WAREHOUSE":
            UArrivedAtWarehouse = UCommu.uarrived.add()
            UArrivedAtWarehouse.truckid = completion.truckid
            UArrivedAtWarehouse.seqnum = get_seqnum()
            location_x = completion.x
            location_y = completion.y
            truckid = completion.truckid
            update_truck_completion(location_x, location_y, truckid, world_id)
            try:
                db_cur.execute("select packageid from package where "
                               "worldid = '" + str(world_id) +
                               "' and truckid = '" + str(truckid) +
                               "' and status = 'E'")
                rows = db_cur.fetchall()
            except psycopg2.OperationalError as error:
                print(error)
            for row in rows:
                db_cur.execute("update package set status = 'W' where "
                               "packageid = '" + str(row[0]) +
                               "' and worldid = '" + str(world_id) + "'")
                db_conn.commit()
        if completion.status == "IDLE":
            location_x = completion.x
            location_y = completion.y
            truckid = completion.truckid
            db_cur.execute("update truck set status = 'I', "
                           "location_x = '" + str(location_x) + "'," +
                           "location_y = '" + str(location_y) +
                           "'where truckid = '" + str(truckid) +
                           "' and worldid = '" + str(world_id) + "'")
            db_conn.commit()
    seqnum = get_seqnum()
    UCommu.acks.append(seqnum)
    json_msg = pb2json(UCommu)
    db_cur.execute(
        """INSERT INTO amazon_ack (seqnum,message) VALUES(%s,%s);""",
        (seqnum, json.dumps(json_msg)))
    db_conn.commit()
    send_to_amazon(UCommu, amazon_fd)
    #     if UPackageDelivered.seqnum in amazon_ack_list:
    #         break
    db_conn.commit()
    #mutex.release()
    return
Esempio n. 22
0
def diskgroup_add():
    if request.headers.get('Content-Type') != 'application/json':
        MESSAGE("Unsupport Content-Type, default:'application/json'")
        abort(406)

    if not request.json or 'diskgroup_name' not in request.json or 'asmdisk_paths' not in request.json:
        MESSAGE("Miss diskgroup_name/asmdisk_paths field")
        abort(400)

    if 'redundancy' in request.json and request.json['redundancy'] not in [
            'external', 'normal', 'high'
    ]:
        MESSAGE(
            "Param 'redundancy' is not legal, only support 'external'/'normal'/'high'"
        )
        abort(400)

    attr_items = None
    if 'attr' in request.json:
        try:
            attr_items = [(i.split('=')[0], i.split('=')[1])
                          for i in request.json['attr'].split(',')]
            for attr in attr_items:
                if attr[0] not in ['compatible.asm', 'compatible.rdbms']:
                    MESSAGE(
                        "Param 'attr' is not legal, only support 'compatible.asm'/'compatible.rdbms'"
                    )
                    abort(400)
        except:
            MESSAGE(
                "Param 'attr' parameter illegal, e.g. compatible.asm=11.2,compatible.rdbms=11.2"
            )
            abort(400)

    mds_request = MakeRequest(msg_mds.DISKGROUP_ADD_REQUEST)
    mds_request.body.Extensions[
        msg_mds.
        diskgroup_add_request].diskgroup_name = request.json['diskgroup_name']
    for path in request.json['asmdisk_paths'].split(','):
        mds_request.body.Extensions[
            msg_mds.diskgroup_add_request].asmdisk_paths.append(path)
    if 'redundancy' in request.json:
        mds_request.body.Extensions[
            msg_mds.
            diskgroup_add_request].redundancy = request.json['redundancy']
    if 'attr' in request.json:
        for attr in attr_items:
            if attr[0] == 'compatible.asm':
                mds_request.body.Extensions[
                    msg_mds.diskgroup_add_request].compatible_asm = attr[1]
            if attr[0] == "compatible.rdbms":
                mds_request.body.Extensions[
                    msg_mds.diskgroup_add_request].compatible_rdbms = attr[1]
    mds_response = send(mds_request)
    return get_response(protobuf_json.pb2json(mds_response), 200)
Esempio n. 23
0
def qos_add():
    if request.headers.get('Content-Type') != "application/json":
        MESSAGE("Unsupport Content-Type, default:'application/json'")
        abort(406)

    if not request.json or 'template_name' not in request.json or 'items' not in request.json:
        MESSAGE("Miss dev_name/items field")
        abort(400)

    try:
        qos_items = [(i.split('=')[0], i.split('=')[1])
                     for i in request.json['items'].split(',')]
    except:
        MESSAGE(
            "QoS 'items' parameter illegal, e.g. read-bps=1048576,read-iops=100,write-bps=1048576,write-iops=100"
        )
        abort(400)

    for items in qos_items:
        if items[0] not in [
                'read-bps', 'read-iops', 'write-bps', 'write-iops'
        ]:
            MESSAGE("QoS 'items' parameter not support '%s'" % items[0])
            abort(400)
        if not str(items[1]).isdigit() or int(items[1]) > 1000000000:
            MESSAGE("Param '%s' is not legal" % items[0])
            abort(400)

    mds_request = MakeRequest(msg_mds.QOS_TEMPLATE_ADD_REQUEST)
    mds_request.body.Extensions[
        msg_mds.
        qos_template_add_request].template_name = request.json['template_name']

    for items in qos_items:
        if items[0] == "read-bps":
            mds_request.body.Extensions[
                msg_mds.qos_template_add_request].qos_info.read_bps = int(
                    items[1])
        elif items[0] == "read-iops":
            mds_request.body.Extensions[
                msg_mds.qos_template_add_request].qos_info.read_iops = int(
                    items[1])
        elif items[0] == "write-bps":
            mds_request.body.Extensions[
                msg_mds.qos_template_add_request].qos_info.write_bps = int(
                    items[1])
        elif items[0] == "write-iops":
            mds_request.body.Extensions[
                msg_mds.qos_template_add_request].qos_info.write_iops = int(
                    items[1])

    mds_response = send(mds_request)
    return get_response(protobuf_json.pb2json(mds_response), 200)
Esempio n. 24
0
def replace_message(db_id, coll_id, data_id):

    raw_data_collection = raw_data_pb2.RawRFReadingCollection()
    raw_metadata = raw_metadata_pb2.Metadata()

    # Connect to the database MongoDB
    try:
        connection = Connection(hostname, port_number)
    except:
        return json.dumps("Unable to connect to the database!")

    try:
        raw_data_collection.ParseFromString(request.data)
    except:
        return json.dumps('Message is not well defined!')

    db_names = connection.database_names()
    if db_id not in db_names:
        return json.dumps("Database doesn't exist!")

    db = connection[db_id]
    coll_names = db.collection_names()
    if coll_id not in coll_names:
        return json.dumps("Collection doesn't exist!")

    collection = db[coll_id]

    try:
        message_collection = collection.find_one({'data_id': data_id})
    except:
        return json.dumps("Unable to read data from the collection!")

    if message_collection is None:
        return json.dumps("No data with this ID in the collection!")

    message_collection['_id'] = str(message_collection['_id'])
    message_backup = message_collection

    try:
        collection.remove({'data_id': data_id})
    except:
        collection.insert(message_backup)
        return json.dumps("Unable to read data from the database!")

    try:
        collection.insert(protobuf_json.pb2json(raw_data_collection))
    except:
        collection.insert(message_backup)
        return json.dumps("Unable to store data into the collection!")

    return json.dumps('Message successfully replaced!')
Esempio n. 25
0
    def GetUrlByTitle(self, req_data, is_json):

        # 自动生成部分,反序列化请求包体
        request = msec_pb2.GetUrlByTitleRequest()
        response = msec_pb2.GetUrlByTitleResponse()
        # json协议处理
        if is_json:
            req_json = json.loads(req_data)
            request = protobuf_json.json2pb(request, req_json)
        else:
            request.ParseFromString(req_data)

        # TODO: 业务逻辑实现
        log_info("GetUrlByTitle start....")
        attr_report("GetUrlByTitle Entry")
        crawlreq = VOA_py_Crawl_pb2.GetMP3ListRequest()
        crawlreq.type = request.type
        # result = CallMethod("VOA_java.Crawl", "crawl.CrawlService.getMP3List", crawlreq, 20000)
        result = CallMethod("VOA_py.Crawl", "crawl.CrawlService.GetMP3List",
                            crawlreq, 20000)
        if result["ret"] != 0:
            response.status = 100
            response.msg = "CallMethod failed: " + result["errmsg"]
            log_error('callmethod error: %d %s' %
                      (result["ret"], result["errmsg"]))
        else:
            crawlrsp = VOA_py_Crawl_pb2.GetMP3ListResponse()
            crawlrsp.ParseFromString(result["response"])
            if crawlrsp.status != 0:
                log_error('getmp3list response error: %d %s' %
                          (crawlrsp.status, crawlrsp.msg))
                response.status = 100
                response.msg = "getmp3list response failed: " + crawlrsp.msg
            else:
                response.status = 100
                response.msg = "failed to find the url"
                for mp3 in crawlrsp.mp3s:
                    if request.title == mp3.title:
                        response.url = mp3.url
                        response.status = 0
                        response.msg = "success"
                        log_info("GetUrlByTitle succ.")
                        attr_report("GetUrlByTitle succ")
                        break

        # 序列化回复包体
        if is_json:
            return json.dumps(protobuf_json.pb2json(response))
        else:
            return response.SerializeToString()
Esempio n. 26
0
def srbd_config():
    if request.headers.get('Content-Type') != "application/json":
        MESSAGE("Unsupport Content-Type, default:'application/json'")
        abort(406)
    if 'nodeid' in request.json and 'attr' in request.json:
        attr_items = None
        try:
            attr_items = [(i.split('=')[0], i.split('=')[1])
                          for i in request.json['attr'].split(',')]
            for attr in attr_items:
                a = request.json['nodeid'] + "_" + attr[0]
                if a not in SRBD_KEY or attr[0] in SRBD_STATUS:
                    MESSAGE("key parameter illegal")
                    abort(400)
        except:
            MESSAGE("key parameter illegal")
            abort(400)
        mds_request = MakeRequest(msg_mds.SRBD_CONFIG_REQUEST)
        for attr in attr_items:
            if request.json['nodeid'] + "_" + attr[0] in SRBD_KEY and attr[
                    0] not in SRBD_STATUS:
                srbd_config = msg_pds.SrbdConfig()
                srbd_config.nodeid = str(request.json['nodeid'])
                srbd_config.srbd_key = str(request.json['nodeid'] + "_" +
                                           attr[0])
                srbd_config.srbd_value = attr[1]
                mds_request.body.Extensions[
                    msg_mds.srbd_config_request].srbd_config.CopyFrom(
                        srbd_config)
        mds_response = send(mds_request)
    elif 'role' in request.json:
        if request.json['role'] not in ROLE:
            message("key parameter illegal")
            abort(400)
        mds_request = MakeRequest(msg_mds.SRBD_CONFIG_REQUEST)
        mds_request.body.Extensions[
            msg_mds.srbd_config_request].node_role = request.json['role']
        mds_response = send(mds_request)
    elif "action" in request.json:
        if request.json['action'] not in ACTION:
            message("key parameter illegal")
            abort(400)
        mds_request = MakeRequest(msg_mds.SRBD_CONFIG_REQUEST)
        mds_request.body.Extensions[
            msg_mds.srbd_config_request].node_action = request.json['action']
        mds_response = send(mds_request)
    else:
        message("key parameter illegal")
        abort(400)
    return get_response(protobuf_json.pb2json(mds_response), 200)
Esempio n. 27
0
File: crawl.py Progetto: zyf111/MSEC
    def GetMP3List(self, req_data, is_json):
        # 自动生成部分,反序列化请求包体
        request = msec_pb2.GetMP3ListRequest()
        response = msec_pb2.GetMP3ListResponse()
        # json协议处理
        if is_json:
            req_json = json.loads(req_data)
            request = protobuf_json.json2pb(request, req_json)
        else:
            request.ParseFromString(req_data)

        # TODO: 业务逻辑实现
        log_info("GetMP3List start")
        monitor_add('GetMP3List entry')
        if request.type != "special" and request.type != "standard":
            response.status = 100
            response.msg = "type field invalid"

        json_req = {
            "handleClass": "com.bison.GetMP3List",
            "requestBody": {
                "type": request.type
            }
        }
        json_ret = self.callmethod_tcp("Jsoup.jsoup", json_req, self.callback,
                                       10.0)
        if json_ret["ret"] != 0:
            response.status = 100
            response.msg = json_ret["errmsg"]
        else:
            if json_ret["data"]["status"] != 0:
                response.status = 100
                response.msg = "jsoup returns " + str(
                    json_ret["data"]["status"])
            else:
                response.status = 0
                response.msg = "success"
                log_info("GetMP3List successfully")
                monitor_add("GetMP3List succ")
                for mp3 in json_ret["data"]["mp3s"]:
                    one_mp3 = response.mp3s.add()
                    one_mp3.url = mp3["url"]
                    one_mp3.title = mp3["title"]

        # 序列化回复包体
        if is_json:
            return json.dumps(protobuf_json.pb2json(response))
        else:
            return response.SerializeToString()
Esempio n. 28
0
def disk_replace(disk_name):
    if request.headers.get('Content-Type') != "application/json":
        MESSAGE("Unsupport Content-Type, default:'application/json'")
        abort(406)
    if not request.json or 'dev_name' not in request.json:
        MESSAGE("Miss dev_name field")
        abort(400)

    mds_request = MakeRequest(msg_mds.DISK_REPLACE_REQUEST)
    mds_request.body.Extensions[
        msg_mds.disk_replace_request].disk_name = disk_name
    mds_request.body.Extensions[
        msg_mds.disk_replace_request].dev_name = request.json['dev_name']
    mds_response = send(mds_request)
    return get_response(protobuf_json.pb2json(mds_response), 200)
def loadingfinished_handler(loadingfinished_handle_list, world_id, amazon_fd,
                            world_fd):
    db_conn = psycopg2.connect(
        "dbname='postgres' user='******' password='******'"
        "host='" + db_host + "' port='" + db_port + "'")
    db_cur = db_conn.cursor()
    loading_status_update(loadingfinished_handle_list, world_id, amazon_fd,
                          world_fd)
    UCommands = world_ups_pb2.UCommands()
    #mutex.acquire()
    for loadingfinished in loadingfinished_handle_list:
        go_deliver = UCommands.deliveries.add()
        go_deliver.truckid = loadingfinished.truckid
        go_deliver.seqnum = get_seqnum()
        db_cur.execute("select location_x, location_y from package"
                       " where worldid = '" + str(world_id) +
                       "' and truckid = '" + str(go_deliver.truckid) +
                       "' and status = 'L'")
        rows = db_cur.fetchall()
        for row in rows:
            package = go_deliver.packages.add()
            package.packageid = loadingfinished.packageid
            package.x = int(row[0])
            package.y = int(row[1])
            db_cur.execute("update package set status = 'O' "
                           "where truckid = '" + str(go_deliver.truckid) +
                           "' and worldid = '" + str(world_id) +
                           "' and packageid = '" + str(package.packageid) +
                           "'")
            db_conn.commit()
            db_cur.execute("update truck set status = 'O'  "
                           " where truckid = '" + str(go_deliver.truckid) +
                           "' and worldid = '" + str(world_id) +
                           "' and packageid = '" + str(package.packageid) +
                           "'")
            db_conn.commit()
    #while True:
    seqnum = get_seqnum()
    UCommands.acks.append(seqnum)
    json_msg = pb2json(UCommands)
    db_cur.execute("""INSERT INTO world_ack (seqnum,message) VALUES(%s,%s);""",
                   (seqnum, json.dumps(json_msg)))
    db_conn.commit()
    send_to_world(UCommands, world_fd)
    #    if go_deliver.seqnum in world_ack_list:
    #        break
    db_conn.commit()
    return
Esempio n. 30
0
def qos_link(template_name):
    if request.headers.get('Content-Type') != "application/json":
        MESSAGE("Unsupport Content-Type, default:'application/json'")
        abort(406)

    if not request.json or 'lun_name' not in request.json:
        MESSAGE("Miss lun_name field")
        abort(400)

    mds_request = MakeRequest(msg_mds.LINK_QOS_TEMPLATE_REQUEST)
    mds_request.body.Extensions[
        msg_mds.link_qos_template_request].template_name = template_name
    mds_request.body.Extensions[
        msg_mds.link_qos_template_request].lun_name = request.json["lun_name"]
    mds_response = send(mds_request)
    return get_response(protobuf_json.pb2json(mds_response), 200)
Esempio n. 31
0
def pool_resize(pool_name):
    if request.headers.get('Content-Type') != "application/json":
        MESSAGE("Unsupport Content-Type, default:'application/json'")
        abort(406)
    if not request.json or 'size' not in request.json:
        MESSAGE("Miss size field")
        abort(400)
    if not str(request.json['size']).isdigit():
        MESSAGE("Param size illegal")
        abort(400)
    mds_request = MakeRequest(msg_mds.POOL_RESIZE_REQUEST)
    mds_request.body.Extensions[
        msg_mds.pool_resize_request].pool_name = pool_name
    mds_request.body.Extensions[msg_mds.pool_resize_request].size = int(
        request.json['size'])
    mds_response = send(mds_request)
    return get_response(protobuf_json.pb2json(mds_response), 200)
Esempio n. 32
0
def pool_synclevel(pool_name):
    if request.headers.get('Content-Type') != "application/json":
        MESSAGE("Unsupport Content-Type, default:'application/json'")
        abort(406)
    if not request.json or 'sync_level' not in request.json:
        MESSAGE("Miss sync_level field")
        abort(400)
    if not str(request.json['sync_level']).isdigit() or int(
            request.json['sync_level']) > 10:
        MESSAGE("Param sync_level illegal, support 0-10")
        abort(400)
    mds_request = MakeRequest(msg_mds.POOL_CONFIG_REQUEST)
    mds_request.body.Extensions[
        msg_mds.pool_config_request].pool_name = pool_name
    mds_request.body.Extensions[msg_mds.pool_config_request].sync_level = int(
        request.json['sync_level'])
    mds_response = send(mds_request)
    return get_response(protobuf_json.pb2json(mds_response), 200)
Esempio n. 33
0
def pool_add():
    if request.headers.get('Content-Type') != "application/json":
        MESSAGE("Unsupport Content-Type, default:'application/json'")
        abort(406)
    if not request.json or 'disk_names' not in request.json:
        MESSAGE("Miss disk_names field")
        abort(400)
    if type(request.json['disk_names']) != type([]):
        MESSAGE("Illegal disk_names field")
        abort(400)
    if len(request.json['disk_names']) != 1:
        MESSAGE("Max support one disk now")
        abort(400)
    if 'extent' in request.json and not str(request.json['extent']).isdigit():
        MESSAGE("Illegal extent field")
        abort(400)
    if 'bucket' in request.json and not str(request.json['bucket']).isdigit():
        MESSAGE("Illegal bucket field")
        abort(400)
    if 'sippet' in request.json and not str(request.json['sippet']).isdigit():
        MESSAGE("Illegal sippet field")
        abort(400)
    is_variable = False
    if 'is_variable' in request.json:
        if type(request.json['is_variable']) != type(True):
            MESSAGE("Illegal is_variable field, support:'true/false'")
            abort(400)
        is_variable = request.json['is_variable']
    mds_request = MakeRequest(msg_mds.POOL_ADD_REQUEST)
    mds_request.body.Extensions[msg_mds.pool_add_request].disk_names.append(
        request.json['disk_names'][0])
    mds_request_body.Extensions[
        msg_mds.pool_add_request].is_variable = is_variable
    if 'extent' in request.json:
        mds_request.body.Extensions[msg_mds.pool_add_request].extent = int(
            str(request.json['extent']))
    if 'bucket' in request.json:
        mds_request.body.Extensions[msg_mds.pool_add_request].bucket = int(
            str(request.json['bucket']))
    if 'sippet' in request.json:
        mds_request.body.Extensions[msg_mds.pool_add_request].sippet = int(
            str(request.json['sippet']))
    mds_response = send(mds_request)
    return get_response(protobuf_json.pb2json(mds_response), 200)
Esempio n. 34
0
def jsonifyAllUserEvents(protobufFilename, outputFilename):

    userEventsBatch = ue.BBMUserEventBatch()
    try:
        f = open(protobufFilename, "rb")
        userEventsBatch.ParseFromString(f.read())
        f.close()
    except IOError:
        print protobufFilename + ": Could not open file.  Exiting"
        sys.exit(1)

    try:
        f = open(outputFilename, "w")
        json_obj = protobuf_json.pb2json(userEventsBatch)
        f.write(str(json_obj))
        f.close()
    except IOError:
        print outputFilename + ": Could not open file.  Exiting"
        sys.exit(1)
Esempio n. 35
0
def do_inference(hostport):
  host, port = hostport.split(':')
  channel = implementations.insecure_channel(host, int(port))
  stub = parsey_api_pb2.beta_create_ParseyService_stub(channel)

  while 1 :
    try : line = sys.stdin.readline()
    except KeyboardInterrupt : break
    if not line : break
    line = line.strip()
    request = parsey_api_pb2.ParseyRequest()
    request.text.append(line)
    response = stub.Parse(request, 5.0) # timeout 5 seconds
    print response
    json_obj=protobuf_json.pb2json(response)
    ret = json.dumps(json_obj,ensure_ascii=False,encoding='utf-8')
    print "Input : ", line
    print "Parsing :"
    print ret
Esempio n. 36
0
def diskgroup_alter(diskgroup_name):
    if request.headers.get('Content-Type') != 'application/json':
        MESSAGE("Unsupport Content-Type, default:'application/json'")
        abort(406)

    if not request.json or 'rebalance' not in request.json:
        MESSAGE("Miss rebalance field")
        abort(400)

    if not str(request.json['rebalance']).isdigit():
        MESSAGE("Illegal rebalance field")
        abort(400)

    mds_request = MakeRequest(msg_mds.DISKGROUP_ALTER_REQUEST)
    mds_request.body.Extensions[
        msg_mds.diskgroup_alter_request].diskgroup_name = diskgroup_name
    mds_request.body.Extensions[
        msg_mds.diskgroup_alter_request].rebalance_power = int(
            request.json['rebalance'])
    mds_response = send(mds_request)
    return get_response(protobuf_json.pb2json(mds_response), 200)
def replace_location(db_id, coll_id):

    experiment_collection = experiment_results_pb2.Experiment()

    # Connect to the database MongoDB
    try:
        connection = Connection('localhost', 12345)
    except:
        return json.dumps("Unable to connect to the database!")

    try:
        experiment_collection.ParseFromString(request.data)
    except:
        return json.dumps('Message is not well defined!')

    db_names = connection.database_names()
    if db_id not in db_names:
        return json.dumps("Database doesn't exist!")  
    
    db = connection[db_id]
    coll_names = db.collection_names()
    if coll_id not in coll_names:
        return json.dumps("Collection doesn't exist!")  

    collection = db[coll_id]
    try:
        collection_backup = collection.find_one({})
        collection.remove()
    except:
        return json.dumps("Unable to read data from the database!")

    try:
        collection.insert(protobuf_json.pb2json(experiment_collection))
    except:
        collection.insert(collection_backup)
        return json.dumps("Unable to store data into the database!")

    return json.dumps('Message successfully replaced!')
def lookup_entities(client, identifiers):
    """Search for entities by phone number, email, or gaia_id."""

    # Instantiate a GetEntityByIdRequest Protocol Buffer message describing the
    # request.

    lookup_dicts = [{'phone': sys.argv[1], 'create_offnetwork_gaia': True}]
    request = hangups.hangouts_pb2.GetEntityByIdRequest(
        request_header=client.get_request_header(),
        batch_lookup_spec=[hangups.hangouts_pb2.EntityLookupSpec(**d)
                           for d in lookup_dicts],
    )

    try:
        # Make the request to the Hangouts API.
        res = yield from client.get_entity_by_id(request)

        # Print the list of entities in the response.
        for entity_result in res.entity_result:
            print(json.dumps(protobuf_json.pb2json(entity_result)))

    finally:
        # Disconnect the hangups Client to make client.connect return.
        yield from client.disconnect()
Esempio n. 39
0
def replace_message(db_id, coll_id, data_id):

    raw_data_collection = raw_data_pb2.RawRFReadingCollection()
    raw_metadata = raw_metadata_pb2.Metadata()

    # Connect to the database MongoDB
    try:
        connection = Connection("localhost", 27017)
    except:
        return json.dumps("Unable to connect to the database!")

    detect_message = 0
    try:
        raw_data_collection.ParseFromString(request.data)
        detect_message = 1
    except:
        try:
            raw_metadata.ParseFromString(request.data)
            detect_message = 2
        except:
            return json.dumps("Message is not well defined!")

    db_names = connection.database_names()
    if db_id not in db_names:
        return json.dumps("Database doesn't exist!")

    db = connection[db_id]
    coll_names = db.collection_names()
    if coll_id not in coll_names:
        return json.dumps("Collection doesn't exist!")

    collection = db[coll_id]

    try:
        message_collection = collection.find_one({"data_id": data_id})
    except:
        return json.dumps("Unable to read data from the collection!")

    if message_collection is None:
        return json.dumps("No data with this ID in the collection!")

    message_collection["_id"] = str(message_collection["_id"])
    message_backup = message_collection

    try:
        collection.remove({"data_id": data_id})
    except:
        collection.insert(message_backup)
        return json.dumps("Unable to read data from the database!")

    if detect_message == 1:
        try:
            collection.insert(protobuf_json.pb2json(raw_data_collection))
        except:
            collection.insert(message_backup)
            return json.dumps("Unable to store data into the collection!")
    else:
        try:
            collection.insert(protobuf_json.pb2json(raw_metadata))
        except:
            collection.insert(message_backup)
            return json.dumps("Unable to store data into the collection!")

    return json.dumps("Message successfully replaced!")
def main():
    gtfs_realtime = gtfs_realtime_pb2.FeedMessage()
    gtfs_realtime.ParseFromString(urllib2.urlopen(sys.argv[1]).read())
    print json.dumps(protobuf_json.pb2json(gtfs_realtime), separators=(',',':'))
    return
import os, sys, json, log_definition_pb2
from pprint import pprint
import protobuf_json

msg = log_definition_pb2.logdef()
msg.format_name = "test"
msg.delims = " "

pprint(msg.SerializeToString())

json_obj=protobuf_json.pb2json(msg)
print json_obj
Esempio n. 42
0
#!/usr/bin/python
import os, sys, json,sys
from pprint import pprint
sys.path.extend(['../protobuf-json-read-only','./pb'])
import protobuf_json
import nyct_subway_pb2 as nyct
import gtfs_realtime_pb2 as gtfs
input = sys.stdin.read()
fm = gtfs.FeedMessage()
fm.ParseFromString(input)
json = protobuf_json.pb2json(fm)
print json

Esempio n. 43
0
def ListColumns(table):
	for column in table.column:
		header = column.header.data
		print 'Column header : ', header
		for data in column.data:
			try:
				print 'Data : ', data.data
			except Exception:
				pass


if len(sys.argv) != 2:
	print "Usage: ", sys.argv[0], "TABLE_FILE"
	sys.exit(-1)

table = table_pb2.Table()

f = open(sys.argv[1], "rb")
table.ParseFromString(f.read())
f.close()

# ListColumns(table)
json_obj = protobuf_json.pb2json(table)
print 'Rows: ', len(table.column[0].data)
print 'Columns : ', len(table.column)
# print str(json_obj)
parsed = json.loads(json.dumps(json_obj))
print json.dumps(parsed, indent=4, sort_keys=True)
# print json_obj

Esempio n. 44
0
#!/usr/bin/python

import os, sys, json
from pprint import pprint

import protobuf_json

import test_pb2 as pb_test

# create and fill test message
pb=pb_test.TestMessage()
pb.id=123
pb.flag=True
for i in xrange(1000):
    msgs=pb.nested_msgs.add()
    msgs.id=456
    msgs.title="test title"
    msgs.url="http://localhost/"

for i in xrange(100):
    pb.rep_int.append(i)

json_obj=protobuf_json.pb2json(pb)

with open('message.proto', 'w') as f:
    f.write(pb.SerializeToString())

with open('message.json', 'w') as f:
    f.write(json.dumps(json_obj))

Esempio n. 45
0
experiment_results.primary_metrics.accuracy_error_2D_max = 0.0;           # Max 2D error of the geo. accuracy of all points in the experiment
experiment_results.primary_metrics.accuracy_error_2D_median = 0.0;
experiment_results.primary_metrics.accuracy_error_2D_rms = 0.0;
experiment_results.primary_metrics.accuracy_error_2D_75_percentile = 0.0;
experiment_results.primary_metrics.accuracy_error_2D_90_percentile = 0.0;
experiment_results.primary_metrics.accuracy_error_3D_average = 0.0;       # Average 3D error of the geo. accuracy of all points in the experiment
experiment_results.primary_metrics.accuracy_error_3D_variance = 0.0;      # 3D error variance of the geo. accuracy of all points in the experiment
experiment_results.primary_metrics.accuracy_error_3D_min = 0.0;           # Min 3D error of the geo. accuracy of all points in the experiment
experiment_results.primary_metrics.accuracy_error_3D_max = 0.0;           # Max 3D error of the geo. accuracy of all points in the experiment
experiment_results.primary_metrics.accuracy_error_2D_median = 0.0;
experiment_results.primary_metrics.accuracy_error_2D_rms = 0.0;
experiment_results.primary_metrics.accuracy_error_2D_75_percentile = 0.0;
experiment_results.primary_metrics.accuracy_error_2D_90_percentile = 0.0;
experiment_results.primary_metrics.room_accuracy_error_average = 0.0;     # Average room accuracy error
experiment_results.primary_metrics.latency_average = 0.0;                 # Average latency
experiment_results.primary_metrics.latency_variance = 0.0;                # Latency variance
experiment_results.primary_metrics.latency_min = 0.0;                     # Latency min
experiment_results.primary_metrics.latency_max = 0.0;                     # Latency max
experiment_results.primary_metrics.latency_median = 0.0;
experiment_results.primary_metrics.latency_rms = 0.0;
experiment_results.primary_metrics.latency_75_percentile = 0.0;
experiment_results.primary_metrics.latency_90_percentile = 0.0;


obj = json.dumps(protobuf_json.pb2json(experiment_results))

req = RequestWithMethod(apiURL + 'evarilos/metrics/v1.0/database/' + db_id  + '/experiment/' + exp_id, 'POST', headers={"Content-Type": "application/json"}, data = obj)
response = urllib2.urlopen(req)
message = json.loads(response.read())
print message
Esempio n. 46
0
 def jsonDump(self):
     import json
     #local files
     import protobuf_json
     self.json = json.dumps(protobuf_json.pb2json(self.feed), separators=(',',':'))
Esempio n. 47
0
                    
                    ## '\t1 ||| 0=8.86276 1=2 3\n'
                    ## N.B.: can't just strip! "\t... ||| ... ||| \n" => 2 fields instead of 3
                    tails, rule, fields = handle.readline().strip("\t\n").split(" ||| ")


                    tails = tails.split() 
                    tailnodes = []
                    
                    for x in tails:
                        if x[0]=='"': 
                            pass
                        else: 
                            tailnodes.append(int(x))

                    for t in tailnodes:
                        edge.tail_node_ids.append(t)
                    edge.label = rule.decode('UTF-8')
                               
                    edge.Extensions[edge_fv] =fields

            forest.root = node.id
            line = handle.readline()
            yield forest


if __name__ == "__main__":
    for f in load(sys.stdin):
        print f
        print json.dumps(pb2json(f), indent=2)
Esempio n. 48
0
def ProtoToKVS(pb, collection_name):
    kvs = protobuf_json.pb2json(pb)
    if collection_name in (RW_USERS,) and pb.id:
        kvs['_id'] = pb.id
        del kvs['id']
    return kvs
Esempio n. 49
0
def ToDict(pb):
    """protobuf -> dictionary.
    Wrapper around protobuf_json module so that other modules in our project don't use it."""
    return protobuf_json.pb2json(pb)