示例#1
0
def start():
    # 连接MongoDB,查询tokens,根据contractAddress到etherscan查询最新数据
    client = MongoCluster().connect()
    db = client.get_database('gse-transaction')
    collection = db.get_collection('mrout_6000001-6001000')
    # collection.insert_one()

    # 连接HDFS读取文件
    from pyhdfs import HdfsClient
    client2 = HdfsClient(hosts='%s,50070' % hdfs_ip, max_tries=10)
    # 返回这个用户的根目录
    print client2.get_home_directory()
    # 返回可用的namenode节点
    print client2.get_active_namenode()
    # 返回指定目录下的所有文件
    print client2.listdir("/user/leon/mrout_3_6000001-6001000/")
    # 读某个文件
    client2.mkdirs("/user/leon")
    inputfile = client2.open('/user/leon/mrout_3_6000001-6001000/part-00000')
    # 查看文件内容
    for r in inputfile:
        line = str(r).encode('utf-8')  # open后是二进制,str()转换为字符串并转码
        print(line)
示例#2
0
def inference(image_path,model_path,img_w,img_h,channel,out_result,net,**kwargs):


    client = HdfsClient(hosts='172.16.18.112,172.16.18.114', user_name='hadoop')
    active_namenode = client.get_active_namenode()
    HDFS_HOSTS = "hdfs://" + active_namenode.split(":")[0] + ":" + '9000'

    # img = Image.open(image_path)
    # image_test = np.asarray(np.resize(img, (1, img_w, img_h, channel)))

    image_path = HDFS_HOSTS + image_path + '/data/1.jpg'

    image = tf.read_file(image_path)
    image = tf.image.decode_jpeg(image, 1)
    image = tf.image.resize_image_with_crop_or_pad(image, img_w, img_h)
    image = tf.image.per_image_standardization(image)
    # image = np.asarray(np.resize(image, (1, 32,32,1)))
    image = tf.reshape(image, [1, img_w, img_h, channel])
    sess = tf.InteractiveSession()
    image_test = sess.run(image)


    hdfs_model_path = HDFS_HOSTS + model_path + '/model/'

    tf.reset_default_graph()  # 清除默认图形堆栈并重置全局默认图形。
    with tf.Session()as sess:
        meta_graph_def = tf.saved_model.loader.load(sess, [tag_constants.SERVING], hdfs_model_path)

        signature = meta_graph_def.signature_def  # 从meta_graph_def取出SignatureDef对象

        images_placeholder = signature['test_signature'].inputs['input_x'].name  # 从SignatureDef对象中找出具体的输入输出张量

        embeddinigs = signature['test_signature'].outputs['outputs'].name


        output = sess.run(embeddinigs, feed_dict={images_placeholder: image_test})
        output = sess.run(tf.nn.softmax(output))

        y = tf.argmax(output,axis=1)
        y_pred = sess.run(y)
        print(y_pred)
示例#3
0
import tensorflow as tf
import tensorflow as tf
from pyhdfs import HdfsClient
# 端口配置
cfg_http_port = '50070'
cfg_rpc_port = '9000'
user_name = 'hadoop'
hosts = '172.16.18.112,172.16.18.114'
# 数据集接口url
url1 = 'http://test.cop.com/vbap3/dsc/dataSetDefs/updateByOther'
url2 = 'http://test.cop.com/vbap3/dsc/dataSetDefs/uploadFields'
# hdfs配置
client = HdfsClient(hosts=hosts, user_name=user_name)
active_namenode = client.get_active_namenode()
HDFS_HOSTS = "hdfs://" + active_namenode.split(":")[0] + ":" + cfg_rpc_port

client1 = HdfsClient(hosts=hosts, user_name=user_name)
active_namenode = client.get_active_namenode()
HDFS_HOSTS1 = "hdfs://" + active_namenode.split(":")[0] + ":" + cfg_http_port