コード例 #1
0
def graph_eval(input_graph_def, input_node, output_node):
    #Reading images and segmentation labels
    x_test, y_test, img_file, seg_file = cnn.get_images_and_labels(
        IMG_TEST_DIR, SEG_TEST_DIR, cfg.NUM_CLASSES, cfg.WIDTH, cfg.HEIGHT)

    # load graph
    tf.import_graph_def(input_graph_def, name='')

    # Get input & output tensors
    x = tf.get_default_graph().get_tensor_by_name(input_node + ':0')
    y = tf.get_default_graph().get_tensor_by_name(output_node + ':0')

    # Create the Computational graph
    with tf.Session() as sess:

        sess.run(tf.initializers.global_variables())

        feed_dict = {x: x_test}  #, labels: y_test}
        y_pred = sess.run(y, feed_dict)

    # Calculate intersection over union for each segmentation class
    y_predi = np.argmax(y_pred, axis=3)
    y_testi = np.argmax(y_test, axis=3)
    print(y_testi.shape, y_predi.shape)

    cnn.IoU(y_testi, y_predi)
    #cfg.ShowSegmentedImages(100, x_test, y_testi, y_predi, cfg.NUM_CLASSES) # steps by 20

    print('FINISHED!')
    return x_test, y_testi, y_predi, img_file, seg_file
コード例 #2
0
def graph_eval(input_graph_def, input_node, output_node):
        #Reading images and segmentation labels
        x_test, y_test, img_file, seg_file= cnn.get_images_and_labels(IMG_TEST_DIR, SEG_TEST_DIR,
                                                                      cfg.NUM_CLASSES, cfg.WIDTH, cfg.HEIGHT)

        y_pred2  = np.zeros((y_test.shape))
        y_pred2i = np.zeros((y_test.shape[0],y_test.shape[1],y_test.shape[2]))

        # load graph
        tf.import_graph_def(input_graph_def,name = '')

        # Get input & output tensors
        x = tf.compat.v1.get_default_graph().get_tensor_by_name(input_node+':0')
        y = tf.compat.v1.get_default_graph().get_tensor_by_name(output_node+':0')

        # Create the Computational graph
        with tf.compat.v1.Session() as sess:
                sess.run(tf.compat.v1.initializers.global_variables())
                feed_dict={x: x_test}
		#y_pred = sess.run(y, feed_dict) # original code
                logits = sess.run(y, feed_dict)  # new code
                pred_DB = softmax_predict(logits) #new code
                y_pred = pred_DB.eval() #new code
                ##alternative way to compute softmax in C++ style
                #for i in range(logits.shape[0]):
                #        y_pred2[i]= cpp_softmax(logits[i], 224, 224, 12)


        # Calculate intersection over union for each segmentation class
        y_predi = np.argmax(y_pred, axis=3)
        y_testi = np.argmax(y_test, axis=3)
        print(y_testi.shape,y_predi.shape)
        cnn.IoU(y_testi,y_predi)

        ##just to check the cpp_softmax works fine
        #y_pred2i = np.argmax(y_pred2, axis=3)
        #print(y_testi.shape,y_pred2i.shape)
        #cfg.IoU(y_testi,y_pred2i)

        print ('FINISHED!')
        return x_test, y_testi, y_predi, y_pred2i, img_file, seg_file