コード例 #1
0
need_convert = False
run_inference = True

if os.path.isdir(os.path.join(os.getcwd(), "tflite")) is False:
    os.makedirs(os.path.join(os.getcwd(), "tflite"))

model_path = "./tflite/converted_model.tflite"

base_path = os.getcwd()
base_inference_path = os.path.join(base_path, "dataset")
inference_image_path = os.path.join(base_inference_path,
                                    "t10k-images-idx3-ubyte")
inference_label_path = os.path.join(base_inference_path,
                                    "t10k-labels-idx1-ubyte")
inference_label_obj = inference_labels(inference_label_path)
inference_image_obj = inference_images(inference_image_path)

raw_Data = []
raw_Label = []
quan_steps = inference_image_obj.get_images_number()
for i in range(quan_steps):
    raw_Data.append(inference_image_obj.read_images(1)[0])
    raw_Label.append(inference_label_obj.read_labels(1)[0])

batchsize = 128

train = tf.convert_to_tensor(np.array(raw_Data, dtype='float32'))
train_label = tf.convert_to_tensor(np.array(raw_Label, dtype='float32'))
dataset = tf.data.Dataset.from_tensor_slices(
    (train, train_label)).batch(batchsize, drop_remainder=True)
コード例 #2
0
from tensorflow.python.platform import gfile
from tensorflow.python.client import timeline

tf.compat.v1.disable_eager_execution()

if __name__ == "__main__":
    print("Begin inference!")
    #base_path = "/home/mnist_dataset"
    base_path = os.getcwd()
    base_inference_path = os.path.join(base_path, "dataset")
    inference_image_path = os.path.join(base_inference_path,
                                        "t10k-images-idx3-ubyte")
    inference_label_path = os.path.join(base_inference_path,
                                        "t10k-labels-idx1-ubyte")
    inference_labels = inference_labels(inference_label_path)
    inference_images = inference_images(inference_image_path)
    input_image_size = int(inference_images.get_row_number()) * int(
        inference_images.get_column_number())
    right_count = 0
    batchsize = 128
    total_time_ms = 0
    global_step = 0

    config = tf.compat.v1.ConfigProto(intra_op_parallelism_threads=28,
                                      inter_op_parallelism_threads=1)

    with tf.compat.v1.Session(config=config) as sess:
        # saver = tf.train.import_meta_graph(os.path.join(base_path,"train_data/checkPoint/trainModel.meta"))
        # saver.restore(sess, tf.train.latest_checkpoint(os.path.join(base_path,"train_data/checkPoint")))

        with gfile.FastGFile(