def TensorFlowFrozenParse(architecture_name, image_path): from mmdnn.conversion.examples.tensorflow.extractor import tensorflow_extractor from mmdnn.conversion.tensorflow.tensorflow_frozenparser import TensorflowParser2 # get original model prediction result original_predict = tensorflow_extractor.inference(architecture_name, None, TestModels.cachedir, image_path, is_frozen = True) para = tensorflow_extractor.get_frozen_para(architecture_name) del tensorflow_extractor # original to IR IR_file = TestModels.tmpdir + 'tensorflow_frozen_' + architecture_name + "_converted" parser = TensorflowParser2( TestModels.cachedir + para[0], para[1], para[2].split(':')[0], para[3].split(':')[0]) parser.run(IR_file) del parser del TensorflowParser2 return original_predict
def TensorFlowFrozenParse(architecture_name, image_path): from mmdnn.conversion.examples.tensorflow.extractor import tensorflow_extractor from mmdnn.conversion.tensorflow.tensorflow_frozenparser import TensorflowParser2 # get original model prediction result original_predict = tensorflow_extractor.inference(architecture_name, TestModels.cachedir, image_path, is_frozen = True) para = tensorflow_extractor.get_frozen_para(architecture_name) del tensorflow_extractor # original to IR IR_file = TestModels.tmpdir + 'tensorflow_frozen_' + architecture_name + "_converted" parser = TensorflowParser2( TestModels.cachedir + para[0], para[1], para[2].split(':')[0], para[3].split(':')[0]) parser.run(IR_file) del parser del TensorflowParser2 return original_predict
def tensorflow_parse(architecture_name, test_input_path): from mmdnn.conversion.examples.tensorflow.extractor import tensorflow_extractor from mmdnn.conversion.tensorflow.tensorflow_parser import TensorflowParser # get original model prediction result original_predict = tensorflow_extractor.inference(architecture_name, None, TestModels.cachedir, test_input_path) del tensorflow_extractor # original to IR IR_file = TestModels.tmpdir + 'tensorflow_' + architecture_name + "_converted" parser = TensorflowParser( TestModels.cachedir + "imagenet_" + architecture_name + ".ckpt.meta", TestModels.cachedir + "imagenet_" + architecture_name + ".ckpt", ["MMdnn_Output"]) parser.run(IR_file) del parser del TensorflowParser return original_predict
def TensorFlowParse(architecture_name, image_path): from mmdnn.conversion.examples.tensorflow.extractor import tensorflow_extractor from mmdnn.conversion.tensorflow.tensorflow_parser import TensorflowParser # get original model prediction result original_predict = tensorflow_extractor.inference(architecture_name, TestModels.cachedir, image_path) del tensorflow_extractor # original to IR IR_file = TestModels.tmpdir + 'tensorflow_' + architecture_name + "_converted" parser = TensorflowParser( TestModels.cachedir + "imagenet_" + architecture_name + ".ckpt.meta", TestModels.cachedir + "imagenet_" + architecture_name + ".ckpt", None, "MMdnn_Output") parser.run(IR_file) del parser del TensorflowParser return original_predict
def TensorFlowFrozenParse(architecture_name, image_path): from mmdnn.conversion.examples.tensorflow.extractor import tensorflow_extractor from mmdnn.conversion.tensorflow.tensorflow_frozenparser import TensorflowParser2 # get original model prediction result original_predict = tensorflow_extractor.inference(architecture_name, TestModels.cachedir, image_path, is_frozen = True) # print(original_predict) # assert False del tensorflow_extractor # original to IR IR_file = TestModels.tmpdir + 'tensorflow_' + architecture_name + "_converted" parser = TensorflowParser2( TestModels.cachedir + "inception_v1_2016_08_28_frozen.pb", [224, 224, 3], "InceptionV1/Logits/Predictions/Reshape_1:0") parser.run(IR_file) del parser del TensorflowParser2 return original_predict