def main(): """ main funcion """ accepted_framework = ['TF', 'CAFFE', 'ONNX', 'TFLITE', 'MNN'] parser = argparse.ArgumentParser() parser.add_argument("-f", "--framework", type=str,\ choices=['TF', 'CAFFE', 'ONNX', 'TFLITE', 'MNN'], default='TF',\ required=True, help="model type, for example:TF/CAFFE/ONNX/TFLITE/MNN") parser.add_argument("--modelFile", type=str, required=True,\ help="tensorflow Pb or caffeModel, for example:xxx.pb/xxx.caffemodel") parser.add_argument("--prototxt", type=str,\ help="only used for caffe, for example: xxx.prototxt") parser.add_argument("--MNNModel", type=str, required=True,\ help="MNN model, ex: xxx.mnn") parser.add_argument("--fp16", type=bool, default=False,\ help="{True,False}\ Boolean to change the mnn usage. If True, the output\ model save data in half_float type" ) TF = 0 CAFFE = 1 ONNX = 2 MNN = 3 TFLITE = 4 args = parser.parse_args() if args.framework.upper() in accepted_framework: if args.framework == 'TF': framework_type = TF elif args.framework.upper() == 'CAFFE': framework_type = CAFFE elif args.framework.upper() == 'ONNX': framework_type = ONNX elif args.framework.upper() == 'MNN': framework_type = MNN elif args.framework.upper() == 'TFLITE': framework_type = TFLITE else: usage() return -1 if args.modelFile is None or not os.path.exists(args.modelFile): print("modelfile not exist") return -1 if args.MNNModel is None: usage() return -1 if args.framework.upper() == 'CAFFE': if args.prototxt is None or not os.path.exists(args.prototxt): print("prototxt file not exist") return -1 else: ### just cheat with a not exist name ### args.prototxt = "NA.mnn" Tools.mnnconvert(args.MNNModel, args. modelFile, framework_type,\ args.fp16, args.prototxt) return 0
def main(): """ main funcion """ Tools.mnnconvert(sys.argv) arg_dict = parse_args() if mnn_logger is not None: if "modelFile" not in arg_dict.keys() or "MNNModel" not in arg_dict.keys(): return 0 log_dict = {} log_dict["tool"] = "mnnconvert_python" log_dict["model_guid"] = MNN.get_model_uuid(arg_dict["MNNModel"]) src_model_size = os.path.getsize(arg_dict["modelFile"]) / 1024.0 / 1024.0 dst_model_size = os.path.getsize(arg_dict["MNNModel"]) / 1024.0 / 1024.0 compress_rate = src_model_size / dst_model_size arg_dict.pop("modelFile") arg_dict.pop("MNNModel") log_dict["detail"] = {"args": arg_dict, "src_model_size": src_model_size, "dst_model_size": dst_model_size, "compress_rate": compress_rate} mnn_logger.put_log(log_dict, "convert") return 0
def main(): """ main funcion """ TF, CAFFE, ONNX, MNN, TFLITE = 0, 1, 2, 3, 4 framework_map = { 'TF': TF, 'CAFFE': CAFFE, 'ONNX': ONNX, 'TFLITE': TFLITE, 'MNN': MNN } parser = argparse.ArgumentParser() parser.add_argument("-f", "--framework", type=str,\ choices=list(framework_map.keys()), default='TF', required=True, help="model type") parser.add_argument("--modelFile", type=str, required=True,\ help="tensorflow Pb or caffeModel, for example:xxx.pb/xxx.caffemodel") parser.add_argument("--prototxt", type=str, help="only used for caffe, for example: xxx.prototxt") parser.add_argument("--MNNModel", type=str, required=True, help="MNN model, ex: xxx.mnn") parser.add_argument("--bizCode", type=str, required=True, help="bizcode, ex: MNN") parser.add_argument("--fp16", type=bool, default=False,\ help="{True,False}\ Boolean to change the mnn usage. If True, the output\ model save data in half_float type" ) parser.add_argument("--weightQuantBits", type=int, default=0) parser.add_argument("--weightQuantAsymmetric", type=bool, default=False) parser.add_argument( "--compressionParamsFile", type=str, default=None, help= "The path of model compression file that stores the int8 calibration \ table for quantization or auxiliary parameters for sparsity.") args = parser.parse_args() framework_type = framework_map[args.framework] if args.modelFile is None or not os.path.exists(args.modelFile): print("modelfile not exist") return -1 if args.MNNModel is None: parser.print_help(sys.stderr)() return -1 if args.framework.upper() == 'CAFFE': if args.prototxt is None or not os.path.exists(args.prototxt): print("prototxt file not exist") return -1 else: ### just cheat with a not exist name ### args.prototxt = "NA.mnn" if args.compressionParamsFile is not None and \ not os.path.exists(args.compressionParamsFile): print("Compression params file not exist.") return -1 if args.compressionParamsFile is None: args.compressionParamsFile = "" Tools.mnnconvert(args.MNNModel, args. modelFile, framework_type,\ args.fp16, args.prototxt, args.weightQuantBits, args.weightQuantAsymmetric, args.compressionParamsFile, args.bizCode) return 0
def main(): """ main funcion """ accepted_framework = ['TF', 'CAFFE', 'ONNX', 'TFLITE', 'MNN'] parser = argparse.ArgumentParser() parser.add_argument("-f", "--framework", type=str,\ choices=['TF', 'CAFFE', 'ONNX', 'TFLITE', 'MNN'], default='TF',\ required=True, help="model type, for example:TF/CAFFE/ONNX/TFLITE/MNN") parser.add_argument("--modelFile", type=str, required=True,\ help="tensorflow Pb or caffeModel, for example:xxx.pb/xxx.caffemodel") parser.add_argument("--prototxt", type=str,\ help="only used for caffe, for example: xxx.prototxt") parser.add_argument("--MNNModel", type=str, required=True,\ help="MNN model, ex: xxx.mnn") parser.add_argument("--fp16", type=bool, default=False,\ help="{True,False}\ Boolean to change the mnn usage. If True, the output\ model save data in half_float type" ) parser.add_argument("--weightQuantBits", type=int, default=0) parser.add_argument( "--compressionParamsFile", type=str, default=None, help= "The path of model compression file that stores the int8 calibration \ table for quantization or auxiliary parameters for sparsity.") TF = 0 CAFFE = 1 ONNX = 2 MNN = 3 TFLITE = 4 args = parser.parse_args() if args.framework.upper() in accepted_framework: if args.framework == 'TF': framework_type = TF elif args.framework.upper() == 'CAFFE': framework_type = CAFFE elif args.framework.upper() == 'ONNX': framework_type = ONNX elif args.framework.upper() == 'MNN': framework_type = MNN elif args.framework.upper() == 'TFLITE': framework_type = TFLITE else: usage() return -1 if args.modelFile is None or not os.path.exists(args.modelFile): print("modelfile not exist") return -1 if args.MNNModel is None: usage() return -1 if args.framework.upper() == 'CAFFE': if args.prototxt is None or not os.path.exists(args.prototxt): print("prototxt file not exist") return -1 else: ### just cheat with a not exist name ### args.prototxt = "NA.mnn" if args.compressionParamsFile is not None and \ not os.path.exists(args.compressionParamsFile): print("Compression params file not exist.") return -1 if args.compressionParamsFile is None: args.compressionParamsFile = "" Tools.mnnconvert(args.MNNModel, args. modelFile, framework_type,\ args.fp16, args.prototxt, args.weightQuantBits, args.compressionParamsFile) return 0
def main(): """ main funcion """ Tools.mnnconvert(sys.argv) return 0