Exemplo n.º 1
0
def convert(conf, output, enable_micro=False):
    if ModelKeys.quantize_stat in conf:
        quantize_stat = conf[ModelKeys.quantize_stat]
    else:
        quantize_stat = False
    for model_name, model_conf in conf["models"].items():
        model_output = output + "/" + model_name + "/model"
        org_model_dir = output + "/" + model_name + "/org_model"
        util.mkdir_p(model_output)
        util.mkdir_p(org_model_dir)

        model_conf = normalize_model_config(model_conf)

        model_file = util.download_or_get_model(
            model_conf[ModelKeys.model_file_path],  # noqa
            model_conf[ModelKeys.model_sha256_checksum],  # noqa
            output + "/" + model_name + "/org_model")
        model_conf[ModelKeys.model_file_path] = model_file
        if ModelKeys.weight_file_path in model_conf:
            weight_file = util.download_or_get_model(
                model_conf[ModelKeys.weight_file_path],
                model_conf[ModelKeys.weight_sha256_checksum], "/tmp/")
            model_conf[ModelKeys.weight_file_path] = weight_file

        # TODO: remove the following after quantize tool is made
        if ModelKeys.quantize_range_file in model_conf:
            range_file = util.download_or_get_model(
                model_conf[ModelKeys.quantize_range_file],
                "", model_output)
            model_conf[ModelKeys.quantize_range_file] = range_file

        mace_model = convert_model(model_conf, quantize_stat)

        try:
            visualizer = visualize_model.ModelVisualizer(model_name,
                                                         mace_model,
                                                         model_output)
            visualizer.save_html()
        except:  # noqa
            print("Failed to visualize model:", sys.exc_info())

        model, params = merge_params(mace_model,
                                     model_conf[ModelKeys.data_type])
        if enable_micro:
            micro_converter = MicroConverter(model_conf, copy.deepcopy(model),
                                             copy.deepcopy(params), model_name)
            micro_converter.gen_code()
            micro_converter.package(model_output + "/" +
                                    model_name + "_micro.tar.gz")
        output_model_file = model_output + "/" + model_name + ".pb"
        output_params_file = model_output + "/" + model_name + ".data"
        with open(output_model_file, "wb") as f:
            f.write(model.SerializeToString())
        with open(output_params_file, "wb") as f:
            f.write(bytearray(params))
        with open(output_model_file + "_txt", "w") as f:
            f.write(str(model))
Exemplo n.º 2
0
def convert(conf, output, enable_micro=False):
    for model_name, model_conf in conf["models"].items():
        model_output = output + "/" + model_name + "/model"
        org_model_dir = output + "/" + model_name + "/org_model"
        util.mkdir_p(model_output)
        util.mkdir_p(org_model_dir)

        model_conf = normalize_model_config(model_conf, model_output,
                                            org_model_dir)
        conf["models"][model_name] = model_conf
        net_confs = model_conf[ModelKeys.subgraphs]

        model = mace_pb2.MultiNetDef()
        add_input_output_tensor(model, model_conf)

        model_params = []
        for net_name, net_conf in net_confs.items():
            if "quantize_stat" in conf:
                net_conf["quantize_stat"] = conf["quantize_stat"]
            net_def_with_Data = convert_net(net_name, net_conf, enable_micro)
            try:
                visualizer = visualize_model.ModelVisualizer(
                    net_name, net_def_with_Data, model_output)
                visualizer.save_html()
            except:  # noqa
                print("Failed to visualize graph:", sys.exc_info())
            net_def, params = merge_params(net_def_with_Data,
                                           net_conf[ModelKeys.data_type])
            if enable_micro:
                convert_micro(
                    model_name,
                    net_confs,
                    net_def,
                    params,
                    model_output,
                )

            net_def.data_offset = len(model_params)
            net_def.data_size = len(params)
            model.net_def.extend([net_def])
            model_params.extend(params)
        # store model and weight to files
        output_model_file = model_output + "/" + model_name + ".pb"
        output_params_file = model_output + "/" + model_name + ".data"
        with open(output_model_file, "wb") as f:
            f.write(model.SerializeToString())
        with open(output_params_file, "wb") as f:
            f.write(bytearray(model_params))
        with open(output_model_file + "_txt", "w") as f:
            f.write(str(model))
Exemplo n.º 3
0
def convert(conf, output):
    if not os.path.exists(output):
        os.mkdir(output)

    for model_name, model_conf in conf["models"].items():
        model_output = output + "/" + model_name
        if not os.path.exists(model_output):
            os.mkdir(model_output)

        subgraph = model_conf["subgraphs"][0]
        del model_conf["subgraphs"]
        model_conf.update(subgraph)

        model_file = util.download_or_get_file(
            model_conf["model_file_path"], model_conf["model_sha256_checksum"],
            model_output)
        model_conf["model_file_path"] = model_file
        if "weight_file_path" in model_conf:
            weight_file = util.download_or_get_file(
                model_conf["weight_file_path"],
                model_conf["weight_sha256_checksum"], model_output)
            model_conf["weight_file_path"] = weight_file
        # TODO: remove the following after quantize tool is made
        if "quantize_range_file" in model_conf:
            range_file = util.download_or_get_file(
                model_conf["quantize_range_file"], "", model_output)
            model_conf["quantize_range_file"] = range_file

        mace_model = convert_model(model_conf)

        try:
            visualizer = visualize_model.ModelVisualizer(
                model_name, mace_model, model_output)
            visualizer.save_html()
        except:  # noqa
            print("Failed to visualize model:", sys.exc_info()[0])

        model, params = merge_params(mace_model)

        output_model_file = model_output + "/" + model_name + ".pb"
        output_params_file = model_output + "/" + model_name + ".data"
        with open(output_model_file, "wb") as f:
            f.write(model.SerializeToString())
        with open(output_params_file, "wb") as f:
            f.write(bytearray(params))
        with open(output_model_file + "_txt", "w") as f:
            f.write(str(model))