Example #1
0
def run_models_for_device(flags, args, dev):
    conf = config_parser.parse(flags.config)
    for name, model_conf in conf["models"].items():
        if not flags.model_name or name == flags.model_name:
            MaceLogger.info("Run model %s" % name)
            model_conf = config_parser.normalize_model_config(model_conf)
            run_model_for_device(flags, args, dev, name, model_conf)
Example #2
0
def main() -> None:
    config = config_parser.parse('config.json')
    db_worker = DBWorker(config)
    redis_client = redis.Redis(password=config['redis_pass'])
    try:
        redis_client.get('1')
    except redis.exceptions.ConnectionError:
        print('Ошибка подключения к Redis.')
        exit()
    args = parse_args()

    if args.s:  # запуск
        start(config, db_worker, redis_client)
Example #3
0
    parser.add_argument("--backend",
                        type=str,
                        default="tensorflow",
                        help="onnx backend framework")
    parser.add_argument("--build",
                        action="store_true",
                        help="if build before run")
    parser.add_argument('--output',
                        type=str,
                        default="build",
                        help="output dir")
    parser.add_argument('--vlog_level',
                        type=int,
                        default="0",
                        help="vlog level")

    return parser.parse_known_args()


if __name__ == "__main__":
    flags, args = parse_args()
    conf = config_parser.parse(flags.config)
    if flags.build or flags.validate:
        micro_conf = config_parser.normalize_model_config(
            conf[ModelKeys.models][flags.model_name])
        build_engine(flags.model_name, micro_conf[ModelKeys.data_type])
    if flags.validate and flags.layers != "-1":
        run_layers_validate(flags, args, conf)
    else:
        run_model(flags, args, conf)
Example #4
0
def convert_func(flags):
    configs = config_parser.parse(flags.config)
    print(configs)
    library_name = configs[YAMLKeyword.library_name]
    if not os.path.exists(BUILD_OUTPUT_DIR):
        os.makedirs(BUILD_OUTPUT_DIR)
    elif os.path.exists(os.path.join(BUILD_OUTPUT_DIR, library_name)):
        sh.rm("-rf", os.path.join(BUILD_OUTPUT_DIR, library_name))
    os.makedirs(os.path.join(BUILD_OUTPUT_DIR, library_name))
    if not os.path.exists(BUILD_DOWNLOADS_DIR):
        os.makedirs(BUILD_DOWNLOADS_DIR)

    model_output_dir = \
        '%s/%s/%s' % (BUILD_OUTPUT_DIR, library_name, MODEL_OUTPUT_DIR_NAME)
    model_header_dir = \
        '%s/%s/%s' % (BUILD_OUTPUT_DIR, library_name, MODEL_HEADER_DIR_PATH)
    # clear output dir
    if os.path.exists(model_output_dir):
        sh.rm("-rf", model_output_dir)
    os.makedirs(model_output_dir)
    if os.path.exists(model_header_dir):
        sh.rm("-rf", model_header_dir)

    if os.path.exists(MODEL_CODEGEN_DIR):
        sh.rm("-rf", MODEL_CODEGEN_DIR)
    if os.path.exists(ENGINE_CODEGEN_DIR):
        sh.rm("-rf", ENGINE_CODEGEN_DIR)

    if flags.quantize_stat:
        configs[YAMLKeyword.quantize_stat] = flags.quantize_stat

    if flags.model_data_format:
        model_data_format = flags.model_data_format
    else:
        model_data_format = configs.get(YAMLKeyword.model_data_format, "file")
    embed_model_data = model_data_format == ModelFormat.code

    if flags.model_graph_format:
        model_graph_format = flags.model_graph_format
    else:
        model_graph_format = configs.get(YAMLKeyword.model_graph_format,
                                         "file")
    embed_graph_def = model_graph_format == ModelFormat.code
    if flags.enable_micro:
        mace_check((not embed_model_data) and (not embed_graph_def),
                   ModuleName.YAML_CONFIG,
                   "You should specify file mode to convert micro model.")
    if embed_graph_def:
        os.makedirs(model_header_dir)
        sh_commands.gen_mace_engine_factory_source(
            configs[YAMLKeyword.models].keys(), embed_model_data)
        sh.cp("-f", glob.glob("mace/codegen/engine/*.h"), model_header_dir)

    convert.convert(configs, MODEL_CODEGEN_DIR, flags.enable_micro)

    for model_name, model_config in configs[YAMLKeyword.models].items():
        if flags.enable_micro:
            data_type = model_config.get(YAMLKeyword.data_type, "")
            mace_check(
                data_type == FPDataType.fp32_fp32.value
                or data_type == FPDataType.bf16_fp32.value,
                ModuleName.YAML_CONFIG,
                "You should specify fp32_fp32 or bf16_fp32 data type "
                "to convert micro model.")
        model_codegen_dir = "%s/%s" % (MODEL_CODEGEN_DIR, model_name)
        encrypt.encrypt(
            model_name, "%s/model/%s.pb" % (model_codegen_dir, model_name),
            "%s/model/%s.data" % (model_codegen_dir, model_name),
            config_parser.parse_device_type(model_config[YAMLKeyword.runtime]),
            model_codegen_dir, bool(model_config.get(YAMLKeyword.obfuscate,
                                                     1)),
            model_graph_format == "code", model_data_format == "code")

        if model_graph_format == ModelFormat.file:
            sh.mv("-f", '%s/model/%s.pb' % (model_codegen_dir, model_name),
                  model_output_dir)
            sh.mv("-f", '%s/model/%s.data' % (model_codegen_dir, model_name),
                  model_output_dir)
            if flags.enable_micro:
                sh.mv(
                    "-f", '%s/model/%s_micro.tar.gz' %
                    (model_codegen_dir, model_name), model_output_dir)
        else:
            if not embed_model_data:
                sh.mv("-f",
                      '%s/model/%s.data' % (model_codegen_dir, model_name),
                      model_output_dir)

            sh.cp("-f", glob.glob("mace/codegen/models/*/code/*.h"),
                  model_header_dir)

        MaceLogger.summary(
            StringFormatter.block("Model %s converted" % model_name))

    if model_graph_format == ModelFormat.code:
        build_model_lib(configs, flags.address_sanitizer, flags.debug_mode)

    print_library_summary(configs)
Example #5
0
def convert_func(flags):
    configs = config_parser.parse(flags.config)
    print(configs)
    library_name = configs[YAMLKeyword.library_name]
    if not os.path.exists(BUILD_OUTPUT_DIR):
        os.makedirs(BUILD_OUTPUT_DIR)
    elif os.path.exists(os.path.join(BUILD_OUTPUT_DIR, library_name)):
        sh.rm("-rf", os.path.join(BUILD_OUTPUT_DIR, library_name))
    os.makedirs(os.path.join(BUILD_OUTPUT_DIR, library_name))
    if not os.path.exists(BUILD_DOWNLOADS_DIR):
        os.makedirs(BUILD_DOWNLOADS_DIR)

    model_output_dir = \
        '%s/%s/%s' % (BUILD_OUTPUT_DIR, library_name, MODEL_OUTPUT_DIR_NAME)
    model_header_dir = \
        '%s/%s/%s' % (BUILD_OUTPUT_DIR, library_name, MODEL_HEADER_DIR_PATH)
    # clear output dir
    if os.path.exists(model_output_dir):
        sh.rm("-rf", model_output_dir)
    os.makedirs(model_output_dir)
    if os.path.exists(model_header_dir):
        sh.rm("-rf", model_header_dir)

    if os.path.exists(MODEL_CODEGEN_DIR):
        sh.rm("-rf", MODEL_CODEGEN_DIR)
    if os.path.exists(ENGINE_CODEGEN_DIR):
        sh.rm("-rf", ENGINE_CODEGEN_DIR)

    if flags.model_data_format:
        model_data_format = flags.model_data_format
    else:
        model_data_format = configs.get(YAMLKeyword.model_data_format, "file")
    embed_model_data = model_data_format == ModelFormat.code

    if flags.model_graph_format:
        model_graph_format = flags.model_graph_format
    else:
        model_graph_format = configs.get(YAMLKeyword.model_graph_format,
                                         "file")
    if model_graph_format == ModelFormat.code:
        os.makedirs(model_header_dir)
        sh_commands.gen_mace_engine_factory_source(
            configs[YAMLKeyword.models].keys(), embed_model_data)
        sh.cp("-f", glob.glob("mace/codegen/engine/*.h"), model_header_dir)

    convert.convert(configs, MODEL_CODEGEN_DIR)

    for model_name, model_config in configs[YAMLKeyword.models].items():
        model_codegen_dir = "%s/%s" % (MODEL_CODEGEN_DIR, model_name)
        encrypt.encrypt(
            model_name, "%s/model/%s.pb" % (model_codegen_dir, model_name),
            "%s/model/%s.data" % (model_codegen_dir, model_name),
            config_parser.parse_device_type(model_config[YAMLKeyword.runtime]),
            model_codegen_dir, bool(model_config.get(YAMLKeyword.obfuscate,
                                                     1)),
            model_graph_format == "code", model_data_format == "code")

        if model_graph_format == ModelFormat.file:
            sh.mv("-f", '%s/model/%s.pb' % (model_codegen_dir, model_name),
                  model_output_dir)
            sh.mv("-f", '%s/model/%s.data' % (model_codegen_dir, model_name),
                  model_output_dir)
        else:
            if not embed_model_data:
                sh.mv("-f",
                      '%s/model/%s.data' % (model_codegen_dir, model_name),
                      model_output_dir)

            sh.cp("-f", glob.glob("mace/codegen/models/*/code/*.h"),
                  model_header_dir)

        MaceLogger.summary(
            StringFormatter.block("Model %s converted" % model_name))

    if model_graph_format == ModelFormat.code:
        build_model_lib(configs, flags.address_sanitizer, flags.debug_mode)

    print_library_summary(configs)
Example #6
0
#
# graph = tf.Graph()
# with tf.compat.v1.Session(graph=graph) as sess:
#    loader = tf.compat.v1.train.import_meta_graph(trained_checkpoint_prefix +
#                                                  '.meta')
#    loader.restore(sess, trained_checkpoint_prefix)
#
#    builder = tf.compat.v1.saved_model.builder.SavedModelBuilder(export_directory)
#    builder.add_meta_graph_and_variables(sess,
#                                        [tf.saved_model.TRAINING,
#                                         tf.saved_model.SERVING],
#                                        strip_default_attrs=True)
#    builder.save()

model_config_file = "cfg/darknet.cfg"
parsed_config = config_parser.parse(model_config_file)

detection_model = Detector(parsed_config, 1)
detection_model = detection_model.build_model((416, 416, 3), 1)
detection_model.layers[0].trainable = False

checkpoint = tf.train.Checkpoint(model=detection_model)
latest_checkpoint_path = tf.train.latest_checkpoint("/home/karan/Checkpoint/")
checkpoint.restore(latest_checkpoint_path)

# shape = (1, 416, 416, 3)
# sample_image = np.random.randint(0, 255, size=(1, 416, 416, 3), dtype=np.uint8)
# input_image = np.array(sample_image / 255, dtype=np.float32)

# model = tf.keras.models.load_model("/home/karan/Checkpoint/")
# model.build_model(shape[1:], shape[0])