示例#1
0
def prepare_omz_model(openvino_ref, model, omz_repo, omz_cache_dir, tmpdir):
    """
    Download and convert Open Model Zoo model to Intermediate Representation,
    get path to model XML.
    """
    # Step 1: downloader
    omz_log = logging.getLogger("prepare_omz_model")

    python_executable = sys.executable
    downloader_path = omz_repo / "tools" / "downloader" / "downloader.py"
    model_path_root = tmpdir

    cmd = [
        f'{python_executable}', f'{downloader_path}', '--name',
        f'{model["name"]}', f'--precisions={model["precision"]}',
        '--num_attempts', f'{OMZ_NUM_ATTEMPTS}', '--output_dir',
        f'{model_path_root}'
    ]

    if omz_cache_dir:
        cmd.append('--cache_dir')
        cmd.append(f'{omz_cache_dir}')

    return_code, output = cmd_exec(cmd, log=omz_log)
    assert return_code == 0, "Downloading OMZ models has failed!"

    # Step 2: converter
    converter_path = omz_repo / "tools" / "downloader" / "converter.py"
    ir_path = model_path_root / "_IR"
    # Note: remove --precisions if both precisions (FP32 & FP16) are required
    cmd = [
        f'{python_executable}', f'{converter_path}', '--name',
        f'{model["name"]}', '-p', f'{python_executable}',
        f'--precisions={model["precision"]}', '--output_dir', f'{ir_path}',
        '--download_dir', f'{model_path_root}', '--mo',
        f'{openvino_ref / "tools"/ "model_optimizer" / "mo.py"}'
    ]

    return_code, output = cmd_exec(cmd,
                                   env=get_openvino_environment(openvino_ref),
                                   log=omz_log)
    assert return_code == 0, "Converting OMZ models has failed!"

    # Step 3: info_dumper
    info_dumper_path = omz_repo / "tools" / "downloader" / "info_dumper.py"
    cmd = [
        f'{python_executable}', f'{info_dumper_path}', '--name',
        f'{model["name"]}'
    ]

    return_code, output = cmd_exec(cmd, log=omz_log)
    assert return_code == 0, "Getting information about OMZ models has failed!"
    model_info = json.loads(output)[0]

    # Step 4: form model_path
    model_path = ir_path / model_info["subdirectory"] / model[
        "precision"] / f'{model_info["name"]}.xml'

    return model_path
示例#2
0
def run_infer(model, out_file, install_dir):
    """ Function running inference
    """
    return_code, output = cmd_exec(
        [sys.executable,
         infer_tool,
         "-d=CPU", f"-m={model}", f"-r={out_file}"
         ],
        env=get_openvino_environment(install_dir),
    )
    return return_code, output
def test_infer(test_id, model, artifacts):
    """ Test inference with conditional compiled binaries
    """
    install_prefix = artifacts / test_id / "install_pkg"
    exe_suffix = ".exe" if sys.platform == "win32" else ""
    benchmark_app = install_prefix / "bin" / f"benchmark_app{exe_suffix}"
    returncode, _ = cmd_exec(
        [str(benchmark_app), "-d=CPU", f"-m={model}", "-niter=1", "-nireq=1"],
        env=get_openvino_environment(install_prefix),
    )
    assert returncode == 0, f"Command exited with non-zero status {returncode}"
示例#4
0
def run_infer(models, out_dir, install_dir):
    """ Function running inference
    """
    out_dir.mkdir(parents=True, exist_ok=True)
    return_code, output = cmd_exec(
        [
            sys.executable, infer_tool, "-d=CPU",
            *[f"-m={model}" for model in models], f"-r={out_dir}"
        ],
        env=get_openvino_environment(install_dir),
    )
    return return_code, output
示例#5
0
def run_infer(model, out_file, install_dir):
    """ Function running inference
    """
    sys_executable = os.path.join(sys.prefix, 'python.exe') if sys.platform == "win32" \
        else os.path.join(sys.prefix, 'bin', 'python')
    return_code, output = cmd_exec(
        [
            sys_executable, infer_tool, "-d=CPU", f"-m={model}",
            f"-r={out_file}"
        ],
        env=get_openvino_environment(install_dir),
    )
    return return_code, output
示例#6
0
def prepare_omz_model(openvino_ref, model, omz_repo, omz_cache_dir, tmpdir):
    """
    Download and convert Open Model Zoo model to Intermediate Representation,
    get path to model XML.
    """
    # Step 1: downloader
    omz_log = logging.getLogger("prepare_omz_model")

    python_executable = sys.executable
    downloader_path = omz_repo / "tools" / "downloader" / "downloader.py"
    model_path_root = tmpdir

    cmd = f'{python_executable} {downloader_path} --name {model["name"]}' \
          f' --precisions={model["precision"]}' \
          f' --num_attempts {OMZ_NUM_ATTEMPTS}' \
          f' --output_dir {model_path_root}'

    if omz_cache_dir:
        cmd += f' --cache_dir {omz_cache_dir}'

    cmd_exec(cmd, log=omz_log)

    # Step 2: converter
    converter_path = omz_repo / "tools" / "downloader" / "converter.py"
    ir_path = model_path_root / "_IR"
    # Note: remove --precisions if both precisions (FP32 & FP16) are required
    cmd = f'{python_executable} {converter_path} --name {model["name"]}' \
          f' -p {python_executable}' \
          f' --precisions={model["precision"]}' \
          f' --output_dir {ir_path}' \
          f' --download_dir {model_path_root}' \
          f' --mo {Path("../../model-optimizer/mo.py").resolve()}'

    cmd_exec(cmd, env=get_openvino_environment(openvino_ref), log=omz_log)

    # Step 3: info_dumper
    info_dumper_path = omz_repo / "tools" / "downloader" / "info_dumper.py"
    cmd = f'"{python_executable}" "{info_dumper_path}" --name {model["name"]}'

    return_code, output = cmd_exec(cmd, log=omz_log)
    model_info = json.loads(output)[0]

    # Step 4: form model_path
    model_path = ir_path / model_info["subdirectory"] / model[
        "precision"] / f'{model_info["name"]}.xml'

    return model_path