예제 #1
0
def max_available_cores() -> int:
    """
    :return: the maximum number of physical cores detected on the system
    """
    if cpu_details is not None:
        _LOGGER.debug("retrieving physical core count per socket "
                      "from deepsparse.cpu.cpu_details()")

        return cpu_details()[0]

    _LOGGER.debug("retrieving physical core count using psutil")
    physical_cores = psutil.cpu_count(logical=False)

    return physical_cores if physical_cores else -1
예제 #2
0
    def __init__(
        self,
        model_info: ModelInfo,
        batch_size: int = 1,
        num_cores: Optional[int] = None,
        iterations_per_check: int = 10,
        warmup_iterations_per_check: int = 5,
    ):
        self._batch_size = batch_size
        self._iterations_per_check = iterations_per_check
        self._warmup_iterations_per_check = warmup_iterations_per_check

        # try grabbing default max cores if needed; for tracking purposes
        try:
            from deepsparse.cpu import cpu_details

            self._num_cores = num_cores or cpu_details()[0]
        except Exception:
            self._num_cores = num_cores

        super().__init__(model_info)
예제 #3
0
except Exception:
    Model = object
    File = object

try:
    # flake8: noqa
    from deepsparse.cpu import cpu_details
    from deepsparse.lib import init_deepsparse_lib
    from deepsparse.version import *
except ImportError:
    raise ImportError("Unable to import deepsparse python apis. "
                      "Please contact [email protected]")

__all__ = ["Engine", "compile_model", "benchmark_model", "analyze_model"]

CORES_PER_SOCKET, AVX_TYPE, VNNI = cpu_details()

LIB = init_deepsparse_lib()


def _model_to_path(model: Union[str, Model, File]) -> str:
    if not model:
        raise ValueError(
            "model must be a path, sparsezoo.Model, or sparsezoo.File")

    if isinstance(model, str):
        pass
    elif Model is not object and isinstance(model, Model):
        # default to the main onnx file for the model
        model = model.onnx_file.downloaded_path()
    elif File is not object and isinstance(model, File):
예제 #4
0
import time

import onnxruntime

from deepsparse import compile_model, cpu
from deepsparse.benchmark import BenchmarkResults
from deepsparse.utils import (
    generate_random_inputs,
    get_input_names,
    get_output_names,
    override_onnx_batch_size,
    verify_outputs,
)


CORES_PER_SOCKET, AVX_TYPE, _ = cpu.cpu_details()


def parse_args():
    parser = argparse.ArgumentParser(
        description=(
            "Benchmark an ONNX model, comparing between DeepSparse and ONNXRuntime"
        )
    )

    parser.add_argument(
        "onnx_filepath",
        type=str,
        help="The full filepath of the ONNX model file being benchmarked",
    )