Exemple #1
0
def load_neuropod(neuropod_path, _always_use_native=True, **kwargs):
    """
    Load a neuropod package. Returns a NeuropodExecutor

    :param  neuropod_path       The path to a neuropod package
    :param  visible_gpu:        The index of the GPU that this Neuropod should run on (if any).
                                This is either `None` or a nonnegative integer. Setting this
                                to `None` will attempt to run this model on CPU.
    :param  load_custom_ops:    Whether or not to load custom ops included in the model.
    """
    if _always_use_native:
        return NativeNeuropodExecutor(neuropod_path, **kwargs)

    # If we were given a zipfile, extract it to a temp dir and use it
    neuropod_path = zip_loader.extract_neuropod_if_necessary(neuropod_path)

    # Figure out what type of neuropod this is
    neuropod_config = config_utils.read_neuropod_config(neuropod_path)
    platform = neuropod_config["platform"]

    if platform == "python":
        from neuropod.backends.python.executor import PythonNeuropodExecutor

        return PythonNeuropodExecutor(neuropod_path, **kwargs)
    elif platform == "torchscript":
        from neuropod.backends.torchscript.executor import TorchScriptNeuropodExecutor

        return TorchScriptNeuropodExecutor(neuropod_path, **kwargs)
    elif platform == "tensorflow":
        from neuropod.backends.tensorflow.executor import TensorflowNeuropodExecutor

        return TensorflowNeuropodExecutor(neuropod_path, **kwargs)
    else:
        raise ValueError(
            "Invalid platform found in neuropod config: {}".format(platform))
    def __init__(self, neuropod_path):
        # Read the neuropod config
        self.neuropod_config = config_utils.read_neuropod_config(neuropod_path)

        # Generate the tensor to device mapping
        self.input_device_mapping = {
            tensor["name"]:
            self.neuropod_config["input_tensor_device"][tensor["name"]]
            for tensor in self.inputs
        }