Esempio n. 1
0
def check_model(model: Union[ModelProto, str, bytes],
                full_check: bool = False) -> None:
    """Check the consistency of a model. An exception is raised if the test fails.

    Arguments:
        model (ModelProto): model to check
        full_check (bool): if True, the function checks shapes can be inferred
    """
    # If model is a path instead of ModelProto
    if isinstance(model, str):
        C.check_model_path(model)
        if full_check:
            onnx.shape_inference.infer_shapes_path(model,
                                                   check_type=True,
                                                   strict_mode=True)
    else:
        protobuf_string = model if isinstance(
            model, bytes) else model.SerializeToString()
        # If the protobuf is larger than 2GB,
        # remind users should use the model path to check
        if sys.getsizeof(protobuf_string) > MAXIMUM_PROTOBUF:
            raise ValueError(
                'This protobuf of onnx model is too large (>2GB). Call check_model with model path instead.'
            )
        C.check_model(protobuf_string)
        if full_check:
            onnx.shape_inference.infer_shapes(model,
                                              check_type=True,
                                              strict_mode=True)
Esempio n. 2
0
def check_model(model, full_check=False):  # type: (Union[ModelProto, Text], bool) -> None
    if isinstance(model, string_types):
        C.check_model_path(model)
        m = onnx.load(model)
    else:
        C.check_model(model.SerializeToString())
        m = model
    if full_check:
        onnx.shape_inference.infer_shapes(m, True)
Esempio n. 3
0
def check_model(model, full_check=False):  # type: (Union[ModelProto, Text], bool) -> None
    if isinstance(model, string_types):
        C.check_model_path(model)
        m = onnx.load(model)
    else:
        # If the protobuf is larger than 2GB,
        # remind users should use the model path to check
        protobuf_string = model.SerializeToString()
        if sys.getsizeof(protobuf_string) > MAXIMUM_PROTOBUF:
            raise ValueError('This protobuf of onnx model is too large (>2GB). Call check_model with model path instead.')
        C.check_model(protobuf_string)
        m = model
    if full_check:
        onnx.shape_inference.infer_shapes(m, True)
Esempio n. 4
0
def check_model(model: Union[ModelProto, Text, bytes],
                full_check: bool = False) -> None:
    # If model is a path instead of ModelProto
    if isinstance(model, str):
        C.check_model_path(model)
        if full_check:
            onnx.shape_inference.infer_shapes_path(model,
                                                   check_type=True,
                                                   strict_mode=True)
    else:
        protobuf_string = model if isinstance(
            model, bytes) else model.SerializeToString()
        # If the protobuf is larger than 2GB,
        # remind users should use the model path to check
        if sys.getsizeof(protobuf_string) > MAXIMUM_PROTOBUF:
            raise ValueError(
                'This protobuf of onnx model is too large (>2GB). Call check_model with model path instead.'
            )
        C.check_model(protobuf_string)
        if full_check:
            onnx.shape_inference.infer_shapes(model,
                                              check_type=True,
                                              strict_mode=True)
Esempio n. 5
0
def check_model(model):  # type: (Union[ModelProto, Text]) -> None
    if isinstance(model, string_types):
        C.check_model_path(model)
    else:
        C.check_model(model.SerializeToString())