def test_guess_type(self):
        dtypes = [
            (np.int32, Int32TensorType),
            (np.int64, Int64TensorType),
            (np.float32, FloatTensorType),
            (np.str_, StringTensorType),
            (np.bool_, BooleanTensorType),
            (np.int8, Int8TensorType),
            (np.uint8, UInt8TensorType)
        ]
        if Complex64TensorType is not None:
            dtypes.append((np.complex64, Complex64TensorType))
        if Complex128TensorType is not None:
            dtypes.append((np.complex128, Complex128TensorType))
        for dtype, exp in dtypes:
            if dtype == np.str_:
                mat = np.empty((3, 3), dtype=dtype)
                mat[:, :] = ""
            else:
                mat = np.zeros((3, 3), dtype=dtype)
            res = _guess_type(mat)
            assert isinstance(res, exp)

        dtypes = [np.float64]
        for dtype in dtypes:
            mat = np.zeros((3, 3), dtype=dtype)
            _guess_type(mat, )
Пример #2
0
def guess_initial_types(X, initial_types):
    """
    Guesses initial types from an array or a dataframe.

    @param      X               array or dataframe
    @param      initial_types   hints about X
    @return                     data types
    """
    if X is None and initial_types is None:
        raise NotImplementedError(  # pragma: no cover
            "Initial types must be specified.")
    elif initial_types is None:
        if isinstance(X, (numpy.ndarray, pandas.DataFrame)):
            X = X[:1]
        if isinstance(X, pandas.DataFrame):
            initial_types = []
            for c in X.columns:
                if isinstance(X[c].values[0], (str, numpy.str_)):
                    g = StringTensorType()
                else:
                    g = _guess_type(X[c].values)
                g.shape = [None, 1]
                initial_types.append((c, g))
        else:
            gt = _guess_type(X)
            initial_types = [('X', gt)]
    return initial_types
def _display_intermediate_steps(model_onnx, inputs):
    import onnxruntime
    print("[_display_intermediate_steps] BEGIN")
    if isinstance(model_onnx, str):
        import onnx
        model_onnx = onnx.load(model_onnx)

    for name, node in enumerate_model_initializers(model_onnx, add_node=True):
        print("INIT: {} - {}".format(name, _guess_type(node)))

    for out, node in enumerate_model_node_outputs(model_onnx, add_node=True):
        print('-')
        print("OUTPUT: {} from {}".format(out, node.name))
        step = select_model_inputs_outputs(model_onnx, out)
        try:
            step_sess = onnxruntime.InferenceSession(step.SerializeToString())
        except Exception as e:
            raise RuntimeError("Unable to load ONNX model with onnxruntime. "
                               "Last added node is:\n{}".format(node)) from e
        for o in step_sess.get_inputs():
            print("IN :", o)
        for o in step_sess.get_outputs():
            print("OUT: ", o)
        if inputs:
            res = step_sess.run(inputs)
            print(res)
    print("[_display_intermediate_steps] END")
def _display_intermediate_steps(model_onnx, inputs, disable_optimisation):
    import onnxruntime
    print("[_display_intermediate_steps] BEGIN")
    if isinstance(model_onnx, str):
        import onnx
        model_onnx = onnx.load(model_onnx)

    for name, node in enumerate_model_initializers(model_onnx, add_node=True):
        print("INIT: {} - {}".format(name, _guess_type(node)))

    for out, node in enumerate_model_node_outputs(model_onnx, add_node=True):
        print('-')
        print("OUTPUT: {} from {}".format(out, node.name))
        step = select_model_inputs_outputs(model_onnx, out)
        if (disable_optimisation
                and hasattr(onnxruntime, 'GraphOptimizationLevel')):
            opts = onnxruntime.SessionOptions()
            opts.graph_optimization_level = (
                onnxruntime.GraphOptimizationLevel.ORT_DISABLE_ALL)
        else:
            opts = None
        try:
            step_sess = onnxruntime.InferenceSession(step.SerializeToString(),
                                                     sess_options=opts)
        except Exception as e:
            raise RuntimeError("Unable to load ONNX model with onnxruntime. "
                               "Last added node is:\n{}".format(node)) from e
        for o in step_sess.get_inputs():
            print("IN :", o)
        for o in step_sess.get_outputs():
            print("OUT: ", o)
        if inputs:
            res = step_sess.run(inputs)
            print(res)
    print("[_display_intermediate_steps] END")
Пример #5
0
    def test_guess_type(self):
        dtypes = [(np.int32, Int32TensorType), (np.int64, Int64TensorType),
                  (np.float32, FloatTensorType), (np.str, StringTensorType)]
        for dtype, exp in dtypes:
            if dtype == np.str:
                mat = np.empty((3, 3), dtype=dtype)
                mat[:, :] = ""
            else:
                mat = np.zeros((3, 3), dtype=dtype)
            res = _guess_type(mat)
            assert isinstance(res, exp)

        dtypes = [np.float64]
        for dtype in dtypes:
            mat = np.zeros((3, 3), dtype=dtype)
            _guess_type(mat, )
    def test_guess_type(self):
        dtypes = [(np.int32, Int32TensorType), (np.int64, Int64TensorType),
                  (np.float32, FloatTensorType), (np.str, StringTensorType)]
        for dtype, exp in dtypes:
            if dtype == np.str:
                mat = np.empty((3, 3), dtype=dtype)
                mat[:, :] = ""
            else:
                mat = np.zeros((3, 3), dtype=dtype)
            res = _guess_type(mat)
            assert isinstance(res, exp)

        dtypes = [np.float64]
        for dtype in dtypes:
            mat = np.zeros((3, 3), dtype=dtype)
            try:
                _guess_type(mat, )
                raise AssertionError("It should fail for type "
                                     "{}".format(dtype))
            except NotImplementedError:
                pass