コード例 #1
0
    def test_caffe2_to_onnx_value_info(self):
        caffe2_net = tempfile.NamedTemporaryFile()
        output = tempfile.NamedTemporaryFile()

        model = ModelHelper(name='caffe2-to-onnx-test')
        brew.relu(model, ["X"], "Y")
        caffe2_net.write(model.net.Proto().SerializeToString())
        caffe2_net.flush()

        args = [caffe2_net.name, '--output', output.name]
        self.assertRaisesRegexp(Exception,
                                'value info',
                                self._run_command, caffe2_to_onnx, args)

        args.extend([
            '--value-info',
            json.dumps({
                'X': (onnx_pb2.TensorProto.FLOAT, (2, 2)),
            })])
        result = self._run_command(caffe2_to_onnx, args)

        onnx_model = onnx_pb2.ModelProto()
        onnx_model.ParseFromString(output.read())
        self.assertEqual(len(onnx_model.graph.node), 1)
        self.assertEqual(onnx_model.graph.node[0].op_type, 'Relu')
        self.assertEqual(len(onnx_model.graph.initializer), 0)
コード例 #2
0
    def test_caffe2_to_onnx(self):
        caffe2_net = tempfile.NamedTemporaryFile()
        caffe2_init_net = tempfile.NamedTemporaryFile()
        output = tempfile.NamedTemporaryFile()

        model = ModelHelper(name='caffe2-to-onnx-test')
        brew.relu(model, ["X"], "Y")
        caffe2_net.write(model.net.Proto().SerializeToString())
        caffe2_net.flush()

        init_model = ModelHelper(name='caffe2-to-onnx-init-test')
        init_model.net.GivenTensorFill([], 'X', shape=[2, 2],
                                       values=np.zeros((2, 2)).flatten().astype(float))
        caffe2_init_net.write(init_model.net.Proto().SerializeToString())
        caffe2_init_net.flush()

        result = self._run_command(
            caffe2_to_onnx, [
                caffe2_net.name,
                '--caffe2-init-net', caffe2_init_net.name,
                '--output', output.name,
            ])

        onnx_model = onnx_pb2.ModelProto()
        onnx_model.ParseFromString(output.read())
        self.assertEqual(len(onnx_model.graph.node), 1)
        self.assertEqual(onnx_model.graph.node[0].op_type, 'Relu')
        self.assertEqual(len(onnx_model.graph.initializer), 1)
        self.assertEqual(onnx_model.graph.initializer[0].name, onnx_model.graph.input[0].name)
コード例 #3
0
def onnx_to_caffe2(onnx_model, output, init_net_output):
    onnx_model_proto = onnx_pb2.ModelProto()
    onnx_model_proto.ParseFromString(onnx_model.read())
    graph_def = onnx_model_proto.graph
    init_net, predict_net = c2.onnx_graph_to_caffe2_net(graph_def)
    init_net_output.write(init_net.SerializeToString())
    output.write(predict_net.SerializeToString())
コード例 #4
0
ファイル: onnx_parser.py プロジェクト: zzzzzzrc/MMdnn
    def _load_model(model_file):
        """Load a ONNX model file from disk

        Parameters
        ----------
        model_file: str
            Path where the model file path is (protobuf file)

        Returns
        -------
        model: A ONNX protobuf model
        """
        from onnx import onnx_pb2
        from mmdnn.conversion.common.IR.IR_graph import load_protobuf_from_file

        model = onnx_pb2.ModelProto()
        load_protobuf_from_file(model, model_file)

        print("ONNX model file [%s] loaded successfully." % model_file)
        return model
コード例 #5
0
ファイル: convert.py プロジェクト: hlu1/onnx-coreml
def onnx_to_coreml(onnx_model, output):
    onnx_model_proto = onnx_pb2.ModelProto()
    onnx_model_proto.ParseFromString(onnx_model.read())
    coreml_model = convert(onnx_model_proto)
    coreml_model.save(output)
コード例 #6
0
def onnx_to_coreml(onnx_model, output):  # type: (IO[str], str) -> None
    onnx_model_proto = onnx_pb2.ModelProto()
    onnx_model_proto.ParseFromString(onnx_model.read())
    coreml_model = convert(onnx_model_proto)
    coreml_model.save(output)