Example #1
0
def make_sequence_value_info(
        name,  # type: Text
        elem_type,  # type: int
        shape,  # type: Optional[Sequence[Union[Text, int]]]
        doc_string="",  # type: Text
        elem_shape_denotation=None,  # type: Optional[List[Text]]
):  # type: (...) -> ValueInfoProto
    """Makes a ValueInfoProto based on the data type and shape for Sequence."""
    value_info_proto = ValueInfoProto()
    value_info_proto.name = name
    if doc_string:
        value_info_proto.doc_string = doc_string

    sequence_type_proto = value_info_proto.type.sequence_type
    sequence_type_proto.elem_type.tensor_type.elem_type = elem_type

    tensor_value_info = make_tensor_value_info(name, elem_type, shape,
                                               doc_string,
                                               elem_shape_denotation)

    if shape is not None:
        sequence_type_proto.elem_type.tensor_type.shape.CopyFrom(
            tensor_value_info.type.tensor_type.shape)

    return value_info_proto
Example #2
0
def make_tensor_value_info(name, elem_type, shape, doc_string=""):
    """Makes a TypeProto based on the data type and shape."""
    value_info_proto = ValueInfoProto()
    value_info_proto.name = name
    if doc_string:
        value_info_proto.doc_string = doc_string

    tensor_type_proto = value_info_proto.type.tensor_type
    tensor_type_proto.elem_type = elem_type

    tensor_shape_proto = tensor_type_proto.shape

    # You might think this is a no-op (extending a normal Python list by []
    # certainly is), but protobuf lists work a little differently; if a field is never
    # set, it is omitted from the resulting protobuf; a list that is explicitly
    # set to be empty will get an (empty) entry in the protobuf. This difference
    # is visible to our consumers, so make sure we emit an empty shape!
    tensor_shape_proto.dim.extend([])

    for d in shape:
        dim = tensor_shape_proto.dim.add()
        if isinstance(d, integer_types):
            dim.dim_value = d
        elif isinstance(d, text_type):
            dim.dim_param = d
        else:
            raise ValueError(
                'Invalid item in shape: {}. '
                'Needs to of integer_types or text_type.'.format(d))

    return value_info_proto
Example #3
0
 def _make_value_info(variable):
     value_info = ValueInfoProto()
     value_info.name = variable.full_name
     value_info.type.CopyFrom(  # pylint: disable=E1101
         variable.type.to_onnx_type())  # pylint: disable=E1101
     if variable.type.doc_string:  # pylint: disable=E0611
         value_info.doc_string = variable.type.doc_string  # pragma: no cover
     return value_info
Example #4
0
def make_value_info(
        name: Text,
        type_proto: TypeProto,
        doc_string: Text = "",
) -> ValueInfoProto:
    """Makes a ValueInfoProto with the given type_proto."""
    value_info_proto = ValueInfoProto()
    value_info_proto.name = name
    if doc_string:
        value_info_proto.doc_string = doc_string

    value_info_proto.type.CopyFrom(type_proto)
    return value_info_proto
Example #5
0
def make_value_info(
        name,  # type: Text
        type_proto,  # type: TypeProto
        doc_string="",  # type: Text
):  # type: (...) -> ValueInfoProto
    """Makes a ValueInfoProto with the given type_proto."""
    value_info_proto = ValueInfoProto()
    value_info_proto.name = name
    if doc_string:
        value_info_proto.doc_string = doc_string

    value_info_proto.type.CopyFrom(type_proto)
    return value_info_proto
Example #6
0
def make_tensor_value_info(
        name,  # type: Text
        elem_type,  # type: TensorProto.DataType
        shape,  # type: Optional[Sequence[int]]
        doc_string="",  # type: Text
        shape_denotation=None,  # type: Optional[List[Text]]
):  # type: (...) -> ValueInfoProto
    """Makes a ValueInfoProto based on the data type and shape."""
    value_info_proto = ValueInfoProto()
    value_info_proto.name = name
    if doc_string:
        value_info_proto.doc_string = doc_string

    tensor_type_proto = value_info_proto.type.tensor_type
    tensor_type_proto.elem_type = elem_type

    tensor_shape_proto = tensor_type_proto.shape

    if shape is not None:
        # You might think this is a no-op (extending a normal Python
        # list by [] certainly is), but protobuf lists work a little
        # differently; if a field is never set, it is omitted from the
        # resulting protobuf; a list that is explicitly set to be
        # empty will get an (empty) entry in the protobuf. This
        # difference is visible to our consumers, so make sure we emit
        # an empty shape!
        tensor_shape_proto.dim.extend([])

        if shape_denotation:
            if len(shape_denotation) != len(shape):
                raise ValueError(
                    'Invalid shape_denotation. '
                    'Must be of the same length as shape.')

        for i, d in enumerate(shape):
            dim = tensor_shape_proto.dim.add()
            if d is None:
                pass
            elif isinstance(d, integer_types):
                dim.dim_value = d
            elif isinstance(d, text_type):
                dim.dim_param = d
            else:
                raise ValueError(
                    'Invalid item in shape: {}. '
                    'Needs to of integer_types or text_type.'.format(d))

            if shape_denotation:
                dim.denotation = shape_denotation[i]

    return value_info_proto
Example #7
0
def make_tensor_value_info(
        name,  # type: Text
        elem_type,  # type: TensorProto.DataType
        shape,  # type: Optional[Sequence[int]]
        doc_string="",  # type: Text
        shape_denotation=None,  # type: Optional[List[Text]]
):  # type: (...) -> ValueInfoProto
    """Makes a ValueInfoProto based on the data type and shape."""
    value_info_proto = ValueInfoProto()
    value_info_proto.name = name
    if doc_string:
        value_info_proto.doc_string = doc_string

    tensor_type_proto = value_info_proto.type.tensor_type
    tensor_type_proto.elem_type = elem_type

    tensor_shape_proto = tensor_type_proto.shape

    if shape is not None:
        # You might think this is a no-op (extending a normal Python
        # list by [] certainly is), but protobuf lists work a little
        # differently; if a field is never set, it is omitted from the
        # resulting protobuf; a list that is explicitly set to be
        # empty will get an (empty) entry in the protobuf. This
        # difference is visible to our consumers, so make sure we emit
        # an empty shape!
        tensor_shape_proto.dim.extend([])

        if shape_denotation:
            if len(shape_denotation) != len(shape):
                raise ValueError(
                    'Invalid shape_denotation. '
                    'Must be of the same length as shape.')

        for i, d in enumerate(shape):
            dim = tensor_shape_proto.dim.add()
            if d is None:
                pass
            elif isinstance(d, integer_types):
                dim.dim_value = d
            elif isinstance(d, text_type):
                dim.dim_param = d
            else:
                raise ValueError(
                    'Invalid item in shape: {}. '
                    'Needs to of integer_types or text_type.'.format(d))

            if shape_denotation:
                dim.denotation = shape_denotation[i]

    return value_info_proto
Example #8
0
def make_sparse_tensor_value_info(
        name: Text,
        elem_type: int,
        shape: Optional[Sequence[Union[Text, int, None]]],
        doc_string: Text = "",
        shape_denotation: Optional[List[Text]] = None,
) -> ValueInfoProto:
    """Makes a SparseTensor ValueInfoProto based on the data type and shape."""
    value_info_proto = ValueInfoProto()
    value_info_proto.name = name
    if doc_string:
        value_info_proto.doc_string = doc_string

    sparse_tensor_type_proto = make_sparse_tensor_type_proto(elem_type, shape, shape_denotation)
    value_info_proto.type.sparse_tensor_type.CopyFrom(sparse_tensor_type_proto.sparse_tensor_type)
    return value_info_proto
Example #9
0
def make_tensor_value_info(
        name,  # type: Text
        elem_type,  # type: int
        shape,  # type: Optional[Sequence[Union[Text, int, None]]]
        doc_string="",  # type: Text
        shape_denotation=None,  # type: Optional[List[Text]]
):  # type: (...) -> ValueInfoProto
    """Makes a ValueInfoProto based on the data type and shape."""
    value_info_proto = ValueInfoProto()
    value_info_proto.name = name
    if doc_string:
        value_info_proto.doc_string = doc_string

    tensor_type_proto = make_tensor_type_proto(elem_type, shape, shape_denotation)
    value_info_proto.type.CopyFrom(tensor_type_proto)
    return value_info_proto
Example #10
0
def make_tensor_value_info(
    name: str,
    elem_type: int,
    shape: Optional[Sequence[Union[str, int, None]]],
    doc_string: str = "",
    shape_denotation: Optional[List[str]] = None,
) -> ValueInfoProto:
    """Makes a ValueInfoProto based on the data type and shape."""
    value_info_proto = ValueInfoProto()
    value_info_proto.name = name
    if doc_string:
        value_info_proto.doc_string = doc_string

    tensor_type_proto = make_tensor_type_proto(elem_type, shape,
                                               shape_denotation)
    value_info_proto.type.CopyFrom(tensor_type_proto)
    return value_info_proto
Example #11
0
def strip_weights(model):
    graph = model.graph

    # Outputs remain same
    new_outputs = list(graph.output)
    # Nodes remain same
    new_nodes = list(graph.node)

    # We replace all initializers with input nodes.
    new_initializers = []
    new_inputs = list(graph.input)
    for node in graph.initializer:
        input = ValueInfoProto()
        input.name = node.name
        # Magic keyword for input nodes belonging to server
        input.doc_string = "MPC_MODEL_WEIGHTS"
        input.type.tensor_type.elem_type = node.data_type
        for size in node.dims:
            dim = TensorShapeProto.Dimension()
            dim.dim_value = size
            input.type.tensor_type.shape.dim.append(dim)
        new_inputs.append(input)

    new_graph = helper.make_graph(
        new_nodes,
        graph.name,
        new_inputs,
        new_outputs,
        initializer=new_initializers,
        doc_string=graph.doc_string,
        value_info=graph.value_info,
    )
    new_model = helper.make_model(
        new_graph,
        ir_version=model.ir_version,
        doc_string=model.doc_string,
        model_version=model.model_version,
        domain=model.domain,
        producer_name="MPCWeightStripper",
    )
    new_model.metadata_props.extend(model.metadata_props)
    new_model.opset_import.pop()
    new_model.opset_import.extend(model.opset_import)
    return new_model
Example #12
0
def make_tensor_value_info(name, elem_type, shape, doc_string=""):
    """Makes a TypeProto based on the data type and shape."""
    value_info_proto = ValueInfoProto()
    value_info_proto.name = name
    if doc_string:
        value_info_proto.doc_string = doc_string

    tensor_type_proto = value_info_proto.type.tensor_type
    tensor_type_proto.elem_type = elem_type

    tensor_shape_proto = tensor_type_proto.shape.dim
    for d in shape:
        dim = tensor_shape_proto.add()
        if isinstance(d, integer_types):
            dim.dim_value = d
        elif isinstance(d, text_type):
            dim.dim_param = d
        else:
            raise ValueError(
                'Invalid item in shape: {}. '
                'Needs to of integer_types or text_type.'.format(d))

    return value_info_proto