def make_sequence_value_info( name, # type: Text elem_type, # type: int shape, # type: Optional[Sequence[Union[Text, int]]] doc_string="", # type: Text elem_shape_denotation=None, # type: Optional[List[Text]] ): # type: (...) -> ValueInfoProto """Makes a ValueInfoProto based on the data type and shape for Sequence.""" value_info_proto = ValueInfoProto() value_info_proto.name = name if doc_string: value_info_proto.doc_string = doc_string sequence_type_proto = value_info_proto.type.sequence_type sequence_type_proto.elem_type.tensor_type.elem_type = elem_type tensor_value_info = make_tensor_value_info(name, elem_type, shape, doc_string, elem_shape_denotation) if shape is not None: sequence_type_proto.elem_type.tensor_type.shape.CopyFrom( tensor_value_info.type.tensor_type.shape) return value_info_proto
def make_tensor_value_info(name, elem_type, shape, doc_string=""): """Makes a TypeProto based on the data type and shape.""" value_info_proto = ValueInfoProto() value_info_proto.name = name if doc_string: value_info_proto.doc_string = doc_string tensor_type_proto = value_info_proto.type.tensor_type tensor_type_proto.elem_type = elem_type tensor_shape_proto = tensor_type_proto.shape # You might think this is a no-op (extending a normal Python list by [] # certainly is), but protobuf lists work a little differently; if a field is never # set, it is omitted from the resulting protobuf; a list that is explicitly # set to be empty will get an (empty) entry in the protobuf. This difference # is visible to our consumers, so make sure we emit an empty shape! tensor_shape_proto.dim.extend([]) for d in shape: dim = tensor_shape_proto.dim.add() if isinstance(d, integer_types): dim.dim_value = d elif isinstance(d, text_type): dim.dim_param = d else: raise ValueError( 'Invalid item in shape: {}. ' 'Needs to of integer_types or text_type.'.format(d)) return value_info_proto
def _make_value_info(variable): value_info = ValueInfoProto() value_info.name = variable.full_name value_info.type.CopyFrom( # pylint: disable=E1101 variable.type.to_onnx_type()) # pylint: disable=E1101 if variable.type.doc_string: # pylint: disable=E0611 value_info.doc_string = variable.type.doc_string # pragma: no cover return value_info
def make_value_info( name, # type: Text type_proto, # type: TypeProto doc_string="", # type: Text ): # type: (...) -> ValueInfoProto """Makes a ValueInfoProto with the given type_proto.""" value_info_proto = ValueInfoProto() value_info_proto.name = name if doc_string: value_info_proto.doc_string = doc_string value_info_proto.type.CopyFrom(type_proto) return value_info_proto
def test_get_inputs(self): model = OnnxModel(model_proto=ModelProto( graph=GraphProto(initializer=[TensorProto(name='y')], input=[ ValueInfoProto(name='x'), ValueInfoProto(name='y'), ValueInfoProto(name='z') ])), input_data_formats=[None, None]) self.assertEqual(model.get_inputs(), [ValueInfoProto(name='x'), ValueInfoProto(name='z')])
def make_value_info( name: Text, type_proto: TypeProto, doc_string: Text = "", ) -> ValueInfoProto: """Makes a ValueInfoProto with the given type_proto.""" value_info_proto = ValueInfoProto() value_info_proto.name = name if doc_string: value_info_proto.doc_string = doc_string value_info_proto.type.CopyFrom(type_proto) return value_info_proto
def make_tensor_value_info( name, # type: Text elem_type, # type: TensorProto.DataType shape, # type: Optional[Sequence[int]] doc_string="", # type: Text shape_denotation=None, # type: Optional[List[Text]] ): # type: (...) -> ValueInfoProto """Makes a ValueInfoProto based on the data type and shape.""" value_info_proto = ValueInfoProto() value_info_proto.name = name if doc_string: value_info_proto.doc_string = doc_string tensor_type_proto = value_info_proto.type.tensor_type tensor_type_proto.elem_type = elem_type tensor_shape_proto = tensor_type_proto.shape if shape is not None: # You might think this is a no-op (extending a normal Python # list by [] certainly is), but protobuf lists work a little # differently; if a field is never set, it is omitted from the # resulting protobuf; a list that is explicitly set to be # empty will get an (empty) entry in the protobuf. This # difference is visible to our consumers, so make sure we emit # an empty shape! tensor_shape_proto.dim.extend([]) if shape_denotation: if len(shape_denotation) != len(shape): raise ValueError( 'Invalid shape_denotation. ' 'Must be of the same length as shape.') for i, d in enumerate(shape): dim = tensor_shape_proto.dim.add() if d is None: pass elif isinstance(d, integer_types): dim.dim_value = d elif isinstance(d, text_type): dim.dim_param = d else: raise ValueError( 'Invalid item in shape: {}. ' 'Needs to of integer_types or text_type.'.format(d)) if shape_denotation: dim.denotation = shape_denotation[i] return value_info_proto
def make_tensor_value_info(name: str, shape: Sequence[int]) -> IValueInfoProto: value_info_proto = ValueInfoProto() value_info_proto.name = name tensor_type_proto = value_info_proto.type.tensor_type tensor_type_proto.elem_type = DataType.FLOAT tensor_shape_proto = tensor_type_proto.shape.dim for d in shape: dim = tensor_shape_proto.add() dim.dim_value = d return value_info_proto
def make_sparse_tensor_value_info( name: Text, elem_type: int, shape: Optional[Sequence[Union[Text, int, None]]], doc_string: Text = "", shape_denotation: Optional[List[Text]] = None, ) -> ValueInfoProto: """Makes a SparseTensor ValueInfoProto based on the data type and shape.""" value_info_proto = ValueInfoProto() value_info_proto.name = name if doc_string: value_info_proto.doc_string = doc_string sparse_tensor_type_proto = make_sparse_tensor_type_proto(elem_type, shape, shape_denotation) value_info_proto.type.sparse_tensor_type.CopyFrom(sparse_tensor_type_proto.sparse_tensor_type) return value_info_proto
def make_tensor_value_info( name, # type: Text elem_type, # type: int shape, # type: Optional[Sequence[Union[Text, int, None]]] doc_string="", # type: Text shape_denotation=None, # type: Optional[List[Text]] ): # type: (...) -> ValueInfoProto """Makes a ValueInfoProto based on the data type and shape.""" value_info_proto = ValueInfoProto() value_info_proto.name = name if doc_string: value_info_proto.doc_string = doc_string tensor_type_proto = make_tensor_type_proto(elem_type, shape, shape_denotation) value_info_proto.type.CopyFrom(tensor_type_proto) return value_info_proto
def make_tensor_value_info( name: str, elem_type: int, shape: Optional[Sequence[Union[str, int, None]]], doc_string: str = "", shape_denotation: Optional[List[str]] = None, ) -> ValueInfoProto: """Makes a ValueInfoProto based on the data type and shape.""" value_info_proto = ValueInfoProto() value_info_proto.name = name if doc_string: value_info_proto.doc_string = doc_string tensor_type_proto = make_tensor_type_proto(elem_type, shape, shape_denotation) value_info_proto.type.CopyFrom(tensor_type_proto) return value_info_proto
def strip_weights(model): graph = model.graph # Outputs remain same new_outputs = list(graph.output) # Nodes remain same new_nodes = list(graph.node) # We replace all initializers with input nodes. new_initializers = [] new_inputs = list(graph.input) for node in graph.initializer: input = ValueInfoProto() input.name = node.name # Magic keyword for input nodes belonging to server input.doc_string = "MPC_MODEL_WEIGHTS" input.type.tensor_type.elem_type = node.data_type for size in node.dims: dim = TensorShapeProto.Dimension() dim.dim_value = size input.type.tensor_type.shape.dim.append(dim) new_inputs.append(input) new_graph = helper.make_graph( new_nodes, graph.name, new_inputs, new_outputs, initializer=new_initializers, doc_string=graph.doc_string, value_info=graph.value_info, ) new_model = helper.make_model( new_graph, ir_version=model.ir_version, doc_string=model.doc_string, model_version=model.model_version, domain=model.domain, producer_name="MPCWeightStripper", ) new_model.metadata_props.extend(model.metadata_props) new_model.opset_import.pop() new_model.opset_import.extend(model.opset_import) return new_model
def make_tensor_value_info(name, elem_type, shape, doc_string=""): """Makes a TypeProto based on the data type and shape.""" value_info_proto = ValueInfoProto() value_info_proto.name = name if doc_string: value_info_proto.doc_string = doc_string tensor_type_proto = value_info_proto.type.tensor_type tensor_type_proto.elem_type = elem_type tensor_shape_proto = tensor_type_proto.shape.dim for d in shape: dim = tensor_shape_proto.add() if isinstance(d, integer_types): dim.dim_value = d elif isinstance(d, text_type): dim.dim_param = d else: raise ValueError( 'Invalid item in shape: {}. ' 'Needs to of integer_types or text_type.'.format(d)) return value_info_proto
def _add_value_info(self, value_info: onnx.ValueInfoProto): if not value_info.HasField("name"): raise ValueError("Got value without name") name = value_info.name if not _nested_HasField(value_info, "type.tensor_type.shape"): raise ValueError( "Value '{}' does not have a shape in this graph." " Please run shape inference before importing.".format(name)) tensor_type = value_info.type.tensor_type if not tensor_type.HasField("elem_type"): raise ValueError( "Value '{}' does not have a type in this graph." " Please run type inference before importing.".format(name)) shape = [] for d in tensor_type.shape.dim: if d.HasField("dim_value"): shape.append(d.dim_value) elif d.HasField("dim_param"): parsed = pystr_to_symbolic(d.dim_param) for sym in parsed.free_symbols: if clean_onnx_name(str(sym)) not in self.sdfg.symbols: self.sdfg.add_symbol(clean_onnx_name(str(sym)), stype=int) parsed = parsed.subs( sym, dace.symbol(clean_onnx_name(str(sym)))) shape.append(parsed) else: raise ValueError( "Value '{}' does not have a shape in this graph." " Please run shape inference before importing.".format( name)) transient = name not in self.inputs and name not in self.outputs if len(shape) == 0: self.sdfg.add_scalar(clean_onnx_name(name), dtype=onnx_tensor_type_to_typeclass( tensor_type.elem_type), transient=transient) else: self.sdfg.add_array(clean_onnx_name(name), shape=shape, dtype=onnx_tensor_type_to_typeclass( tensor_type.elem_type), transient=transient)
def make_empty_tensor_value_info(name): # type: (Text) -> ValueInfoProto value_info_proto = ValueInfoProto() value_info_proto.name = name return value_info_proto
new_initializers = [ init for init in model.graph.initializer if init.name not in nodes_to_remove and init.name not in inputs_to_remove ] # 3. Remove nodes new_nodes = [n for n in model.graph.node if n.name not in nodes_to_remove] # Get Ouput Tensor Types to create ValueInfo for output info # by running model on dummy input temp_model = ModelProto() temp_model.CopyFrom(model) for i in new_output_names: op = ValueInfoProto() op.name = i temp_model.graph.output.append(op) onnx.save(temp_model, "__temp.onnx") sess = onnxruntime.InferenceSession("__temp.onnx") sess_inps = sess.get_inputs() input_dict = {} for i in sess_inps: shape = fix_shape(i.shape, batch_size) typ = get_np_type_from_onnxruntime(i.type) input_dict[i.name] = np.random.rand(*shape).astype(typ) output_tensors = sess.run(new_output_names, input_dict) if os.path.exists("__temp.onnx"): os.remove("__temp.onnx")
def make_empty_tensor_value_info(name: Text) -> ValueInfoProto: value_info_proto = ValueInfoProto() value_info_proto.name = name return value_info_proto