def addSoftMax(inp, out, graph_def): softmax = NodeDef() softmax.name = out softmax.op = 'Softmax' text_format.Merge('i: -1', softmax.attr['axis']) softmax.input.append(inp) graph_def.node.extend([softmax])
def addConcatNode(name, inputs): concat = NodeDef() concat.name = name concat.op = 'ConcatV2' for inp in inputs: concat.input.append(inp) concat.input.append(concatAxis.name) graph_def.node.extend([concat])
def const_node(dtype, value, shape): node = NodeDef() node.op = 'Const' node.attr['dtype'].type = dtype.as_datatype_enum node.attr['value'].tensor.dtype = dtype.as_datatype_enum node.attr['value'].tensor.tensor_shape.CopyFrom(as_shape(shape).as_proto()) if value.dtype == np.float32: _extend(node.attr['value'].tensor.float_val, value) elif value.dtype == np.int32: _extend(node.attr['value'].tensor.int_val, value) else: raise Exception('const_node, unknown dtype {}'.format(value.dtype)) return node
def build(self): for i, layer in enumerate(self.model.node): self.layer_map[layer.name] = TensorflowGraphNode(layer) self.layer_name_map[layer.name] = layer.name for pred in layer.input: if pred not in self.layer_map: new_node = NodeDef() new_node.name = pred new_node.op = "NoOp" self.layer_map[pred] = TensorflowGraphNode(new_node) self.layer_name_map[pred] = pred self._make_connection(pred, layer.name) super(TensorflowGraph, self).build()
def make_op_node(op_name: Text, inputs: Inputs, name: Text = None) -> NodeDef: """ Create a TF graph node given the operation, input, and a name. The resulting node definition won't include any operation-specific attributes. It returns a valid node for most operations, though. Args: op_name: Native TF operation name (e.g. "MatMul") inputs: Input node, node name, or list of inputs nodes or node names name: Node name in the graph, must be unique and defaults to the operation name Returns: TF graph node definition for the given operation, inputs, and name """ input_list = inputs # convert scalar input into list if not isinstance(inputs, list): input_list = [input_list] # convert list items to strings for i, item in enumerate(input_list): if hasattr(item, 'name'): input_list[i] = item.name # generate node defintion dtype = dtypes.float32.as_datatype_enum node_def = NodeDef(op=op_name, name=name or op_name, attr={'T': AttrValue(type=dtype)}) node_def.input.extend(input_list) return node_def
def make_const_node(data: Tensor, name: str = None) -> NodeDef: """ Create a TF graph node containing a constant value. The resulting node is equivalent to using `tf.constant` on the default graph. Args: data: Numpy-array containing the data, shape, and datatype name: Optional name of the node Returns: Graph node for adding to a TF Graph instance """ dtype = as_dtype(data.dtype).as_datatype_enum tensor_content = data.tobytes() tensor_dim = [TensorShapeProto.Dim(size=size) for size in data.shape] tensor_shape = TensorShapeProto(dim=tensor_dim) tensor_proto = TensorProto(tensor_content=tensor_content, tensor_shape=tensor_shape, dtype=dtype) node_def = NodeDef(op='Const', name=name or 'Const', attr={ 'value': AttrValue(tensor=tensor_proto), 'dtype': AttrValue(type=dtype) }) return node_def
def build(self): for i, layer in enumerate(self.model.node): self.layer_map[layer.name] = TensorflowGraphNode(layer) for i, layer in enumerate(self.model.node): self.layer_map[layer.name] = TensorflowGraphNode(layer) self.layer_name_map[layer.name] = layer.name for pred in layer.input: if pred not in self.layer_map: new_node = NodeDef() new_node.name = pred new_node.op = "NoOp" self.layer_map[pred] = TensorflowGraphNode(new_node) self.layer_name_map[pred] = pred self._make_connection(pred, layer.name) super(TensorflowGraph, self).build()
def addReshape(inp, out, shape, graph_def): shapeNode = NodeDef() shapeNode.name = out + '/shape' shapeNode.op = 'Const' text_format.Merge(tensorMsg(shape), shapeNode.attr["value"]) graph_def.node.extend([shapeNode]) reshape = NodeDef() reshape.name = out reshape.op = 'Reshape' reshape.input.append(inp) reshape.input.append(shapeNode.name) graph_def.node.extend([reshape])
def update_graph_def(input_graph_def: GraphDef, nodes_to_remap: Dict[Text, List[NodeDef]], inputs_to_replace: Dict[Text, Text]) -> GraphDef: """ Update a TF graph_def by replacing nodes and node inputs. There will be no consistency check in this function. Callers have to make sure the given remappings and input replacements result in a valid graph. Args: input_graph_def: TF graph_def with nodes or node inputs to replace nodes_to_remap: `dict` that maps node names to a list of replacement nodes. Nodes whose name map to an empty list, will be removed from the returned graph. Nodes that are not in the input graph_def but have an entry in the remap dict, will be ignored. inputs_to_replace: `dict` that maps node names to replacement names. Nodes that have been removed need to be replaced in all referenced graph nodes. This mapping can be used to make sure this happens. Returns: An updated copy of the input graph_def. The original inputs remains unchanged. """ result_graph_def = GraphDef() for node in input_graph_def.node: if node.name in nodes_to_remap: nodes_to_insert = nodes_to_remap[node.name] if nodes_to_insert and len(nodes_to_insert) > 0: result_graph_def.node.extend(nodes_to_insert) continue new_node = NodeDef() new_node.CopyFrom(node) for i, input_node in enumerate(new_node.input): if input_node in inputs_to_replace: new_node.input[i] = inputs_to_replace[input_node] result_graph_def.node.extend([new_node]) result_graph_def.versions.CopyFrom(input_graph_def.versions) return result_graph_def
def addSlice(inp, out, begins, sizes, graph_def): beginsNode = NodeDef() beginsNode.name = out + '/begins' beginsNode.op = 'Const' text_format.Merge(tensorMsg(begins), beginsNode.attr["value"]) graph_def.node.extend([beginsNode]) sizesNode = NodeDef() sizesNode.name = out + '/sizes' sizesNode.op = 'Const' text_format.Merge(tensorMsg(sizes), sizesNode.attr["value"]) graph_def.node.extend([sizesNode]) sliced = NodeDef() sliced.name = out sliced.op = 'Slice' sliced.input.append(inp) sliced.input.append(beginsNode.name) sliced.input.append(sizesNode.name) graph_def.node.extend([sliced])
addReshape('FirstStageBoxPredictor/ClassPredictor/BiasAdd', 'FirstStageBoxPredictor/ClassPredictor/reshape_1', [0, -1, 2], graph_def) addSoftMax('FirstStageBoxPredictor/ClassPredictor/reshape_1', 'FirstStageBoxPredictor/ClassPredictor/softmax', graph_def) # Compare with Reshape_4 addFlatten('FirstStageBoxPredictor/ClassPredictor/softmax', 'FirstStageBoxPredictor/ClassPredictor/softmax/flatten', graph_def) # Compare with FirstStageBoxPredictor/BoxEncodingPredictor/BiasAdd addFlatten('FirstStageBoxPredictor/BoxEncodingPredictor/BiasAdd', 'FirstStageBoxPredictor/BoxEncodingPredictor/flatten', graph_def) proposals = NodeDef() proposals.name = 'proposals' # Compare with ClipToWindow/Gather/Gather (NOTE: normalized) proposals.op = 'PriorBox' proposals.input.append('FirstStageBoxPredictor/BoxEncodingPredictor/BiasAdd') proposals.input.append(graph_def.node[0].name) # image_tensor text_format.Merge('b: false', proposals.attr["flip"]) text_format.Merge('b: true', proposals.attr["clip"]) text_format.Merge('f: %f' % args.features_stride, proposals.attr["step"]) text_format.Merge('f: 0.0', proposals.attr["offset"]) text_format.Merge(tensorMsg([0.1, 0.1, 0.2, 0.2]), proposals.attr["variance"]) widths = [] heights = [] for a in args.aspect_ratios: for s in args.scales:
def addFlatten(inp, out, graph_def): flatten = NodeDef() flatten.name = out flatten.op = 'Flatten' flatten.input.append(inp) graph_def.node.extend([flatten])
concat = NodeDef() concat.name = name concat.op = 'ConcatV2' for inp in inputs: concat.input.append(inp) concat.input.append(axisNodeName) graph_def.node.extend([concat]) addConstNode('concat/axis_flatten', [-1]) addConstNode('PriorBox/concat/axis', [-2]) for label in ['ClassPredictor', 'BoxEncodingPredictor']: concatInputs = [] for i in range(args.num_layers): # Flatten predictions flatten = NodeDef() inpName = 'BoxPredictor_%d/%s/BiasAdd' % (i, label) flatten.input.append(inpName) flatten.name = inpName + '/Flatten' flatten.op = 'Flatten' concatInputs.append(flatten.name) graph_def.node.extend([flatten]) addConcatNode('%s/concat' % label, concatInputs, 'concat/axis_flatten') # Add layers that generate anchors (bounding boxes proposals). scales = [args.min_scale + (args.max_scale - args.min_scale) * i / (args.num_layers - 1) for i in range(args.num_layers)] + [1.0] priorBoxes = [] addConstNode('reshape_prior_boxes_to_4d', [1, 2, -1, 1])
del node.input[i] # Connect input node to the first layer assert (graph_def.node[0].op == 'Placeholder') # assert(graph_def.node[1].op == 'Conv2D') weights = graph_def.node[1].input[0] for i in range(len(graph_def.node[1].input)): graph_def.node[1].input.pop() graph_def.node[1].input.append(graph_def.node[0].name) graph_def.node[1].input.append(weights) # Create SSD postprocessing head ############################################### # Concatenate predictions of classes, predictions of bounding boxes and proposals. concatAxis = NodeDef() concatAxis.name = 'concat/axis_flatten' concatAxis.op = 'Const' text_format.Merge( 'tensor {' ' dtype: DT_INT32' ' tensor_shape { }' ' int_val: -1' '}', concatAxis.attr["value"]) graph_def.node.extend([concatAxis]) def addConcatNode(name, inputs): concat = NodeDef() concat.name = name concat.op = 'ConcatV2'
def addConstNode(name, values): node = NodeDef() node.name = name node.op = 'Const' text_format.Merge(tensorMsg(values), node.attr["value"]) graph_def.node.extend([node])
if graph_def.node[i].op in ['Const', 'Dequantize']: del graph_def.node[i] for attr in ['T', 'data_format', 'Tshape', 'N', 'Tidx', 'Tdim', 'use_cudnn_on_gpu', 'Index', 'Tperm', 'is_training', 'Tpaddings', 'Tblock_shape', 'Tcrops']: if attr in graph_def.node[i].attr: del graph_def.node[i].attr[attr] # Append prior box generators min_sizes = [30, 60, 111, 162, 213, 264] max_sizes = [60, 111, 162, 213, 264, 315] steps = [8, 16, 32, 64, 100, 300] aspect_ratios = [[2], [2, 3], [2, 3], [2, 3], [2], [2]] layers = [conv4_3_norm, fc7, conv6_2_h, conv7_2_h, conv8_2_h, conv9_2_h] for i in range(6): priorBox = NodeDef() priorBox.name = 'PriorBox_%d' % i priorBox.op = 'PriorBox' priorBox.input.append(layers[i].name[:layers[i].name.find(':')]) priorBox.input.append(inp_nodes[0]) # data text_format.Merge('i: %d' % min_sizes[i], priorBox.attr["min_size"]) text_format.Merge('i: %d' % max_sizes[i], priorBox.attr["max_size"]) text_format.Merge('b: true', priorBox.attr["flip"]) text_format.Merge('b: false', priorBox.attr["clip"]) text_format.Merge(tensorMsg(aspect_ratios[i]), priorBox.attr["aspect_ratio"]) text_format.Merge(tensorMsg([0.1, 0.1, 0.2, 0.2]), priorBox.attr["variance"]) text_format.Merge('f: %f' % steps[i], priorBox.attr["step"]) text_format.Merge('f: 0.5', priorBox.attr["offset"]) graph_def.node.extend([priorBox])
concat.name = name concat.op = 'ConcatV2' for inp in inputs: concat.input.append(inp) concat.input.append(axisNodeName) graph_def.node.extend([concat]) addConstNode('concat/axis_flatten', [-1]) addConstNode('PriorBox/concat/axis', [-2]) for label in ['ClassPredictor', 'BoxEncodingPredictor']: concatInputs = [] for i in range(args.num_layers): # Flatten predictions flatten = NodeDef() inpName = 'BoxPredictor_%d/%s/BiasAdd' % (i, label) flatten.input.append(inpName) flatten.name = inpName + '/Flatten' flatten.op = 'Flatten' concatInputs.append(flatten.name) graph_def.node.extend([flatten]) addConcatNode('%s/concat' % label, concatInputs, 'concat/axis_flatten') # Add layers that generate anchors (bounding boxes proposals). scales = [ args.min_scale + (args.max_scale - args.min_scale) * i / (args.num_layers - 1) for i in range(args.num_layers) ] + [1.0]
def quantize_graph_def(graph_def, skip=None, output_nodes=None, rel_tol=None, only=None): """ :type graph_def: GraphDef :type skip: set|list :type output_nodes: list :type rel_tol: float :type only: str :return: QuantizedGraph """ if output_nodes is not None and len(output_nodes) > 0: graph_def = extract_sub_graph(graph_def, output_nodes) nodes = [] items = [] for node in graph_def.node: # check skip if should_skip(node, skip): nodes.append(node) continue # try convert to constant try: value = MakeNdarray(node.attr['value'].tensor) # type: np.ndarray except TypeError: nodes.append(node) continue # check repeated field same_value = all_same_value(value, rel_tol) if same_value is not None: nodes.append( const_node(node.attr['dtype'].type, np.array([same_value], dtype=value.dtype), value.shape)) continue # check data size elif value.size < 4096: nodes.append(node) continue # finally processed_node = NodeDef() processed_node.name = node.name processed_node.op = 'Placeholder' processed_node.attr['dtype'].type = node.attr['dtype'].type processed_node.attr['shape'].shape.CopyFrom( as_shape(value.shape).as_proto()) nodes.append(processed_node) item = QuantizedItem() item.name = node.name item.dtype = node.attr['dtype'].type item.shape.extend(value.shape) print('quantize {}'.format(node.name)) _fill(item, value, only=only) items.append(item) graph = QuantizedGraph() graph.graph.versions.CopyFrom(graph_def.versions) graph.graph.library.CopyFrom(graph_def.library) graph.graph.node.extend(nodes) graph.items.extend(items) return graph
del node.input[i] # Connect input node to the first layer assert(graph_def.node[0].op == 'Placeholder') # assert(graph_def.node[1].op == 'Conv2D') weights = graph_def.node[1].input[0] for i in range(len(graph_def.node[1].input)): graph_def.node[1].input.pop() graph_def.node[1].input.append(graph_def.node[0].name) graph_def.node[1].input.append(weights) # Create SSD postprocessing head ############################################### # Concatenate predictions of classes, predictions of bounding boxes and proposals. concatAxis = NodeDef() concatAxis.name = 'concat/axis_flatten' concatAxis.op = 'Const' text_format.Merge( 'tensor {' ' dtype: DT_INT32' ' tensor_shape { }' ' int_val: -1' '}', concatAxis.attr["value"]) graph_def.node.extend([concatAxis]) def addConcatNode(name, inputs): concat = NodeDef() concat.name = name concat.op = 'ConcatV2' for inp in inputs:
def addSoftMax(inp, out): softmax = NodeDef() softmax.name = out softmax.op = 'Softmax' text_format.Merge('i: -1', softmax.attr['axis']) softmax.input.append(inp) graph_def.node.extend([softmax]) addReshape('FirstStageBoxPredictor/ClassPredictor/BiasAdd', 'FirstStageBoxPredictor/ClassPredictor/reshape_1', [0, -1, 2]) addSoftMax('FirstStageBoxPredictor/ClassPredictor/reshape_1', 'FirstStageBoxPredictor/ClassPredictor/softmax') # Compare with Reshape_4 flatten = NodeDef() flatten.name = 'FirstStageBoxPredictor/BoxEncodingPredictor/flatten' # Compare with FirstStageBoxPredictor/BoxEncodingPredictor/BiasAdd flatten.op = 'Flatten' flatten.input.append('FirstStageBoxPredictor/BoxEncodingPredictor/BiasAdd') graph_def.node.extend([flatten]) proposals = NodeDef() proposals.name = 'proposals' # Compare with ClipToWindow/Gather/Gather (NOTE: normalized) proposals.op = 'PriorBox' proposals.input.append('FirstStageBoxPredictor/BoxEncodingPredictor/BiasAdd') proposals.input.append(graph_def.node[0].name) # image_tensor text_format.Merge('b: false', proposals.attr["flip"]) text_format.Merge('b: true', proposals.attr["clip"]) text_format.Merge('f: %f' % args.features_stride, proposals.attr["step"]) text_format.Merge('f: 0.0', proposals.attr["offset"])
for attr in [ 'T', 'data_format', 'Tshape', 'N', 'Tidx', 'Tdim', 'use_cudnn_on_gpu', 'Index', 'Tperm', 'is_training', 'Tpaddings', 'Tblock_shape', 'Tcrops' ]: if attr in graph_def.node[i].attr: del graph_def.node[i].attr[attr] # Append prior box generators min_sizes = [30, 60, 111, 162, 213, 264] max_sizes = [60, 111, 162, 213, 264, 315] steps = [8, 16, 32, 64, 100, 300] aspect_ratios = [[2], [2, 3], [2, 3], [2, 3], [2], [2]] layers = [conv4_3_norm, fc7, conv6_2_h, conv7_2_h, conv8_2_h, conv9_2_h] for i in range(6): priorBox = NodeDef() priorBox.name = 'PriorBox_%d' % i priorBox.op = 'PriorBox' priorBox.input.append(layers[i].name[:layers[i].name.find(':')]) priorBox.input.append(inp_nodes[0]) # data text_format.Merge('i: %d' % min_sizes[i], priorBox.attr["min_size"]) text_format.Merge('i: %d' % max_sizes[i], priorBox.attr["max_size"]) text_format.Merge('b: true', priorBox.attr["flip"]) text_format.Merge('b: false', priorBox.attr["clip"]) text_format.Merge(tensorMsg(aspect_ratios[i]), priorBox.attr["aspect_ratio"]) text_format.Merge(tensorMsg([0.1, 0.1, 0.2, 0.2]), priorBox.attr["variance"]) text_format.Merge('f: %f' % steps[i], priorBox.attr["step"]) text_format.Merge('f: 0.5', priorBox.attr["offset"])
def addConstNode(name, values, graph_def): node = NodeDef() node.name = name node.op = 'Const' text_format.Merge(tensorMsg(values), node.attr["value"]) graph_def.node.extend([node])
break addReshape('FirstStageBoxPredictor/ClassPredictor/BiasAdd', 'FirstStageBoxPredictor/ClassPredictor/reshape_1', [0, -1, 2], graph_def) addSoftMax('FirstStageBoxPredictor/ClassPredictor/reshape_1', 'FirstStageBoxPredictor/ClassPredictor/softmax', graph_def) # Compare with Reshape_4 addFlatten('FirstStageBoxPredictor/ClassPredictor/softmax', 'FirstStageBoxPredictor/ClassPredictor/softmax/flatten', graph_def) # Compare with FirstStageBoxPredictor/BoxEncodingPredictor/BiasAdd addFlatten('FirstStageBoxPredictor/BoxEncodingPredictor/BiasAdd', 'FirstStageBoxPredictor/BoxEncodingPredictor/flatten', graph_def) proposals = NodeDef() proposals.name = 'proposals' # Compare with ClipToWindow/Gather/Gather (NOTE: normalized) proposals.op = 'PriorBox' proposals.input.append('FirstStageBoxPredictor/BoxEncodingPredictor/BiasAdd') proposals.input.append(graph_def.node[0].name) # image_tensor text_format.Merge('b: false', proposals.attr["flip"]) text_format.Merge('b: true', proposals.attr["clip"]) text_format.Merge('f: %f' % args.features_stride, proposals.attr["step"]) text_format.Merge('f: 0.0', proposals.attr["offset"]) text_format.Merge(tensorMsg([0.1, 0.1, 0.2, 0.2]), proposals.attr["variance"]) widths = [] heights = [] for a in args.aspect_ratios: for s in args.scales:
def _replace_input_nodes(inputs_to_replace: Dict[Text, Text], new_node: NodeDef) -> None: """Replace inputs with new names""" for i, input_node in enumerate(new_node.input): if input_node in inputs_to_replace: new_node.input[i] = inputs_to_replace[input_node]