def find_and_replace_pattern(self, graph: Graph): add_output_ops(graph, graph.graph['packed_outputs'], inputs=graph.graph['user_shapes']) # For keeping tensor names information for output nodes fake outputs are added # to graph during the model loading. In the following code fake outputs are removed # and tensor names information is moved to output->Result edge. for node in graph.get_op_nodes(needs_removal=True): fw_info = None in_node = None for in_port_idx in node.in_edges(): node_idx = node.in_edge(in_port_idx)['in'] if node_idx in node.in_nodes(): in_node = node.in_node(node_idx) fw_info_value = get_edge_attribute_between_nodes( in_node, node, 'fw_tensor_debug_info') if fw_info_value: fw_info = fw_info_value break graph.erase_node(node) if fw_info is not None and in_node is not None: for out_idx in in_node.out_nodes(): set_edge_attribute_between_nodes(in_node, in_node.out_node(out_idx), 'fw_tensor_debug_info', fw_info)
def replace_sub_graph(self, graph: Graph, match: dict): """ Need to find each occurrence of pattern: _contrib_MultiBoxPrior -> Flatten remove Flatten layer - IE does not expect outputs to be flattened Parameters ---------- graph : Graph Graph with loaded model. match : dict Patterns which were found in graph structure. """ graph.erase_node(match['flatten'])
def replace_sub_graph(graph: Graph, match: dict, **kwargs): r""" Usually graph looks like: main_graph ... Result | | image_batch label_batch \ / batch_join / \ placeholder fifo_queue Replacer works for both cases (that's why we have loop - 68 line): label_batch was marked as output there is no label_batch node """ true_placeholder_shape = match['placeholder'].shape placeholder_shape = match['fifo_queue'].shapes[0] placeholder_data_type = match['fifo_queue'].types[0] # in case OOB conversion batch_size placeholder shape is not required # so use a shape specified in FIFOQueueV2 shapes list attribute assert true_placeholder_shape is None or true_placeholder_shape.ndim <= 1 if true_placeholder_shape is not None and true_placeholder_shape.ndim == 1 and len( true_placeholder_shape) > 1: log.warning( 'Placeholder \'{}\' got non 0-dimensional shape {} in FIFOQueue pattern. Placeholder will have the ' 'same shape after folding the pattern instead of {} shape which is original for the network.' ''.format(match['placeholder'].id, true_placeholder_shape, placeholder_shape)) placeholder_shape = true_placeholder_shape placeholder_name = match['fifo_queue'].name graph.erase_node(match['fifo_queue']) graph.erase_node(match['placeholder']) for _, out in match['batch_join'].out_nodes().items(): if out.id != match['image_batch'].id: if out.out_node().op == 'Result': graph.remove_node(out.out_node().id) graph.remove_node(out.id) graph.remove_node(match['batch_join'].id) placeholder = Parameter( graph, { 'name': placeholder_name, 'shape': placeholder_shape, 'data_type': placeholder_data_type }).create_node() graph.create_edge(placeholder, match['image_batch']) log.info( "FIFOQueueV2 pattern was detected. New shape of placeholder {} is {}. Use -b to set batch size if " "needed".format(placeholder.id, placeholder['shape']))
def replace_sub_graph(self, graph: Graph, match: dict): ph = match['placeholder'] if ph.name in graph.graph['freeze_placeholder']: name = ph.name if ph.has_and_set('data_type'): data_type = ph.data_type else: data_type = SUPPORTED_DATA_TYPES[ graph.graph['cmd_params'].data_type][0] string_value = graph.graph['freeze_placeholder'][name] try: if data_type != np.bool: value = mo_array(string_value, dtype=data_type) # TODO: investigate why boolean type is allowed only for TensorFlow elif data_type == np.bool and graph.graph['fw'] == 'tf': from openvino.tools.mo.front.tf.common import tf_data_type_cast if isinstance(string_value, list): casted_list = list() for v in mo_array(string_value): casted_list.append( tf_data_type_cast[ph.data_type](v)) value = mo_array(string_value, dtype=data_type) else: value = tf_data_type_cast[ph.data_type](string_value) else: raise Error("Cannot cast value {} to {} data_type".format( string_value, data_type)) except: raise Error("Cannot cast value {} to {} data_type".format( string_value, data_type)) try: value = np.reshape(a=value, newshape=ph.shape) except: raise Error("Can not reshape value {} to shape {}".format( value, ph.shape)) out_edges = list(graph.out_edges(ph.id, data=True)) new_node = Const(graph).create_node( attrs={ 'value': value, 'data_type': type(value), 'name': name + '/const_placeholder', 'shape': ph.shape }) graph.erase_node(ph) graph.add_edges_from([(new_node.id, v, attrs) for u, v, attrs in out_edges]) log.info( "Placeholder node \"{}\" was replaced with Const node \"{}\" with value \"{}\"" .format(name, new_node.name, value))
def replace_pattern(graph: Graph, match: dict): node = match['result'] if len(node.in_nodes()) == 0: graph.erase_node(node)
def replace_sub_graph(graph: Graph, match: dict): graph.erase_node(match['output']) graph.erase_node(match['noop']) log.info("NoOp node \"{}\" was removed from the graph".format( match['noop'].id))