def common_pool_extender(op: Node): for attr in ['strides', 'pads_begin', 'pads_end', 'kernel']: Extender.attr_to_list(op, attr) op['stride'] = int64_array([1, 1] + op.strides) op['window'] = int64_array([1, 1] + op.kernel) op['kernel_spatial'] = op.kernel op['output_spatial_shape'] = None op['batch_dims'] = int64_array([0]), op['channel_dims'] = int64_array([1]), dim = len(op.pads_begin) assert dim in (1, 2, 3), '{}D {} not supported! Node name: {}'.format( dim, op.soft_get('type'), op.soft_get('name', op.id)) pad = [[0, 0], [0, 0]] pad.extend([[op.pads_begin[i], op.pads_end[i]] for i in range(dim)]) op['pad'] = int64_array(pad) op['spatial_dims'] = [i + 2 for i in range(dim)] if op.has_valid('rounding_type') and op.rounding_type == 'ceil': op['pooling_convention'] = 'full'
def extend(op: Node): for attr in [ 'strides', 'dilations', 'pads_begin', 'pads_end', 'output_padding' ]: Extender.attr_to_list(op, attr) op['stride'] = int64_array([1, 1] + op.strides) op['dilation'] = int64_array([1, 1] + op.dilations) op['batch_dims'] = int64_array([0]) op['channel_dims'] = int64_array([1]) # Be VERY careful with these attributes! op['input_feature_channel'] = 1 op['output_feature_channel'] = 0 dim = len(op.pads_begin) assert dim in (1, 2, 3), '{}D Convolution not supported!'.format(dim) pad = [[0, 0], [0, 0]] pad.extend([[op.pads_begin[i], op.pads_end[i]] for i in range(dim)]) op['pad'] = int64_array(pad) op['spatial_dims'] = [i + 2 for i in range(dim)]
def common_backpropdata_extender(op: Node): for attr in [ 'strides', 'output_padding', 'pads_begin', 'pads_end', 'dilations' ]: Extender.attr_to_list(op, attr) if op.has_valid('output_padding'): op.output_padding = int64_array([0, 0] + op.output_padding) dim = len(op.strides) if op.has_valid('pads_begin') and op.has_valid('pads_end'): pad = [[0, 0], [0, 0]] pad.extend([[op.pads_begin[i], op.pads_end[i]] for i in range(dim)]) op['pad'] = int64_array(pad) op['spatial_dims'] = [i + 2 for i in range(dim)] if not op.has_valid('dilations'): op['dilations'] = [1 for _ in range(dim)] if not op.has_valid('strides'): op['strides'] = [1 for _ in range(dim)] op['dilation'] = int64_array([1, 1] + op.dilations) op['stride'] = int64_array([1, 1] + op.strides) op['infer'] = backpropdata_infer
def attr_restore(node: Node, attribute: str, value=None): # Function to restore some specific attr for PriorBox & PriorBoxClustered layers if not node.has_valid(attribute): node[attribute] = [] if value is None else [value] if isinstance(node[attribute], str): node[attribute] = [] else: Extender.attr_to_list(node, attribute)
def extend(op: Node): assert op.has_valid( 'element_type' ), 'Parameter node {} has missed element_type attr!'.format(op.name) op['data_type'] = destination_type_to_np_data_type(op.element_type) if op.shape == '': op.shape = int64_array([]) else: Extender.attr_to_list(op, 'shape')
def extend(op: Node): attrs = [ 'shrink_axis_mask', 'new_axis_mask', 'ellipsis_mask', 'begin_mask', 'end_mask' ] for attr in attrs: Extender.attr_to_list(op, attr) op.begin_mask = int64_array([1 - i for i in op.begin_mask]) op.end_mask = int64_array([1 - i for i in op.end_mask])
def backpropdata_infer(op: Node): op['new_input_shapes'] = list() for n in op.in_nodes(): op.new_input_shapes.append(op.in_node(n).shape) assert len(op.new_input_shapes) == len(op.old_input_shapes) for i in range(len(op.new_input_shapes)): assert np.array_equal(op.new_input_shapes[i], op.old_input_shapes[i]), 'Something wrong happened while ' \ '{} shape infer with type {}!'.format(op.name, op.type) Extender.const_shape_infer(op)
def extend(op: Node): for attr in StridedSlice.get_mask_names(): # We can not use op.has_and_set(attr) here as a condition, because it will return False if begin/end is # 1D tensor and begin_mask/end_mask is equal to 0 if op.has(attr) and op[attr] != '': Extender.attr_to_list(op, attr) else: assert attr not in ['begin_mask', 'end_mask'],\ '{} is not defined for the node {}'.format(attr, op.soft_get('name', op.id)) op[attr] = int64_array([0]) op.begin_mask = int64_array([1 - i for i in op.begin_mask]) op.end_mask = int64_array([1 - i for i in op.end_mask])
def backpropdata_infer(op: Node): Extender.use_shapes_from_ir(op)
def copy_graph_with_ops(graph: Graph) -> Graph: """ Function to copy graph and apply extenders to appropriate nodes :param graph: Graph to copy :return:Copied graph with applyed extenders """ new_graph = Graph() new_graph.stage = 'back' new_graph.graph = graph.graph node_connections = dict() mapping_of_old_idx_into_new = dict() restore_correct_ports(graph) # Nodes preprocessing stage in source graph # Firstly propagate values only for Const nodes, because other preprocessings # assumes Const nodes are already preprocessed. for op in graph.get_op_nodes(type='Const'): preprocessing_op_nodes[op.type](op) for op in graph.get_op_nodes(): if op.soft_get('type') != 'Const' and op.soft_get('type') in preprocessing_op_nodes: preprocessing_op_nodes[op.type](op) # Create a new copy of graph with correct attributes (shape & type infer, backend attrs etc.) for op in graph.get_op_nodes(): # Apply extenders to nodes in source graph if op.type in Extender.registered_ops: Extender.get_extender_class_by_name(op.type).extend(op) else: log.debug('Extender for node {} with type={} not found, please note.'.format(op.name, op.type)) # Add node with necessary type and extended attrs in new graph op_type = op.soft_get('type_to_create', op.type) if op_type in custom_ops: node = custom_ops[op_type](new_graph, op.attrs()).create_node() else: assert op_type in Op.registered_ops, 'Operation {} not found in MO operations, ' \ 'please check it!'.format(op_type) node = Op.get_op_class_by_name(op_type)(new_graph, op.attrs()).create_node() if op.has_and_set('need_copy_input_blobs'): copy_input_blobs(op, node) # Collect node connections mapping_of_old_idx_into_new[op.id] = node.id node_connections[op.id] = collect_node_outputs(op) # Restore connections in new graph for input_node_idx, its_outputs in list(node_connections.items()): for out_port_idx, out_port_dest in its_outputs.items(): for dest_in_port_idx, dest_node_idx in out_port_dest: src = Node(new_graph, mapping_of_old_idx_into_new[input_node_idx]) dst = Node(new_graph, mapping_of_old_idx_into_new[dest_node_idx]) src.out_port(out_port_idx).connect(dst.in_port(dest_in_port_idx)) # Nodes postprocessing stage in new graph for op in new_graph.get_op_nodes(): if op.soft_get('type') in postprocessing_op_nodes: postprocessing_op_nodes[op.type](op) # clean up graph to shape inference new_graph.clean_up() return new_graph
def extend(op: Node): for attr in StridedSlice.get_mask_names(): Extender.attr_to_list(op, attr) op.begin_mask = int64_array([1 - i for i in op.begin_mask]) op.end_mask = int64_array([1 - i for i in op.end_mask])
def extend(op: Node): Extender.attr_to_list(op, 'pyramid_scales')
def copy_graph_with_ops(graph: Graph) -> Graph: """ Function to copy graph and apply extenders to appropriate nodes :param graph: Graph to copy :return:Copied graph with applied extenders """ new_graph = Graph() new_graph.stage = 'back' new_graph.graph = graph.graph node_connections = dict() mapping_of_old_idx_into_new = dict() restore_correct_ports(graph) # Nodes preprocessing stage in source graph # Firstly propagate values only for Const nodes, because other preprocessings # assumes Const nodes are already preprocessed. for op in graph.get_op_nodes(type='Const'): preprocessing_op_nodes[op.type](op) for op in graph.get_op_nodes(): if op.soft_get('type') != 'Const' and op.soft_get( 'type') in preprocessing_op_nodes: preprocessing_op_nodes[op.type](op) # Create a new copy of graph with correct attributes (shape & type infer, backend attrs etc.) for op in graph.get_op_nodes(): # Save input shapes restored from IR op['old_input_shapes'] = list() for n in op.in_nodes(): op.old_input_shapes.append(int64_array(op.in_node(n).shape)) # Apply extenders to nodes in source graph if op.type in Extender.registered_ops: Extender.get_extender_class_by_name(op.type).extend(op) else: log.debug( 'Extender for node {} with type={} not found, please note.'. format(op.name, op.type)) # Add node with necessary type and extended attrs in new graph op_type = op.soft_get('type_to_create', op.type) if op_type in custom_ops: node = custom_ops[op_type](new_graph, op.attrs()).create_node() else: if op_type not in Op.registered_ops: log.warning( 'Operation {} is not found in MO operations, please check it! ' 'Simple shape infer function is used'.format(op_type)) node = Op(new_graph, op.attrs()).create_node() assert 'type' in node, 'Operation {} have no `type` attribute.'.format( node.soft_get('name')) node['op'] = node.type node['infer'] = Extender.use_shapes_from_ir if 'ir_data_attrs' in op: node['IE'] = [('layer', [ ('id', lambda node: node.node), 'name', 'type', 'version' ], [('data', list(op.ir_data_attrs.keys()), []), '@ports', '@consts'])] else: node = Op.get_op_class_by_name(op_type)( new_graph, op.attrs()).create_node() # Fill out_ports_count attribute if 'out_ports_count' not in node and node.soft_get( 'type') != 'Result': node['out_ports_count'] = len(op.out_edges()) # This attribute is no longer needed and we can delete it if 'ir_data_attrs' in node: del node['ir_data_attrs'] if op.has_and_set('need_copy_input_blobs'): copy_input_blobs(op, node) # Collect node connections mapping_of_old_idx_into_new[op.id] = node.id node_connections[op.id] = collect_node_outputs(op) # Restore connections in new graph for input_node_idx, its_outputs in list(node_connections.items()): for out_port_idx, out_port_dest in its_outputs.items(): for dest_in_port_idx, dest_node_idx in out_port_dest: src = Node(new_graph, mapping_of_old_idx_into_new[input_node_idx]) dst = Node(new_graph, mapping_of_old_idx_into_new[dest_node_idx]) src.out_port(out_port_idx).connect( dst.in_port(dest_in_port_idx)) # Nodes postprocessing stage in new graph for op in new_graph.get_op_nodes(): # Call normalize node outputs for restored operations to connect temporary Result operations for disconnected # output ports. We need to do that for correct shape inference. These Result operations will be removed during # IR emitting. For TopK operation outputs normalizing we should use specific # function TopKNormalizer.normalize_outputs. if op.soft_get('type') != 'TopK': Op.normalize_outputs(op) # Set correct_data_type attribute to Const data nodes to correct processing of restored values if op.soft_get('type') == 'Const': assert len(op.out_nodes()) == 1 and op.out_node(0).soft_get('kind') == 'data',\ 'Const node {} not properly corrected to appropriate data node'.format(op.soft_get('name')) op.out_node(0)['correct_data_type'] = True restore_tensor_names(op) # operations postprocessing with some special types if op.soft_get('type') in postprocessing_op_nodes: postprocessing_op_nodes[op.type](op) # clean up graph to shape inference new_graph.clean_up() return new_graph
def extend(op: Node): Extender.attr_to_list(op, 'axes')