def placeholder_scales(self, placeholder: Node): """ Helper function to get scales for prior boxes out of input image size: [1 / im_width, 1 / im_height, 1 / im_width, 1 / im_height] """ graph = placeholder.graph name = placeholder.soft_get('name', placeholder.id) shape_value = placeholder.soft_get('shape', None) assert shape_value is not None, \ "[ {} replacer ] Placeholder `{}` should have shape attribute".format(self.replacement_id, name) assert isinstance(shape_value, np.ndarray), \ "[ {} replacer ] Placeholder `{}` shape attribute should be np.ndarray".format(self.replacement_id, name) assert shape_value.size == 4, \ "[ {} replacer ] Placeholder `{}` should be 4D. Shape: {}".format(self.replacement_id, name, shape_value) shape = Shape(graph, {'name': 'input_image_shape'}).create_node() shape.in_port(0).connect(placeholder.out_port(0)) begin = Const(graph, {'value': int64_array([1])}).create_node() end = Const(graph, {'value': int64_array([3])}).create_node() stride = Const(graph, {'value': int64_array([1])}).create_node() spatial = StridedSlice(graph, {'name': name + '/get_h_w', 'begin_mask': int64_array([1]), 'end_mask': int64_array([1]), 'new_axis_mask': int64_array([0]), 'shrink_axis_mask': int64_array([0]), 'ellipsis_mask': int64_array([0])}).create_node() spatial.in_port(0).connect(shape.out_port(0)) spatial.in_port(1).connect(begin.out_port(0)) spatial.in_port(2).connect(end.out_port(0)) spatial.in_port(3).connect(stride.out_port(0)) power = Const(graph, {'value': float32_array([-1.])}).create_node() spatial_scale = Pow(graph, {}).create_node() spatial_scale.in_port(0).connect(spatial.out_port(0)) spatial_scale.in_port(1).connect(power.out_port(0)) # Power `type_infer` requires inputs to have equal data type convert_to_fp32 = Cast(graph, {'dst_type': np.float32}).create_node() spatial_scale.in_port(0).get_connection().insert_node(convert_to_fp32) order = Const(graph, {'value': int64_array([1, 0])}).create_node() axis_const = Const(graph, {'value': int64_array(0)}).create_node() reverse = Gather(graph, {}).create_node() reverse.in_port(0).connect(spatial_scale.out_port(0)) reverse.in_port(1).connect(order.out_port(0)) axis_const.out_port(0).connect(reverse.in_port(2)) priors_scale_node = Concat(graph, {'axis': 0, 'in_ports_count': 2}).create_node() priors_scale_node.add_input_port(0, skip_if_exist=True) priors_scale_node.add_input_port(1, skip_if_exist=True) priors_scale_node.in_port(0).connect(reverse.out_port(0)) priors_scale_node.in_port(1).connect(reverse.out_port(0)) return priors_scale_node
def replace_op(self, graph: Graph, node: Node): inp0 = node.in_port(0).get_source().node inp1 = node.in_port(1).get_source().node begin_id = Const(graph, {"value": int64_array([1])}).create_node() end_id = Const(graph, {"value": int64_array([2])}).create_node() dim1 = StridedSlice( graph, dict( name=inp0.name + "/dim1", begin_mask=[1], end_mask=[1], shrink_axis_mask=[0], new_axis_mask=[0], ellipsis_mask=[0], ), ).create_node([inp1, begin_id, end_id]) rows = Div(graph, dict(name=node.name + "/rows")).create_node([inp0, dim1]) inp0 = Cast( graph, dict(name=inp0.name + "/fp32", dst_type=np.float32) ).create_node([inp0]) dim1 = Cast( graph, dict(name=dim1.name + "/fp32", dst_type=np.float32) ).create_node([dim1]) cols = FloorMod(graph, dict(name=node.name + "/cols")).create_node([inp0, dim1]) cols = Cast( graph, dict(name=cols.name + "/i64", dst_type=np.int64) ).create_node([cols]) concat = PackOp(graph, dict(name=node.name + "/merged", axis=0)).create_node( [rows, cols] ) return [concat.id]
def replace_sub_graph(self, graph: Graph, match: dict): node = match['op'] strided_slice_node = StridedSlice( graph, dict(name=node.id + '/strided_slice_', shrink_axis_mask=np.array( np.zeros(len(node.crop_begin), dtype=np.int64)), new_axis_mask=np.array( np.zeros(len(node.crop_begin), dtype=np.int64)), ellipsis_mask=np.array( np.zeros(len(node.crop_begin), dtype=np.int64)), begin_mask=np.array( np.ones(len(node.crop_begin), dtype=np.int64)), end_mask=np.array(np.ones(len(node.crop_end), dtype=np.int64)))).create_node() node.in_port(0).get_connection().set_destination( strided_slice_node.in_port(0)) node.out_port(0).get_connection().set_source( strided_slice_node.out_port(0)) crop_begin_node = Const( graph, dict(value=node.crop_begin, symbol_dict={'name': node.id + '/crop_begin_const'})).create_node() crop_end_node = Const( graph, dict(value=node.crop_end, symbol_dict={'name': node.id + '/crop_end_const'})).create_node() strided_slice_node.in_port(1).get_connection().set_source( crop_begin_node.out_port(0)) strided_slice_node.in_port(2).get_connection().set_source( crop_end_node.out_port(0)) if len(node.step) > 0: stride_node = Const( graph, dict(value=node.step, symbol_dict={'name': node.id + '/steps_const'})).create_node() strided_slice_node.in_port(3).get_connection().set_source( stride_node.out_port(0))
def normalize_strided_slice(graph: Graph, node: Node): input_shape = node.in_port(0).data.get_shape() input_rank = len(input_shape) begin = node.in_port(1).data.get_value() if begin is not None: slice_rank = len(begin) else: slice_rank = input_rank + np.count_nonzero( node.new_axis_mask) - np.count_nonzero(node.shrink_axis_mask) StridedSlice.align_mask_with_slice_rank( node, slice_rank) # if StridedSlice is created after partial_infer StridedSliceNormalizer.normalize_slices_attr(node) num_insertions = input_rank - slice_rank + np.count_nonzero( node.new_axis_mask) assert num_insertions >= 0, 'slice_rank - num_new_axis must <= input rank. Got instead: ' \ 'input_rank = {}, slice_rank = {}, num_new_axis = {}'. \ format(input_rank, slice_rank, np.count_nonzero(node.new_axis_mask)) if np.any(node.ellipsis_mask): assert np.count_nonzero( node.ellipsis_mask ) == 1, 'only one ellipsis_mask nonzero value is allowed' ellipsis_start = np.nonzero(node.ellipsis_mask)[0][0] # since we don't expect values in begin and end: take the whole range along ellipsis_start node.begin_mask[ellipsis_start] = 0 node.end_mask[ellipsis_start] = 0 node.ellipsis_mask[ellipsis_start] = 0 insertion_start_idx = ellipsis_start + 1 StridedSliceNormalizer.unroll_ellipsis_for_inputs( graph, node, ellipsis_start, num_insertions) elif num_insertions > 0: insertion_start_idx = slice_rank # insert blank values to mask ends StridedSliceNormalizer.extend_inputs(node, num_insertions) if num_insertions > 0: # insert blank values for ellipsis unrolling and extending for mask_name in StridedSlice.get_mask_names(): node[mask_name] = np.insert(node[mask_name], insertion_start_idx, [0] * num_insertions).astype(int)
def extract(cls, node): pb = node.pb bm = int_to_array_bit_mask(pb.attr["begin_mask"].i) bm = mo_array([1 - b for b in bm], dtype=np.int32) em = int_to_array_bit_mask(pb.attr["end_mask"].i) em = mo_array([1 - b for b in em], dtype=np.int32) attrs = { 'begin_mask': bm, 'end_mask': em, 'ellipsis_mask': int_to_array_bit_mask(pb.attr["ellipsis_mask"].i), 'new_axis_mask': int_to_array_bit_mask(pb.attr["new_axis_mask"].i), 'shrink_axis_mask': int_to_array_bit_mask(pb.attr["shrink_axis_mask"].i), } StridedSlice.update_node_stat(node, attrs) return cls.enabled
def extend(op: Node): for attr in StridedSlice.get_mask_names(): # We can not use op.has_and_set(attr) here as a condition, because it will return False if begin/end is # 1D tensor and begin_mask/end_mask is equal to 0 if op.has(attr) and op[attr] != '': Extender.attr_to_list(op, attr) else: assert attr not in ['begin_mask', 'end_mask'],\ '{} is not defined for the node {}'.format(attr, op.soft_get('name', op.id)) op[attr] = int64_array([0]) op.begin_mask = int64_array([1 - i for i in op.begin_mask]) op.end_mask = int64_array([1 - i for i in op.end_mask])
def replace_op(self, graph: Graph, node: Node): if node.module.inverse: axes = Const( graph, { 'value': int64_array(range(2, node.module.num_axes - 1)) }).create_node() dft_node = IDFT(graph, dict(name=node.name, in_ports_count=2)).create_node( [node.in_node(0), axes]) # Slice a real part begin_id = Const(graph, { 'value': int64_array([0, 0]) }).create_node() end_id = Const(graph, {'value': int64_array([0, 1])}).create_node() real = StridedSlice( graph, dict(name=node.name + '/real', begin_mask=[0, 0], end_mask=[0, 1], shrink_axis_mask=[0, 0], new_axis_mask=[0], ellipsis_mask=[1, 0])).create_node( [dft_node, begin_id, end_id]) squeeze_axis = Const(graph, {'value': -1}).create_node() res = Squeeze(graph, dict(name=node.name + '/squeeze')).create_node( [real, squeeze_axis]) return [res.id] else: zero = Const(graph, {'value': 0.0}).create_node() imag = Mul(graph, dict(name=node.name + '/imag')).create_node( [node.in_node(0), zero]) cmplx = PackOp(graph, dict(name=node.name + '/complex', axis=-1)).create_node([node.in_node(0), imag]) axes = Const(graph, { 'value': int64_array(range(2, node.module.num_axes)) }).create_node() dft_node = DFT(graph, dict(name=node.name, in_ports_count=2)).create_node([cmplx, axes]) return [dft_node.id]
def append_variances(priors_scale_node: Node, variance: list): graph = priors_scale_node.graph name = priors_scale_node.name sp_shape = Shape(graph, {'name': name + '/shape'}).create_node() priors_scale_node.out_port(0).connect(sp_shape.in_port(0)) begin = Const(graph, {'value': int64_array([-2])}).create_node() end = Const(graph, {'value': int64_array([-1])}).create_node() stride = Const(graph, {'value': int64_array([1])}).create_node() shape_part_for_tiling = StridedSlice(graph, {'name': name + '/get_-2_dim', 'begin_mask': int64_array([1]), 'end_mask': int64_array([1]), 'new_axis_mask': int64_array([0]), 'shrink_axis_mask': int64_array([0]), 'ellipsis_mask': int64_array([0])}).create_node() sp_shape.out_port(0).connect(shape_part_for_tiling.in_port(0)) begin.out_port(0).connect(shape_part_for_tiling.in_port(1)) end.out_port(0).connect(shape_part_for_tiling.in_port(2)) stride.out_port(0).connect(shape_part_for_tiling.in_port(3)) shape_concat = create_op_node_with_second_input(graph, Concat, int64_array([4]), {'name': name + '/shape_for_tiling', 'in_ports_count': 2, 'axis': int64_array(0)}, shape_part_for_tiling) variance = Const(graph, {'name': name + '/variance', 'value': float32_array(variance)}).create_node() tile = Broadcast(graph, {'name': name + '/variance_tile'}).create_node() variance.out_port(0).connect(tile.in_port(0)) shape_concat.out_port(0).connect(tile.in_port(1)) reshape_dim = Const(graph, {'value': int64_array([-1, 4])}).create_node() sp_reshape = Reshape(graph, {'name': name + '/reshape'}).create_node() sp_reshape.in_port(0).connect(priors_scale_node.out_port(0)) sp_reshape.in_port(1).connect(reshape_dim.out_port(0)) concat = Concat(graph, {'name': name + '/priors_concat', 'axis': int64_array(0), 'in_ports_count': 2}).create_node() sp_reshape.out_port(0).connect(concat.in_port(0)) tile.out_port(0).connect(concat.in_port(1)) output_dims = Const(graph, {'value': int64_array([1, 2, -1])}).create_node() output_node = Reshape(graph, {'name': name + '/3D_priors_wth_variances'}).create_node() concat.out_port(0).connect(output_node.in_port(0)) output_dims.out_port(0).connect(output_node.in_port(1)) return output_node
def find_and_replace_pattern(self, graph: Graph): for node in graph.get_op_nodes(op='Slice'): node_name = node.soft_get('name', node.id) input_shape = node.in_port(0).data.get_shape() if node.is_in_port_connected(3): axes = node.in_port(3).data.get_value().copy() assert axes is not None, 'The input with axes is not constant for node {}'.format( node_name) for i, val in enumerate(axes): axes[i] = get_canonical_axis_index(input_shape, val) else: axes = int64_array(range(len(input_shape))) ss_begin = create_ss_interval_border(graph, node.in_port(1).get_source(), input_shape, axes, node_name) ss_end = create_ss_interval_border(graph, node.in_port(2).get_source(), input_shape, axes, node_name) node.in_port(1).disconnect() node.in_port(2).disconnect() rename_nodes([(ss_begin, node_name + '/Begin'), (ss_end, node_name + '/End')]) if node.is_in_port_connected(4): steps = node.in_port(4).data.get_value() assert steps is not None, 'The input with steps is not constant for node {}'.format( node_name) else: steps = np.ones([axes.size]) ss_begin_mask = np.zeros(len(input_shape), dtype=np.int64) ss_end_mask = np.zeros(len(input_shape), dtype=np.int64) ss_step = np.ones(len(input_shape), dtype=np.int64) for i, axis in enumerate(axes): ss_begin_mask[axis] = 1 ss_end_mask[axis] = 1 ss_step[axis] = steps[i] ss_strides = Const( graph, dict(name=node_name + '/Strides', value=ss_step)).create_node() ss = StridedSlice( graph, dict(name='ss', new_axis_mask=np.zeros(len(input_shape), dtype=np.int64), shrink_axis_mask=np.zeros(len(input_shape), dtype=np.int64), ellipsis_mask=np.zeros(len(input_shape), dtype=np.int64), begin_mask=ss_begin_mask, end_mask=ss_end_mask)).create_node() node.in_port(0).get_connection().set_destination(ss.in_port(0)) ss.in_port(1).connect(ss_begin.out_port(0)) ss.in_port(2).connect(ss_end.out_port(0)) ss.in_port(3).connect(ss_strides.out_port(0)) node.out_port(0).get_connection().set_source(ss.out_port(0)) rename_nodes([(node, node_name + '/ShouldBeDeleted'), (ss, node_name)])
def generate_sub_graph(self, graph: Graph, match: SubgraphMatch): reshape_classes_node = create_op_node_with_second_input(graph, Reshape, int64_array([0, -1]), dict(name='do_reshape_classes'), match.single_input_node(1)[0]) initial_priors_node = match.single_input_node(2)[0] priors_name = initial_priors_node.soft_get('name', initial_priors_node.id) # model calculates identical prior boxes for each batch, so we take first slice of them begin = Const(graph, {'value': mo_array([0, 0, 0], dtype=np.int32)}).create_node() end = Const(graph, {'value': mo_array([1, 0, 0], dtype=np.int32)}).create_node() stride = Const(graph, {'value': mo_array([1, 1, 1], dtype=np.int32)}).create_node() priors_node = StridedSlice(graph, {'name': priors_name + '/0_batch_slice', 'begin_mask': int64_array([1, 1, 1]), 'end_mask': int64_array([1, 0, 0]), 'new_axis_mask': int64_array([0]), 'shrink_axis_mask': int64_array([0]), 'ellipsis_mask': int64_array([0])}).create_node() initial_priors_node.out_port(0).connect(priors_node.in_port(0)) begin.out_port(0).connect(priors_node.in_port(1)) end.out_port(0).connect(priors_node.in_port(2)) stride.out_port(0).connect(priors_node.in_port(3)) placeholders = graph.get_op_nodes(type='Parameter') assert len(placeholders) == 1, "{} replacer requires model to have one Placeholder, but current model has " \ "{} placeholders".format(self.replacement_id, len(placeholders)) placeholder = placeholders[0] # scale prior boxes to the [0, 1] interval node_with_scales_for_prior_boxes = self.placeholder_scales(placeholder) priors_scale_node = Mul(graph, {'name': 'scale_priors'}).create_node() broadcast = Broadcast(graph, {'name': 'scales_broadcast'}).create_node() shape_of_priors = Shape(graph, {'name': 'priors_shape'}).create_node() priors_node.out_port(0).connect(shape_of_priors.in_port(0)) broadcast.in_port(1).connect(shape_of_priors.out_port(0)) broadcast.in_port(0).connect(node_with_scales_for_prior_boxes.out_port(0)) priors_scale_node.in_port(0).connect(priors_node.out_port(0)) priors_scale_node.in_port(1).connect(broadcast.out_port(0)) try: variance = match.custom_replacement_desc.custom_attributes['variance'] except: raise Error('There is no variance attribute in {} replacement config file `custom_attributes`' ''.format(self.replacement_id)) priors = self.append_variances(priors_scale_node, variance) # calculate prior boxes widths and heights split_node = create_op_with_const_inputs( graph, VariadicSplit, {1: int64_array(2), 2: int64_array([1, 1, 1, 1])}, {'out_ports_count': 4}, priors_scale_node) priors_width_node = Sub(graph, dict(name=split_node.name + '/sub_2-0_') ).create_node([(split_node, 2), (split_node, 0)]) priors_height_node = Sub(graph, dict(name=split_node.name + '/sub_3-1_') ).create_node([(split_node, 3), (split_node, 1)]) # concat weights and heights into a single tensor and multiple with the box coordinates regression values # WA with 3 Concats instead of 1 for keeping model reshapable # concat_width_height_node = Concat(graph, {'name': 'concat_priors_width_height', 'axis': -1, # 'in_ports_count': 4}).create_node( # [priors_width_node, priors_height_node, priors_width_node, priors_height_node]) concat_1 = Concat(graph, {'name': 'concat_width_height', 'axis': -1, 'in_ports_count': 2}).create_node([priors_width_node, priors_height_node]) concat_2 = Concat(graph, {'name': 'concat_width_height_width', 'axis': -1, 'in_ports_count': 2}).create_node([concat_1, priors_width_node]) concat_width_height_node = Concat(graph, {'name': 'concat_priors_width_height', 'axis': -1, 'in_ports_count': 2} ).create_node([concat_2, priors_height_node]) applied_width_height_regressions_node = Mul(graph, {'name': 'final_regressions'}).create_node( [concat_width_height_node, match.single_input_node(0)[0]]) # reshape to 2D tensor as Inference Engine Detection Output layer expects reshape_regression_node = create_op_node_with_second_input(graph, Reshape, int64_array([0, -1]), dict(name='reshape_regression'), applied_width_height_regressions_node) detection_output_op = DetectionOutput(graph, match.custom_replacement_desc.custom_attributes) # get nms from the original network iou_threshold = None nms_nodes = graph.get_op_nodes(op='NonMaxSuppression') if len(nms_nodes) > 0: # it is highly unlikely that for different classes NMS has different # moreover DetectionOutput accepts only scalar values for iou_threshold (nms_threshold) iou_threshold = nms_nodes[0].in_node(3).value if iou_threshold is None: raise Error('During {} `iou_threshold` was not retrieved from RetinaNet graph'.format(self.replacement_id)) detection_output_node = detection_output_op.create_node( [reshape_regression_node, reshape_classes_node, priors], dict(name=detection_output_op.attrs['type'], nms_threshold=iou_threshold, clip_after_nms=1, normalized=1, variance_encoded_in_target=0, background_label_id=1000)) # As outputs are replaced with a postprocessing node, outgoing tensor names are no longer # correspond to original tensors and should be removed from output->Result edges out_nodes = [] for out in range(match.outputs_count()): out_nodes.append(match.output_node(out)[0]) clear_tensor_names_info(out_nodes) return {'detection_output_node': detection_output_node}
def replace_sub_graph(self, graph: Graph, match: dict): node = match['op'] if 1 not in node.in_ports() or node.in_port(1).disconnected(): if node.has_valid('factor') and not node.has_valid('width') and not node.has_valid('height'): factor = Const(graph, {'value': np.array(node.factor)}).create_node() shape = Shape(graph, {'name': node.name + '/shape'}).create_node() begin = Const(graph, {'value': np.array([2])}).create_node() end = Const(graph, {'value': np.array([4])}).create_node() stride = Const(graph, {'value': np.array([1])}).create_node() ss = StridedSlice(graph, {'name': node.name + '/ss_0_port', 'begin_mask': np.array([1]), 'end_mask': np.array([0]), 'new_axis_mask': np.array([0]), 'shrink_axis_mask': np.array([0]), 'ellipsis_mask': np.array([0])}).create_node() mul = Mul(graph, {'name': node.name + '/factor_mul_'}).create_node() source = node.in_port(0).get_connection().get_source() source.connect(shape.in_port(0)) shape.out_port(0).connect(ss.in_port(0)) begin.out_port(0).connect(ss.in_port(1)) end.out_port(0).connect(ss.in_port(2)) stride.out_port(0).connect(ss.in_port(3)) ss.out_port(0).connect(mul.in_port(0)) factor.out_port(0).connect(mul.in_port(1)) node.add_input_port(1, skip_if_exist=True) assert node.in_port(1).disconnected() mul.out_port(0).connect(node.in_port(1)) else: shape = Shape(graph, {'name': node.name + '/shape'}).create_node() begin = Const(graph, {'value': np.array([2])}).create_node() end = Const(graph, {'value': np.array([4])}).create_node() stride = Const(graph, {'value': np.array([1])}).create_node() ss = StridedSlice(graph, {'name': node.name + '/ss_0_port', 'begin_mask': np.array([1]), 'end_mask': np.array([0]), 'new_axis_mask': np.array([0]), 'shrink_axis_mask': np.array([0]), 'ellipsis_mask': np.array([0])}).create_node() source = node.in_port(0).get_connection().get_source() source.connect(shape.in_port(0)) shape.out_port(0).connect(ss.in_port(0)) begin.out_port(0).connect(ss.in_port(1)) end.out_port(0).connect(ss.in_port(2)) stride.out_port(0).connect(ss.in_port(3)) pads_value = node.pads_begin + node.pads_end pads_const = Const(graph, {'value': np.array(pads_value)}).create_node() add = Add(graph, {'name': node.name + '/pad_add'}).create_node() ss.out_port(0).connect(add.in_port(0)) add.in_port(1).connect(pads_const.out_port(0)) if node.soft_get('shrink_factor') != 1 and node.soft_get('zoom_factor') == 1: shrink_factor = node.shrink_factor if shrink_factor < 1: log.error('Shrink factor should be positive in node {}'.format(node.id)) return None const = Const(graph, {'name': node.name + '/pre_shrink_sub_const', 'value': np.array(-1)}).create_node() sub = Add(graph, {'name': node.name + '/pre_shrink_sub'}).create_node() add.out_port(0).connect(sub.in_port(0)) sub.in_port(1).connect(const.out_port(0)) const = Const(graph, {'value': np.array(1 / shrink_factor), 'name': node.name + 'shrink_factor_div_const'}).create_node() div = Mul(graph, {'name': node.name + 'shrink_factor_div'}).create_node() sub.out_port(0).connect(div.in_port(0)) div.in_port(1).connect(const.out_port(0)) const = Const(graph, {'name': node.name + '/shrink_factor_add_one_const', 'value': np.array(1) }).create_node() add = Add(graph, {'name': node.name + '/shrink_factor_add_one'}).create_node() div.out_port(0).connect(add.in_port(0)) const.out_port(0).connect(add.in_port(1)) node.add_input_port(1, skip_if_exist=True) assert node.in_port(1).disconnected() add.out_port(0).connect(node.in_port(1)) elif node.soft_get('shrink_factor') == 1 and node.soft_get('zoom_factor') != 1: zoom_factor = node.zoom_factor if zoom_factor < 1: log.error('Zoom factor should be positive in node {}'.format(node.id)) return None node['debug_message'] = 'Interpolate layer replacer may be wrong, please, try to update it in the' \ ' file (openvino/tools/mo/front/InterpolateNormalizer.py at the line {}).' \ ''.format(inspect.currentframe().f_lineno) + refer_to_faq_msg(100) # Reshape methods can be different in some cases # Commented out section represents reshape that used in deeplab-caffe # Uncomment the following lines, if your model was trained with deeplab-caffe # or have the same reshape method # const = Const(graph, {'value': np.array(-1), # 'name': node.name + 'zoom_factor_deeplab-caffe_sub_const'}).create_node() # sub = Add(graph, {'name': node.name + 'zoom_factor_deeplab-caffe_sub'}).create_node() # add.out_port(0).connect(sub.in_port(0)) # const.out_port(0).connect(sub.in_port(1)) # # const = Const(graph, {'value': np.array(zoom_factor - 1), # 'name': node.name + 'zoom_factor_deeplab-caffe_mul_const'}).create_node() # mul = Mul(graph, {'name': node.name + 'zoom_factor_deeplab-caffe_mul'}).create_node() # sub.out_port(0).connect(mul.in_port(0)) # const.out_port(0).connect(mul.in_port(1)) # # sum = Add(graph, {'name': node.name + 'zoom_factor_deeplab-caffe_sum'}).create_node() # add.out_port(0).connect(sum.in_port(0)) # mul.out_port(0).connect(sum.in_port(1)) # # node.add_input_port(1, skip_if_exist=True) # assert node.in_port(1).disconnected() # sum.out_port(0).connect(node.in_port(1)) # Comment out the following lines if you use the reshape method from previous section const = Const(graph, {'value': np.array(zoom_factor), 'name': node.name + '/zoom_factor_mul_const'}).create_node() mul = Mul(graph, {'name': node.name + '/zoom_factor_mul'}).create_node() add.out_port(0).connect(mul.in_port(0)) const.out_port(0).connect(mul.in_port(1)) node.add_input_port(1, skip_if_exist=True) assert node.in_port(1).disconnected() mul.out_port(0).connect(node.in_port(1)) elif node.soft_get('width') != 0 and node.soft_get('height') != 0: const = Const(graph, {'value': np.array([node.height, node.width])}).create_node() node.add_input_port(1, skip_if_exist=True) assert node.in_port(1).disconnected() const.out_port(0).connect(node.in_port(1)) elif node.soft_get('shrink_factor') != 1 and node.soft_get('zoom_factor') != 1: shrink_factor = node.shrink_factor zoom_factor = node.zoom_factor if shrink_factor < 1: log.error('Shrink factor should be positive in node {}'.format(node.id)) return None if zoom_factor < 1: log.error('Zoom factor should be positive in node {}'.format(node.id)) return None const = Const(graph, {'value': np.array(-1)}).create_node() sub = Add(graph, {'name': node.name + '/shrink_zoom_factor_sub'}).create_node() add.out_port(0).connect(sub.in_port(0)) const.out_port(0).connect(sub.in_port(1)) const = Const(graph, {'value': np.array(1 / (shrink_factor + 1))}).create_node() div = Mul(graph, {'name': node.name + '/shrink_factor_div'}).create_node() sub.out_port(0).connect(div.in_port(0)) const.out_port(0).connect(div.in_port(1)) const = Const(graph, {'value': np.array(-1), 'name': node.name + 'shrink_zoom_factor_sum_const'}).create_node() sum = Add(graph, {'name': node.name + '/shrink_zoom_factor_sum'}).create_node() div.out_port(0).connect(sum.in_port(0)) const.out_port(0).connect(sum.in_port(1)) const = Const(graph, {'value': np.array(zoom_factor - 1)}).create_node() mul = Mul(graph, {'name': node.name + '/zoom_factor_mul'}).create_node() sum.out_port(0).connect(mul.in_port(0)) const.out_port(0).connect(mul.in_port(1)) sum = Add(graph, {'name': node.name + '/final_shrink_zoom_factor_sum'}).create_node() div.out_port(0).connect(sum.in_port(0)) mul.out_port(0).connect(sum.in_port(1)) node.add_input_port(1, skip_if_exist=True) assert node.in_port(1).disconnected() sum.out_port(0).connect(node.in_port(1)) else: if node.soft_get('fw') == 'caffe': shape = Shape(graph, {'name': node.name + '/shape'}).create_node() begin = Const(graph, {'value': np.array([2])}).create_node() end = Const(graph, {'value': np.array([4])}).create_node() stride = Const(graph, {'value': np.array([1])}).create_node() ss = StridedSlice(graph, {'name': node.name + '/ss_0_port', 'begin_mask': np.array([1]), 'end_mask': np.array([0]), 'new_axis_mask': np.array([0]), 'shrink_axis_mask': np.array([0]), 'ellipsis_mask': np.array([0])}).create_node() source = node.in_port(1).get_connection().get_source() node.in_port(1).disconnect() source.connect(shape.in_port(0)) shape.out_port(0).connect(ss.in_port(0)) begin.out_port(0).connect(ss.in_port(1)) end.out_port(0).connect(ss.in_port(2)) stride.out_port(0).connect(ss.in_port(3)) ss.out_port(0).connect(node.in_port(1))
def replace_pattern(self, graph: Graph, match: dict): node = match['pb'] name = node.soft_get('name', node.id) graph.graph['cmd_params'].static_shape = False assert len(node.in_ports()) == 2 begin = Const(graph, { 'value': mo_array([2], dtype=np.int32), 'name': name + '/ss_begin' }).create_node() end = Const(graph, { 'value': mo_array([4], dtype=np.int32), 'name': name + '/ss_end' }).create_node() stride = Const(graph, { 'value': mo_array([1], dtype=np.int32), 'name': name + '/ss_stride' }).create_node() shape_0 = Shape(graph, {'name': name + '/0_port'}).create_node() ss_0 = StridedSlice( graph, { 'name': name + '/ss_0_port', 'begin_mask': mo_array([1], dtype=np.int32), 'end_mask': mo_array([0], dtype=np.int32), 'new_axis_mask': mo_array([0], dtype=np.int32), 'shrink_axis_mask': mo_array([0], dtype=np.int32), 'ellipsis_mask': mo_array([0], dtype=np.int32) }).create_node() shape_0.out_port(0).connect(ss_0.in_port(0)) begin.out_port(0).connect(ss_0.in_port(1)) end.out_port(0).connect(ss_0.in_port(2)) stride.out_port(0).connect(ss_0.in_port(3)) source = node.in_port(0).get_connection().get_source() node.in_port(0).disconnect() source.connect(shape_0.in_port(0)) ss_0.out_port(0).connect(node.in_port(0)) shape_1 = Shape(graph, {'name': name + '/1_port'}).create_node() ss_1 = StridedSlice( graph, { 'name': name + '/ss_1_port', 'begin_mask': mo_array([1], dtype=np.int32), 'end_mask': mo_array([0], dtype=np.int32), 'new_axis_mask': mo_array([0], dtype=np.int32), 'shrink_axis_mask': mo_array([0], dtype=np.int32), 'ellipsis_mask': mo_array([0], dtype=np.int32) }).create_node() shape_1.out_port(0).connect(ss_1.in_port(0)) begin.out_port(0).connect(ss_1.in_port(1)) end.out_port(0).connect(ss_1.in_port(2)) stride.out_port(0).connect(ss_1.in_port(3)) source = node.in_port(1).get_connection().get_source() node.in_port(1).disconnect() source.connect(shape_1.in_port(0)) ss_1.out_port(0).connect(node.in_port(1)) ss_0['force_precision_in_ports'] = {1: 'int64', 2: 'int64', 3: 'int64'} ss_1['force_precision_in_ports'] = {1: 'int64', 2: 'int64', 3: 'int64'} node['need_shape_inference'] = True node['override_output_shape'] = True node['V10_infer'] = True unsqueeze = create_op_node_with_second_input( graph, Unsqueeze, int64_array([0]), {'name': name + '/unsqueeze'}) naked_priorbox_name = name + '/naked_not_unsqueezed' rename_nodes([(node, naked_priorbox_name), (unsqueeze, name)]) node.out_port(0).get_connection().set_source(unsqueeze.out_port(0)) node.out_port(0).connect(unsqueeze.in_port(0))
def replace_pattern(self, graph: Graph, match: [str, Node]): node = match['crop'] assert node.has_valid('axis') node_axis = self.list_to_ndarray(node.axis) in_shape = node.in_port(0).data.get_shape() shape_rank = in_shape.size axis_mask = int64_array( [1 if i in node_axis else 0 for i in range(shape_rank)]) begin_mask = axis_mask.copy() end_mask = axis_mask.copy() ss = StridedSlice( graph, { 'name': node.soft_get('name', node.id) + '/strided_slice', 'begin_mask': begin_mask, 'end_mask': end_mask, 'new_axis_mask': np.zeros(len(end_mask)), 'shrink_axis_mask': np.zeros(len(end_mask)), 'ellipsis_mask': np.zeros(len(end_mask)) }).create_node() if len(node.in_nodes()) == 2 and node.has_valid('offset'): # Crop Type 1 begin = Const( graph, { 'value': self.mask_normalizer(shape_rank, node_axis, node.offset), 'name': ss.name + '/begin' }).create_node() shape = Shape(graph, { 'name': ss.name + '/shape_of_crop' }).create_node() end = Add(graph, {'name': ss.name + '/end'}).create_node() node.in_port(1).get_connection().get_source().connect( shape.in_port(0)) node.in_port(1).disconnect() shape.out_port(0).connect(end.in_port(0)) begin.out_port(0).connect(end.in_port(1)) elif node.has_valid('dim') and node.has_valid('offset'): # Crop Type 2 node_dim = self.list_to_ndarray(node.dim) node_offset = self.list_to_ndarray(node.offset) assert node_dim.size == node_offset.size == node_axis.size begin = Const( graph, { 'value': self.mask_normalizer(shape_rank, node_axis, node_offset), 'name': ss.name + '/begin' }).create_node() end_values = mo_array( [node_offset[i] + node_dim[i] for i in range(len(node_dim))]) end = Const( graph, { 'value': self.mask_normalizer(shape_rank, node_axis, end_values), 'name': ss.name + '/end' }).create_node() elif node.has_valid('crop_begin') and node.has_valid('crop_end'): # Crop Type 3 node_crop_begin = self.list_to_ndarray(node.crop_begin) node_crop_end = self.list_to_ndarray(node.crop_end) assert len(node_crop_begin) == len(node_crop_end) == len(node_axis) begin = Const( graph, { 'value': self.mask_normalizer(shape_rank, node_axis, node_crop_begin), 'name': ss.name + '/begin' }).create_node() shape = Shape(graph, {'name': ss.name + '/shape'}).create_node() end = Add(graph, {'name': ss.name + '/end'}).create_node() const = Const( graph, { 'value': -1 * self.mask_normalizer(shape_rank, node_axis, node_crop_end), 'name': ss.name + '/const' }).create_node() node.in_port(0).get_connection().get_source().connect( shape.in_port(0)) shape.out_port(0).connect(end.in_port(0)) const.out_port(0).connect(end.in_port(1)) else: raise Exception("Unknown type of Crop") source = node.in_port(0).get_connection().get_source() stride = Const( graph, { 'value': np.ones(shape_rank, dtype=np.int64), 'name': ss.name + '/stride' }).create_node() source.connect(ss.in_port(0)) begin.out_port(0).connect(ss.in_port(1)) end.out_port(0).connect(ss.in_port(2)) stride.out_port(0).connect(ss.in_port(3)) node.in_port(0).disconnect() node.out_port(0).get_connection().set_source(ss.out_port(0)) ss['force_precision_in_ports'] = {1: 'int64', 2: 'int64', 3: 'int64'}