def backend_attrs(self): version = self.get_opset() if version == 'opset6': return [('classes_index_type', lambda node: np_data_type_to_destination_type(node.classes_index_type)), ('sequence_length_type', lambda node: np_data_type_to_destination_type(node.sequence_length_type)), ('merge_repeated', lambda node: bool_to_str(node, 'merge_repeated'))] else: raise Error('Unknown opset version "{}"'.format(version))
def backend_attrs(self): version = self.get_opset() if version in ['opset3', 'opset4', 'opset5']: return ['sort_result_descending', 'box_encoding', ('output_type', lambda node: np_data_type_to_destination_type(node.output_type))] elif version == 'opset1': return ['sort_result_descending', 'box_encoding'] else: raise Error('Unsupported operation opset version "{}"'.format(version))
def backend_attrs(self): version = self.get_opset() if version == "extension": return ['with_right_bound'] else: return [ 'with_right_bound', ('output_type', lambda node: np_data_type_to_destination_type(node.output_type)), ]
def backend_attrs(self): version = self.get_opset() if version == 'opset3': return ['axis', 'mode', 'sort', ('index_element_type', lambda node: np_data_type_to_destination_type(node.index_element_type))] elif version == 'opset1': return ['axis', 'mode', 'sort'] else: raise Error('Unknown opset version "{}"'.format(version))
def backend_attrs(self): if self.ir_version == 10: return [ ('destination_type', lambda node: np_data_type_to_destination_type(node.dst_type)) ] else: return [('precision', lambda node: np_data_type_to_precision(node.dst_type))]
def supported_attrs(self): return [ 'offset', 'size', ('shape', lambda node: ','.join([str(i) for i in node.shape])), ('element_type', lambda node: precision_to_destination_type(node.force_type) if node.has_valid('force_type') else np_data_type_to_destination_type(node.value.dtype)), ]
def supported_attrs(self): if self.ir_version == 10: return [ ('shape', lambda node: ','.join([str(i) for i in node.shape])), ('element_type', lambda node: np_data_type_to_destination_type(node.data_type) ), ] else: return []
def backend_attrs(self): version = self.get_opset() if version == 'opset4': return [ ('output_type', lambda node: np_data_type_to_destination_type( node.output_type)), ] elif version == 'opset1': return [] else: raise Error('Unknown opset version "{}"'.format(version))
def serialize_old_api_map_for_result(self, node) -> Dict: if 'order' not in self.info: return {} result = {'element_type': 'undefined'} if node.has_port('in', 0) and node.has_valid('_in_port_precision'): result['element_type'] = np_data_type_to_destination_type(node.soft_get('_in_port_precision')[0]) result['order'] = ','.join(map(str, self.info['order'])) return result
def supported_attrs(self): if self.ir_version < 10: return ['center_point_box'] else: version = self.get_opset() if version == 'opset3': return ['sort_result_descending', 'box_encoding', ('output_type', lambda node: np_data_type_to_destination_type(node.output_type))] elif version == 'opset1': return ['sort_result_descending', 'box_encoding'] else: raise Error('Unsupported operation opset version "{}"'.format(version))
def dequantize_data(fake_quantize: Node, dst_type: type, quantized_type: type) -> Node: graph = fake_quantize.graph quantized_data = fake_quantize.in_port(0).get_source().node name = fake_quantize.soft_get('name', fake_quantize.id) assert quantized_data.soft_get('type') == 'Convert' and quantized_data.dst_type == quantized_type, \ 'Weights aren`t compressed as expected for node {}'.format(fake_quantize.soft_get('name', fake_quantize.id)) dequantizing_cast = Cast(graph, dict( name=quantized_data.name + "/to_{}".format(np_data_type_to_destination_type(dst_type)), dst_type=dst_type, stop_value_propagation=True)).create_node() fake_quantize.in_port(0).get_connection().set_destination(dequantizing_cast.in_port(0)) # limits of dequantize in_low = fake_quantize.in_port(1).get_source() in_high = fake_quantize.in_port(2).get_source() out_low = fake_quantize.in_port(3).get_source() out_high = fake_quantize.in_port(4).get_source() # scale calculation output_range = Sub(graph, {'name': name + '/output_range'}).create_node() output_range.in_port(0).connect(out_high) output_range.in_port(1).connect(out_low) input_range = Sub(graph, {'name': name + '/input_range'}).create_node() input_range.in_port(0).connect(in_high) input_range.in_port(1).connect(in_low) scale = Div(graph, {'name': name + '/scale'}).create_node() scale.in_port(0).connect(output_range.out_port(0)) scale.in_port(1).connect(input_range.out_port(0)) # shift calculation descaled_output_low = Div(graph, {'name': name + '/descaled_output_low'}).create_node() descaled_output_low.in_port(0).connect(out_low) descaled_output_low.in_port(1).connect(scale.out_port(0)) shift = Sub(graph, {'name': name + '/zero_point'}).create_node() shift.in_port(0).connect(in_low) shift.in_port(1).connect(descaled_output_low.out_port(0)) # DeQuantize(x) == Mul(Sub(x, zero_point), scale) sub_zp = Sub(graph, {'name': name + '/minus_zp'}).create_node() sub_zp.in_port(0).connect(dequantizing_cast.out_port(0)) sub_zp.in_port(1).connect(shift.out_port(0)) mul_scale = Mul(graph, {'name': name + '/mulpiply_by_scale'}).create_node() mul_scale.in_port(0).connect(sub_zp.out_port(0)) mul_scale.in_port(1).connect(scale.out_port(0)) fake_quantize.out_port(0).get_connection().set_source(mul_scale.out_port(0)) graph.remove_nodes_from([fake_quantize.id, fake_quantize.out_node(0)])
def serialize_old_api_map_for_parameter(self, node) -> Dict: result = {} if 'legacy_type' not in self.info and 'inverse_order' not in self.info: return result result['order'] = '' result['element_type'] = 'undefined' if 'legacy_type' in self.info: result['element_type'] = np_data_type_to_destination_type(self.info['legacy_type']) else: if node.has_port('out', 0) and not node.out_port(0).disconnected(): result['element_type'] = np_data_type_to_destination_type(node.out_port(0).get_data_type()) if 'inverse_order' in self.info: result['order'] = ','.join(map(str, self.info['inverse_order'])) else: if node.has_port('out', 0) and not node.out_port(0).disconnected(): result['order'] = list(range(len(node.out_port(0).data.get_shape()))) return result
def backend_attrs(self): return [ ('strides', lambda node: ','.join(map(str, node['stride'][node.spatial_dims])) ), ('kernel', lambda node: ','.join(map(str, node['window'][node.spatial_dims])) ), ('pads_begin', lambda node: ','.join( map(str, get_backend_pad(node.pad, node.spatial_dims, 0)))), ('pads_end', lambda node: ','.join( map(str, get_backend_pad(node.pad, node.spatial_dims, 1)))), ('exclude-pad', lambda node: bool_to_str(node, 'exclude_pad')), 'rounding_type', ('auto_pad', lambda node: node.auto_pad if node.has_valid('auto_pad') else 'explicit'), ('dilations', lambda node: ','.join( map(str, node['dilation'][node.spatial_dims]))), 'axis', ('index_element_type', lambda node: np_data_type_to_destination_type(node.index_element_type)) ]
def supported_attrs(self): return [ ('shape', lambda node: ','.join([str(i) for i in unmask_shape(node.shape)])), ('element_type', lambda node: np_data_type_to_destination_type(node.data_type)), ]
def backend_attrs(self): return [('destination_type', lambda node: np_data_type_to_destination_type(node.dst_type))]
def serialize(self, node) -> Dict: if 'legacy_type' not in self.info: return {} return {'value': np_data_type_to_destination_type(self.info['legacy_type'])}
def create_ref_net_in_scales_mode(precision, input_shape, output_shape, sizes_value, scales_value, attrs): input_data_type = np_data_type_to_destination_type(data_type_str_to_np(precision)) input_rank = len(input_shape) epsilon = np.array([1.0e-5]) spatial_dims = spatial_dimensions(input_shape) begin_dim = spatial_dims[0] end_dim = input_rank spatial_scales_value = scales_value[spatial_dims] nodes_attrs = { 'input': {'kind': 'op', 'type': 'Parameter'}, 'input_data': {'shape': input_shape, 'kind': 'data'}, 'shape_of': {'kind': 'op', 'type': 'ShapeOf'}, 'shape_of_data': {'shape': int64_array([input_rank]), 'kind': 'data'}, 'shape_to_float': {'kind': 'op', 'type': 'Convert', 'destination_type': input_data_type}, 'shape_to_float_data': {'shape': int64_array([input_rank]), 'kind': 'data'}, 'mul': {'kind': 'op', 'type': 'Multiply'}, 'mul_scales_const_data': {'kind': 'data', 'value': scales_value}, 'mul_scales_const': {'kind': 'op', 'type': 'Const'}, 'mul_scales_data': {'shape': int64_array([input_rank]), 'kind': 'data'}, 'mul_data': {'shape': int64_array([input_rank]), 'kind': 'data'}, 'eps_const_data': {'kind': 'data', 'value': epsilon}, 'eps_const': {'kind': 'op', 'type': 'Const'}, 'eps_data': {'shape': int64_array([1]), 'kind': 'data'}, 'add': {'kind': 'op', 'type': 'Add'}, 'add_data': {'shape': int64_array([input_rank]), 'kind': 'data'}, 'floor': {'type': 'Floor', 'kind': 'op'}, 'floor_data': {'shape': int64_array([input_rank]), 'kind': 'data'}, 'to_int': {'kind': 'op', 'type': 'Convert', 'destination_type': 'i64'}, 'to_int_data': {'shape': int64_array([input_rank]), 'kind': 'data'}, 'strided_slice': { 'kind': 'op', 'type': 'StridedSlice', 'begin_mask': 0, 'end_mask': 0, 'new_axis_mask': 0, 'shrink_axis_mask': 0, 'ellipsis_mask': 0 }, 'strided_slice_data': {'shape': int64_array([len(spatial_scales_value)]), 'kind': 'data'}, 'begin_const_data': {'kind': 'data', 'value': int64_array([begin_dim])}, 'begin_const': {'kind': 'op', 'type': 'Const'}, 'begin_data': {'shape': int64_array([1]), 'kind': 'data'}, 'end_const_data': {'kind': 'data', 'value': int64_array([end_dim])}, 'end_const': {'kind': 'op', 'type': 'Const'}, 'end_data': {'shape': int64_array([1]), 'kind': 'data'}, 'stride_const_data': {'kind': 'data', 'value': int64_array([1])}, 'stride_const': {'kind': 'op', 'type': 'Const'}, 'stride_data': {'shape': int64_array([1]), 'kind': 'data'}, 'scales_const_data': {'kind': 'data', 'value': spatial_scales_value}, 'scales_const': {'kind': 'op', 'type': 'Const'}, 'scales_data': {'shape': int64_array([len(spatial_scales_value)]), 'kind': 'data'}, 'axes_const_data': {'kind': 'data', 'value': spatial_dims}, 'axes_const': {'kind': 'op', 'type': 'Const'}, 'axes_data': {'shape': int64_array([len(spatial_dims)]), 'kind': 'data'}, 'interpolate': attrs, 'interpolate_data': {'shape': output_shape, 'kind': 'data'}, 'result': {'kind': 'op', 'type': 'Result'}, } edges = [ ('input', 'input_data'), ('input_data', 'interpolate', {'in': 0, 'out': 0}), ('input_data', 'shape_of', {'in': 0, 'out': 0}), ('shape_of', 'shape_of_data'), ('shape_of_data', 'shape_to_float'), ('shape_to_float', 'shape_to_float_data'), ('shape_to_float_data', 'mul', {'in': 0}), ('mul_scales_const_data', 'mul_scales_const'), ('mul_scales_const', 'mul_scales_data'), ('mul_scales_data', 'mul', {'in': 1}), ('mul', 'mul_data'), ('eps_const_data', 'eps_const'), ('eps_const', 'eps_data'), ('mul_data', 'add', {'in': 0}), ('eps_data', 'add', {'in': 1}), ('add', 'add_data'), ('add_data', 'floor'), ('floor', 'floor_data'), ('floor_data', 'to_int'), ('to_int', 'to_int_data'), ('to_int_data', 'strided_slice', {'in': 0}), ('strided_slice', 'strided_slice_data'), ('begin_const_data', 'begin_const'), ('begin_const', 'begin_data'), ('begin_data', 'strided_slice', {'in': 1}), ('end_const_data', 'end_const'), ('end_const', 'end_data'), ('end_data', 'strided_slice', {'in': 2}), ('stride_const_data', 'stride_const'), ('stride_const', 'stride_data'), ('stride_data', 'strided_slice', {'in': 3}), ('strided_slice_data', 'interpolate', {'in': 1}), ('scales_const_data', 'scales_const'), ('scales_const', 'scales_data'), ('scales_data', 'interpolate', {'in': 2}), ('axes_const_data', 'axes_const'), ('axes_const', 'axes_data'), ('axes_data', 'interpolate', {'in': 3}), ('interpolate', 'interpolate_data'), ('interpolate_data', 'result') ] return build_graph(nodes_attrs, edges)
def backend_attrs(self): return [ ('output_type', lambda node: np_data_type_to_destination_type(node.output_type)), 'global_seed', 'op_seed' ]