def extract(cls, node): attrs = { 'op': __class__.op, 'dim': node.module.shape, } Reshape.update_node_stat(node, attrs) return cls.enabled
def extract(cls, node): dim = onnx_attr(node, 'shape', 'ints', None) if dim is not None: dim = np.array(dim, dtype=np.int64) Reshape.update_node_stat(node, {'dim': dim}) else: Reshape.update_node_stat(node) return cls.enabled
def onnx_reshape_ext(node): ''' Extract ONNX Reshape op of different versions. Support both latest Reshape and Reshape-1. The first one has 2 arguments, Reshape-1 has one input and shape is coded in attribute. ''' dim = onnx_attr(node, 'shape', 'ints', None) if dim is not None: dim = np.array(dim, dtype=np.int64) Reshape.update_node_stat(node, {'dim': dim}) else: Reshape.update_node_stat(node) return node.graph.node[node.id]
def extract(cls, node): attrs = get_mxnet_layer_attrs(node.symbol_dict) dim = attrs.tuple("shape", int, None) reverse = attrs.bool("reverse", False) update_attrs = {'dim': int64_array(dim), 'reverse': reverse} for d in dim: if d in [-2, -3, -4] or reverse: MXReshape.update_node_stat(node, update_attrs) return cls.enabled # update the attributes of the node Reshape.update_node_stat(node, update_attrs) return cls.enabled
def extract(node): attrs = get_mxnet_layer_attrs(node.symbol_dict) dim = attrs.tuple("shape", int, None) update_attrs = {'dim': np.array(dim)} for d in dim: if d in [-2, -3, -4]: log.error( 'The attribute "shape" of the operation "{}" contains value "{}" which is not supported.' .format(node.soft_get('name'), d)) return False # update the attributes of the node Reshape.update_node_stat(node, update_attrs) return __class__.enabled
def extract(cls, node): param = node.pb.reshape_param if param.axis != 0: log.error('The operation "Reshape" has attribute "axis" with unsupported value "{}"'.format(param['axis'])) return False if param.num_axes != -1: log.error('The operation "Reshape" has attribute "num_axes" with unsupported value "{}"'.format( param['num_axes'])) return False Reshape.update_node_stat(node, { 'dim': list(param.shape.dim), }) return cls.enabled
def replace_pattern(graph: Graph, match: dict): flatten = match['reshape'] out_shape = flatten.out_port(0).data.get_shape() const_id = graph.unique_id(flatten.id + '/DimData') graph.add_node( const_id, **{ 'kind': 'data', 'value': out_shape, 'shape': np.array(out_shape.shape), 'name': flatten.id + '/DimData' }) flatten.add_input_port(1, skip_if_exist=True) graph.add_edge(const_id, flatten.id, **{'in': 1}) flatten['force_precision_in_ports'] = {1: 'int64'} # TODO workaround for nGraph only!!! flatten.in_node(1)['value'] = flatten.out_node(0)['shape'] Reshape.update_node_stat(flatten) flatten['force_precision_in_ports'] = {1: 'int64'}
def extract(cls, node: Node): Reshape.update_node_stat(node, {'special_zero': False}) return cls.enabled
def extract(node): Reshape.update_node_stat(node, {}) return __class__.enabled