Ejemplo n.º 1
0
    def _common(cls, node, scales, sizes, nearest_mode='round_prefer_ceil', **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] if inp else None for inp in node.input]
        x = inputs[0]
        x_shape = x[2].shape
        x_rank = len(x_shape)
        spatial_size = x_rank - 2
        in_c = x_shape[1]
        in_w = x_shape[-1]
        if scales is not None:
            sizes = np.array(x_shape) * np.array(scales)
        sizes = [None if x_shape[idx] is None else dim
                 for idx, dim in enumerate(sizes)]
        if spatial_size == 1:
            sizes.insert(-1, 1)

        if nearest_mode != 'round_prefer_ceil':
            logger.warning('only round_prefer_ceil is supported for nearest mode')

        if spatial_size != 2 and spatial_size != 1:
            raise ValueError('resize only supports 4D tensor in NCHW mode or 3D tensor in NCF mode'
                             f' - input shape is {x_shape} sizes is {sizes}')

        if not all(x_dim == size_dim for x_dim, size_dim in zip(x_shape[:2:], sizes[:2:])):
            raise ValueError('resize only supports 4D tensor in NCHW mode or 3D tensor in NCF mode'
                             f' - input shape is {x_shape} sizes is {sizes}')

        mode = node.attrs.get('mode', 'nearest')
        if mode != 'nearest' and mode != 'linear':
            raise ValueError('resize only supports nearest and linear modes')

        params_class = BilinearResizerParameters if mode == 'linear' else NearestNeighborResizerParameters

        params = params_class(valid_name,
                              new_shape=tuple(sizes[2::]),
                              align_corners=False,
                              halfpixel_centers=False,
                              in_dims_hint=[['c', 'h', 'w']],
                              out_dims_hint=[['c', 'h', 'w']])

        if spatial_size == 1:
            r1_params = ReshapeParameters(f'{valid_name}_reshape2d',
                                          old_shape=Dim.unnamed([in_c, in_w]),
                                          shape=Dim.unnamed([in_c, 1, in_w]))
            r2_params = ReshapeParameters(f'{valid_name}_reshape1d',
                                          old_shape=Dim.unnamed([in_c, 1, sizes[-1]]),
                                          shape=Dim.unnamed([in_c, sizes[-1]]))
            G.add_edge(NNEdge(from_node=x[0], to_node=r1_params, from_idx=x[1], to_idx=0))
            G.add_edge(NNEdge(from_node=r1_params, to_node=params, from_idx=0, to_idx=0))
            G.add_edge(NNEdge(from_node=params, to_node=r2_params, from_idx=0, to_idx=0))
            pout_dims = ProvisionalDim(sizes[:-2:] + sizes[-1::])
            params = r2_params
        else:
            pout_dims = ProvisionalDim(sizes)
            G.add_edge(NNEdge(from_node=x[0], to_node=params, from_idx=x[1], to_idx=0))

        all_nodes[node.output[0]] = (params, 0, pout_dims)
        return params
Ejemplo n.º 2
0
def add_reshape(G,
                tensors,
                name,
                subgraph,
                _,
                op,
                load_tensors=False,
                dequantize=False):
    reshape_opts = ReshapeOptions.ReshapeOptions()
    reshape_opts.Init(op.BuiltinOptions().Bytes, op.BuiltinOptions().Pos)
    inp = get_input_size(tensors, subgraph, op, 0)
    set_shape = get_tensor(G.model, tensors, subgraph, op, 1)
    # TODO - Which to use? Attribute or input? TFLITE seems to set both
    del set_shape
    new_shape = list(reshape_opts.NewShapeAsNumpy())
    if -1 in new_shape:
        new_shape_size = reduce(lambda x, y: x * 1
                                if y == -1 else x * y, new_shape, 1)
        inp_size = reduce(lambda x, y: x * y, inp, 1)
        new_shape[new_shape.index(-1)] = inp_size // new_shape_size

    old_shape = Dim.unnamed(remove_batch_dim(inp), is_ordered=True)
    new_shape = Dim.unnamed(remove_batch_dim(new_shape), is_ordered=True)
    node = ReshapeParameters(name, old_shape=old_shape, shape=new_shape)
    return add_node(G, node)
Ejemplo n.º 3
0
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]
        x = inputs[0]
        x_shape = x[2].shape
        axes = cls._resolve_negative_ranks(kwargs['axes'], len(x_shape))
        if axes:
            if any(x_shape[axis] != 1 for axis in axes):
                raise ValueError("axis parameter in node %s is invalid %s" % (valid_name, axes))
            new_shape = [dim for idx, dim in enumerate(x_shape) if idx not in axes]
        else:
            new_shape = [dim for dim in x_shape if dim != 1]

        pshape = ProvisionalDim(new_shape)
        if cls.is_constant(x):
            logger.info("reducing %s to a constant", valid_name)
            x_val = cls.get_constant(x)
            params = ConstantInputParameters(valid_name, value=x_val.reshape(new_shape),
                                             constant_store=G.constant_store)
        else:
            old_shape = cls._get_real_dim(x_shape)
            shape = cls._get_real_dim(new_shape)
            params = ReshapeParameters(valid_name, old_shape=old_shape, shape=shape)
            G.add_edge(NNEdge(from_node=x[0], to_node=params, from_idx=x[1], to_idx=0))
        all_nodes[node.output[0]] = (params, 0, pshape)
        return params
Ejemplo n.º 4
0
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]
        x = inputs[0]
        x_shape = x[2].shape
        axes = cls._resolve_negative_ranks(kwargs['axes'], len(x_shape))
        if len(x_shape) == 0:
            assert len(axes) == 1 and axes[0] == 0
            new_shape = [1]
        else:
            new_shape = [
                item for sublist in [[1, dim] if idx in axes else [dim]
                                     for idx, dim in enumerate(x_shape)]
                for item in sublist
            ]

        pshape = ProvisionalDim(new_shape)
        if cls.is_constant(x):
            logger.info("reducing %s to a constant", valid_name)
            x_val = cls.get_constant(x)
            params = ConstantInputParameters(valid_name,
                                             value=x_val.reshape(new_shape))
        else:
            old_shape = cls._get_real_dim(x_shape)
            shape = cls._get_real_dim(new_shape)
            params = ReshapeParameters(valid_name,
                                       old_shape=old_shape,
                                       shape=shape)
            G.add_edge(
                NNEdge(from_node=x[0], to_node=params, from_idx=x[1],
                       to_idx=0))
        all_nodes[node.output[0]] = (params, 0, pshape)
        return params
Ejemplo n.º 5
0
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]
        x = inputs[0]
        x_shape = x[2].shape
        out_rank = len(x_shape) + len(kwargs['axes'])
        axes = cls._resolve_negative_ranks(kwargs['axes'], out_rank)

        old_shape = x_shape.copy()
        new_shape = [
            1 if new_idx in axes else old_shape.pop(0)
            for new_idx in range(out_rank)
        ]

        pshape = ProvisionalDim(new_shape)
        if cls.is_constant(x):
            x_val = cls.get_constant(x)
            logger.info(
                f"reducing {valid_name} to a constant {cls.print_small(x_val)}"
            )
            params = ConstantInputParameters(valid_name,
                                             value=x_val.reshape(new_shape))
        else:
            old_shape = cls._get_real_dim(x_shape)
            shape = cls._get_real_dim(new_shape)
            params = ReshapeParameters(valid_name,
                                       old_shape=old_shape,
                                       shape=shape)
            G.add_edge(
                NNEdge(from_node=x[0], to_node=params, from_idx=x[1],
                       to_idx=0))
        all_nodes[node.output[0]] = (params, 0, pshape, x[3])
        return params
Ejemplo n.º 6
0
    def _common(cls, node: TFLiteNode, **kwargs):
        node_opts = node.get_options(ReshapeOptions)
        G = kwargs['G']
        opts = kwargs['opts']
        all_nodes = kwargs['all_nodes']

        inputs = [all_nodes[t] for t in node.input]
        x = inputs[0]
        x_shape = x[2].shape

        # TF2 seems to use the second input whereas TF1 uses the opts
        new_shape = None
        if node_opts:
            new_shape = list(node_opts.NewShapeAsNumpy())
        elif len(inputs) > 1:
            set_shape_tensor = list(cls._verify_constant(inputs[1]))
            node.input[1].used = True
            new_shape = list(set_shape_tensor)
        else:
            ValueError(
                f"Cannot asses new_shape for Reshape Parameter: {node.name}")

        if -1 in new_shape:
            new_shape_size = reduce(lambda x, y: x * 1
                                    if y == -1 else x * y, new_shape, 1)
            inp_size = reduce(lambda x, y: x * y
                              if y is not None else x, x_shape, 1)
            new_shape[new_shape.index(-1)] = inp_size // new_shape_size

        if None in x_shape:
            if 1 in new_shape:
                old_batch_dim = x_shape.index(None)
                new_batch_dim = new_shape.index(1)
                if old_batch_dim != new_batch_dim:
                    LOG.info(
                        "node %s moved batch dimension for axis %s to axis %s",
                        node.name, old_batch_dim, new_batch_dim)
                new_shape[new_batch_dim] = None
            else:
                raise ValueError(
                    "unable to determine movement of unspcified axis in node %s"
                    % node.name)

        pnew_shape = ProvisionalDim(new_shape)
        old_shape = Dim.unnamed(cls.remove_unspecified_dim(x_shape),
                                is_ordered=True)
        new_shape = Dim.unnamed(cls.remove_unspecified_dim(new_shape),
                                is_ordered=True)

        params = ReshapeParameters(node.name,
                                   old_shape=old_shape,
                                   shape=new_shape)

        if opts.get('load_quantization'):
            G.quantization[NodeId(params)] = cls.load_tf_quantization(
                [node.input[0]], node.output)
        G.add_edge(
            NNEdge(from_node=x[0], to_node=params, from_idx=x[1], to_idx=0))
        all_nodes[node.output[0]] = (params, 0, pnew_shape)
        return params
Ejemplo n.º 7
0
    def _match(self, G: GraphView, set_identity: bool = True, **kwargs):
        rnn_nodes = [
            self.find_unpack(G, node) for node in G.nodes()
            if isinstance(node, RNNBaseParameters) and node.n_output_cells > 1
        ]
        rnn_nodes_by_slice = self.validate_slices(G, rnn_nodes)
        rnn_nodes_by_slice = self.validate_multi_branch(G, rnn_nodes_by_slice)
        if not rnn_nodes_by_slice:
            return False

        for unpack_node, rnn_unpacks in rnn_nodes_by_slice.items():
            modified_nodes = set()
            for rnn_unpack in rnn_unpacks:
                self.process_path(G, rnn_unpack, modified_nodes)
            # since process path will have removed all unnecessary nodes the edges will be correct here
            out_edges = G.out_edges(unpack_node.name)
            in_edges = G.in_edges(unpack_node.name)
            assert len(in_edges
                       ) == 1, "expecting unpack node to have only one in edge"
            in_edge = in_edges[0]
            changes_shape = unpack_node.changes_shape if isinstance(
                unpack_node, StridedSliceParameters) else False

            LOG.info("Eliminating last cell unpack: %s", unpack_node.name)
            G.remove(unpack_node)

            # Here the strided slice can change the output shape of the RNN
            # so insert a reshape to do the shape change
            if changes_shape:
                reshape = ReshapeParameters(
                    unpack_node.name + '_reshape',
                    old_shape=Dim.unnamed(unpack_node.post_slice_shape),
                    shape=Dim.unnamed(unpack_node.out_shape))
                G.add_edge(
                    NNEdge(from_node=in_edge.from_node,
                           to_node=reshape,
                           from_idx=in_edge.from_idx))
                for out_edge in out_edges:
                    G.add_edge(
                        NNEdge(from_node=reshape,
                               to_node=out_edge.to_node,
                               to_idx=out_edge.to_idx))
                if G.quantization:
                    G.quantization[NodeId(reshape)] = G.quantization[NodeId(
                        unpack)]
            else:
                for out_edge in out_edges:
                    G.add_edge(
                        NNEdge(from_node=in_edge.from_node,
                               to_node=out_edge.to_node,
                               from_idx=in_edge.from_idx,
                               to_idx=out_edge.to_idx))
            if G.quantization:
                del G.quantization[NodeId(unpack_node)]

        if set_identity:
            self.set_identity(G)

        return True
Ejemplo n.º 8
0
    def _common(cls, node: TFLiteNode, **kwargs):
        node_opts = node.get_options(StridedSliceOptions)
        G = kwargs['G']
        opts = kwargs['opts']
        all_nodes = kwargs['all_nodes']

        inputs = [all_nodes[t] for t in node.input]
        x = inputs[0]
        x_shape = x[2].shape

        # begin end stride
        vec_begin = list(cls._verify_constant(inputs[1]))
        vec_end = list(cls._verify_constant(inputs[2]))
        vec_stride = list(cls._verify_constant(inputs[3]))
        for i in range(1, 4):
            node.input[i].used = True
        if any([vec is None for vec in [vec_begin, vec_end, vec_stride]]):
            raise NotImplementedError(
                "strided slice with variable begin end or stride is not supported")
        spec = zip(vec_begin, vec_end, vec_stride)
        begin_mask = node_opts.BeginMask()
        ellipsis_mask = node_opts.EllipsisMask()
        end_mask = node_opts.EndMask()
        new_axis_mask = node_opts.NewAxisMask()
        shrink_axis_mask = node_opts.ShrinkAxisMask()

        act_slice, out_shape, can_reshape = StridedSliceParameters.get_slice(
            x_shape, spec,
            begin_mask,
            end_mask, ellipsis_mask,
            new_axis_mask, shrink_axis_mask)

        if cls.is_constant(x):
            LOG.info("reducing %s to a constant", node.name)
            x_val = cls.get_constant(x)
            params = StridedSliceParameters(node.name, act_slice=act_slice, out_shape=out_shape)
            x_val = params.numpy_slice(x_val)
            params = ConstantInputParameters(node.name, value=x_val)
        else:
            if can_reshape:
                if list(x_shape) == list(out_shape):
                    LOG.info("converting strided slice %s to a noop", node.name)
                    params = NoOPParameters(node.name)
                else:
                    LOG.info("converting strided slice %s to a reshape", node.name)
                    in_shape = Dim.unnamed(x[2].known_shape, is_ordered=True)
                    out_shape = Dim.unnamed(out_shape, is_ordered=True)
                    params = ReshapeParameters(node.name, old_shape=in_shape, shape=out_shape)
            else:
                params = StridedSliceParameters(node.name, act_slice=act_slice, out_shape=out_shape)
            G.add_edge(NNEdge(from_node=x[0], to_node=params, from_idx=x[1], to_idx=0))

        if opts.get('load_quantization'):
            G.quantization[NodeId(params)] = cls.load_tf_quantization([node.input[0]], node.output)
        all_nodes[node.output[0]] = (params, 0, x[2].infer_mapping(out_shape, allow_bad_length=True))
        return params
Ejemplo n.º 9
0
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]

        if cls.SINCE_VERSION == 1:
            shape = np.array(node.attrs["shape"])
        else:  # since_version >= 5
            shape = cls.get_constant(inputs[1])

        input_shape = np.array(inputs[0][2].shape)
        shape = [
            dim if dim != 0 else input_shape[idx]
            for idx, dim in enumerate(shape)
        ]
        if -1 in shape:
            wild_index = shape.index(-1)
            in_size = prod([1 if dim is None else dim for dim in input_shape])
            shape_size = prod(
                [1 if dim is None or dim <= 0 else dim for dim in shape])
            if in_size % shape_size != 0:
                raise ValueError('invalid reshape')
            shape[wild_index] = in_size // shape_size
        shape = np.array(shape)

        if cls.is_constant(inputs[0]):
            logger.info("reducing %s to a constant", valid_name)
            params = ConstantInputParameters(valid_name,
                                             value=cls.get_constant(
                                                 inputs[0]).reshape(shape),
                                             dims=Dim.unnamed(shape),
                                             constant_store=G.constant_store)
            pshape = ProvisionalDim(shape)
            all_nodes[node.output[0]] = (params, 0, pshape)
            return params

        # TODO - There must be a better way of doing this
        # This hacks around the fact that the batch dimension will be in the reshape
        if input_shape[0] is None and shape[0] == 1:
            shape = np.array([None] + list(shape[1::]))

        pshape = ProvisionalDim(shape)
        # pylint: disable=singleton-comparison
        old_shape = Dim.unnamed(list(input_shape[input_shape != None]))
        shape = Dim.unnamed(list(shape[shape != None]))
        params = ReshapeParameters(valid_name,
                                   old_shape=old_shape,
                                   shape=shape)
        inp = inputs[0]
        G.add_edge(
            NNEdge(from_node=inp[0], to_node=params, from_idx=inp[1],
                   to_idx=0))
        all_nodes[node.output[0]] = (params, 0, pshape)
        return params
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]
        input_shapes = [inp[2].shape for inp in inputs]
        axis = node.attrs['axis']
        new_axis = node.attrs.get('new_axis', 0)
        # if new_axis is false this is the same as concat
        if not new_axis:
            return cls.gen_concat(node, inputs, axis)
        # if it is true then it's different
        if not all(shape == input_shapes[0] for shape in input_shapes[1::]):
            raise ValueError(
                'all shapes must be the same in ConcatFromSequence with new axis'
            )

        # reduce to a constant if we can
        if all(cls.is_constant(inp) for inp in inputs):
            logger.info("reducing %s to a constant", valid_name)
            value = np.concatenate([cls.get_constant(inp) for inp in inputs],
                                   axis=axis)
            params = ConstantInputParameters(valid_name, value=value)
            all_nodes[node.output[0]] = (params, 0,
                                         ProvisionalDim(value.shape),
                                         inputs[0][3])
            return params

        # add the axis into the shape
        new_shape = input_shapes[0].copy()
        new_shape = new_shape[:axis:] + [1] + new_shape[axis::]
        old_shape = cls._get_real_dim(input_shapes[0])
        shape = cls._get_real_dim(new_shape)
        # create a reshape on each input and pass the outputs to the concat mixin
        #pylint: disable=consider-using-enumerate
        for idx in range(len(inputs)):
            inp = inputs[idx]
            rparams = ReshapeParameters("%s_reshape_%s" % (valid_name, idx),
                                        old_shape=old_shape,
                                        shape=shape)
            G.add_edge(
                NNEdge(from_node=inp[0],
                       to_node=rparams,
                       from_idx=inp[1],
                       to_idx=0))
            inputs[idx] = (rparams, 0, ProvisionalDim(new_shape), inp[3])

        return cls.gen_concat(node, inputs, axis)
Ejemplo n.º 11
0
    def _common(cls, node: TFLiteNode, **kwargs):
        node_opts = node.get_options(SqueezeOptions)
        G = kwargs['G']
        opts = kwargs['opts']
        all_nodes = kwargs['all_nodes']

        inputs = [all_nodes[t] for t in node.input]
        x = inputs[0]
        x_shape = x[2].shape

        if node_opts.SqueezeDimsIsNone():
            new_shape = [dim for dim in x_shape if dim != 1]
        else:
            axes = node_opts.SqueezeDimsAsNumpy()
            axes = np.where(axes < 0, axes + len(x_shape), axes)
            if np.any(np.array(x_shape)[axes] not in [None, 1]):
                raise ValueError(f'invalid expand dims > 1 {node.name}')
            new_shape = [
                dim for idx, dim in enumerate(x_shape) if idx not in axes
            ]

        if cls.is_constant(x):
            LOG.info("reducing %s to a constant", node.name)
            val = np.reshape(cls.get_constant(x), new_shape)
            params = ConstantInputParameters(node.name,
                                             value=val,
                                             dims=Dim.unnamed(val.shape))
        else:
            pnew_shape = ProvisionalDim(new_shape)
            old_shape = Dim.unnamed(cls.remove_unspecified_dim(x_shape),
                                    is_ordered=True)
            new_shape = Dim.unnamed(cls.remove_unspecified_dim(new_shape),
                                    is_ordered=True)
            params = ReshapeParameters(node.name,
                                       old_shape=old_shape,
                                       shape=new_shape)

        if opts.get('load_quantization'):
            G.quantization[NodeId(params)] = cls.load_tf_quantization(
                [node.input[0]], node.output)
        G.add_edge(
            NNEdge(from_node=x[0], to_node=params, from_idx=x[1], to_idx=0))
        all_nodes[node.output[0]] = (params, 0, pnew_shape)
        return params
Ejemplo n.º 12
0
def add_reshape(G,
                name,
                subgraph,
                _,
                op,
                load_tensors=False,
                dequantize=False):
    reshape_opts = ReshapeOptions.ReshapeOptions()
    reshape_opts.Init(op.BuiltinOptions().Bytes, op.BuiltinOptions().Pos)
    inp = get_input_size(subgraph, op, 0)
    new_shape = list(reshape_opts.NewShapeAsNumpy())
    if -1 in new_shape:
        new_shape_size = reduce(lambda x, y: x * 1
                                if y == -1 else x * y, new_shape, 1)
        inp_size = reduce(lambda x, y: x * y, inp, 1)
        new_shape[new_shape.index(-1)] = inp_size // new_shape_size
    new_shape = Dim.unnamed(new_shape, is_ordered=True)
    node = ReshapeParameters(name, new_shape)
    return add_node(G, node)
 def remove_known_batch_dimension(cls, G, x, node, batch_axis=0):
     x_shape = x[2].shape
     if x_shape[batch_axis] is not None:
         if x_shape[0] > 1:
             raise ValueError(
                 f'multi batch (n={x_shape[batch_axis]}) operations are not supported by {node.name}')
         rparams = ReshapeParameters(
             f'{node.name}_batch',
             old_shape=Dim.unnamed(x_shape),
             shape=Dim.unnamed(x_shape[0:batch_axis:]+x_shape[batch_axis+1::]))
         if G.quantization:
             qrec = G.quantization[NodeId(x[0])]
             G.quantization[NodeId(rparams)] = QRec.copy_ktype(
                 qrec,
                 in_qs=[qrec.out_qs[0]],
                 out_qs=[qrec.out_qs[0]])
         G.add_edge(
             NNEdge(from_node=x[0], to_node=rparams, from_idx=x[1], to_idx=0))
         return (rparams, 0, ProvisionalDim(x_shape[0:batch_axis:]+[None]+x_shape[batch_axis+1::]))
     else:
         return x
Ejemplo n.º 14
0
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]
        x = inputs[0]
        x_shape = x[2].shape
        axes = cls._resolve_negative_ranks(kwargs['axes'], len(x_shape))
        if len(x_shape) == 0:
            assert len(axes) == 1 and axes[0] == 0
            new_shape = [1]
        else:
            new_shape = []
            old_shape = x_shape.copy()
            axes_copy = axes.copy()
            idx = 0
            while axes_copy or old_shape:
                if idx in axes_copy:
                    axes_copy.remove(idx)
                    new_shape.append(1)
                else:
                    if not old_shape:
                        raise ValueError(f'error in unsqueeze inshape {x_shape} axes {axes}')
                    new_shape.append(old_shape.pop(0))
                idx += 1

        pshape = ProvisionalDim(new_shape)
        if cls.is_constant(x):
            logger.info("reducing %s to a constant", valid_name)
            x_val = cls.get_constant(x)
            params = ConstantInputParameters(valid_name, value=x_val.reshape(new_shape),
                                             constant_store=G.constant_store)
        else:
            old_shape = cls._get_real_dim(x_shape)
            shape = cls._get_real_dim(new_shape)
            params = ReshapeParameters(valid_name, old_shape=old_shape, shape=shape)
            G.add_edge(NNEdge(from_node=x[0], to_node=params, from_idx=x[1], to_idx=0))
        all_nodes[node.output[0]] = (params, 0, pshape)
        return params
Ejemplo n.º 15
0
    def _match(self, G: GraphView, set_identity: bool = True, **kwargs):
        nodes = list(G.nodes(node_classes=GlobalPoolingParameters))
        modified_graph = False
        while nodes:
            node = nodes.pop()
            node_group = self.reductions(G, node)
            if len(node_group) <= 1:
                continue
            modified_graph = True
            reduction_axes, new_shape, has_keepdims, _ = reduce(
                reduce_reduction, node_group, None)
            new_node = node_group[0]
            new_node.axis = sorted(list(reduction_axes))
            new_node.keep_dims = has_keepdims
            out_edges = G.out_edges(node_group[-1].name)
            if G.quantization:
                last_qrec = G.quantization[NodeId(node_group[-1])]
                G.quantization[NodeId(new_node)].out_qs = last_qrec.out_qs
            for node in node_group[1::]:
                G.remove(node.name)
                nid = NodeId(node)
                if G.quantization and nid in G.quantization:
                    del G.quantization[nid]
            if has_keepdims and len(new_shape) != len(
                    new_node.in_dims[0].shape):
                rparams = ReshapeParameters(
                    G.unique_name(f'{new_node.name}_reshape'),
                    shape=Dim.unnamed(new_shape))
                if G.quantization:
                    G.quantization.copy_qrec(last_qrec, 'out', 0, rparams)
                G.add_edge(NNEdge(new_node, rparams))
                new_node = rparams
            for edge in out_edges:
                G.add_edge(NNEdge(new_node, edge.to_node, to_idx=edge.to_idx))

        if set_identity:
            self.set_identity(G)

        return modified_graph
Ejemplo n.º 16
0
    def _common(cls, node: TFLiteNode, **kwargs):
        G = kwargs['G']
        opts = kwargs['opts']
        all_nodes = kwargs['all_nodes']

        inputs = [all_nodes[t] for t in node.input]
        x = inputs[0]
        x_shape = x[2].shape

        exp_dim = int(cls._verify_constant(inputs[1]))
        if exp_dim < 0:
            exp_dim += len(x_shape)
        if x_shape[exp_dim] is None:
            exp_dim += 1
        new_shape = x_shape[:exp_dim:] + [1] + x_shape[exp_dim::]

        if cls.is_constant(x):
            LOG.info("reducing %s to a constant", node.name)
            val = np.reshape(cls.get_constant(x), new_shape)
            params = ConstantInputParameters(node.name,
                                             value=val,
                                             dims=Dim.unnamed(val.shape))
        else:
            pnew_shape = ProvisionalDim(new_shape)
            old_shape = Dim.unnamed(cls.remove_unspecified_dim(x_shape),
                                    is_ordered=True)
            new_shape = Dim.unnamed(cls.remove_unspecified_dim(new_shape),
                                    is_ordered=True)
            params = ReshapeParameters(node.name,
                                       old_shape=old_shape,
                                       shape=new_shape)

        if opts.get('load_quantization'):
            G.quantization[NodeId(params)] = cls.load_tf_quantization(
                [node.input[0]], node.output)
        G.add_edge(
            NNEdge(from_node=x[0], to_node=params, from_idx=x[1], to_idx=0))
        all_nodes[node.output[0]] = (params, 0, pnew_shape)
        return params
Ejemplo n.º 17
0
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]
        x = inputs[0]
        x_shape = x[2].shape

        axis = node.attrs.get('axis', 1)
        if axis < 0:
            axis += len(x_shape)

        old_shape = cls._get_real_dim(x_shape)
        if axis == 0:
            shape = [1, prod(old_shape)]
            pshape = shape
        else:
            start = x_shape[:axis:]
            end = x_shape[axis::]
            pshape = list(start) + [prod(cls._get_real_dim(end))]
            shape = cls._get_real_dim(pshape)

        if cls.is_constant(x):
            logger.info("reducing %s to a constant", valid_name)
            params = ConstantInputParameters(
                valid_name,
                value=cls.get_constant(x).reshape(shape),
                constant_store=G.constant_store
            )
            pshape = ProvisionalDim(shape)
        else:
            params = ReshapeParameters(valid_name, old_shape=Dim.unnamed(
                old_shape), shape=Dim.unnamed(shape))
            pshape = ProvisionalDim(pshape)
            G.add_edge(NNEdge(from_node=x[0], to_node=params, from_idx=x[1], to_idx=0))

        all_nodes[node.output[0]] = (params, 0, pshape)
        return params
Ejemplo n.º 18
0
    def do_remove(self, args: argparse.Namespace):
        """Removes all the edges and nodes between two node. Will only work if nodes do not affect shape of tensor."""
        self._check_graph()
        if any(node not in self.G for node in args.nodes):
            self.perror("node not found in graph")
            return
        node_from = self.G[args.nodes[0]]
        if len(args.nodes) == 1:
            if args.up:
                nodes_above = self.G.nodes_above(node_from)
                out_edges = self.G.indexed_out_edges(node_from)
                nodes_above.add(node_from)
                input_names = sorted([
                    node.name for node in nodes_above
                    if isinstance(node, InputParameters)
                ])
                self.G.remove_all(nodes_above | {node_from})
                for idx, edge_group in enumerate(out_edges):
                    name = input_names.pop(0) if input_names else None
                    in_node = self.G.add_input(node_from.out_dims[idx],
                                               name=name)
                    self.pfeedback(f'adding input {in_node.name}')
                    for edge in edge_group:
                        self.G.add_edge(
                            NNEdge(from_node=in_node,
                                   to_idx=edge.to_idx,
                                   to_node=edge.to_node))
            else:
                nodes_below = self.G.nodes_below(node_from)
                for node in list(nodes_below):
                    nodes_below.update(
                        edge.from_node for edge in self.G.in_edges(node)
                        if isinstance(edge.from_node, ConstantInputParameters))
                if self.G.is_vertex_cut(nodes_below):
                    self.perror(
                        f'removing everything below {node_from.name} would split the graph which is not permitted'
                    )
                    return
                nodes_below.add(node_from)
                in_edges = self.G.in_edges(node_from.name)
                output_names = sorted([
                    node.name for node in nodes_below
                    if isinstance(node, OutputParameters)
                ])
                self.G.remove_all(nodes_below)
                for edge in in_edges:
                    name = output_names.pop(0) if output_names else None
                    out_node = self.G.add_output(name=name)
                    self.pfeedback(f'adding output {out_node.name}')
                    self.G.add_edge(
                        NNEdge(from_node=edge.from_node,
                               from_idx=edge.from_idx,
                               to_node=out_node))
        else:
            node_to = self.G[args.nodes[1]]
            nodes_between = self.G.nodes_between(node_from, node_to)
            if not nodes_between:
                self.perror(
                    f'there are no nodes between {node_from.name} and {node_to.name}'
                )
                return
            if not self.G.nodes_between_in(node_from, node_to, nodes_between):
                self.perror(
                    f'all paths from {node_from.name} must lead to {node_to.name}'
                )
                return

            edges_from = self.G.indexed_out_edges(node_from)
            edges_to = self.G.indexed_in_edges(node_to.name)
            if len(edges_from) != len(edges_to):
                self.perror(
                    f"{node_from.name} has a different number of outputs than {node_to.name}'s inputs"
                )
                return
            for idx, _ in enumerate(edges_from):
                if node_from.out_dims[idx].size() != node_to.in_dims[idx].size(
                ):
                    self.perror(
                        f"{node_from.name} output {idx} has a different size to {node_to.name}'s input"
                    )
                    return
            self.G.remove_all(nodes_between)
            for idx, _ in enumerate(edges_from):
                if node_from.out_dims[idx].shape != node_to.in_dims[idx].shape:
                    reshape = ReshapeParameters(
                        self.G.unique_name(f'{node_from.name}_reshape{idx}'),
                        old_shape=node_from.out_dims[idx],
                        shape=node_to.in_dims[idx])
                    self.G.add_edge(
                        NNEdge(from_node=node_from,
                               from_idx=idx,
                               to_node=reshape))
                    self.G.add_edge(
                        NNEdge(from_node=reshape, to_node=node_to, to_idx=idx))
                else:
                    self.G.add_edge(
                        NNEdge(from_node=node_from,
                               from_idx=idx,
                               to_node=node_to,
                               to_idx=idx))
        self.G.add_dimensions()
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]
        # input N x C x H x W
        x = inputs[0]
        x_rank = len(x[2].shape)
        x_shape = x[2].shape
        real_in_shape = deepcopy(x_shape)
        #conv_shape = [x if idx > 0 and x is not None else 1 for idx, x in enumerate(x_shape)]
        conv_shape = x_shape
        if None in x_shape:
            real_in_shape.remove(None)
        spatial_size = x_rank - 2
        assert spatial_size == 2 or spatial_size == 1, "only 1D and 2D convolutions supported"

        # M x C/group x kH x kW
        weights_node = inputs[1][0]
        weights_node.name = f'{valid_name}_weights'
        weights = cls.get_constant(inputs[1])
        out_c = weights.shape[1]
        group = node.attrs.get("group", 1)
        in_c = conv_shape[-spatial_size-1] if conv_shape[-spatial_size-1] is not None else 1
        filt_out_c = out_c // group
        if in_c != weights.shape[0]:
            raise ValueError(f'node {valid_name} has incorrect input channel '
                             f'dimension {in_c} expecting {weights.shape[0]}')
        if spatial_size == 1:
            filt_w = weights.shape[-1]
            filt_h = 1
            # create a new constant node since we are changing the shape
            weights = np.reshape(weights, (in_c, filt_out_c, filt_h, filt_w))
            weights_node = ConstantInputParameters(f'{valid_name}_weights', value=weights,
                                                   dims=Dim.unnamed(
                                                       weights.shape))
            cls.record_constant_qrec(inputs[1], weights_node, **kwargs)
        else:
            filt_h = weights.shape[-2]
            filt_w = weights.shape[-1]

        h = 1 if spatial_size == 1 else (conv_shape[-2] if conv_shape[-2] is not None else 1)
        w = conv_shape[-1] if conv_shape[-1] is not None else 1

        filt_dim = Conv2DFilterDim(filt_h, filt_w,
                                   filt_out_c, in_c=in_c)
        filt_dim = filt_dim.impose_order(cls.ONNX_TRANSFILTER_ORDER)

        if len(inputs) > 2:
            biases_node = inputs[2][0]
            biases = cls.get_constant(inputs[2])
        else:
            biases = np.zeros([out_c], dtype=np.float32)
            biases_node = ConstantInputParameters(f'{valid_name}_biases', value=biases,
                                                  dims=Dim.unnamed(
                                                      biases.shape))

        padding, dilations, strides, output_padding = cls.calc_shapes(node, spatial_size, Dim2D((h, w)), Dim2D((filt_h, filt_w)))

        params = TransposeConv2DParameters(valid_name,
                                  filt=filt_dim,
                                  stride=strides,
                                  dilation=dilations,
                                  groups=group,
                                  padding=padding,
                                  has_bias=True,
                                  in_dims_hint=[['c', 'h', 'w'],
                                                cls.ONNX_TRANSFILTER_ORDER, ['c']],
                                  out_dims_hint=[['c', 'h', 'w']])

        in_dim = Dim.named_ordered(c=in_c, h=h, w=w)
        w_dim = Dim.named_ordered(
            out_c=filt_out_c, in_c=in_c, h=filt_h, w=filt_w)
        b_dim = Dim.named_ordered(c=out_c)
        out_dims = params.get_output_size([in_dim, w_dim, b_dim])
        G.add_edge(NNEdge(from_node=weights_node,
                          to_node=params, from_idx=0, to_idx=1))
        G.add_edge(NNEdge(from_node=biases_node,
                          to_node=params, from_idx=0, to_idx=2))
        if conv_shape != real_in_shape:
            # insert reshape from [xx,None,xx,xx] -> [None, xx, xx, xx]
            rbatch_params = ReshapeParameters(G.unique_name(f'{valid_name}_reshape_batchdim'),
                                          old_shape=Dim.unnamed(conv_shape),
                                          shape=Dim.unnamed(real_in_shape))
            G.add_edge(
                NNEdge(from_node=x[0], to_node=rbatch_params, from_idx=x[1], to_idx=0))
            prev_node = rbatch_params
            prev_idx = 0
        else:
            prev_node = x[0]
            prev_idx = x[1]

        if spatial_size == 1:
            oned_in_shape = [in_c, w]
            twod_in_shape = [in_c, 1, w]
            oned_out_shape = [out_dims[0].c, out_dims[0].w]
            r1_params = ReshapeParameters(f'{valid_name}_reshape2d',
                                          old_shape=Dim.unnamed(oned_in_shape),
                                          shape=Dim.unnamed(twod_in_shape))
            r2_params = ReshapeParameters(f'{valid_name}_reshape1d',
                                          old_shape=out_dims[0],
                                          shape=Dim.unnamed(oned_out_shape))
            G.add_edge(
                NNEdge(from_node=prev_node, to_node=r1_params, from_idx=prev_idx, to_idx=0))
            G.add_edge(NNEdge(from_node=r1_params,
                              to_node=params, from_idx=0, to_idx=0))
            G.add_edge(NNEdge(from_node=params,
                              to_node=r2_params, from_idx=0, to_idx=0))
            pout_dims = ProvisionalDim([conv_shape[0]] + oned_out_shape)
            all_nodes[node.output[0]] = (r2_params, 0, pout_dims, None)
            return r2_params
        else:
            pout_dims = ProvisionalDim([conv_shape[0]] + out_dims[0].shape)
            G.add_edge(
                NNEdge(from_node=prev_node, to_node=params, from_idx=prev_idx, to_idx=0))
            all_nodes[node.output[0]] = (params, 0, pout_dims, None)
            return params
Ejemplo n.º 20
0
    def _common(cls, node, **kwargs):
        node_opts = node.get_options(FullyConnectedOptions)
        G = kwargs['G']
        opts = kwargs['opts']
        all_nodes = kwargs['all_nodes']

        keep_dims = node_opts.KeepNumDims()
        # check(not keep_dims,
        #       f'keep dims on Fully Connected {node.name} is not supported')

        inputs = [all_nodes[t] if t is not None else None for t in node.input]

        x = inputs[0]
        x_shape = x[2]
        x_known_shape = x_shape.known_shape
        inp_sz = np.prod(np.array(x_known_shape))
        weights = inputs[1]
        weights_node = weights[0]
        weights_shape = weights[2].shape
        check(
            len(weights_shape) == 2,
            f'bad filter shape {weights_shape} in {node.name}')
        out_c = weights_shape[0]
        batch_size = inp_sz // weights_shape[1]

        keep_dims = node_opts.KeepNumDims()
        if keep_dims:
            if x_shape.shape[-1] != weights_shape[1]:
                raise ValueError(
                    f'Keep dims set on {node.name} but last input dimension does not match weights'
                )
            out_shape = x_shape.shape.copy()
            out_shape[-1] = out_c
        elif batch_size > 1:
            out_shape = (batch_size, out_c)
        else:
            out_shape = (None, out_c)
        real_out_shape = tuple(dim for dim in out_shape if dim is not None)

        filt_dim = FcFilterDim(weights_shape[0], weights_shape[1])

        node.input[1].used = True
        check(filt_dim.sz * batch_size == inp_sz,
              "filter doesn't match input size")

        if len(inputs) > 2 and inputs[2] is not None:
            bias = inputs[2]
            bias_node = bias[0]
        else:
            bias_node = ConstantInputParameters(f'{node.name}_bias',
                                                dims=Dim.unnamed([out_c]),
                                                value=np.zeros(
                                                    [out_c], dtype=np.float32))

        if batch_size > 1:
            # add a reshape to force the size of the input to batch * in_c
            input_shape = (batch_size, weights_shape[1])
            if x_known_shape != input_shape:
                rparams = ReshapeParameters(
                    G.unique_name(f'{node.name}_batch'),
                    old_shape=Dim.unnamed(x_known_shape),
                    shape=Dim.unnamed(input_shape))
                G.add_edge(
                    NNEdge(from_node=x[0],
                           to_node=rparams,
                           from_idx=x[1],
                           to_idx=0))
                link = (rparams, 0)
            else:
                link = x

            # the batched linear is ([NxM] . [MxK]) + [K]
            params = MatMulTransposedParameters(node.name)
            cls.new_load_filter_parameters(G, params, weights_shape, 0,
                                           node.input[0], weights_node,
                                           bias_node, node.output[0], opts)
            trans2 = TransposeParameters(G.unique_name(f'{node.name}_tin2'),
                                         transpose=(1, 0))
            G.add_edge(
                NNEdge(from_node=link[0], to_node=params, from_idx=link[1]))
            G.add_edge(NNEdge(from_node=weights_node, to_node=params,
                              to_idx=1))
            #G.add_edge(NNEdge(from_node=trans2, to_node=params, to_idx=1))
            G.add_edge(NNEdge(from_node=bias_node, to_node=params, to_idx=2))
            fc_shape = (batch_size, out_c)
        else:
            ker_in_order = None
            ker_out_order = None
            link = (x[0], x[1])

            params = FcParameters(node.name,
                                  filt=filt_dim,
                                  has_bias=True,
                                  ker_in_order=ker_in_order,
                                  ker_out_order=ker_out_order,
                                  batch_size=batch_size,
                                  keep_dims=keep_dims)
            cls.new_load_filter_parameters(
                G, params, params.filter.actual_shape,
                params.filter.get_order_idx('out_c'), node.input[0],
                weights_node, bias_node, node.output[0], opts)

            G.add_edge(NNEdge(from_node=weights_node, to_node=params,
                              to_idx=1))
            G.add_edge(NNEdge(from_node=bias_node, to_node=params, to_idx=2))
            G.add_edge(
                NNEdge(from_node=link[0],
                       to_node=params,
                       from_idx=link[1],
                       to_idx=0))
            fc_shape = (out_c, )

        pout_dims = ProvisionalDim(out_shape)
        aparams = cls.fuse_activation(node_opts, node.name, params, **kwargs)

        if real_out_shape != fc_shape:
            rparams = ReshapeParameters(G.unique_name(f'{node.name}_keepdims'),
                                        old_shape=fc_shape,
                                        shape=real_out_shape)
            G.add_edge(NNEdge(from_node=aparams, to_node=rparams))
            aparams = rparams

        all_nodes[node.output[0]] = (aparams, 0, pout_dims)
        return params
Ejemplo n.º 21
0
    def _match(self, G: GraphView, set_identity: bool = True, **kwargs):
        modified_graph = False
        concats = set(G.nodes(node_classes=ConcatParameters))
        while concats:
            concat = concats.pop()
            if concat.axis != 0:
                continue
            subgraph = find_concats_up(G, concat)
            found = set(subgraph.nodes(node_classes=ConcatParameters))
            if len(found) <= 1:
                continue
            LOG.info(
                f"Combining concats {','.join([node.name for node in found])}")
            modified_graph = True
            concats -= found

            in_edges = [inp.edge for inp in subgraph.inputs()]
            in_dims = [
                edge.from_node.out_dims[edge.from_idx] for edge in in_edges
            ]
            nodes_to_remove = [
                node for node in subgraph.nodes()
                if node != concat and not isinstance(node, DummyInput)
            ]
            for edge in in_edges:
                G.remove_edge(edge)
            for node in nodes_to_remove:
                if node.name in G:
                    G.remove(node)
                nid = NodeId(node)
                if G.quantization and nid in G.quantization:
                    del G.quantization[nid]

            # remove_internal_graph(G, subgraph)
            out_dim = concat.out_dims[0]
            in_qs = []
            for idx, edge in enumerate(in_edges):
                from_node = edge.from_node
                from_idx = edge.from_idx
                if len(in_dims[idx]) > 1:
                    reshape = ReshapeParameters(
                        G.unique_name(f'{concat.name}_flat{idx}'),
                        old_shape=in_dims[idx],
                        shape=Dim.unnamed([in_dims[idx].size()]))
                    G.add_edge(
                        NNEdge(from_node=from_node,
                               from_idx=from_idx,
                               to_node=reshape))
                    from_node = reshape
                    from_idx = 0
                G.add_edge(
                    NNEdge(from_node=from_node,
                           from_idx=from_idx,
                           to_node=concat,
                           to_idx=idx))
                if in_qs is not None and G.quantization:
                    nid = NodeId(edge.from_node)
                    if nid in G.quantization:
                        qrec = G.quantization[nid]
                        in_qs.append(qrec.out_qs[edge.from_idx])
                    else:
                        in_qs = None
                else:
                    in_qs = None
            if in_qs is not None and G.quantization:
                nid = NodeId(concat)
                if nid in G.quantization:
                    G.quantization[nid].in_qs = in_qs
            reshape = ReshapeParameters(G.unique_name(f'{concat.name}_expand'),
                                        old_shape=Dim.unnamed([out_dim.size()
                                                               ]),
                                        shape=out_dim)
            G.insert_node_after(concat, reshape, edge_class=NNEdge)

        if set_identity:
            self.set_identity(G)

        return modified_graph
Ejemplo n.º 22
0
    def conv(cls, node, quantized=False, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]
        # input N x C x H x W
        x = inputs[0]
        x_rank = len(x[2].shape)
        x_shape = x[2].shape

        if x_shape[0] is not None:
            real_in_shape = tuple(x_shape.copy())
            if x_shape[0] > 1:
                # support for multi batch is very limited
                batch = x_shape[0]
                logger.warning(
                    f"{valid_name} has a non 1 batch dimension of {batch} -"
                    " this is not supported by nntool or autotiler kernels")
            else:
                # if the batch is specified but is 1 then the input will be reshaped
                # and the output will have the batch dim set as unknown.
                batch = None
        else:
            real_in_shape = tuple(x_shape[1:])
            batch = None

        spatial_size = x_rank - 2
        assert spatial_size == 2 or spatial_size == 1, "only 1D and 2D convolutions supported"

        # Input error checking
        undefined = []
        if x_shape[1] is None:
            # cope with swapped batch and channel due to bad initial reshape
            if x_shape[0] == 1:
                batch = None
                x_shape = [x_shape[1], x_shape[0]] + list(x_shape[2:])
                real_in_shape = x_shape[1:]
            else:
                undefined.append(f"input channel size of filter {valid_name} must be defined.")

        if not all(dim is not None for dim in x_shape[-spatial_size:]):
            undefined.append(f"input spatial size {x_shape} of filter {valid_name} must be defined.")
        if undefined:
            raise ValueError(f"{' '.join(undefined)}. You may need to override input dimensions.")

        # M x C/group x kH x kW
        weights_idx = 3 if quantized else 1
        weights_node = inputs[weights_idx][0]
        weights_node.name = f'{valid_name}_weights'
        weights = cls.get_constant(inputs[weights_idx])
        out_c = weights.shape[0]
        group = node.attrs.get("group", 1)
        in_c = x_shape[1]
        filt_in_c = in_c // group
        if in_c != weights.shape[1] * group:
            raise ValueError(f'node {valid_name} has incorrect input channel '
                             f'dimension {in_c} expecting {weights.shape[1] * group}')
        if spatial_size == 1:
            filt_w = weights.shape[-1]
            filt_h = h = 1
            w = x_shape[-1]
            # create a new constant node since we are changing the shape
            weights = np.reshape(weights, (out_c, filt_in_c, filt_h, filt_w))
            weights_node = ConstantInputParameters(f'{valid_name}_weights', value=weights,
                                                   dims=Dim.unnamed(
                                                       weights.shape))
            cls.record_constant_qrec(inputs[1], weights_node, **kwargs)
        else:
            filt_h = weights.shape[-2]
            filt_w = weights.shape[-1]
            h = x_shape[-2]
            w = x_shape[-1]

        conv_in_shape = (in_c, h, w)

        # h = 1 if spatial_size == 1 else (
        #     x_shape[-2] if x_shape[-2] is not None else 1)
        # w = x_shape[-1] if x_shape[-1] is not None else 1

        filt_dim = Conv2DFilterDim(filt_h, filt_w,
                                   out_c, in_c=filt_in_c)
        filt_dim = filt_dim.impose_order(cls.ONNX_FILTER_ORDER)

        biases_idx = 8 if quantized else 2
        if len(inputs) > biases_idx:
            biases_node = inputs[biases_idx][0]
            biases = cls.get_constant(inputs[biases_idx])
        else:
            biases = np.zeros([out_c], dtype=np.float32)
            biases_node = ConstantInputParameters(f'{valid_name}_biases', value=biases,
                                                  dims=Dim.unnamed(
                                                      biases.shape))

        dilations = cls.pad_start_with(node.attrs.get("dilations", []), [1], 2)
        strides = cls.pad_start_with(node.attrs.get("strides", []), [1], 2)
        pad_dim = cls.calc_pad_dim(node, 4)

        if batch is not None:
            in_hint = ['n', 'c', 'h', 'w']
            out_hint = ['n', 'c', 'h', 'w']
            in_dim = Dim.named_ordered(n=batch, c=in_c, h=h, w=w)
            ker_in_order = [
                ['n', 'c', 'h', 'w'],
                ['out_c', 'in_c', 'h', 'w'],
                ['out_c']]
            ker_out_order = [['n', 'c', 'h', 'w']]
        else:
            in_hint = ['c', 'h', 'w']
            out_hint = ['c', 'h', 'w']
            in_dim = Dim.named_ordered(c=in_c, h=h, w=w)
            ker_in_order = [
                ['c', 'h', 'w'],
                ['out_c', 'in_c', 'h', 'w'],
                ['out_c']]
            ker_out_order = [['c', 'h', 'w']]
        params = Conv2DParameters(valid_name,
                                  filt=filt_dim,
                                  stride=StrideDim(strides[0],
                                                   strides[1]),
                                  dilation=DilationDim(dilations[0],
                                                       dilations[1]),
                                  batch=batch,
                                  groups=group,
                                  padding=pad_dim,
                                  ker_in_order=ker_in_order,
                                  ker_out_order=ker_out_order,
                                  has_bias=True,
                                  in_dims_hint=[in_hint,
                                                cls.ONNX_FILTER_ORDER, ['c']],
                                  out_dims_hint=[out_hint])

        if quantized:
            qrecs = kwargs['qrecs']
            x_zp = cls.get_constant(inputs[2])
            x_scale = cls.get_constant(inputs[1])
            x_qtype = QType(dtype=x_zp.dtype, scale=x_scale, zero_point=x_zp)
            w_zp = cls.get_constant(inputs[5])
            w_scale = cls.get_constant(inputs[4])
            weights_node.qtype = w_qtype = QType(
                dtype=w_zp.dtype, scale=w_scale,
                zero_point=w_zp, quantized_dimension=0 if len(w_scale) > 1 else None)
            o_zp = cls.get_constant(inputs[7])
            o_scale = cls.get_constant(inputs[6])
            o_qtype = QType(dtype=o_zp.dtype, scale=o_scale, zero_point=o_zp)
            biases_node.qtype = b_qtype = QType(
                dtype=biases.dtype, scale=w_scale*x_scale)
            qrecs[NodeId(params)] = QRec.scaled(
                in_qs=[x_qtype, w_qtype, b_qtype],
                out_qs=[o_qtype],
            )
        else:
            o_qtype = None

        w_dim = Dim.named_ordered(
            out_c=out_c, in_c=filt_in_c, h=filt_h, w=filt_w)
        b_dim = Dim.named_ordered(c=out_c)
        out_dims = params.get_output_size([in_dim, w_dim, b_dim])
        G.add_edge(NNEdge(from_node=weights_node,
                          to_node=params, from_idx=0, to_idx=1))
        G.add_edge(NNEdge(from_node=biases_node,
                          to_node=params, from_idx=0, to_idx=2))

        # check if input needs a reshape
        if conv_in_shape != real_in_shape:
            r1_params = ReshapeParameters(f'{valid_name}_reshape_in',
                                          old_shape=Dim.unnamed(real_in_shape),
                                          shape=Dim.unnamed(conv_in_shape))
            G.add_edge(
                NNEdge(from_node=x[0], to_node=r1_params, from_idx=x[1], to_idx=0))
            G.add_edge(NNEdge(from_node=r1_params,
                              to_node=params, from_idx=0, to_idx=0))
        else:
            G.add_edge(
                NNEdge(from_node=x[0], to_node=params, from_idx=x[1], to_idx=0))

        # check if output needs a reshape
        if spatial_size == 1:
            if batch is not None:
                oned_out_shape = [batch, out_dims[0].c, out_dims[0].w]
                pout_dims = ProvisionalDim(oned_out_shape)
            else:
                oned_out_shape = [out_dims[0].c, out_dims[0].w]
                pout_dims = ProvisionalDim([None] + oned_out_shape)

            r2_params = ReshapeParameters(f'{valid_name}_reshape_out',
                                          old_shape=out_dims[0],
                                          shape=Dim.unnamed(oned_out_shape))
            G.add_edge(NNEdge(from_node=params,
                              to_node=r2_params, from_idx=0, to_idx=0))
            params = r2_params
        else:
            pout_dims = ProvisionalDim([batch] + out_dims[0].shape)

        all_nodes[node.output[0]] = (params, 0, pout_dims, o_qtype)
        return params
Ejemplo n.º 23
0
    def match(self, G: GraphView, set_identity: bool = True):
        rnn_nodes = [
            self.find_unpack(G, node) for node in G.nodes()
            if isinstance(node, RNNBaseParameters)
        ]
        has_modified_graph = False
        for rnn_unpack in rnn_nodes:
            if not rnn_unpack:
                continue
            unpack_node = rnn_unpack[-1]
            rnn_node = rnn_unpack[0]
            time_axis = 0
            if isinstance(unpack_node, StridedSliceParameters):
                if unpack_node.act_slice[time_axis][1] != rnn_node.n_cells:
                    LOG.debug("can't remove %s. Slice not equal to cells",
                              unpack_node.name)
                    continue
                if unpack_node.act_slice[time_axis][2] != 1:
                    LOG.debug("can't remove %s. Slice not of length 1",
                              unpack_node.name)
                    continue
                if unpack_node.act_slice[time_axis][0] != rnn_node.n_cells - 1:
                    LOG.debug("can't remove %s. Slice isn't last cell",
                              unpack_node.name)
                    continue
                out_edge = G.out_edges(unpack_node.name)[0]
                changes_shape = unpack_node.changes_shape
            elif isinstance(unpack_node, SplitParameters):
                out_edges = G.out_edges(unpack_node.name)
                if len(out_edges) > 1:
                    LOG.debug("can't remove %s. More than one output edge",
                              unpack_node.name)
                    continue
                out_edge = out_edges[0]
                if out_edge.from_idx != len(unpack_node.act_slices) - 1:
                    LOG.debug("can't remove %s. Not last output",
                              unpack_node.name)
                    continue
                act_slice = unpack_node.act_slices[-1]
                if act_slice[time_axis][1] != rnn_node.n_cells:
                    LOG.debug("can't remove %s. Slice not equal to cells",
                              unpack_node.name)
                    continue
                if act_slice[time_axis][0] != rnn_node.n_cells - 1:
                    LOG.debug("can't remove %s. Slice isn't last cell",
                              unpack_node.name)
                    continue
                changes_shape = False
                out_edge = G.out_edges(unpack_node.name)[0]
            else:
                continue

            has_modified_graph = True
            LOG.info("Eliminating last cell unpack: %s", unpack_node.name)
            for node in rnn_unpack[1:-1:]:
                LOG.info("Eliminating others: %s", node.name)
                if G.quantization:
                    del G.quantization[NodeId(node)]
                G.remove(node)
            G.remove(unpack_node)
            rnn_node.n_output_cells = 1
            rnn_node.out_dims[0] = unpack_node.out_dims[out_edge.from_idx]
            if unpack_node.out_dims_hint and unpack_node.out_dims_hint[
                    out_edge.from_idx]:
                rnn_node.out_dims_hint = [
                    unpack_node.out_dims_hint[out_edge.from_idx]
                ]
            else:
                rnn_node.out_dims_hint = None
            # Here the strided slice can change the output shape of the RNN
            # so insert a reshape to do the shape change
            if changes_shape:
                reshape = ReshapeParameters(
                    unpack_node.name + '_reshape',
                    old_shape=Dim.unnamed(unpack_node.post_slice_shape),
                    shape=Dim.unnamed(unpack_node.out_shape))
                G.add_edge(NNEdge(rnn_node, reshape))
                G.add_edge(
                    NNEdge(reshape, out_edge.to_node, to_idx=out_edge.to_idx))
                if G.quantization:
                    G.quantization[NodeId(reshape)] = G.quantization[NodeId(
                        unpack)]
            else:
                G.add_edge(
                    NNEdge(rnn_node, out_edge.to_node, to_idx=out_edge.to_idx))
            if G.quantization:
                del G.quantization[NodeId(unpack_node)]

        if set_identity:
            self.set_identity(G)

        return has_modified_graph
Ejemplo n.º 24
0
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]

        if cls.SINCE_VERSION == 1:
            shape = np.array(node.attrs["shape"])
        else:  # since_version >= 5
            shape = cls.get_constant(inputs[1])

        input_shape = np.array(inputs[0][2].shape)
        shape = [
            dim if dim != 0 else input_shape[idx]
            for idx, dim in enumerate(shape)
        ]
        # this catches a special case where inp is something like [None, 2, 4] and shape is [2, -1, 4]
        # The -1 is clearly the None moving so move it
        if cls.moves_unknown(input_shape, shape):
            shape = np.array([None if dim == -1 else dim for dim in shape])
        else:
            if -1 in shape:
                new_shape_size = reduce(
                    lambda x, y: x * 1
                    if y is None or y == -1 else x * y, shape, 1)
                inp_size = reduce(lambda x, y: x * y
                                  if y is not None else x, input_shape, 1)
                in_size = prod(
                    [1 if dim is None else dim for dim in input_shape])
                shape_size = prod([1 if dim is None else dim for dim in shape])
                if in_size % shape_size != 0:
                    raise ValueError('invalid reshape')
                shape[shape.index(-1)] = inp_size // new_shape_size
            shape = np.array(shape)
            # TODO - There must be a better way of doing this
            # This hacks around the fact that the batch dimension will be in the reshape
            if input_shape[0] is None and shape[0] == 1:
                shape = np.array([None] + list(shape[1::]))

        inp = inputs[0]
        if cls.is_constant(inp):
            # there should be no None in shape since a constant always has known size
            logger.info("reducing %s to a constant", valid_name)
            params = ConstantInputParameters(
                valid_name,
                value=cls.get_constant(inp).reshape(shape),
                dims=Dim.unnamed(shape))
            pshape = ProvisionalDim(shape)
            all_nodes[node.output[0]] = (params, 0, pshape, inp[3])
            return params

        pshape = ProvisionalDim(shape)
        # pylint: disable=singleton-comparison
        old_shape = Dim.unnamed(list(input_shape[input_shape != None]))
        shape = Dim.unnamed(list(shape[shape != None]))
        params = ReshapeParameters(valid_name,
                                   old_shape=old_shape,
                                   shape=shape)
        G.add_edge(
            NNEdge(from_node=inp[0], to_node=params, from_idx=inp[1],
                   to_idx=0))
        all_nodes[node.output[0]] = (params, 0, pshape, inp[3])
        return params
Ejemplo n.º 25
0
def test_reshape1():
    params = ReshapeParameters("test", (2, 2, 2))
    out_size = params.get_output_size([Dim.unnamed([1, 8])])[0]
    assert out_size.shape == [2, 2, 2]
Ejemplo n.º 26
0
    def _match(self,
               G: GraphView,
               set_identity: bool = True,
               **kwargs) -> bool:
        has_modified_graph = False
        slices_by_origin = {}
        for slice_node in [
                node for node in G.nodes()
                if isinstance(node, StridedSliceParameters)
        ]:
            in_edge = G.in_edges(slice_node.name)[0]
            group = slices_by_origin.setdefault(
                (in_edge.from_node, in_edge.from_idx), [])
            group.append(slice_node)
        for in_edge, slice_nodes in slices_by_origin.items():
            slices = list(zip(*[node.act_slice for node in slice_nodes]))
            if len(slice_nodes) == 1:
                self.slice_to_split(G, slice_nodes, slices)
                continue

            # strides must be one
            if any(sl[2] != 1 for sl_axis in slices for sl in sl_axis):
                continue

            diff_axes = list([
                idx for idx, elems in enumerate(slices)
                if not all(elems[0] == elem for elem in elems[1::])
            ])
            not_diff_axes = [
                idx for idx in range(len(slices)) if idx not in diff_axes
            ]
            diff_slices = [
                sl for idx, sl in enumerate(slices) if idx in diff_axes
            ]
            axis_lengths = in_edge[0].out_dims[in_edge[1]].shape
            if not_diff_axes and min(not_diff_axes) < max(diff_axes):
                transpose_from = tuple(range(len(slices)))
                transpose_to = tuple(diff_axes + not_diff_axes)
                axis_lengths = [axis_lengths[idx] for idx in transpose_to]
            else:
                transpose_from = transpose_to = None
            diff_axis_lengths = axis_lengths[0:len(diff_axes):]

            diff_slices = combine_slices(diff_axis_lengths, diff_slices,
                                         slice_nodes)
            if diff_slices is None:
                continue

            if len(diff_axes) > 1:
                reshape_from = axis_lengths
                reshape_to = [np.prod(diff_axis_lengths)] + \
                    axis_lengths[len(diff_axes)::]
            else:
                reshape_from = None
                reshape_to = slice_nodes[0].in_dims[0].shape
                if transpose_from:
                    reshape_to = [reshape_to[idx] for idx in transpose_to]

            sizes, shapes, sorted_nodes = slices_to_sizes(
                diff_slices, axis_lengths[len(diff_axes)::])

            name_prefix = sorted_nodes[0].name

            in_edge = G.in_edges(sorted_nodes[0].name)[0]
            in_node = in_edge.from_node
            in_idx = in_edge.from_idx

            if transpose_from:
                params = TransposeParameters(G.unique_name(name_prefix +
                                                           '_tin'),
                                             transpose=transpose_to)
                G.add_edge(
                    NNEdge(from_node=in_node, to_node=params, from_idx=in_idx))
                in_node = params
                in_idx = 0

            if reshape_from:
                params = ReshapeParameters(G.unique_name(name_prefix +
                                                         '_reshape'),
                                           old_shape=Dim.unnamed(reshape_from),
                                           shape=Dim.unnamed(reshape_to))
                G.add_edge(
                    NNEdge(from_node=in_node, to_node=params, from_idx=in_idx))
                in_node = params
                in_idx = 0

            act_slices, out_shapes, axis = SplitParameters.get_splits(
                reshape_to, 0, splits=sizes)
            split_node = SplitParameters(G.unique_name(name_prefix + '_split'),
                                         act_slices=act_slices,
                                         out_shapes=out_shapes,
                                         axis=axis)

            G.add_edge(
                NNEdge(from_node=in_node, from_idx=in_idx, to_node=split_node))

            sub_names = []
            for idx, node in enumerate(sorted_nodes):
                sub_names.append(node.name)
                out_edges = G.out_edges(node.name)
                G.remove(node)
                for out_edge in out_edges:
                    params = split_node
                    out_idx = idx
                    if reshape_from:
                        from_node = params
                        params = ReshapeParameters(
                            G.unique_name(name_prefix + f'_reshape{idx}'),
                            shape=Dim.unnamed(shapes[idx]))
                        G.add_edge(
                            NNEdge(from_node=from_node,
                                   to_node=params,
                                   from_idx=out_idx))
                        out_idx = 0
                    if transpose_from:
                        from_node = params
                        params = TransposeParameters(
                            G.unique_name(name_prefix + f'_tout{idx}'),
                            transpose=reverse_transpose(transpose_to))
                        G.add_edge(
                            NNEdge(from_node=from_node,
                                   to_node=params,
                                   from_idx=out_idx))
                        out_idx = 0

                    G.add_edge(
                        NNEdge(from_node=params,
                               to_node=out_edge.to_node,
                               from_idx=out_idx,
                               to_idx=out_edge.to_idx))
            if G.quantization:
                G.add_dimensions()
                quantizer = NewQuantizer.from_quantized_graph(G)
                quantizer.quantize()
                RemoveUnnecessaryQuantizeOperators().match(G)

            LOG.info(
                f'replaced slice nodes {",".join(sub_names)} with split node {split_node.name}'
            )

            has_modified_graph = True

        if set_identity:
            self.set_identity(G)

        return has_modified_graph
Ejemplo n.º 27
0
    def _common(cls, node: TFLiteNode, **kwargs):
        node_opts = node.get_options(PackOptions)
        G = kwargs['G']
        opts = kwargs['opts']
        all_nodes = kwargs['all_nodes']

        inputs = [all_nodes[t] for t in node.input]
        inp_shapes = [input[2].shape for input in inputs]

        values_count = node_opts.ValuesCount()
        check(len(inputs) == values_count, "invalid tflite file - values count not equal to inputs")

        buffer_idxes = [tensor.buffer_idx for tensor in node.input]
        if any(not cls.is_constant(inp) for inp in inputs):
            check(len(set(buffer_idxes)) == len(buffer_idxes),
                ("packs with multiple versions of the same input are not supported. "
                "This is normally a graph design problem."))

        axis = node_opts.Axis()
        dimension_size = len(inp_shapes)
        if axis < 0:
            axis += dimension_size

        check(all(shape == inp_shapes[0] for shape in inp_shapes[1::]),
              "invalid tflite file - pack inputs not the same")

        # prepare shapes of all tensors
        pconcat_out_shape = inp_shapes[0].copy()
        pconcat_out_shape.insert(axis, values_count)

        pconcat_in_shape = inp_shapes[0].copy()
        pconcat_in_shape.insert(axis, 1)

        preshape_in_shape = inp_shapes[0].copy()

        # remove nones from constants
        cls.remove_none_from_constants(inputs, preshape_in_shape)

        # remove nones from reshape shapes
        reshape_in_shape = cls.remove_unspecified_dim(preshape_in_shape)
        concat_in_shape = cls.remove_unspecified_dim(pconcat_in_shape)

        if all(cls.is_constant(inp) for inp in inputs):
            LOG.info("reducing %s to a constant", node.name)
            value = np.stack([cls.get_constant(inp) for inp in inputs], axis=axis)
            params = ConstantInputParameters(node.name, value=value)
        elif len(inputs) == 1:
            params = ReshapeParameters(node.name,
                                        old_shape=reshape_in_shape,
                                        shape=concat_in_shape)
            G.add_edge(NNEdge(from_node=inputs[0][0], to_node=params, from_idx=inputs[0][1]))
        else:
            axis -= sum(1 if dim is None else 0 for dim in pconcat_out_shape[:axis:])
            params = ConcatParameters(node.name, axis=axis, axis_hint=None)

            # insert reshapes on each input to add concat axis
            for idx, inp in enumerate(inputs):
                rparams = ReshapeParameters(node.name + "_%s" % idx,
                                            old_shape=reshape_in_shape,
                                            shape=concat_in_shape)
                G.add_edge(NNEdge(from_node=inp[0], to_node=rparams, from_idx=inp[1]))
                G.add_edge(NNEdge(from_node=rparams, to_node=params, to_idx=idx))
                if opts.get('load_quantization'):
                    G.quantization[NodeId(rparams)] = cls.load_tf_quantization([node.input[idx]], [node.input[idx]])

        if opts.get('load_quantization'):
            G.quantization[NodeId(params)] = cls.load_tf_quantization(node.input, node.output)

        all_nodes[node.output[0]] = (params, 0, ProvisionalDim(pconcat_out_shape))
        return params