def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]
        x = inputs[0]
        x_shape = x[2].shape
        out_rank = len(x_shape) + len(kwargs['axes'])
        axes = cls._resolve_negative_ranks(kwargs['axes'], out_rank)

        old_shape = x_shape.copy()
        new_shape = [
            1 if new_idx in axes else old_shape.pop(0)
            for new_idx in range(out_rank)
        ]

        pshape = ProvisionalDim(new_shape)
        if cls.is_constant(x):
            x_val = cls.get_constant(x)
            logger.info(
                f"reducing {valid_name} to a constant {cls.print_small(x_val)}"
            )
            params = ConstantInputParameters(valid_name,
                                             value=x_val.reshape(new_shape))
        else:
            old_shape = cls._get_real_dim(x_shape)
            shape = cls._get_real_dim(new_shape)
            params = ReshapeParameters(valid_name,
                                       old_shape=old_shape,
                                       shape=shape)
            G.add_edge(
                NNEdge(from_node=x[0], to_node=params, from_idx=x[1],
                       to_idx=0))
        all_nodes[node.output[0]] = (params, 0, pshape, x[3])
        return params
Exemple #2
0
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]
        x = inputs[0]
        x_shape = x[2].shape
        y = inputs[1]
        indices = cls.get_constant(y)
        axis = node.attrs.get('axis', 0)

        pshape = ProvisionalDim(x_shape[:axis:] + list(indices.shape) +
                                x_shape[axis + 1:])
        if cls.is_constant(x):
            logger.info("reducing %s to a constant", valid_name)
            x_val = cls.get_constant(x)
            params = ConstantInputParameters(valid_name,
                                             value=np.take(x_val,
                                                           indices,
                                                           axis=axis))
        else:
            axis = cls._trim_axis(axis, x_shape)
            params = GatherParametters(valid_name, axis=axis, indices=indices)
            G.add_edge(
                NNEdge(from_node=x[0], to_node=params, from_idx=x[1],
                       to_idx=0))
        all_nodes[node.output[0]] = (params, 0, pshape)
        return params
Exemple #3
0
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]
        x = inputs[0]
        x_shape = x[2].shape

        transpose = node.attrs.get('perm', list(range(len(x_shape) - 1, -1, -1)))
        transpose = tuple(transpose)
        pout_shape = [x_shape[i] for i in transpose]

        new_axes = {}
        for idx, dim in enumerate(x_shape):
            if dim is not None:
                new_axes[idx] = len(new_axes)
        if cls.is_constant(x):
            logger.info("reducing %s to a constant", valid_name)
            x_val = cls.get_constant(x)
            params = ConstantInputParameters(valid_name, value=x_val.transpose(transpose))
        else:
            transpose = [new_axes[axis] for axis in transpose if x_shape[axis] is not None]
            if transpose == sorted(transpose):
                params = NoOPParameters(valid_name, desc="transpose does nothing")
            else:
                params = TransposeParameters(valid_name, transpose=transpose)
            G.add_edge(NNEdge(from_node=x[0], to_node=params, from_idx=x[1], to_idx=0))
        all_nodes[node.output[0]] = (params, 0, ProvisionalDim(pout_shape))
        return params
Exemple #4
0
 def _common(cls, node, **kwargs):
     all_nodes = kwargs['all_nodes']
     valid_name = kwargs['valid_name']
     G = kwargs['G']
     constant_operation = kwargs.get('constant_operation')
     inputs = [all_nodes[inp] for inp in node.input]
     # may have more than one input i.e. clip
     x = inputs[0]
     if cls.is_constant(x) and constant_operation:
         res = constant_operation(cls.get_constant(x))
         if res.size < 10:
             logger.info("reducing %s to a constant %s", valid_name, res)
         else:
             logger.info("reducing %s to a constant", valid_name)
         params = ConstantInputParameters(valid_name,
                                          value=res,
                                          constant_store=G.constant_store)
     else:
         params_args = kwargs.get('params_args', {})
         params = kwargs['params_class'](valid_name, **params_args)
         G.add_edge(
             NNEdge(from_node=x[0], to_node=params, from_idx=x[1],
                    to_idx=0))
     all_nodes[node.output[0]] = (params, 0, copy.deepcopy(x[2]))
     return params
Exemple #5
0
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]
        x = inputs[0]
        x_shape = x[2].shape
        axes = cls._resolve_negative_ranks(kwargs['axes'], len(x_shape))
        if len(x_shape) == 0:
            assert len(axes) == 1 and axes[0] == 0
            new_shape = [1]
        else:
            new_shape = [
                item for sublist in [[1, dim] if idx in axes else [dim]
                                     for idx, dim in enumerate(x_shape)]
                for item in sublist
            ]

        pshape = ProvisionalDim(new_shape)
        if cls.is_constant(x):
            logger.info("reducing %s to a constant", valid_name)
            x_val = cls.get_constant(x)
            params = ConstantInputParameters(valid_name,
                                             value=x_val.reshape(new_shape))
        else:
            old_shape = cls._get_real_dim(x_shape)
            shape = cls._get_real_dim(new_shape)
            params = ReshapeParameters(valid_name,
                                       old_shape=old_shape,
                                       shape=shape)
            G.add_edge(
                NNEdge(from_node=x[0], to_node=params, from_idx=x[1],
                       to_idx=0))
        all_nodes[node.output[0]] = (params, 0, pshape)
        return params
 def _common(cls, node, **kwargs):
     all_nodes = kwargs['all_nodes']
     G = kwargs['G']
     valid_name = kwargs['valid_name']
     inputs = [all_nodes[inp] for inp in node.input]
     x = inputs[0]
     x_shape = x[2].shape
     to_dtype = node.attrs['to']
     if cls.is_constant(x):
         x_val = cls.get_constant(x)
         x_val = x_val.astype(to_dtype)
         if x_val.size < 10:
             logger.info("reducing %s to a constant %s", valid_name, x_val)
         else:
             logger.info("reducing %s to a constant", valid_name)
         params = ConstantInputParameters(valid_name,
                                          dims=Dim.unnamed(x_val.shape),
                                          value=x_val)
     else:
         params = QuantizeParameters(valid_name,
                                     to_qtype=QType(dtype=to_dtype))
         G.add_edge(
             NNEdge(from_node=x[0], to_node=params, from_idx=x[1],
                    to_idx=0))
     all_nodes[node.output[0]] = (params, 0, ProvisionalDim(x_shape), None)
     return params
Exemple #7
0
 def _common(cls, node, v13=False, **kwargs):
     all_nodes = kwargs['all_nodes']
     valid_name = kwargs['valid_name']
     G = kwargs['G']
     inputs = [all_nodes[inp] for inp in node.input]
     axis = node.attrs.get('axis', None)
     # may have more than one input i.e. clip
     x = inputs[0]
     x_shape = x[2].shape
     if axis and axis < 0:
         axis += len(x_shape)
     axis = cls._trim_axis(axis, x_shape)
     if axis != 0 and not v13:
         ValueError(
             'LogSoftmax does not support ONNX version < 13 with axis not first'
         )
     if cls.is_constant(x):
         logger.info("reducing %s to a constant", valid_name)
         params = ConstantInputParameters(
             valid_name,
             value=np.log(softmax_func(cls.get_constant(x), axis=axis)))
     else:
         softmax_params = SoftMaxParameters(f'{valid_name}_softmax',
                                            axis=axis)
         G.add_edge(
             NNEdge(from_node=x[0],
                    to_node=softmax_params,
                    from_idx=x[1],
                    to_idx=0))
         params = LogOpParameters(f'{valid_name}_log')
         G.add_edge(NNEdge(from_node=softmax_params, to_node=params))
     all_nodes[node.output[0]] = (params, 0, copy.deepcopy(x[2]), None)
     return params
Exemple #8
0
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]
        x = inputs[0]
        x_shape = x[2].shape
        axes = cls._resolve_negative_ranks(kwargs['axes'], len(x_shape))
        if axes:
            if any(x_shape[axis] != 1 for axis in axes):
                raise ValueError("axis parameter in node %s is invalid %s" % (valid_name, axes))
            new_shape = [dim for idx, dim in enumerate(x_shape) if idx not in axes]
        else:
            new_shape = [dim for dim in x_shape if dim != 1]

        pshape = ProvisionalDim(new_shape)
        if cls.is_constant(x):
            logger.info("reducing %s to a constant", valid_name)
            x_val = cls.get_constant(x)
            params = ConstantInputParameters(valid_name, value=x_val.reshape(new_shape),
                                             constant_store=G.constant_store)
        else:
            old_shape = cls._get_real_dim(x_shape)
            shape = cls._get_real_dim(new_shape)
            params = ReshapeParameters(valid_name, old_shape=old_shape, shape=shape)
            G.add_edge(NNEdge(from_node=x[0], to_node=params, from_idx=x[1], to_idx=0))
        all_nodes[node.output[0]] = (params, 0, pshape)
        return params
Exemple #9
0
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']

        inputs = [all_nodes[inp] for inp in node.input]

        if not all(cls.is_constant(inp) for inp in inputs):
            raise NotImplementedError(
                "nntool does not support import of graphs with evaluated loops"
            )

        importer = kwargs['importer']
        sub_G = NNGraph()
        all_nodes_clone = all_nodes.copy()
        importer.import_subgraph(sub_G,
                                 node.attrs['body'], {},
                                 all_nodes=all_nodes_clone)
        if not all(
                isinstance(inp, (InputParameters, ConstantInputParameters))
                for inp in sub_G.inputs()):
            raise NotImplementedError(
                "nntool does not support import of graphs with evaluated loops"
            )
        sub_G.add_dimensions()
        for idx, inp in enumerate(sub_G.inputs()):
            inp.index = idx

        logger.info(f"reducing loop {valid_name} to a constant")
        count = inputs[0][0].value
        keep_going = inputs[1][0].value
        loop_carried = [inp[0].value for inp in inputs[2:]]
        outputs = [np.array([])] * len(node.output)
        while keep_going and count > 0:
            executer = GraphExecuter(sub_G)
            output_tensors = executer.execute([count, keep_going] +
                                              loop_carried,
                                              silent=True)
            outp_vals = [
                output_tensors[node.step_idx][0] for node in sub_G.outputs()
                if not isinstance(node, InputParameters)
            ]
            keep_going = outp_vals[0]
            for idx, val in enumerate(outp_vals[1:]):
                if idx < len(loop_carried):
                    loop_carried[idx] = outputs[idx] = val
                elif outputs[idx] is None:
                    outputs[idx] = val
                else:
                    outputs[idx] = np.concatenate((outputs[idx], val))
            count -= 1
        for idx, outp in enumerate(node.output):
            params = ConstantInputParameters(
                G.unique_name(f'{valid_name}_out{idx}'),
                value=outputs[idx],
                dims=Dim.unnamed(outputs[idx].shape))
            all_nodes[outp] = (params, 0, ProvisionalDim(outputs[idx].shape),
                               None)

        return None
Exemple #10
0
 def gen_concat(cls, node, inputs, axis, **kwargs):
     all_nodes = kwargs['all_nodes']
     G = kwargs['G']
     valid_name = kwargs['valid_name']
     inputs = [all_nodes[inp] for inp in node.input]
     input_shapes = [inp[2].shape for inp in inputs]
     axis_sum = sum(shape[axis] for shape in input_shapes)
     axis = axis if axis >= 0 else len(input_shapes[0]) + axis
     output_shape = [
         axis_sum if idx == axis else dim
         for idx, dim in enumerate(input_shapes[0])
     ]
     pout_dim = ProvisionalDim(output_shape)
     none_dims = sum(
         [1 if dim is None else 0 for dim in output_shape[:axis:]])
     if all(cls.is_constant(inp) for inp in inputs):
         value = np.concatenate([cls.get_constant(inp) for inp in inputs],
                                axis=axis)
         logger.info(
             f"reducing {valid_name} to a constant {print_small(value)}")
         params = ConstantInputParameters(valid_name, value=value)
     else:
         params = ConcatParameters(valid_name, axis=axis - none_dims)
         for idx, inp in enumerate(inputs):
             G.add_edge(
                 NNEdge(from_node=inp[0],
                        to_node=params,
                        from_idx=inp[1],
                        to_idx=idx))
     all_nodes[node.output[0]] = (params, 0, pout_dim, inputs[0][3])
     return params
Exemple #11
0
 def _common(cls, node, **kwargs):
     all_nodes = kwargs['all_nodes']
     valid_name = kwargs['valid_name']
     G = kwargs['G']
     constant_operation = kwargs.get('constant_operation')
     inputs = [all_nodes[inp] for inp in node.input]
     assert len(inputs) == 2
     if all(cls.is_constant(inp) for inp in inputs) and constant_operation:
         logger.info("reducing %s to a constant", valid_name)
         values = [cls.get_constant(inp) for inp in inputs]
         outputs = cls.implied_broadcast(inputs)
         params = ConstantInputParameters(valid_name, value=constant_operation(*values),
                                          dims=Dim.unnamed(outputs[0].known_shape),
                                          constant_store=G.constant_store)
     else:
         params_args = kwargs.get('params_args', {})
         params = kwargs['params_class'](valid_name, **params_args)
         outputs = cls.implied_broadcast(inputs)
         shapes = []
         for idx, inp in enumerate(inputs):
             G.add_edge(NNEdge(from_node=inp[0], to_node=params, from_idx=inp[1], to_idx=idx))
             shapes.append(inp[2].known_shape)
         if isinstance(params, Broadcastable):
             params.set_broadcast(shapes)
     all_nodes[node.output[0]] = (params, 0, outputs[0])
     return params
Exemple #12
0
    def _common(cls, node, starts, ends, axes, steps, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        x = all_nodes[node.input[0]]
        x_shape = np.array(x[2].shape)
        x_rank = len(x_shape)
        axes = cls._resolve_negative_ranks(
            axes, len(x_shape)) if axes else tuple(range(x_rank))
        axes_rank = len(axes)
        steps = steps if steps else [1] * axes_rank
        slices = np.stack([starts, ends, steps]).transpose((1, 0))
        p_slices = []
        p_shape = []
        for idx, dim in enumerate(x_shape):
            try:
                if dim is None:
                    p_slices.append(None)
                    p_shape.append(None)
                else:
                    slice_idx = axes.index(idx)
                    begin, end, step = slices[slice_idx]
                    begin = max(min(begin if begin >= 0 else dim + begin, dim),
                                0)
                    end = max(min(end if end >= 0 else dim + end, dim), -1)
                    # -sys.maxsize is used to indicate 0 in the reverse slice direction
                    # this makes it compatible with the numpy slice
                    p_slices.append(
                        (begin, -sys.maxsize if end == -1 else end, step))
                    if step < 0:
                        p_shape.append((begin - end) // -step)
                    else:
                        p_shape.append((end - begin) // step)

            except ValueError:
                p_slices.append((0, dim, 1))
                p_shape.append(dim)
        slices = cls._get_real_dim(p_slices)
        shape = cls._get_real_dim(p_shape)

        params = StridedSliceParameters(valid_name,
                                        act_slice=slices,
                                        out_shape=shape)
        if cls.is_constant(x):
            x_val = cls.get_constant(x)
            x_val = params.numpy_slice(x_val)
            if x_val.size < 10:
                logger.info("reducing %s to a constant %s", valid_name, x_val)
            else:
                logger.info("reducing %s to a constant", valid_name)
            params = ConstantInputParameters(valid_name,
                                             dims=Dim.unnamed(x_val.shape),
                                             value=x_val,
                                             constant_store=G.constant_store)
        else:
            G.add_edge(
                NNEdge(from_node=x[0], to_node=params, from_idx=x[1],
                       to_idx=0))
        all_nodes[node.output[0]] = (params, 0, ProvisionalDim(p_shape))
        return params
    def _common(cls, node, copy_qtype=False, quantized_args=None, **kwargs):
        all_nodes = kwargs['all_nodes']
        valid_name = kwargs['valid_name']
        G = kwargs['G']
        constant_operation = kwargs.get('constant_operation')
        constant_int_operation = kwargs.get('constant_int_operation')
        inputs = [all_nodes[inp] for inp in node.input]
        if quantized_args:
            args = [inputs[quantized_args[0][0]], inputs[quantized_args[1][0]]]
            inp_qtypes = [
                cls.get_qtype(inputs[quantized_args[0][1]], inputs[quantized_args[0][2]]),
                cls.get_qtype(inputs[quantized_args[1][1]], inputs[quantized_args[1][2]])
            ]
            out_qtype = cls.get_qtype(inputs[quantized_args[2][0]], inputs[quantized_args[2][1]])
        else:
            args = inputs
            assert len(args) == 2
            out_qtype = None

        if all(cls.is_constant(inp) for inp in args) and constant_operation:
            values = [cls.get_constant(inp) for inp in args]
            if quantized_args:
                values = [inp_qtype.dequantize(val) for inp_qtype, val in zip(inp_qtypes, values)]
            outputs = cls.implied_broadcast(inputs)
            if constant_int_operation and all(np.issubdtype(val.dtype, np.integer) for val in values):
                res = constant_int_operation(*values)
            else:
                res = constant_operation(*values)
            if quantized_args:
                res = out_qtype.quantize(res)
            if res.size < 10:
                logger.info("reducing %s to a constant %s", valid_name, res)
            else:
                logger.info("reducing %s to a constant", valid_name)
            params = ConstantInputParameters(valid_name, value=res,
                                             dims=Dim.unnamed(outputs[0].known_shape),
                                             qtype=out_qtype)
        else:
            params_args = kwargs.get('params_args', {})
            params = kwargs['params_class'](valid_name, **params_args)
            outputs = cls.implied_broadcast(inputs)
            shapes = []
            for idx, inp in enumerate(args):
                G.add_edge(NNEdge(from_node=inp[0], to_node=params, from_idx=inp[1], to_idx=idx))
                shapes.append(inp[2].known_shape)
            if isinstance(params, Broadcastable):
                params.set_broadcast(shapes)
            if quantized_args:
                for qtype, inp in zip(inp_qtypes, args):
                    if cls.is_constant(inp):
                        inp[0].qtype = qtype
                qrecs = kwargs['qrecs']
                qrecs[NodeId(params)] = QRec.scaled(in_qs=inp_qtypes, out_qs=[out_qtype])

        if copy_qtype:
            out_qtype = inputs[0][3] if inputs[0][3] is not None else inputs[1][3]

        all_nodes[node.output[0]] = (params, 0, outputs[0], out_qtype)
        return params
Exemple #14
0
    def _common(cls,
                node,
                mode='constant',
                pads=None,
                constant_value=0,
                **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]
        x = inputs[0]
        x_shape = x[2].shape

        apads = np.array(pads).reshape((-1, 2))
        if cls.is_constant(x):
            logger.info("reducing %s to a constant", valid_name)
            val = cls.get_constant(x)
            if mode == 'constant':
                val = np.pad(val,
                             apads,
                             mode=mode,
                             constant_values=constant_value)
            else:
                val = np.pad(val, apads, mode=mode)
            params = ConstantInputParameters(valid_name,
                                             value=val,
                                             constant_store=G.constant_store)
            all_nodes[node.output[0]] = (params, 0, ProvisionalDim(x_shape))
            return params

        if mode != 'constant':
            raise ValueError('%s - pad mode %s is not supported' %
                             (valid_name, mode))
        if constant_value != 0:
            raise ValueError('%s - only zero padding is supported' %
                             valid_name)

        trimmed_pads = tuple(
            [pad for idx, pad in enumerate(apads) if x_shape[idx] is not None])

        if all(sum(trimmed_pad) == 0 for trimmed_pad in trimmed_pads):
            params = NoOPParameters(valid_name, desc="eliminated pad of 0")
            pshape = x_shape
        else:
            pshape = [
                dim + sum(apads[idx]) if dim is not None else None
                for idx, dim in enumerate(x_shape)
            ]
            # pshape = [None if dim is None else dim + sum(apads[idx]) for idx, dim in enumerate(x_shape)]
            padvals = [(constant_value, constant_value)] * len(trimmed_pads)
            params = PadParameters(valid_name,
                                   padding=trimmed_pads,
                                   pad_vals=padvals)
        G.add_edge(
            NNEdge(from_node=x[0], to_node=params, from_idx=x[1], to_idx=0))
        all_nodes[node.output[0]] = (params, 0, ProvisionalDim(pshape))
        return params
Exemple #15
0
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]

        if cls.SINCE_VERSION == 1:
            shape = np.array(node.attrs["shape"])
        else:  # since_version >= 5
            shape = cls.get_constant(inputs[1])

        input_shape = np.array(inputs[0][2].shape)
        shape = [
            dim if dim != 0 else input_shape[idx]
            for idx, dim in enumerate(shape)
        ]
        if -1 in shape:
            wild_index = shape.index(-1)
            in_size = prod([1 if dim is None else dim for dim in input_shape])
            shape_size = prod(
                [1 if dim is None or dim <= 0 else dim for dim in shape])
            if in_size % shape_size != 0:
                raise ValueError('invalid reshape')
            shape[wild_index] = in_size // shape_size
        shape = np.array(shape)

        if cls.is_constant(inputs[0]):
            logger.info("reducing %s to a constant", valid_name)
            params = ConstantInputParameters(valid_name,
                                             value=cls.get_constant(
                                                 inputs[0]).reshape(shape),
                                             dims=Dim.unnamed(shape),
                                             constant_store=G.constant_store)
            pshape = ProvisionalDim(shape)
            all_nodes[node.output[0]] = (params, 0, pshape)
            return params

        # TODO - There must be a better way of doing this
        # This hacks around the fact that the batch dimension will be in the reshape
        if input_shape[0] is None and shape[0] == 1:
            shape = np.array([None] + list(shape[1::]))

        pshape = ProvisionalDim(shape)
        # pylint: disable=singleton-comparison
        old_shape = Dim.unnamed(list(input_shape[input_shape != None]))
        shape = Dim.unnamed(list(shape[shape != None]))
        params = ReshapeParameters(valid_name,
                                   old_shape=old_shape,
                                   shape=shape)
        inp = inputs[0]
        G.add_edge(
            NNEdge(from_node=inp[0], to_node=params, from_idx=inp[1],
                   to_idx=0))
        all_nodes[node.output[0]] = (params, 0, pshape)
        return params
 def _common(cls, node, **kwargs):
     all_nodes = kwargs['all_nodes']
     G = kwargs['G']
     valid_name = kwargs['valid_name']
     inputs = [all_nodes[inp] for inp in node.input]
     x = inputs[0]
     logger.info("reducing %s to a constant", valid_name)
     x_shape = [dim if dim else 1 for dim in x[2].shape]
     sz = np.array(prod(x_shape))
     params = ConstantInputParameters(valid_name, value=sz)
     all_nodes[node.output[0]] = (params, 0, ProvisionalDim([]), None)
     return params
 def _common(cls, node, **kwargs):
     all_nodes = kwargs['all_nodes']
     G = kwargs['G']
     valid_name = kwargs['valid_name']
     inputs = [all_nodes[inp] for inp in node.input]
     x = inputs[0]
     # this process can leave dangling nodes without outputs
     # we clean them after we have finished loading
     x_shape = [dim if dim else 1 for dim in x[2].shape]
     params = ConstantInputParameters(valid_name, value=np.array(x_shape))
     logger.info("reducing %s to a constant %s", valid_name, x_shape)
     all_nodes[node.output[0]] = (params, 0, ProvisionalDim([len(x_shape)]), None)
     return params
Exemple #18
0
 def _common(cls, node, **kwargs):
     all_nodes = kwargs['all_nodes']
     valid_name = kwargs['valid_name']
     inputs = [all_nodes[inp] for inp in node.input]
     x = inputs[0]
     if not cls.is_constant(x):
         # TODO - clear unique paths to this node here
         pass
     logger.info("reducing %s to a constant", valid_name)
     x_shape = [dim if dim else 1 for dim in x[2].shape]
     params = ConstantInputParameters(valid_name,
                                      value=np.array(prod(x_shape)))
     all_nodes[node.output[0]] = (params, 0, ProvisionalDim([]))
     return params
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]
        input_shapes = [inp[2].shape for inp in inputs]
        axis = node.attrs['axis']
        new_axis = node.attrs.get('new_axis', 0)
        # if new_axis is false this is the same as concat
        if not new_axis:
            return cls.gen_concat(node, inputs, axis)
        # if it is true then it's different
        if not all(shape == input_shapes[0] for shape in input_shapes[1::]):
            raise ValueError(
                'all shapes must be the same in ConcatFromSequence with new axis'
            )

        # reduce to a constant if we can
        if all(cls.is_constant(inp) for inp in inputs):
            logger.info("reducing %s to a constant", valid_name)
            value = np.concatenate([cls.get_constant(inp) for inp in inputs],
                                   axis=axis)
            params = ConstantInputParameters(valid_name, value=value)
            all_nodes[node.output[0]] = (params, 0,
                                         ProvisionalDim(value.shape),
                                         inputs[0][3])
            return params

        # add the axis into the shape
        new_shape = input_shapes[0].copy()
        new_shape = new_shape[:axis:] + [1] + new_shape[axis::]
        old_shape = cls._get_real_dim(input_shapes[0])
        shape = cls._get_real_dim(new_shape)
        # create a reshape on each input and pass the outputs to the concat mixin
        #pylint: disable=consider-using-enumerate
        for idx in range(len(inputs)):
            inp = inputs[idx]
            rparams = ReshapeParameters("%s_reshape_%s" % (valid_name, idx),
                                        old_shape=old_shape,
                                        shape=shape)
            G.add_edge(
                NNEdge(from_node=inp[0],
                       to_node=rparams,
                       from_idx=inp[1],
                       to_idx=0))
            inputs[idx] = (rparams, 0, ProvisionalDim(new_shape), inp[3])

        return cls.gen_concat(node, inputs, axis)
Exemple #20
0
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]
        x = inputs[0]
        y = inputs[1]
        shape = cls.get_constant(y)

        pshape = cls.broadcast_to(x, shape)
        if cls.is_constant(x):
            logger.info("reducing %s to a constant", valid_name)
            x_val = cls.get_constant(x)
            params = ConstantInputParameters(valid_name, value=x_val * np.ones(shape))
        else:
            raise ValueError("Expand is only implemented on constants")
        all_nodes[node.output[0]] = (params, 0, pshape)
        return params
Exemple #21
0
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]
        if not all(cls.is_constant(inp) for inp in inputs):
            raise NotImplementedError("Tile is only implemented on constants")

        inp_vals = [cls.get_constant(inp) for inp in inputs]
        out_val = np.tile(inp_vals[0], inp_vals[1])
        logger.info("reducing %s to a constant", valid_name)
        params = ConstantInputParameters(
            valid_name,
            value=out_val
        )
        pshape = ProvisionalDim(out_val.shape)
        all_nodes[node.output[0]] = (params, 0, pshape, inputs[0][3])
        return params
Exemple #22
0
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]
        x = inputs[0]
        x_shape = x[2].shape
        y = inputs[1]
        indices = cls.get_constant(y)
        axis = node.attrs.get('axis', 0)

        pshape = ProvisionalDim(x_shape[:axis:] + list(indices.shape) +
                                x_shape[axis + 1:])
        if cls.is_constant(x):
            x_val = cls.get_constant(x)
            logger.info(
                f"reducing {valid_name} to a constant {cls.print_small(x_val)}"
            )
            params = ConstantInputParameters(valid_name,
                                             value=np.take(x_val,
                                                           indices,
                                                           axis=axis))
        else:
            if np.ndim(indices) <= 1:
                idx = np.asscalar(indices)
                act_slice = tuple([(0, dim, 1) if i != axis else
                                   (idx, idx + 1, 1)
                                   for i, dim in enumerate(x_shape)
                                   if dim is not None])
                out_shape = pshape.known_shape.copy()
                params = StridedSliceParameters(valid_name,
                                                act_slice=act_slice,
                                                out_shape=out_shape)
            else:
                axis = cls._trim_axis(axis, x_shape)
                params = GatherParameters(valid_name,
                                          axis=axis,
                                          indices=indices)
            G.add_edge(
                NNEdge(from_node=x[0], to_node=params, from_idx=x[1],
                       to_idx=0))
        all_nodes[node.output[0]] = (params, 0, pshape, x[3])
        return params
Exemple #23
0
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]
        x = inputs[0]
        x_shape = x[2].shape

        if cls.is_constant(x):
            logger.info("reducing %s to a constant", valid_name)
            x_val = cls.get_constant(x)
            params = ConstantInputParameters(valid_name,
                                             dims=Dim.unnamed(x_val.shape),
                                             value=np.floor(x_val),
                                             constant_store=G.constant_store)
        else:
            raise ValueError("ONNX floor operator is not implemented")
        all_nodes[node.output[0]] = (params, 0, ProvisionalDim(x_shape))
        return params
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        valid_name = kwargs['valid_name']
        G = kwargs['G']
        inputs = [all_nodes[inp] for inp in node.input]
        x = inputs[0]
        y = inputs[1]
        shape = cls.get_constant(y)

        pshape = cls.broadcast_to(x, shape)
        if cls.is_constant(x):
            logger.info("reducing %s to a constant", valid_name)
            x_val = cls.get_constant(x)
            params = ConstantInputParameters(valid_name, value=x_val * np.ones(shape))
        else:
            params = ExpandParameters(valid_name, shape=shape)
            G.add_edge(NNEdge(x[0], params, from_idx=x[1]))

        all_nodes[node.output[0]] = (params, 0, pshape, x[3])
        return params
Exemple #25
0
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]
        x = inputs[0]
        x_shape = x[2].shape
        axes = cls._resolve_negative_ranks(kwargs['axes'], len(x_shape))
        if len(x_shape) == 0:
            assert len(axes) == 1 and axes[0] == 0
            new_shape = [1]
        else:
            new_shape = []
            old_shape = x_shape.copy()
            axes_copy = axes.copy()
            idx = 0
            while axes_copy or old_shape:
                if idx in axes_copy:
                    axes_copy.remove(idx)
                    new_shape.append(1)
                else:
                    if not old_shape:
                        raise ValueError(f'error in unsqueeze inshape {x_shape} axes {axes}')
                    new_shape.append(old_shape.pop(0))
                idx += 1

        pshape = ProvisionalDim(new_shape)
        if cls.is_constant(x):
            logger.info("reducing %s to a constant", valid_name)
            x_val = cls.get_constant(x)
            params = ConstantInputParameters(valid_name, value=x_val.reshape(new_shape),
                                             constant_store=G.constant_store)
        else:
            old_shape = cls._get_real_dim(x_shape)
            shape = cls._get_real_dim(new_shape)
            params = ReshapeParameters(valid_name, old_shape=old_shape, shape=shape)
            G.add_edge(NNEdge(from_node=x[0], to_node=params, from_idx=x[1], to_idx=0))
        all_nodes[node.output[0]] = (params, 0, pshape)
        return params
Exemple #26
0
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]
        x = inputs[0]
        x_shape = x[2].shape

        axis = node.attrs.get('axis', 1)
        if axis < 0:
            axis += len(x_shape)

        old_shape = cls._get_real_dim(x_shape)
        if axis == 0:
            shape = [1, prod(old_shape)]
            pshape = shape
        else:
            start = x_shape[:axis:]
            end = x_shape[axis::]
            pshape = list(start) + [prod(cls._get_real_dim(end))]
            shape = cls._get_real_dim(pshape)

        if cls.is_constant(x):
            logger.info("reducing %s to a constant", valid_name)
            params = ConstantInputParameters(
                valid_name,
                value=cls.get_constant(x).reshape(shape),
                constant_store=G.constant_store
            )
            pshape = ProvisionalDim(shape)
        else:
            params = ReshapeParameters(valid_name, old_shape=Dim.unnamed(
                old_shape), shape=Dim.unnamed(shape))
            pshape = ProvisionalDim(pshape)
            G.add_edge(NNEdge(from_node=x[0], to_node=params, from_idx=x[1], to_idx=0))

        all_nodes[node.output[0]] = (params, 0, pshape)
        return params
Exemple #27
0
 def _common(cls, node, **kwargs):
     all_nodes = kwargs['all_nodes']
     valid_name = kwargs['valid_name']
     G = kwargs['G']
     constant_operation = kwargs.get('constant_operation')
     inputs = [all_nodes[inp] for inp in node.input]
     assert len(inputs) == 2
     if all(cls.is_constant(inp) for inp in inputs) and constant_operation:
         logger.info("reducing %s to a constant", valid_name)
         values = [cls.get_constant(inp) for inp in inputs]
         params = ConstantInputParameters(valid_name,
                                          value=constant_operation(*values))
     else:
         params_args = kwargs.get('params_args', {})
         params = kwargs['params_class'](valid_name, **params_args)
         for idx, inp in enumerate(inputs):
             G.add_edge(
                 NNEdge(from_node=inp[0],
                        to_node=params,
                        from_idx=inp[1],
                        to_idx=idx))
     outputs = cls.implied_broadcast(inputs)
     all_nodes[node.output[0]] = (params, 0, outputs[0])
     return params
Exemple #28
0
    def _common(cls,
                node,
                mode='constant',
                pads=None,
                constant_value=0,
                **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]
        x = inputs[0]
        x_shape = x[2].shape

        ndim = len(x_shape)
        npad = len(pads) // 2
        if npad != ndim:
            if all(not pad for pad in pads):
                logger.warning(
                    f'Pad {valid_name} has {npad} pad values and {ndim} input rank. '
                    'Since pad is zero this is ignored but it probably indicates a bug in the ONNX graph.'
                )
            else:
                raise ValueError(
                    f'Eroor in ONNX graph - pad {valid_name} has {npad} pad values and {ndim} input rank.'
                )
        apads = np.array([[pads[idx], pads[idx + ndim]]
                          for idx in range(ndim)])
        # apads = np.array(pads).reshape((-1, 2))
        if cls.is_constant(x):
            logger.info("reducing %s to a constant", valid_name)
            val = cls.get_constant(x)
            if mode == 'constant':
                val = np.pad(val,
                             apads,
                             mode=mode,
                             constant_values=constant_value)
            else:
                val = np.pad(val, apads, mode=mode)
            params = ConstantInputParameters(valid_name, value=val)
            pshape = [
                dim + sum(apads[idx]) if dim is not None else None
                for idx, dim in enumerate(x_shape)
            ]
            all_nodes[node.output[0]] = (params, 0, ProvisionalDim(pshape),
                                         x[3])
            return params

        if mode != 'constant':
            raise ValueError('%s - pad mode %s is not supported' %
                             (valid_name, mode))

        if any(
                sum(pad) > 0 and x_shape[idx] is None
                for idx, pad in enumerate(apads)):
            raise ValueError(
                f'unknown/batch axis is being padded in {valid_name}. Manipulation of '
                'unknown/batch axis is not supported')
        trimmed_pads = tuple(
            [pad for idx, pad in enumerate(apads) if x_shape[idx] is not None])

        if all(sum(trimmed_pad) == 0 for trimmed_pad in trimmed_pads):
            params = NoOPParameters(valid_name, desc="eliminated pad of 0")
            pshape = x_shape
        else:
            pshape = [
                dim + sum(apads[idx]) if dim is not None else None
                for idx, dim in enumerate(x_shape)
            ]
            # pshape = [None if dim is None else dim + sum(apads[idx]) for idx, dim in enumerate(x_shape)]
            padvals = [(constant_value, constant_value)] * len(trimmed_pads)
            params = PadParameters(valid_name,
                                   padding=trimmed_pads,
                                   pad_vals=padvals)
        G.add_edge(
            NNEdge(from_node=x[0], to_node=params, from_idx=x[1], to_idx=0))
        all_nodes[node.output[0]] = (params, 0, ProvisionalDim(pshape), x[3])
        return params
Exemple #29
0
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]

        x = inputs[0]
        x_shape = x[2].shape
        axis = node.attrs.get('axis', 0)

        if axis < 0:
            axis += len(x_shape)

        assert axis < len(x_shape) and axis >= 0,\
            "axis %s is out of bounds - input dims %s in node %s" % (axis, x_shape, valid_name)

        split_dim = x_shape[axis]
        assert split_dim is not None, "split dimension must be defined"

        split = None
        if cls.SINCE_VERSION >= 13:
            if len(inputs) > 1:
                split = cls.get_constant(inputs[1])
        else:
            split = node.attrs.get('split')
            if split:
                split = np.array(split)
                assert sum(
                    split
                ) == split_dim, "split sizes should add up to total size %s" % valid_name
                assert np.all(
                    split > 0
                ), "split sizes should be greater than zero %s" % valid_name
            else:
                num_outputs = len(node.output)
                assert split_dim % num_outputs == 0,\
                    "no split attribute or value and dimension is not divisible by number of outputs %s" % valid_name
                split = np.array([split_dim // num_outputs] * num_outputs)

        split = split.tolist()
        act_slices = []
        out_shapes = []
        out_pshapes = []
        cur = 0
        for idx, split_dim in enumerate(split):
            act_slices.append(
                tuple(
                    (cur, cur + split_dim, 1) if didx == axis else (0, dim, 1)
                    for didx, dim in enumerate(x_shape) if dim is not None))
            out_pshape = tuple(split_dim if didx == axis else dim
                               for didx, dim in enumerate(x_shape))
            out_shapes.append(
                tuple(dim for dim in out_pshape if dim is not None))
            out_pshapes.append(ProvisionalDim(out_pshape))
            cur += split_dim
        axis -= sum(1 if dim is None else 0 for dim in x_shape[:axis:])
        params = SplitParameters(valid_name,
                                 act_slices=act_slices,
                                 out_shapes=out_shapes,
                                 axis=axis)
        if cls.is_constant(x):
            logger.info("reducing %s to %s constant(s)", valid_name,
                        len(out_shapes))
            values = params.numpy_split(cls.get_constant(x))
            for idx, out_pshape in enumerate(out_pshapes):
                cparams = ConstantInputParameters(
                    valid_name,
                    value=values[idx],
                    constant_store=G.constant_store)
                all_nodes[node.output[idx]] = (cparams, 0, out_pshape)
            return None

        G.add_edge(
            NNEdge(from_node=x[0], to_node=params, from_idx=x[1], to_idx=0))
        for idx, out_pshape in enumerate(out_pshapes):
            all_nodes[node.output[idx]] = (params, idx, out_pshape)
        return params
    def _common(cls, node, **kwargs):
        all_nodes = kwargs['all_nodes']
        G = kwargs['G']
        valid_name = kwargs['valid_name']
        inputs = [all_nodes[inp] for inp in node.input]

        if cls.SINCE_VERSION == 1:
            shape = np.array(node.attrs["shape"])
        else:  # since_version >= 5
            shape = cls.get_constant(inputs[1])

        input_shape = np.array(inputs[0][2].shape)
        shape = [
            dim if dim != 0 else input_shape[idx]
            for idx, dim in enumerate(shape)
        ]
        # this catches a special case where inp is something like [None, 2, 4] and shape is [2, -1, 4]
        # The -1 is clearly the None moving so move it
        if cls.moves_unknown(input_shape, shape):
            shape = np.array([None if dim == -1 else dim for dim in shape])
        else:
            if -1 in shape:
                new_shape_size = reduce(
                    lambda x, y: x * 1
                    if y is None or y == -1 else x * y, shape, 1)
                inp_size = reduce(lambda x, y: x * y
                                  if y is not None else x, input_shape, 1)
                in_size = prod(
                    [1 if dim is None else dim for dim in input_shape])
                shape_size = prod([1 if dim is None else dim for dim in shape])
                if in_size % shape_size != 0:
                    raise ValueError('invalid reshape')
                shape[shape.index(-1)] = inp_size // new_shape_size
            shape = np.array(shape)
            # TODO - There must be a better way of doing this
            # This hacks around the fact that the batch dimension will be in the reshape
            if input_shape[0] is None and shape[0] == 1:
                shape = np.array([None] + list(shape[1::]))

        inp = inputs[0]
        if cls.is_constant(inp):
            # there should be no None in shape since a constant always has known size
            logger.info("reducing %s to a constant", valid_name)
            params = ConstantInputParameters(
                valid_name,
                value=cls.get_constant(inp).reshape(shape),
                dims=Dim.unnamed(shape))
            pshape = ProvisionalDim(shape)
            all_nodes[node.output[0]] = (params, 0, pshape, inp[3])
            return params

        pshape = ProvisionalDim(shape)
        # pylint: disable=singleton-comparison
        old_shape = Dim.unnamed(list(input_shape[input_shape != None]))
        shape = Dim.unnamed(list(shape[shape != None]))
        params = ReshapeParameters(valid_name,
                                   old_shape=old_shape,
                                   shape=shape)
        G.add_edge(
            NNEdge(from_node=inp[0], to_node=params, from_idx=inp[1],
                   to_idx=0))
        all_nodes[node.output[0]] = (params, 0, pshape, inp[3])
        return params