예제 #1
0
def _split_pooling_2d(graph: Graph, op: Pooling2D, v: Variable,
                      v_pair: Sequence[Variable], axis: Axis):
    s1 = v_pair[0].shape_dict[axis]
    x = op.inputs["x"]
    y = op.outputs["y"]
    op.remove_all()

    if v == x:
        x_0, x_1 = v_pair
        s, k, p = (op.SH, op.KH, op.PH) if axis == Axis.H else (op.SW, op.KW,
                                                                op.PW)

        raise NotImplementedError

    elif v == y:
        y_0, y_1 = v_pair
        s, k, p = (op.SH, op.KH, op.PH) if axis == Axis.H else (op.SW, op.KW,
                                                                op.PW)

        x_0_range = (0 * s - k // 2, (y_0.shape_dict[axis] - 1) * s + k)
        x_1_range = (y_0.shape_dict[axis] * s - k // 2,
                     (y.shape_dict[axis] - 1) * s + k)

        indices = AxisKeyDict(OrderNHWC.axes,
                              [slice(None) for _ in OrderNHWC.axes])

        indices_0 = AxisKeyDict(indices)
        indices_0[axis] = slice(max(x_0_range[0], 0),
                                min(x_0_range[1], x.shape_dict[axis]))

        indices_1 = AxisKeyDict(indices)
        indices_1[axis] = slice(max(x_1_range[0], 0),
                                min(x_1_range[1], x.shape_dict[axis]))

        x_0, = Slice(None, indices=indices_0)(x)
        x_1, = Slice(None, indices=indices_1)(x)

        if p > 0:
            data = ConstantVariable(
                np.zeros([
                    p if a == axis else x.shape_dict[a] for a in x.order.axes
                ]), x.order)
            x_0, = Concat(None, axis=axis)(data, x_0)
            x_1, = Concat(None, axis=axis)(x_1, data)

        op_0, op_1 = op.copy(), op.copy()
        new_padding = (0, op.PW) if axis == Axis.H else (op.PH, 0)
        op_0.parameters["padding"] = new_padding
        op_1.parameters["padding"] = new_padding

        y_0_new, = op_0(x_0)
        y_1_new, = op_1(x_1)

        OptimizeRule.replace_variable(graph, y_0_new.transpose_like(y_0), y_0)
        OptimizeRule.replace_variable(graph, y_1_new.transpose_like(y_1), y_1)

    else:
        raise UnexpectedAndPleaseReportError()
예제 #2
0
    def change_order(self, order: Order) -> "ConstantVariable":
        """change_order_statement(order)

        Change variable order.

        When number of dimension will be increased, axes whose size is one are created.
        Conversely when number of dimension will be decreased, the size of axes which will be removed must be one.

        Not only order attribute, the data attribute is also modified.

        Args:
            order: new order
        """
        old_order = self.order
        old_shape_dict = AxisKeyDict(self.shape_dict.keys(),
                                     self.shape_dict.values())

        super().change_order(order)

        new_order = self.order
        new_shape_dict = AxisKeyDict(self.shape_dict.keys(),
                                     self.shape_dict.values())

        #
        # `old_order_common` and `new_order_common` represent axis orders about axes included in both `old_order` and `new_order`
        #
        # ex) old_order = OrderCHWN
        #     new_order = OrderNTC
        #
        # =>  old_order_common = OrderCN
        #     new_order_common = OrderNC
        #
        # Data is transposed as follow sequence:
        #
        #     +-----------+                +------------------+                  +------------------+                +-----------+
        #     | old_order |                | old_order_common |                  | new_order_common |                | new_order |
        #     |-----------|  -[reshape]->  |------------------|  -[transpose]->  |------------------|  -[reshape]->  |-----------|
        #     | OrderCHWN |                | OrderCN          |                  | OrderNC          |                | OrderNTC  |
        #     +-----------+                +------------------+                  +------------------+                +-----------+
        #

        old_order_common = Order(
            [axis for axis in old_order.axes if axis in new_order.axes])
        new_order_common = Order(
            [axis for axis in new_order.axes if axis in old_order.axes])

        data = self.data.reshape(
            [old_shape_dict[axis] for axis in old_order_common.axes])
        data = np.transpose(
            data,
            tuple(old_order_common.axes_dict[axis]
                  for axis in new_order_common.axes))
        data = data.reshape([new_shape_dict[axis] for axis in new_order.axes])

        self.data = data

        return self
예제 #3
0
def template(x_shape=[3, 6],
             x_order=OrderNC,
             y_order=OrderNC,
             indices=AxisKeyDict([Axis.N, Axis.C],
                                 [slice(None), slice(None)]),
             description: str = ""):
    # vx = np.random.rand(*x_shape)
    vx = np.arange(np.product(x_shape)).reshape(x_shape)
    x_remain_axes = [a for a in x_order.axes if a in y_order.axes]
    vy = np.transpose(
        vx[tuple(indices[a] for a in x_order.axes)],
        [x_remain_axes.index(a) for a in y_order.axes if a in x_remain_axes])
    for i, a in enumerate(y_order.axes):
        if a not in x_order.axes:
            vy = np.expand_dims(vy, i)

    x = Variable(vx.shape, order=x_order)
    y, = Slice(None, indices=indices)(x)

    y.change_order(y_order)

    assert list(vy.shape) == list(y.shape)

    generate_kernel_test_case(
        description=f"Slice {description}",
        graph=Graph([x], [y]),
        backend=["webgpu", "webgl", "webassembly"],
        inputs={x: vx},
        expected={y: vy},
    )
예제 #4
0
def test_reverse_index():
    template(x_shape=[3, 6],
             x_order=OrderNC,
             y_order=OrderNC,
             indices=AxisKeyDict([Axis.N, Axis.C],
                                 [slice(0, 2), slice(5, 1, -1)]),
             description="reverse index")
예제 #5
0
def _convert_slice(converter: ONNXConverter, onnx_op: INodeProto):
    ### TODO Implement this.

    x = converter.get_variable(onnx_op.input[0])
    attrs = attribute_dict(onnx_op)

    print(attrs)

    # Attrs:
    # 'starts' --> starts.ints
    # 'ends' --> ends.ints
    # 'axes' --> axes.ints
    # https://mil-tokyo.github.io/webdnn/docs/_modules/webdnn/graph/operators/slice.html

    # Slice(name, AxisKeyDict: indices)
    # eg. multiplier = AxisKeyDict(x.order.axes, [pad_begin if a == axis else x.shape_dict[a] for a in x.order.axes])

    # TODO: Construct AxisKeyDict of indices
    # Set name for Slice layer if possible?
    # Should work like this. I hope.

    indices = AxisKeyDict(x.order.axes, [
        slice(s, t) for s, t in zip(attrs["starts"].ints, attrs["ends"].ints)
    ])

    #indices = AxisKeyDict(attrs["axes"].ints, [slice(s,t) for s,t in zip(attrs["starts"].ints, attrs["ends"].ints)])

    print(x.order.axes)
    print(attrs["axes"].ints)

    y, = Slice(None, indices)(x)

    converter.set_variable(onnx_op.output[0], y)
예제 #6
0
    def __call__(self, *xs: "variable.Variable"):
        y_axes = []
        y_shape_dict = AxisKeyDict()
        for i, x in enumerate(xs):
            for axis in x.order.axes:
                if axis in y_axes:
                    if y_shape_dict[axis] == 1:
                        # broadcast
                        y_shape_dict[axis] = x.shape_dict[axis]
                else:
                    y_axes.append(axis)
                    y_shape_dict[axis] = x.shape_dict[axis]

                if Placeholder.check_resolved(x.shape_dict[axis]):
                    if Placeholder.check_resolved(y_shape_dict[axis]):
                        assert y_shape_dict[axis] == x.shape_dict[axis] or x.shape_dict[axis] == 1, \
                            "All input variables of elementwise operator should be same shape: " \
                            f"y.shape_dict[{axis}]={y_shape_dict[axis]}, " \
                            f"x{i}.shape_dict[{axis}]={x.shape_dict[axis]}"
                    else:
                        y_shape_dict[axis] = x.shape_dict[axis]

            self.append_input(f"x{i}", x)

        y = variable.Variable([y_shape_dict[axis] for axis in y_axes],
                              Order(y_axes))
        self.append_output("y", y)
        return y,
예제 #7
0
    def __call__(self, *xs: "variable.Variable"):
        y_axes = []
        y_shape_dict = AxisKeyDict()

        # Check variable in descent order of the number of dimensions.
        # Without this procedure, in case that x0.order=C and x1.order=NC, the output order is CN. Expected result is NC.
        xs_order = [(i, x) for i, x in enumerate(xs)]
        xs_order.sort(key=lambda d: d[1].ndim, reverse=True)

        for i, x in xs_order:
            for axis in x.order.axes:
                if axis in y_axes:
                    if y_shape_dict[axis] == 1:
                        # broadcast
                        y_shape_dict[axis] = x.shape_dict[axis]
                else:
                    y_axes.append(axis)
                    y_shape_dict[axis] = x.shape_dict[axis]

                if Placeholder.check_resolved(x.shape_dict[axis]):
                    if Placeholder.check_resolved(y_shape_dict[axis]):
                        assert y_shape_dict[axis] == x.shape_dict[axis] or x.shape_dict[axis] == 1, \
                            "All input variables of elementwise operator should be same shape: " \
                            f"y.shape_dict[{axis}]={y_shape_dict[axis]}, " \
                            f"x{i}.shape_dict[{axis}]={x.shape_dict[axis]}"
                    else:
                        y_shape_dict[axis] = x.shape_dict[axis]

        y = variable.Variable([y_shape_dict[axis] for axis in y_axes], Order(y_axes))
        ChannelMode.set(y, ChannelModeEnum.R)

        for i, x in enumerate(xs):
            self.append_input(f"x{i}", x)
        self.append_output("y", y)
        return y,
예제 #8
0
    def __getitem__(self, slices) -> "Variable":
        slices = list(slices) if isinstance(slices, Sequence) else [slices]

        if Ellipsis in slices:
            ellipsis_position = slices.index(Ellipsis)
            slices.remove(Ellipsis)
        else:
            ellipsis_position = len(slices)

        num_new_axis = slices.count(None)
        while len(slices) - num_new_axis < self.ndim:
            slices.insert(ellipsis_position, slice(None))

        x_axis_index = 0
        indices = AxisKeyDict()
        for index in slices:
            if isinstance(index, (slice, int)):
                indices[self.order.axes[x_axis_index]] = index
                x_axis_index += 1

            elif index is None:
                indices[Axis()] = None

            else:
                raise TypeError(
                    "Variable indices must be sequence of integers, slices, ellipsis, or None"
                )

        return webdnn.graph.operators.slice.Slice(None,
                                                  indices=indices)(self)[0]
예제 #9
0
def test_large_step():
    template(x_shape=[3, 6],
             x_order=OrderNC,
             y_order=OrderNC,
             indices=AxisKeyDict([Axis.N, Axis.C],
                                 [slice(0, 2), slice(0, 6, 2)]),
             description="large step")
예제 #10
0
    def combine_axes(self, axes: Sequence[Axis], axis: Axis) -> "Variable":
        """combine_axes(shape, axes, axis)
        Combine some axes into one axis. Combined axes must be adjacent

        .. code::

            x = Variable([2, 3, 4, 5], OrderNHWC)

            y = x.combine_axes([Axis.W, Axis.H], Axis.W)
            # # same as follow code. Note that in_order is OrderNWHC, not OrderNHWC, because "axes" parameter is [W, H].
            # y, = Reshape(None, in_order=OrderNWHC, out_order=OrderNWC, out_shape=[2, 12, 5])(x)

            print(y.shape, y.order)
            >>> "[2, 12, 5]", "[N, H, C]"

        Args:
            axes (sequence of :class:`~Axis`): Axes which are combined. All axes must be contained in original variable.
            axis (:class:`~Axis`): Axis created from `axes`. If new axis is specified, which is inserted at last.

        Returns:
            (:class:`~Variable`) reshaped variable
        """
        # check if all combined axes are adjacent
        i_max = 0
        i_min = self.ndim
        for combined_axis in axes:
            i = self.order.axes_dict[combined_axis]
            i_max = max(i_max, i)
            i_min = min(i_min, i)

        assert i_max - i_min + 1 == len(axes), f"""
All combined axes must be adjacent:
    (order) = {self.order}
    (combined axes) = {axes}"""

        # construct in_order, out_order and out_shape
        in_order = Order(self.order.axes[:i_min] + tuple(axes) +
                         self.order.axes[i_max + 1:])

        out_axes = list(self.order.axes)
        out_shape_dict = AxisKeyDict(self.shape_dict)
        if axis not in out_shape_dict:
            out_shape_dict[axis] = 1
            out_axes.insert(i_min, axis)

        for combined_axis in axes:
            if combined_axis == axis:
                continue

            out_shape_dict[axis] *= out_shape_dict[combined_axis]
            out_axes.remove(combined_axis)

        out_order = Order(out_axes)

        return webdnn.graph.operators.reshape.Reshape(
            None,
            in_order=in_order,
            out_order=out_order,
            out_shape=[out_shape_dict[a] for a in out_axes])(self)[0]
예제 #11
0
def test_global():
    template(ksize=(3, 4),
             stride=1,
             pad=0,
             height=3,
             width=4,
             expected_dict=AxisKeyDict([Axis.N, Axis.H, Axis.W, Axis.C],
                                       [2, 1, 1, 4]))
예제 #12
0
def test_projection():
    template(ksize=1,
             stride=1,
             pad=0,
             height=(2 - 1) * 1 + 1 - 2 * 0,
             width=(3 - 1) * 1 + 1 - 2 * 0,
             expected_dict=AxisKeyDict([Axis.N, Axis.H, Axis.W, Axis.C],
                                       [2, 2, 3, 4]))
예제 #13
0
def test_remove_axis():
    template(
        x_shape=[3, 4, 5, 6],
        x_order=OrderNHWC,
        y_order=OrderNC,
        indices=AxisKeyDict([Axis.N, Axis.H, Axis.W, Axis.C],
                            [slice(0, 2), 2, 4, slice(3, 5)]),
        description="remove axis")
예제 #14
0
def test_insert_axis():
    template(x_shape=[3, 6],
             x_order=OrderNC,
             y_order=OrderNHWC,
             indices=AxisKeyDict([Axis.N, Axis.H, Axis.W, Axis.C],
                                 [slice(0, 2), None, None,
                                  slice(3, 5)]),
             description="insert axis")
예제 #15
0
def optimize_loop_structure(variables: List[Variable], key_variable: Variable):
    """
    Optimize loop structure to iterate each element in variables

    Returns:
        (tuple): two elements are returned

        - First one is shape dictionary of all variables.
        - Second one is stride dictionary of all variables.
    """
    orders, shape_dicts = _simplify_orders(
        variables
    )  # type: Dict[Variable, Order], Dict[Variable, AxisKeyDict[List[int]]]
    shapes = {
        v: [shape_dicts[v][a] for a in orders[v].axes]
        for v in variables
    }
    strides = {
        v:
        [mul(shapes[v][orders[v].axes_dict[a] + 1:]) for a in orders[v].axes]
        for v in variables
    }
    stride_dicts = {
        v: AxisKeyDict(orders[v].axes, strides[v])
        for v in variables
    }

    # re-ordering
    axes = []
    for v in sorted(variables, key=lambda v: orders[v].ndim):
        axes += [axis for axis in orders[v].axes if axis not in axes]

    orders = {
        v: Order(list(filter(lambda x: x in orders[v].axes, axes)))
        for v in variables
    }
    shapes = {
        v: [shape_dicts[v][a] for a in orders[v].axes]
        for v in variables
    }
    strides = {
        v: [stride_dicts[v][a] for a in orders[v].axes]
        for v in variables
    }

    key_order = orders[key_variable]
    if key_order.ndim > 4:
        raise NotImplementedError(
            'Currently, loop nest depth larger than 4 is not supported')

    for v in variables:
        shape = shapes[v]
        stride = strides[v]
        while len(shape) < 4:
            stride.append(1)
            shape.append(1)

    return shapes, strides
예제 #16
0
파일: core.py 프로젝트: zhangaz1/webdnn
def _convert_repeat_vector(converter: KerasConverter, k_op: "keras.layers.RepeatVector"):
    x = converter.get_variable(converter.get_input_tensor(k_op)[0])
    new_axis = Axis()
    multiplier = AxisKeyDict(x.order.axes, [1, 1])
    multiplier[new_axis] = k_op.n

    x = x.reshape(shape=(x.shape[0], 1, x.shape[1]), order=Order([x.order.axes[0], new_axis, x.order.axes[1]]))
    y, = Tile(None, multiplier=multiplier)(x)
    converter.set_variable(converter.get_output_tensor(k_op)[0], y)
예제 #17
0
    def exec(self):
        x = self.inputs["x"]

        y_shape_dict = AxisKeyDict()
        for axis, index in self.indices.items():
            if isinstance(index, slice):
                index = normalize_slice(index, x.shape_dict[axis])
                y_shape_dict[axis] = ((abs(index.stop - index.start) - 1) // abs(index.step)) + 1

            elif isinstance(index, int):
                pass  # Remove axis

            elif index is None:
                y_shape_dict[axis] = 1  # Insert axis

        y = Variable(list(y_shape_dict.values()), Order(list(y_shape_dict.keys())))
        self.append_output("y", y)
        return y,
예제 #18
0
    def exec(self):
        A = self.inputs["A"]
        B = self.inputs["B"]
        c_shape_dict = AxisKeyDict()

        for axis in A.order.axes:
            if axis not in self.axes[0]:
                c_shape_dict[axis] = A.shape_dict[axis]

        for axis in B.order.axes:
            if axis not in self.axes[1]:
                c_shape_dict[axis] = B.shape_dict[axis]

        C = Variable(list(c_shape_dict.values()), Order(list(c_shape_dict.keys())))
        self.append_output("C", C)
        for axis in C.order.axes:
            self.attributes.add(Tensorwise(self, axis=axis))
        return C,
예제 #19
0
def test():
    template(
        x_shape=[3, 4, 5, 6],
        x_order=OrderNHWC,
        y_order=OrderNHWC,
        indices=AxisKeyDict(
            [Axis.N, Axis.H, Axis.W, Axis.C],
            [slice(0, 2), slice(1, 3),
             slice(2, 4), slice(3, 5)]),
        description="normal")
예제 #20
0
def test_change_order():
    template(
        x_shape=[3, 4, 5, 6],
        x_order=OrderNHWC,
        y_order=OrderNCHW,
        indices=AxisKeyDict(
            [Axis.N, Axis.H, Axis.W, Axis.C],
            [slice(0, 2), slice(1, 3),
             slice(2, 4), slice(3, 5)]),
        description="change order")
예제 #21
0
def template(N=2, T=3, vocabulary_size=4, feature_size=5, order_x=OrderNT, order_w=OrderNC):
    x = Variable([N, T], OrderNT)
    w = Variable([feature_size, vocabulary_size], OrderNC)

    x.change_order(order_x)
    w.change_order(order_w)

    y, = Embedding(None)(x, w)

    assert_shape(y, AxisKeyDict([Axis.N, Axis.T, Axis.C], [N, T, feature_size]))
예제 #22
0
def test_no_slice():
    template(
        x_shape=[3, 4, 5, 6],
        x_order=OrderNHWC,
        y_order=OrderNHWC,
        indices=AxisKeyDict(
            [Axis.N, Axis.H, Axis.W, Axis.C],
            [slice(None), slice(None),
             slice(None), slice(None)]),
        description="no slice")
예제 #23
0
def convert_r_to_rgba(op: ConvertRtoRGBA) -> List[Kernel]:
    x = op.inputs["x0"]
    y = op.outputs["y"]

    assert ChannelMode.get(x) == ChannelModeEnum.R
    assert ChannelMode.get(y) == ChannelModeEnum.RGBA

    orders, shape_dicts = simplify_orders([x, y])
    shapes = {v: [shape_dicts[v][a] for a in orders[v].axes] for v in [x, y]}
    strides = {
        v:
        [mul(shapes[v][orders[v].axes_dict[a] + 1:]) for a in orders[v].axes]
        for v in [x, y]
    }
    stride_dicts = {v: AxisKeyDict(orders[v].axes, strides[v]) for v in [x, y]}

    # Change x's shapes and strides order to same as y's order
    shapes[x] = [
        shape_dicts[x][a] if a in orders[x].axes else 1 for a in orders[y].axes
    ]
    strides[x] = [
        stride_dicts[x][a] if a in orders[x].axes else 1
        for a in orders[y].axes
    ]

    # Padding shapes and strides to 4D
    if orders[y].ndim > 4:
        raise NotImplementedError(f"Too large number of dimension: {y}")

    for v in [x, y]:
        shape = shapes[v]
        stride = strides[v]
        while len(shape) < 4:
            stride.append(1)
            shape.append(1)

    name_injector = KernelNameInjector(op)
    uniform_injector = UniformInjector()
    uniform_injector.register({
        "sampler_x": x,
        "texture_stride_y": texture_stride(y),
        "variable_shape_y": shapes[y],
        "variable_stride_y": strides[y],
        "texture_shape_x": texture_shape(x),
        "texture_stride_x": texture_stride(x),
        "variable_shape_x": shapes[x],
        "variable_stride_x": strides[x],
    })
    source = template
    source = uniform_injector.inject(source)
    source = name_injector.inject(source)
    kernel = Kernel(source, name_injector.name, uniform_injector.samplers,
                    uniform_injector.uniforms, y)

    return [kernel]
예제 #24
0
    def __init__(self, batch_size: int = 1):
        if not FLAG_KERAS_INSTALLED:
            raise ImportError(
                "ImportError is occurred when Keras and Tensorflow are loaded."
            )

        self._input_index_dict = defaultdict(lambda: 0)
        self._output_index_dict = defaultdict(lambda: 0)
        self._placeholders = AxisKeyDict(
            [Axis.N], [Placeholder(label=Axis.N.name, value=batch_size)])
        self._input_tensor_cache = None  # type: List[tf.Tensor]
        self._output_tensor_cache = None  # type: List[tf.Tensor]
예제 #25
0
파일: array.py 프로젝트: VislaLabs/webdnn-1
def _convert_tile(converter: ChainerConverter, c_op: "chainer.functions.Tile"):
    x = converter.get_variable(c_op.inputs[0])
    reps = c_op.reps

    if x.ndim > len(reps):
        reps = (1, ) * (x.ndim - len(reps)) + reps

    else:
        while x.ndim < len(c_op.reps):
            x = x.expand_dims(Axis(), 0)

    y, = Tile(None, AxisKeyDict(x.order.axes, reps))(x)
    converter.set_variable(c_op.outputs[0](), y)
예제 #26
0
파일: util.py 프로젝트: zhangaz1/webdnn
def template_test_unary_operator(OperatorClass,
                                 operator_kwargs=None,
                                 test1d=True,
                                 test2d=True,
                                 test3d=True,
                                 test4d=True,
                                 axes=None,
                                 orders=None,
                                 shape_dict=None,
                                 expected_dict=None):
    """
    Test template for unary operator

    Args:
        OperatorClass: Target operator class
        operator_kwargs: Operator keyword arguments
        test1d: If True, test with 1D input tensor is ran
        test2d: If True, test with 2D input tensor is ran
        test3d: If True, test with 3D input tensor is ran
        test4d: If True, test with 4D input tensor is ran
        orders: Orders for test input variable. If :code:`None`, all combination of axes are tested.
        axes: If specified and :code:`orders` is not specified, all combination of axes in :code:`axes` are tested.
        shape_dict: Input variable's shape
        expected_dict: Expected shape of output variable
    """

    if operator_kwargs is None:
        operator_kwargs = {}

    if axes is None:
        axes = [Axis.N, Axis.H, Axis.W, Axis.C, Axis.T]

    if orders is None:
        orders = []
        for ndim, flag in {1: test1d, 2: test2d, 3: test3d, 4: test4d}.items():
            if not flag:
                continue

            for axis in permutations(axes, ndim):
                orders.append(Order(axis))

    if shape_dict is None:
        shape_dict = AxisKeyDict()
        for i, axis in enumerate(axes):
            shape_dict[axis] = i + 5

    for order in orders:
        x = Variable([shape_dict[a] for a in order.axes], order)
        y, = OperatorClass(None, **operator_kwargs)(x)
        assert_shape(y,
                     x.shape_dict if expected_dict is None else expected_dict)
예제 #27
0
def reduce_kernel(op: Reduce):
    x = op.inputs["x"]
    y = op.outputs["y"]
    axis = op.axis

    orders, shape_dicts = simplify_orders([x, y], keep_axes=[axis])

    # Padding shapes and strides to 4D
    if orders[y].ndim > 4:
        raise NotImplementedError(f"Too large number of dimension: {y}")

    shapes = {v: [shape_dicts[v][a] for a in orders[v].axes] for v in [x, y]}
    strides = {
        v:
        [mul(shapes[v][orders[v].axes_dict[a] + 1:]) for a in orders[v].axes]
        for v in [x, y]
    }
    stride_dicts = {v: AxisKeyDict(orders[v].axes, strides[v]) for v in [x, y]}

    # Change x's shapes and strides order to same as y's order
    x_virtual_shape = [
        shape_dicts[x][a] if a in orders[x].axes else 1 for a in orders[y].axes
    ]
    x_virtual_stride = [
        stride_dicts[x][a] if a in orders[x].axes else 1
        for a in orders[y].axes
    ]
    while len(x_virtual_shape) < 3:
        x_virtual_stride.append(1)
        x_virtual_shape.append(stride_dicts[x][axis])
    x_virtual_shape.append(shape_dicts[x][axis])
    x_virtual_stride.append(stride_dicts[x][axis])

    y_virtual_shape = shapes[y]
    y_virtual_stride = strides[y]
    while len(y_virtual_shape) < 4:
        y_virtual_stride.append(1)
        y_virtual_shape.append(1)

    code = _generate_template(op,
                              reduction_size=shape_dicts[x][axis],
                              shapes={
                                  y: y_virtual_shape,
                                  x: x_virtual_shape
                              },
                              strides={
                                  y: y_virtual_stride,
                                  x: x_virtual_stride
                              })
    source = code.generate()
    return [Kernel(source, code.name, code.samplers, code.uniforms, y)]
예제 #28
0
def main(col_shape=[1, 5, 5, 3, 3, 6],
         col_order=OrderNHWKKC,
         ksize=3,
         stride=1,
         padding=1,
         expected_shape_dict: AxisKeyDict[int] = AxisKeyDict(
             OrderNHWC.axes, [1, 5, 5, 6])):
    op = Col2Im(None, ksize=ksize, stride=stride, padding=padding)

    x = Variable(col_shape, col_order)
    y, = op(x)

    for axis in y.order.axes:
        assert y.shape_dict[axis] == expected_shape_dict[axis]
예제 #29
0
def tile_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
    x = converter.get_variable(tf_op.inputs[0])
    multiplier = converter.get_variable(tf_op.inputs[1])

    if not isinstance(multiplier, ConstantVariable):
        raise NotImplementedError(
            "[TensorFlowConverter] Operator 'Tile' with dynamic multiplier is not supported yet."
        )

    multiplier = AxisKeyDict(x.order.axes,
                             multiplier.data.astype(int).flatten().tolist())
    y, = Tile(None, multiplier=multiplier)(x)

    converter.set_variable(tf_op.outputs[0], y)
예제 #30
0
def pad_v2_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
    x = converter.get_variable(tf_op.inputs[0])

    paddings = converter.get_variable(tf_op.inputs[1])
    if not isinstance(paddings, ConstantVariable):
        raise NotImplementedError(
            '[TensorFlowConverter] PadV2 with dynamic padding size is not supported'
        )

    paddings = paddings.data.astype(np.int).tolist()

    constant_values = converter.get_variable(tf_op.inputs[2]).change_order(
        x.order)

    for axis, (pad_begin, pad_end) in zip(x.order.axes, paddings):
        xs = []

        if pad_begin > 0:
            multiplier = AxisKeyDict(x.order.axes, [
                pad_begin if a == axis else x.shape_dict[a]
                for a in x.order.axes
            ])
            xs.append(Tile(None, multiplier)(constant_values)[0])

        xs.append(x)

        if pad_end > 0:
            multiplier = AxisKeyDict(x.order.axes, [
                pad_end if a == axis else x.shape_dict[a] for a in x.order.axes
            ])
            xs.append(Tile(None, multiplier)(constant_values)[0])

        if len(xs) > 1:
            x, = Concat(None, axis=axis)(*xs)

    converter.set_variable(tf_op.outputs[0], x)