def _convert_min(converter: ChainerConverter, c_op: "chainer.functions.Min"): x = converter.get_variable(c_op.inputs[0]) for axis in list(x.order.axes) if c_op.axis is None else [ x.order.axes[i] for i in c_op.axis ]: x, = Min(None, axis=axis)(x) if not c_op.keepdims and x.ndim > 1: x = x.squeeze(axis) converter.set_variable(c_op.outputs[0](), x)
def _convert_reduce_min(converter: ONNXConverter, onnx_op: INodeProto): x = converter.get_variable(onnx_op.input[0]) attrs = attribute_dict(onnx_op) axes = attrs["axes"].ints keepdims = (attrs["keepdims"].i if "keepdims" in attrs else 1) == 1 for a in axes: x, = Min(None, axis=x.order.axes[a])(x) if not keepdims: x = x.squeeze(axis=[x.order.axes[i] for i in axes]) converter.set_variable(onnx_op.output[0], x)
def min_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"): x = converter.get_variable(tf_op.inputs[0]) axis = converter.get_variable(tf_op.inputs[1]) assert isinstance( axis, ConstantVariable ), "[TensorFlowConverter] Operation 'Min' with dynamic axis is not supported yet." for axis in [ x.order.axes[i] for i in axis.data.astype(int).flatten().tolist() ]: x, = Min(None, axis=axis)(x) if not tf_op.get_attr("keep_dims") and x.ndim > 1: x = x.squeeze(axis) converter.set_variable(tf_op.outputs[0], x)
def template(x_order=OrderNHWC, y_order=OrderNHW, axis=Axis.C, description: str = ""): vx = np.arange(120).reshape(2, 3, 4, 5) vy = np.min(vx, axis=OrderNHWC.axes_dict[axis]) x = Variable(vx.shape, order=OrderNHWC) y, = Min(None, axis=axis)(x) x.change_order(x_order) y.change_order(y_order) generate_kernel_test_case( description=f"Min {description}", graph=Graph([x], [y]), backend=["webgl"], inputs={x: np.transpose(vx, [OrderNHWC.axes_dict[a] for a in x.order.axes])}, expected={y: np.transpose(vy, [OrderNHW.axes_dict[a] for a in y.order.axes])}, )
def min_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"): x = converter.get_variable(tf_op.inputs[0]) axis = converter.get_variable(tf_op.inputs[1]) v = x assert isinstance( axis, ConstantVariable ), "[TensorFlowConverter] Operation 'Min' with dynamic axis is not supported yet." for i_axis in sorted(axis.data.astype(int).flatten().tolist(), reverse=True): axis = v.order.axes[i_axis] v, = Min(None, axis=axis)(v) if tf_op.get_attr("keep_dims") or x.ndim == 1: v = v.reshape(order=x.order, shape=[ v.shape_dict[a] if a in v.order.axes else 1 for a in x.order.axes ]) converter.set_variable(tf_op.outputs[0], v)
def template(x_shape=[2, 3, 4, 5], x_order=OrderNHWC, y_order=OrderNHWC, axis=Axis.C, description: str = ""): vx = np.random.rand(*x_shape) vy = np.min(vx, axis=x_order.axes_dict[axis], keepdims=True) x = Variable(vx.shape, order=x_order) y, = Min(None, axis=axis)(x) y.change_order(y_order) generate_kernel_test_case( description=f"Min {description}", graph=Graph([x], [y]), backend=["webgpu", "webgl", "webassembly"], inputs={x: vx}, expected={ y: np.transpose(vy, [x.order.axes_dict[a] for a in y.order.axes]) }, )
def test(): x = Variable([2, 3, 4, 5], OrderNHWC) y, = Min(None, axis=Axis.C)(x) assert y.order == Order([Axis.N, Axis.H, Axis.W]) assert y.shape == [2, 3, 4]