Exemplo n.º 1
0
def test_general():
    for condition_custom in [
        {},
        {"x_order": OrderNCHW}
    ]:
        condition = dict(condition_default)
        condition.update(condition_custom)

        vx = np.random.rand(2, 3, 4, 5) - 0.5
        vy = vx.copy()
        vy[vx < 0] = np.exp(vy[vx < 0]) - 1

        x = Variable(vx.shape, order=OrderNHWC)
        y, = Elu(None)(x)

        x.change_order(condition["x_order"])
        y.change_order(condition["y_order"])

        generate_kernel_test_case(
            description=f"Elu: " + (", ".join([f"{k}={v}" for k, v in condition_custom.items()])),
            backend=condition["backend"],
            graph=Graph([x], [y]),
            inputs={x: ConstantVariable(vx, OrderNHWC).change_order(x.order).data},
            expected={y: ConstantVariable(vy, OrderNHWC).change_order(y.order).data},
            raise_skip=False
        )

    raise SkipTest
Exemplo n.º 2
0
def _convert_elu(converter: ChainerConverter, c_op: "chainer.functions.ELU"):
    x = converter.get_variable(c_op.inputs[0])
    if c_op.alpha == 0:
        y, = Relu(None)(x)

    elif c_op.alpha == 1:
        y, = Elu(None)(x)

    else:
        y1, = Elu(None)(x)
        y2, = Relu(None)(x)
        y = (y1 * c_op.alpha) + y2 * (1 - c_op.alpha)

    converter.set_variable(c_op.outputs[0](), y)
Exemplo n.º 3
0
def _convert_elu(converter: KerasConverter, k_op: "keras.layers.ELU"):
    x = converter.get_variable(converter.get_input_tensor(k_op)[0])
    alpha = float(k_op.alpha)

    if alpha == 1.0:
        y, = Elu(None)(x)

    elif alpha == 0.0:
        y, = Relu(None)(x)

    else:
        y1, = Elu(None)(x)
        y2, = Relu(None)(x)
        y = y1 * alpha + y2 * (1 - alpha)

    converter.set_variable(converter.get_output_tensor(k_op)[0], y)
Exemplo n.º 4
0
def do_activation(activation: any, x: Variable) -> Variable:
    if activation is keras.activations.relu:
        return Relu(None)(x)[0]

    elif activation is keras.activations.sigmoid:
        return Sigmoid(None)(x)[0]

    elif activation is keras.activations.hard_sigmoid:
        return HardSigmoid(None)(x)[0]

    elif activation is keras.activations.softplus:
        return Softplus(None, beta=1.0)(x)[0]

    elif activation is keras.activations.softsign:
        return Softsign(None)(x)[0]

    elif activation is keras.activations.softmax:
        return Softmax(None, axis=x.order.axes[-1])(x)[0]

    elif activation is keras.activations.elu:
        return Elu(None)(x)[0]

    elif activation is keras.activations.tanh:
        return Tanh(None)(x)[0]

    elif activation is keras.activations.linear:
        return x

    else:
        raise NotImplementedError(
            f"[KerasConverter] Unknown activation: {activation}")
Exemplo n.º 5
0
def template(x_order=OrderNHWC, y_order=OrderNHWC, description: str = ""):
    vx = np.random.rand(2, 3, 4, 5) - 0.5
    vy = vx.copy()
    vy[vx < 0] = np.exp(vy[vx < 0]) - 1

    x = Variable(vx.shape, order=OrderNHWC)
    y, = Elu(None)(x)

    x.change_order(x_order)
    y.change_order(y_order)

    generate_kernel_test_case(
        description=f"Elu {description}",
        graph=Graph([x], [y]),
        inputs={x: np.transpose(vx, [OrderNHWC.axes_dict[a] for a in x.order.axes])},
        expected={y: np.transpose(vy, [OrderNHWC.axes_dict[a] for a in y.order.axes])},
    )
Exemplo n.º 6
0
def _convert_elu(converter: ONNXConverter, onnx_op: INodeProto):
    x0 = converter.get_variable(onnx_op.input[0])

    attrs = attribute_dict(onnx_op)
    alpha = attrs["alpha"].f
    if alpha != 1:
        raise NotImplementedError(
            "[ONNXConverter] Operator \"Elu\" is supported only the case when parameter \"alpha\" is 1."
        )

    y, = Elu(None)(x0)
    converter.set_variable(onnx_op.output[0], y)
Exemplo n.º 7
0
def test_general():
    vx = np.random.rand(2, 3, 4, 5) - 0.5
    vy = vx.copy()
    vy[vx < 0] = np.exp(vy[vx < 0]) - 1

    x = Variable(vx.shape, order=OrderNHWC)
    y, = Elu(None)(x)

    generate_kernel_test_case(description=f"Elu",
                              backend=["webgpu", "webassembly"],
                              graph=Graph([x], [y]),
                              inputs={x: vx},
                              expected={y: vy})
Exemplo n.º 8
0
 def __call__(self, inputs: List[Variable]) -> Tuple[Variable]:
     assert len(inputs) == 1
     opr = Elu(generate_unique_name(self.cfunc.label))
     return opr(inputs[0])
Exemplo n.º 9
0
def _convert_elu(converter: ChainerConverter, c_op: chainer.functions.ELU):
    x = converter.get_variable(c_op.inputs[0])
    y1, = Elu(None)(x)
    y2, = Relu(None)(x)
    y = y1 * c_op.alpha + y2 * (1 - c_op.alpha)
    converter.set_variable(c_op.outputs[0](), y)