def test_general(): for condition_custom in [{}, {"x_order": OrderNCHW}]: condition = dict(condition_default) condition.update(condition_custom) vx = np.random.rand(2, 3, 4, 5) - 0.5 vy = vx / (np.abs(vx) + 1.0) x = Variable(vx.shape, order=OrderNHWC) y, = Softsign(None)(x) x.change_order(condition["x_order"]) y.change_order(condition["y_order"]) generate_kernel_test_case( description=f"Softsign: " + (", ".join([f"{k}={v}" for k, v in condition_custom.items()])), backend=condition["backend"], graph=Graph([x], [y]), inputs={ x: ConstantVariable(vx, OrderNHWC).change_order(x.order).data }, expected={ y: ConstantVariable(vy, OrderNHWC).change_order(y.order).data }, raise_skip=False) raise SkipTest
def do_activation(activation: any, x: Variable) -> Variable: if activation is keras.activations.relu: return Relu(None)(x)[0] elif activation is keras.activations.sigmoid: return Sigmoid(None)(x)[0] elif activation is keras.activations.hard_sigmoid: return HardSigmoid(None)(x)[0] elif activation is keras.activations.softplus: return Softplus(None, beta=1.0)(x)[0] elif activation is keras.activations.softsign: return Softsign(None)(x)[0] elif activation is keras.activations.softmax: return Softmax(None, axis=x.order.axes[-1])(x)[0] elif activation is keras.activations.elu: return Elu(None)(x)[0] elif activation is keras.activations.tanh: return Tanh(None)(x)[0] elif activation is keras.activations.linear: return x else: raise NotImplementedError( f"[KerasConverter] Unknown activation: {activation}")
def template(x_order=OrderNHWC, y_order=OrderNHWC, description: str = ""): vx = np.random.rand(2, 3, 4, 5) - 0.5 vy = vx / (np.abs(vx) + 1.0) x = Variable(vx.shape, order=OrderNHWC) y, = Softsign(None)(x) x.change_order(x_order) y.change_order(y_order) generate_kernel_test_case( description=f"Softsign {description}", graph=Graph([x], [y]), inputs={x: np.transpose(vx, [OrderNHWC.axes_dict[a] for a in x.order.axes])}, expected={y: np.transpose(vy, [OrderNHWC.axes_dict[a] for a in y.order.axes])}, )
def _convert_softmax(converter: ONNXConverter, onnx_op: INodeProto): x = converter.get_variable(onnx_op.input[0]) converter.set_variable(onnx_op.output[0], Softsign(None)(x)[0])