def test_general(): for condition_custom in [{}, {"x_order": OrderNCHW}]: condition = dict(condition_default) condition.update(condition_custom) vx = np.random.rand(2, 3, 4, 5) - 0.5 vy = 1 / (1 + np.exp(-vx)) x = Variable(vx.shape, order=OrderNHWC) y, = Sigmoid(None)(x) x.change_order(condition["x_order"]) y.change_order(condition["y_order"]) generate_kernel_test_case( description=f"Sigmoid: " + (", ".join([f"{k}={v}" for k, v in condition_custom.items()])), backend=condition["backend"], graph=Graph([x], [y]), inputs={ x: ConstantVariable(vx, OrderNHWC).change_order(x.order).data }, expected={ y: ConstantVariable(vy, OrderNHWC).change_order(y.order).data }, raise_skip=False) raise SkipTest
def do_activation(activation: any, x: Variable) -> Variable: if activation is keras.activations.relu: return Relu(None)(x)[0] elif activation is keras.activations.sigmoid: return Sigmoid(None)(x)[0] elif activation is keras.activations.hard_sigmoid: return HardSigmoid(None)(x)[0] elif activation is keras.activations.softplus: return Softplus(None, beta=1.0)(x)[0] elif activation is keras.activations.softsign: return Softsign(None)(x)[0] elif activation is keras.activations.softmax: return Softmax(None, axis=x.order.axes[-1])(x)[0] elif activation is keras.activations.elu: return Elu(None)(x)[0] elif activation is keras.activations.tanh: return Tanh(None)(x)[0] elif activation is keras.activations.linear: return x else: raise NotImplementedError( f"[KerasConverter] Unknown activation: {activation}")
def template(x_order=OrderNHWC, y_order=OrderNHWC, description: str = ""): vx = np.random.rand(2, 3, 4, 5) - 0.5 vy = 1 / (1 + np.exp(-vx)) x = Variable(vx.shape, order=OrderNHWC) y, = Sigmoid(None)(x) x.change_order(x_order) y.change_order(y_order) generate_kernel_test_case( description=f"Sigmoid {description}", graph=Graph([x], [y]), inputs={ x: np.transpose(vx, [OrderNHWC.axes_dict[a] for a in x.order.axes]) }, expected={ y: np.transpose(vy, [OrderNHWC.axes_dict[a] for a in y.order.axes]) }, )
def template(r=1.0, x_order=OrderNHWC, y_order=OrderNHWC, description: str = ""): vx = (np.random.rand(2, 3, 4, 5) - 0.5) * r vy = 1 / (1 + np.exp(-vx)) # This produces very small positive value (< 1e-7) when vx is negative large. # Actual implementation uses tanh(0.5f * x0) * 0.5f + 0.5f # In the case tanh is used, the result saturates to 0.0 when vs is negative large. # ABS_EPS is set to allow such case. x = Variable(vx.shape, order=OrderNHWC) y, = Sigmoid(None)(x) x.change_order(x_order) y.change_order(y_order) generate_kernel_test_case( description=f"Sigmoid {description}", graph=Graph([x], [y]), inputs={x: np.transpose(vx, [OrderNHWC.axes_dict[a] for a in x.order.axes])}, expected={y: np.transpose(vy, [OrderNHWC.axes_dict[a] for a in y.order.axes])}, ABS_EPS=1e-7 )
def _convert_sigmoid(converter: ONNXConverter, onnx_op: INodeProto): x0 = converter.get_variable(onnx_op.input[0]) y, = Sigmoid(None)(x0) converter.set_variable(onnx_op.output[0], y)
def _convert_sigmoid(converter: ChainerConverter, c_op: "chainer.functions.Sigmoid"): x = converter.get_variable(c_op.inputs[0]) y, = Sigmoid(None)(x) converter.set_variable(c_op.outputs[0](), y)