def test_general(): for condition_custom in [ {}, {"x_order": OrderNCHW}, ]: condition = dict(condition_default) condition.update(condition_custom) beta = condition["beta"] vx = np.random.rand(2, 3, 4, 5) - 0.5 vy = np.log(np.exp(vx * beta) + 1.0) / beta x = Variable(vx.shape, order=OrderNHWC) y, = Softplus(None, beta=beta)(x) x.change_order(condition["x_order"]) y.change_order(condition["y_order"]) generate_kernel_test_case( description=f"Softplus: " + (", ".join([f"{k}={v}" for k, v in condition_custom.items()])), backend=condition["backend"], graph=Graph([x], [y]), inputs={x: ConstantVariable(vx, OrderNHWC).change_order(x.order).data}, expected={y: ConstantVariable(vy, OrderNHWC).change_order(y.order).data}, raise_skip=False ) raise SkipTest
def do_activation(activation: any, x: Variable) -> Variable: if activation is keras.activations.relu: return Relu(None)(x)[0] elif activation is keras.activations.sigmoid: return Sigmoid(None)(x)[0] elif activation is keras.activations.hard_sigmoid: return HardSigmoid(None)(x)[0] elif activation is keras.activations.softplus: return Softplus(None, beta=1.0)(x)[0] elif activation is keras.activations.softsign: return Softsign(None)(x)[0] elif activation is keras.activations.softmax: return Softmax(None, axis=x.order.axes[-1])(x)[0] elif activation is keras.activations.elu: return Elu(None)(x)[0] elif activation is keras.activations.tanh: return Tanh(None)(x)[0] elif activation is keras.activations.linear: return x else: raise NotImplementedError( f"[KerasConverter] Unknown activation: {activation}")
def template(x_order=OrderNHWC, y_order=OrderNHWC, beta=1.0, description: str = ""): vx = np.random.rand(2, 3, 4, 5) - 0.5 vy = np.log(np.exp(vx * beta) + 1.0) / beta x = Variable(vx.shape, order=OrderNHWC) y, = Softplus(None, beta=beta)(x) x.change_order(x_order) y.change_order(y_order) generate_kernel_test_case( description=f"Softplus {description}", graph=Graph([x], [y]), inputs={ x: np.transpose(vx, [OrderNHWC.axes_dict[a] for a in x.order.axes]) }, expected={ y: np.transpose(vy, [OrderNHWC.axes_dict[a] for a in y.order.axes]) }, )
def softplus_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"): x = converter.get_variable(tf_op.inputs[0]) y, = Softplus(None, beta=1)(x) converter.set_variable(tf_op.outputs[0], y)
def _convert_softmax(converter: ONNXConverter, onnx_op: INodeProto): x = converter.get_variable(onnx_op.input[0]) converter.set_variable(onnx_op.output[0], Softplus(None, beta=1.0)(x)[0])
def _convert_softplus(converter: ChainerConverter, c_op: "chainer.functions.Softplus"): x = converter.get_variable(c_op.inputs[0]) y, = Softplus(None, beta=c_op.beta)(x) converter.set_variable(c_op.outputs[0](), y)