Exemple #1
0
def test_general():
    for condition_custom in [{}, {"x_order": OrderNCHW}]:
        condition = dict(condition_default)
        condition.update(condition_custom)

        vx = np.random.rand(2, 3, 4, 5) - 0.5
        vy = vx * (vx > 0)

        x = Variable(vx.shape, order=OrderNHWC)
        y, = Relu(None)(x)

        x.change_order(condition["x_order"])
        y.change_order(condition["y_order"])

        generate_kernel_test_case(
            description=f"Relu: " +
            (", ".join([f"{k}={v}" for k, v in condition_custom.items()])),
            backend=condition["backend"],
            graph=Graph([x], [y]),
            inputs={
                x: ConstantVariable(vx, OrderNHWC).change_order(x.order).data
            },
            expected={
                y: ConstantVariable(vy, OrderNHWC).change_order(y.order).data
            },
            raise_skip=False)

    raise SkipTest
Exemple #2
0
def _convert_crelu(converter: ChainerConverter,
                   c_op: "chainer.functions.CReLU"):
    x = converter.get_variable(c_op.inputs[0])
    y1, = Relu(None)(x)
    y2, = Relu(None)(-x)
    y, = Concat(None, axis=x.order.axes[c_op.axis])(y1, y2)
    converter.set_variable(c_op.outputs[0](), y)
Exemple #3
0
def _convert_elu(converter: ChainerConverter, c_op: "chainer.functions.ELU"):
    x = converter.get_variable(c_op.inputs[0])
    if c_op.alpha == 0:
        y, = Relu(None)(x)

    elif c_op.alpha == 1:
        y, = Elu(None)(x)

    else:
        y1, = Elu(None)(x)
        y2, = Relu(None)(x)
        y = (y1 * c_op.alpha) + y2 * (1 - c_op.alpha)

    converter.set_variable(c_op.outputs[0](), y)
Exemple #4
0
    def convert_layer_dense(self, layer_config: Dict[str, object], inputs: List[Variable]) -> List[Variable]:
        assert len(inputs) == 1
        input = inputs[0]
        name: str = layer_config["name"]
        weight_array = self.weights[f"{name}/{name}/kernel:0"].value
        weight_var = ConstantVariable(weight_array, OrderCN)  # shape: (in, out)
        linear_opr = Linear(name)
        y, = linear_opr(input, weight_var)

        if layer_config["use_bias"]:
            bias_array = self.weights[f"{name}/{name}/bias:0"].value
            bias_var = ConstantVariable(bias_array, OrderC)
            bias_opr = AxiswiseBias(name + "_bias", Axis.C)
            y, = bias_opr(y, bias_var)

        act_opr: Operator = None
        activation_type: str = layer_config["activation"]
        if activation_type == "relu":
            act_opr = Relu(name + "_activation")
        elif activation_type == "softmax":
            warn("omitting softmax activation")
        else:
            raise NotImplementedError(f"Unknown activation {activation_type}")

        if act_opr is not None:
            y, = act_opr(y)

        return [y]
Exemple #5
0
    def convert_layer_activation(self, layer_config: Dict[str, object], inputs: List[Variable]) -> List[Variable]:
        """
        Example:
 {'class_name': 'Activation',
  'config': {'activation': 'relu', 'name': 'activation_2', 'trainable': True},
  'inbound_nodes': [[['bn2a_branch2a', 0, 0, {}]]],
  'name': 'activation_2'},
        :param layer_config: 
        :param inputs: 
        :return: 
        """
        assert len(inputs) == 1
        input = inputs[0]
        name: str = layer_config["name"]

        act_opr: Operator = None
        activation_type: str = layer_config["activation"]
        if activation_type == "relu":
            act_opr = Relu(name + "_activation")
        else:
            raise NotImplementedError(f"Unknown activation {activation_type}")

        y, = act_opr(input)

        return [y]
Exemple #6
0
def _convert_elu(converter: KerasConverter, k_op: "keras.layers.ELU"):
    x = converter.get_variable(converter.get_input_tensor(k_op)[0])
    alpha = float(k_op.alpha)

    if alpha == 1.0:
        y, = Elu(None)(x)

    elif alpha == 0.0:
        y, = Relu(None)(x)

    else:
        y1, = Elu(None)(x)
        y2, = Relu(None)(x)
        y = y1 * alpha + y2 * (1 - alpha)

    converter.set_variable(converter.get_output_tensor(k_op)[0], y)
Exemple #7
0
def do_activation(activation: any, x: Variable) -> Variable:
    if activation is keras.activations.relu:
        return Relu(None)(x)[0]

    elif activation is keras.activations.sigmoid:
        return Sigmoid(None)(x)[0]

    elif activation is keras.activations.hard_sigmoid:
        return HardSigmoid(None)(x)[0]

    elif activation is keras.activations.softplus:
        return Softplus(None, beta=1.0)(x)[0]

    elif activation is keras.activations.softsign:
        return Softsign(None)(x)[0]

    elif activation is keras.activations.softmax:
        return Softmax(None, axis=x.order.axes[-1])(x)[0]

    elif activation is keras.activations.elu:
        return Elu(None)(x)[0]

    elif activation is keras.activations.tanh:
        return Tanh(None)(x)[0]

    elif activation is keras.activations.linear:
        return x

    else:
        raise NotImplementedError(
            f"[KerasConverter] Unknown activation: {activation}")
Exemple #8
0
def _convert_leaky_relu(converter: KerasConverter,
                        k_op: "keras.layers.LeakyReLU"):
    x = converter.get_variable(converter.get_input_tensor(k_op)[0])
    if k_op.alpha == 0:
        y, = Relu(None)(x)
    else:
        y, = LeakyRelu(None, slope=k_op.alpha)(x)

    converter.set_variable(converter.get_output_tensor(k_op)[0], y)
Exemple #9
0
def _convert_thresholded_relu(converter: KerasConverter,
                              k_op: "keras.layers.ThresholdedReLU"):
    x = converter.get_variable(converter.get_input_tensor(k_op)[0])

    if k_op.theta == 0:
        y, = Relu(None)(x)
    else:
        y, = ThresholdRelu(None, threshold=k_op.theta)(x)

    converter.set_variable(converter.get_output_tensor(k_op)[0], y)
Exemple #10
0
def template(x_order=OrderNHWC,
             x_shape=(2, 3, 4, 5),
             y_order=OrderNHWC,
             description: str = ""):
    vx = np.random.rand(*x_shape) - 0.5
    vy = vx * (vx > 0)

    x = Variable(vx.shape, order=x_order)
    y, = Relu(None)(x)
    y.change_order(y_order)

    generate_kernel_test_case(
        description=f"Relu {description}",
        graph=Graph([x], [y]),
        inputs={x: vx},
        expected={
            y: np.transpose(vy, [x_order.axes_dict[a] for a in y_order.axes])
        },
    )
Exemple #11
0
    def convert_layer_conv2d(self, layer_config: Dict[str, object], inputs: List[Variable]) -> List[Variable]:
        """
        Example:
           {'class_name': 'Conv2D',
   'config': {'activation': 'relu',
    'activity_regularizer': None,
    'bias_constraint': None,
    'bias_initializer': {'class_name': 'Zeros', 'config': {}},
    'bias_regularizer': None,
    'data_format': 'channels_last',
    'dilation_rate': [1, 1],
    'filters': 64,
    'kernel_constraint': None,
    'kernel_initializer': {'class_name': 'VarianceScaling',
     'config': {'distribution': 'uniform',
      'mode': 'fan_avg',
      'scale': 1.0,
      'seed': None}},
    'kernel_regularizer': None,
    'kernel_size': [3, 3],
    'name': 'conv2d_2',
    'padding': 'valid',
    'strides': [1, 1],
    'trainable': True,
    'use_bias': True}},
        :param layer_config: 
        :param inputs: 
        :return: 
        """
        assert len(inputs) == 1
        input = inputs[0]
        name: str = layer_config["name"]
        weight_array = self.weights[f"{name}/{name}/kernel:0"].value
        assert layer_config["data_format"] == "channels_last"
        weight_var = ConstantVariable(weight_array, OrderHWCN)  # order does not depend on data_format
        ksize: Tuple[int, int] = tuple(layer_config["kernel_size"])
        stride: Tuple[int, int] = tuple(layer_config["strides"])
        padding_keras: str = layer_config["padding"]  # valid or same
        if isinstance(padding_keras, tuple):
            # preprocess_zeropadding2d
            padding = padding_keras
        elif padding_keras == "valid":
            padding = (0, 0)
        elif padding_keras == "same":
            padding = (ksize[0] // 2, ksize[1] // 2)
        else:
            raise ValueError("Unknown padding")

        conv2d_opr = Convolution2D(name,
                                   ksize=ksize,
                                   stride=stride,
                                   padding=padding)
        y, = conv2d_opr(input, weight_var)

        if layer_config["use_bias"]:
            bias_array = self.weights[f"{name}/{name}/bias:0"].value
            bias_var = ConstantVariable(bias_array, OrderC)
            bias_opr = AxiswiseBias(name + "_bias", Axis.C)
            y, = bias_opr(y, bias_var)

        act_opr: Operator = None
        activation_type: str = layer_config["activation"]
        if activation_type == "relu":
            act_opr = Relu(name + "_activation")
        elif activation_type == "softmax":
            warn("omitting softmax activation")
        elif activation_type == "linear":
            pass
        else:
            raise NotImplementedError(f"Unknown activation {activation_type}")

        if act_opr is not None:
            y, = act_opr(y)

        return [y]
Exemple #12
0
def _convert_relu(converter: ONNXConverter, onnx_op: INodeProto):
    x0 = converter.get_variable(onnx_op.input[0])

    y, = Relu(None)(x0)
    converter.set_variable(onnx_op.output[0], y)
Exemple #13
0
def _convert_relu(converter: ChainerConverter, c_op: "chainer.functions.ReLU"):
    x = converter.get_variable(c_op.inputs[0])
    y, = Relu(None)(x)
    converter.set_variable(c_op.outputs[0](), y)
Exemple #14
0
 def __call__(self, inputs: List[Variable]) -> Tuple[Variable]:
     assert len(inputs) == 1
     opr = Relu(generate_unique_name(self.cfunc.label))
     return opr(inputs[0])
Exemple #15
0
def _convert_elu(converter: ChainerConverter, c_op: chainer.functions.ELU):
    x = converter.get_variable(c_op.inputs[0])
    y1, = Elu(None)(x)
    y2, = Relu(None)(x)
    y = y1 * c_op.alpha + y2 * (1 - c_op.alpha)
    converter.set_variable(c_op.outputs[0](), y)