Ejemplo n.º 1
0
def test_op_capturing():
    x = ng.constant(0)
    with ng.Op.captured_ops() as ops1:
        y = -x
        with ng.Op.all_ops() as ops2:
            z = x + y
            with ng.Op.captured_ops() as ops3:
                ng.exp(z)
    # negate and add
    assert len(ops1) == 2
    # add and exp
    assert len(ops2) == 2
    # exp
    assert len(ops3) == 1
Ejemplo n.º 2
0
def unary_op(op_str, a):
    if op_str == "Abs":
        return ng.abs(a)
    elif op_str == "Acos":
        return ng.acos(a)
    elif op_str == "Asin":
        return ng.asin(a)
    elif op_str == "Atan":
        return ng.atan(a)
    elif op_str == "Ceiling":
        return ng.ceiling(a)
    elif op_str == "Cos":
        return ng.cos(a)
    elif op_str == "Cosh":
        return ng.cosh(a)
    elif op_str == "Floor":
        return ng.floor(a)
    elif op_str == "log":
        return ng.log(a)
    elif op_str == "exp":
        return ng.exp(a)
    elif op_str == "negative":
        return ng.negative(a)
    elif op_str == "Sign":
        return ng.sign(a)
    elif op_str == "Sin":
        return ng.sin(a)
    elif op_str == "Sinh":
        return ng.sinh(a)
    elif op_str == "Sqrt":
        return ng.sqrt(a)
    elif op_str == "Tan":
        return ng.tan(a)
    elif op_str == "Tanh":
        return ng.tanh(a)
Ejemplo n.º 3
0
def Selu(onnx_node, ng_inputs):  # type: (NodeWrapper, List[TensorOp]) -> Op
    # f(x) = gamma * (alpha * exp(x) - alpha) for x <= 0, f(x) = gamma * x for x > 0
    x = ng_inputs[0]
    alpha = onnx_node.get_attribute_value('alpha', 1.6732)
    gamma = onnx_node.get_attribute_value('gamma', 1.0507)

    return gamma * (ng.maximum(x, 0) + alpha *
                    (ng.exp(-ng.maximum(-x, 0)) - 1))
Ejemplo n.º 4
0
def ReduceLogSumExp(
        onnx_node,
        ng_inputs):  # type: (NodeWrapper, List[NgraphNode]) -> NgraphNode
    """Compute the log sum exponent of the input tensor's element' along the provided axes."""
    op = ng.exp(ng_inputs[0])
    op = make_reduction_op(ng.sum, onnx_node, op)
    op = ng.log(op)
    return op
Ejemplo n.º 5
0
def Softplus(onnx_node,
             ng_inputs):  # type: (NodeWrapper, List[NgraphNode]) -> NgraphNode
    """Apply Softplus function, f(x) = ln(exp(x) + 1) to the input tensor element-wise.

    :param onnx_node: The ONNX node representing this operation.
    :param ng_inputs: The input tensors.
    :return: The tensor with applied Softplus operation.
    """
    return ng.log((ng.exp(ng_inputs[0]) + 1))
Ejemplo n.º 6
0
def get_simple_graph():
    ax = ng.make_axes([ng.make_axis(name='C', length=1)])
    base_op = ng.constant(5.0, ax).named("weird_name#@$")
    base_op.metadata["string"] = "stringval"
    simple_graph = ng.log(ng.exp(base_op))
    simple_graph.metadata.update(string_val="foo",
                                 bool_val=True,
                                 float_val=6.5,
                                 int_val=2)
    return base_op, simple_graph
Ejemplo n.º 7
0
def Selu(onnx_node, ng_inputs):  # type: (NodeWrapper, List[NgraphNode]) -> NgraphNode
    """Apply the scaled exponential linear unit function to the input tensor elementwise.

    f(x) = gamma * (alpha * exp(x) - alpha) for x <= 0, f(x) = gamma * x for x > 0
    """
    x = ng_inputs[0]
    alpha = onnx_node.get_attribute_value('alpha', 1.6732)
    gamma = onnx_node.get_attribute_value('gamma', 1.0507)

    return (gamma * (ng.maximum(x, 0) + alpha * (ng.exp(ng.negative(ng.maximum(ng.negative(x), 0))) - 1)))
Ejemplo n.º 8
0
    def __call__(self, x):
        """
        Returns the Exponential Linear activation

        Arguments:
            x (Tensor or optree): input value

        Returns:
            Tensor or optree: output activation
        """
        return ng.maximum(x, 0) + self.alpha * (ng.exp(ng.minimum(x, 0)) - 1)
Ejemplo n.º 9
0
def Elu(onnx_node, ng_inputs):  # type: (NodeWrapper, List[TensorOp]) -> Op
    # f(x) = alpha * (exp(x) - 1.) for x < 0, f(x) = x for x >= 0
    x = ng_inputs[0]
    alpha = onnx_node.get_attribute_value('alpha', 1)

    if not alpha < 0:
        logger.warning(
            'Elu node (%s): alpha value should be positive, but is: %s',
            onnx_node.name, alpha)

    return ng.maximum(x, 0) + alpha * (ng.exp(-ng.maximum(-x, 0)) - 1)
Ejemplo n.º 10
0
    def Exp(self, cntk_op, inputs):
        """
        Returns element-wise exp of inputs[0].

        Arguments:
            inputs: List of inputs to this node.

        Returns:
            A ngraph Op.
        """
        return ng.exp(inputs[0]).named(cntk_op.uid)
Ejemplo n.º 11
0
    def Exp(self, cntk_op, inputs):
        """
        Returns element-wise exp of inputs[0].

        Arguments:
            cntk_op: CNTK operation to be imported.
            inputs: List of inputs to this node.

        Returns:
            A ngraph Op.
        """
        return ng.exp(inputs[0]).named(cntk_op.uid)
Ejemplo n.º 12
0
    def ReduceElements(self, cntk_op, inputs):
        """
        Returns a reduction operation (max, min, mean, sum, prod) or a calculation which matches
        CNTK's LogSum reduction (`reduce_log_sum_exp` function).

        Arguments:
            cntk_op: CNTK operation to be imported.
            inputs: List of inputs to this node.

        Returns:
            A ngraph Op.
        """
        assert len(inputs) == 1

        reduction_op_name = cntk_op.attributes.get('reductionOpName')
        # CNTK API defines a reductionKeepDimensions flag, but we currently don't use it
        # keep_dimensions = cntk_op.attributes.get('reductionKeepDimensions', False)

        cntk_op_attribute_axes = []
        if cntk_op.attributes.get('axisVec'):
            cntk_op_attribute_axes.extend(cntk_op.attributes.get('axisVec'))
        elif cntk_op.attributes.get('axis'):
            cntk_op_attribute_axes.append(cntk_op.attributes.get('axis'))

        # CNTK axes are numbered in reverse order: the last axis is labeled 0, the previous 1, etc.
        reduction_axes_indexes = [len(inputs[0].axes) - 1 - i
                                  for (_, _, i) in cntk_op_attribute_axes]
        reduction_ng_axes_list = [axis for (i, axis) in enumerate(inputs[0].axes)
                                  if i in reduction_axes_indexes]
        reduction_ng_axes = ng.Axes(axes=reduction_ng_axes_list)

        if reduction_op_name == 'Max':
            return ng.max(inputs[0], reduction_axes=reduction_ng_axes).named(cntk_op.uid)

        if reduction_op_name == 'Min':
            return ng.min(inputs[0], reduction_axes=reduction_ng_axes).named(cntk_op.uid)

        if reduction_op_name == 'Mean':
            return ng.mean(inputs[0], reduction_axes=reduction_ng_axes).named(cntk_op.uid)

        if reduction_op_name == 'Sum':
            return ng.sum(inputs[0], reduction_axes=reduction_ng_axes).named(cntk_op.uid)

        if reduction_op_name == 'Prod':
            return ng.prod(inputs[0], reduction_axes=reduction_ng_axes).named(cntk_op.uid)

        if reduction_op_name == 'LogSum':
            return ng.log(ng.sum(ng.exp(inputs[0]), reduction_axes=reduction_ng_axes))\
                .named(cntk_op.uid)

        raise NotImplementedError('CNTKImporter: ReduceElements does not support operation %s',
                                  reduction_op_name)
Ejemplo n.º 13
0
def Elu(onnx_node, ng_inputs):  # type: (NodeWrapper, List[NgraphNode]) -> NgraphNode
    """Apply the exponential linear unit function to the input tensor elementwise.

    f(x) = alpha * (exp(x) - 1.) for x < 0, f(x) = x for x >= 0
    """
    x = ng_inputs[0]
    alpha = onnx_node.get_attribute_value('alpha', 1)

    if not alpha < 0:
        logger.warning('Elu node (%s): alpha value should be positive, but is: %s',
                       onnx_node.name, alpha)

    return (ng.maximum(x, 0) + alpha * (ng.exp(ng.negative(ng.maximum(ng.negative(x), 0))) - 1))
Ejemplo n.º 14
0
    def Exp(self, c2_op, inputs):
        """
        Computes element-wise exp: `exp(x)`

        Arguments:
            c2_op: NodeDef object, the caffe2 node to convert.
            inputs: List of ngraph Ops as inputs to this node.

        Returns:
            A ngraph Op corresponding to the caffe2 node.
        """
        assert 1 == len(inputs)
        return ng.exp(inputs[0]).named(c2_op.name)
Ejemplo n.º 15
0
def test_op_capturing():
    x = ng.constant(0)
    with ng.Op.captured_ops() as ops1:
        y = -x
        with ng.Op.all_ops() as ops2:
            z = x + y
            with ng.Op.captured_ops() as ops3:
                w = ng.exp(z)
    # negate and add
    assert y in ops1
    assert y.args[0] in ops1
    # add and exp
    assert z in ops2
    assert w in ops2
    # exp
    assert w in ops3
    assert z not in ops3
Ejemplo n.º 16
0
def unary_op(op_str, a):
    if op_str == 'Abs':
        return ng.abs(a)
    elif op_str == 'Acos':
        return ng.acos(a)
    elif op_str == 'Asin':
        return ng.asin(a)
    elif op_str == 'Atan':
        return ng.atan(a)
    elif op_str == 'Ceiling':
        return ng.ceiling(a)
    elif op_str == 'Cos':
        return ng.cos(a)
    elif op_str == 'Cosh':
        return ng.cosh(a)
    elif op_str == 'Floor':
        return ng.floor(a)
    elif op_str == 'log':
        return ng.log(a)
    elif op_str == 'exp':
        return ng.exp(a)
    elif op_str == 'negative':
        return ng.negative(a)
    elif op_str == 'Reverse':
        return ng.reverse(a, np.array([1]), 'index')
    elif op_str == 'Sign':
        return ng.sign(a)
    elif op_str == 'Sin':
        return ng.sin(a)
    elif op_str == 'Sinh':
        return ng.sinh(a)
    elif op_str == 'Sqrt':
        return ng.sqrt(a)
    elif op_str == 'Tan':
        return ng.tan(a)
    elif op_str == 'Tanh':
        return ng.tanh(a)
Ejemplo n.º 17
0
def get_simple_graph():
    base_op = ng.constant(5.0)
    simple_graph = ng.log(ng.exp(base_op))
    return base_op, simple_graph
Ejemplo n.º 18
0
def Exp(onnx_node, ng_inputs):  # type: (NodeWrapper, List[NgraphNode]) -> NgraphNode
    """Calculate the exponential of the input tensor elementwise."""
    return ng.exp(ng_inputs[0])
Ejemplo n.º 19
0
def Softplus(onnx_node, ng_inputs):  # type: (NodeWrapper, List[NgraphNode]) -> NgraphNode
    """Apply Softplus function, f(x) = ln(exp(x) + 1) to the input tensor elementwise."""
    return ng.log((ng.exp(ng_inputs[0]) + 1))
Ejemplo n.º 20
0
def ReduceLogSumExp(onnx_node,
                    ng_inputs):  # type: (NodeWrapper, List[TensorOp]) -> Op
    op = ng.exp(ng_inputs[0])
    op = make_reduction_op(ng.sum, onnx_node, op)
    op = ng.log(op)
    return op
Ejemplo n.º 21
0
def Sigmoid(onnx_node, ng_inputs):  # type: (NodeWrapper, List[NgraphNode]) -> NgraphNode
    """Apply the sigmoid function, f(x) = 1 / (1 + exp(-x)) to the input tensor elementwise."""
    return 1 / (1 + ng.exp(ng.negative(ng_inputs[0])))
Ejemplo n.º 22
0
def Exp(onnx_node, ng_inputs):  # type: (NodeWrapper, List[TensorOp]) -> Op
    return ng.exp(ng_inputs[0])
Ejemplo n.º 23
0
 def configure(self, input_op):
     return ng.exp(input_op)
Ejemplo n.º 24
0
 def __call__(self, iteration):
     return self.base_lr * (1 / (1 + ng.exp(-self.gamma *
                                            (iteration - self.step_size))))
Ejemplo n.º 25
0
 def sigmoid(x):
     return 1. / (1. + ng.exp(-x))
Ejemplo n.º 26
0
def exp(x, name=None):
    return ng.exp(x).named(name)
Ejemplo n.º 27
0
def get_simple_graph():
    ax = ng.make_axes([ng.make_axis(name='C', length=1)])
    base_op = ng.constant(5.0, ax)
    simple_graph = ng.log(ng.exp(base_op))
    return base_op, simple_graph