コード例 #1
0
def exp(node, name=None):  # type: (NodeInput, str) -> Node
    """Return node which applies exp to the input node element-wise.

    :param node: The node providing data for operation.
    :param name: The optional name for new output node.
    :return: The new node performing natural exponential operation.
    """
    return Exp(node)
コード例 #2
0
ファイル: test_ops.py プロジェクト: tomdol/ngraph
def unary_op(op_str, a):
    if op_str == 'Abs':
        return Abs(a)
    elif op_str == 'Acos':
        return Acos(a)
    elif op_str == 'Asin':
        return Asin(a)
    elif op_str == 'Atan':
        return Atan(a)
    elif op_str == 'Ceiling':
        return Ceiling(a)
    elif op_str == 'Cos':
        return Cos(a)
    elif op_str == 'Cosh':
        return Cosh(a)
    elif op_str == 'Floor':
        return Floor(a)
    elif op_str == 'log':
        return Log(a)
    elif op_str == 'exp':
        return Exp(a)
    elif op_str == 'negative':
        return Negative(a)
    elif op_str == 'Reverse':
        return Reverse(a, AxisSet({1}))
    elif op_str == 'Sign':
        return Sign(a)
    elif op_str == 'Sin':
        return Sin(a)
    elif op_str == 'Sinh':
        return Sinh(a)
    elif op_str == 'Sqrt':
        return Sqrt(a)
    elif op_str == 'Tan':
        return Tan(a)
    elif op_str == 'Tanh':
        return Tanh(a)
コード例 #3
0
def exp(node, name=None):  # type: (NodeInput, str) -> Node
    """Return node which applies exp to the input node elementwise."""
    return Exp(node)
コード例 #4
0
ファイル: mnist_mlp.py プロジェクト: shyamalschandra/ngraph-1
X2 = X1 / make_float32_constant_like(255., X1)

# Affine 1
W1 = Parameter(float_element_type, Shape([784, 100]))
b1 = Parameter(float_element_type, Shape([100]))
X3 = Dot(X2, W1) + Broadcast(b1, Shape([bz, 100]), AxisSet({0}))
X4 = relu(X3)

# Affine 2
W2 = Parameter(float_element_type, Shape([100, 10]))
b2 = Parameter(float_element_type, Shape([10]))
X5 = Dot(X4, W2) + Broadcast(b2, Shape([bz, 10]), AxisSet({0}))

# Softmax
Logits = X5
Exp = Exp(Logits)
Max = Reduce(Exp, make_float32_constant(0., [], set()), MaxFn, AxisSet({1}))
MaxBroadcast = Broadcast(Max, Shape([bz, 10]), AxisSet({1}))
Softmax = Exp / MaxBroadcast

# Loss
LogSoftmax = Log(Softmax)
Loss = Sum(LogSoftmax * LabelOneHot, AxisSet({0, 1})) / make_float32_constant(
    float(bz), [], set())

# Derivatives
dLogits = Softmax - LabelOneHot
dX5 = dLogits

dX4 = Dot(dX5, transpose(W2, Shape([1, 0])))
dW2 = Dot(transpose(X4, Shape([1, 0])), dX5)