Example #1
0
    def MatMul(self, tf_node, inputs):
        """
        Multiplies matrix `a` by matrix `b`. The inputs must be two-dimensional,
        the inner dimensions must match (possibly after transpose).

        Arguments:
            tf_node: NodeDef object, the tensorflow node to convert.
            inputs: List of ngraph Ops as inputs to this node.

        Returns:
            A ngraph Op corresponding to the tensorflow node.

        Inputs to tf_node:
            a, b, transpose_a, transpose_b, a_is_sparse, b_is_sparse, name
        """
        # get inputs
        left, right = inputs
        if tf_node.attr['transpose_a'].b:
            left = ng.Transpose(left)
        if tf_node.attr['transpose_b'].b:
            right = ng.Transpose(right)

        # check shape
        assert len(left.axes) == len(right.axes) == 2
        assert left.axes[1].length == right.axes[0].length

        # cast axis
        left_casted = ng.cast_axes(left, [left.axes[0], right.axes[0] - 1])

        # result op
        result_op = ng.dot(left_casted, right, name=tf_node.name)

        # return
        return result_op
Example #2
0
def matmul(left, right, transpose_a=False, transpose_b=False, name=None):
    """
    Only support 2d matmul for now.
    """
    # Transpose
    if transpose_a:
        left = ng.Transpose(left)
    if transpose_b:
        right = ng.Transpose(right)

    # Check shape
    assert len(left.axes) == len(right.axes) == 2
    assert left.axes[1].length == right.axes[0].length

    # step 1: cast left (pos_1, pos_0), right (pos_1, pos_0) =>
    #              left (temp , pos_1), right (pos_1, pos_0)
    # step 2: perform left dot right, result
    #         (temp, pos_0)
    # step 3: cast back to (post_1, pos_0)
    left_temp_axes = ng.make_axes(
        [ng.make_axis(left.axes[0].length), right.axes[0]])
    left = ng.cast_axes(left, axes=left_temp_axes)

    # Result op
    result_op = ng.dot(left, right).named(name)
    result_op = cast_to_pos_axes(result_op)

    # Return
    return result_op.named(name)
Example #3
0
def Gemm(onnx_node, ng_inputs):  # type: (NodeWrapper, List[TensorOp]) -> Op
    # Y = alpha * (A @ B) + beta * C
    input_a, input_b, input_c = ng_inputs
    alpha = onnx_node.get_attribute_value('alpha',
                                          1)  # Scalar multiplier for A @ B
    beta = onnx_node.get_attribute_value(
        'beta', 1)  # Scalar multiplier for input tensor C
    broadcast = onnx_node.get_attribute_value('broadcast',
                                              1)  # Should C be broadcast?
    trans_a = onnx_node.get_attribute_value('transA',
                                            False)  # Should A be transposed?
    trans_b = onnx_node.get_attribute_value('transB',
                                            False)  # Should B be transposed?

    if not broadcast:
        logger.warning(
            'Gemm node (%s): import does not support broadcast value %s',
            onnx_node.name, broadcast)

    if trans_a:
        input_a = ng.Transpose(input_a)

    if trans_b:
        input_b = ng.Transpose(input_b)

    input_a, input_b = cast_axes_for_matmul(input_a, input_b)
    a_dot_b = ng.dot(input_a, input_b)
    a_dot_b = cast_to_pos_axes(a_dot_b)
    return alpha * a_dot_b + beta * input_c
Example #4
0
def create_loss_and_learner(model,
                            labels,
                            learning_rate,
                            momentum_coef=0.0,
                            wdecay=0.0,
                            nesterov=False,
                            gradient_clip_norm=None,
                            gradient_clip_value=None):
    """
    Auxiliary function to create loss function (cross entropy and softmax)
    and trainer using stochastic gradient descent with momentum.

    Arguments:
        model - imported model
        labels - placeholder for one-hot labels array
        learning_rate - learning rate for trainer
        momentum_coef - coefficient of momentum (deafult 0.0)
        wdecay - amount of weight decay (default 0.0)
        nesterov - use nesterov accelerated gradient (dafault False)
        gradient_clip_norm - target gradient norm (default None)
        gradient_clip_value - value to element-wise clip gradients (default None)

    Returns:
        Loss function (mean for batch)
    """
    if model.axes.lengths != labels.axes.lengths:
        labels = ng.Transpose(labels)
    assert model.axes.lengths == labels.axes.lengths
    model = ng.cast_axes(model, axes=labels.axes)

    loss = ng.cross_entropy_multi(ng.softmax(model), labels)
    optimizer = GradientDescentMomentum(learning_rate, momentum_coef, wdecay,
                                        gradient_clip_norm,
                                        gradient_clip_value, nesterov)
    return ng.sequential([optimizer(loss), ng.mean(loss, out_axes=())])
Example #5
0
def cross_entropy_with_softmax(model, labels):
    """
    Auxiliary function to add cross entropy and softmax (loss function)
    to imported model for training.

    Arguments:
        model - imported model
        labels - placeholder for one-hot labels array

    Returns:
        Loss function (mean for batch)
    """
    if model.axes.lengths != labels.axes.lengths:
        model = ng.Transpose(model)
    assert model.axes.lengths == labels.axes.lengths
    model = ng.cast_axes(model, axes=labels.axes)

    loss = ng.cross_entropy_multi(ng.softmax(model), labels)
    return ng.mean(loss, out_axes=())
Example #6
0
def Transpose(onnx_node,
              ng_inputs):  # type: (NodeWrapper, List[TensorOp]) -> Op
    """Transpose the input tensor similar to numpy.transpose.

    By default, reverse the dimensions, but if `perm` attribute is specified
    permute the axes according to the values given.
    """
    data = ng_inputs[0]
    permute_axes = onnx_node.get_attribute_value('perm')

    if permute_axes:
        input_template = ''.join(
            [ascii_letters[i] for i in range(len(data.axes))])
        output_template = ''.join([ascii_letters[i] for i in permute_axes])
        ng_op = reorder_axes(data, input_template, output_template)
    else:
        ng_op = ng.Transpose(data)

    return cast_to_pos_axes(ng_op)
Example #7
0
    def CrossEntropyWithSoftmax(self, cntk_op, inputs):
        """
        Computes the softmax cross entropy between the inputs[0] and inputs[1].

        Arguments:
            cntk_op: CNTK operation to be imported.
            inputs: List of inputs to this node.

        Returns:
            A ngraph Op.
        """
        cast_0, cast_1 = squeeze_axes(inputs)

        if cast_0.axes.lengths != cast_1.axes.lengths:
            cast_0 = ng.Transpose(cast_0)
        assert cast_0.axes.lengths == cast_1.axes.lengths

        cast_0 = ng.cast_axes(cast_0, axes=cast_1.axes)
        loss = ng.cross_entropy_multi(ng.softmax(cast_0), cast_1)

        return ng.mean(loss, out_axes=()).named(cntk_op.uid)