def _normalize_sequence(length, inputs, layout, merge, in_layout=None):
    assert inputs is not None, \
        "unroll(inputs=None) has been deprecated. " \
        "Please create input variables outside unroll."

    axis = layout.find('T')
    in_axis = in_layout.find('T') if in_layout is not None else axis
    if isinstance(inputs, symbol.Symbol):
        if merge is False:
            assert len(inputs.list_outputs()) == 1, \
                "unroll doesn't allow grouped symbol as input. Please convert " \
                "to list with list(inputs) first or let unroll handle splitting."
            inputs = list(
                symbol.split(inputs,
                             axis=in_axis,
                             num_outputs=length,
                             squeeze_axis=1))
    else:
        assert length is None or len(inputs) == length
        if merge is True:
            inputs = [symbol.expand_dims(i, axis=axis) for i in inputs]
            inputs = symbol.Concat(*inputs, dim=axis)
            in_axis = axis

    if isinstance(inputs, symbol.Symbol) and axis != in_axis:
        inputs = symbol.swapaxes(inputs, dim0=axis, dim1=in_axis)

    return inputs, axis
Exemple #2
0
    def add_loss(self, splits: sym.Variable):
        """Add loss functions.

        Below, we splice the network output accordingly to compute losses for
        the following:

            1. Bounding box attributes
            2. Class probabilities
            3. IOUS as "confidence scores"

        Below, the ugly splice functions are replacements for reshaping.
        Instead, split along a dimension into multiple chunks, and then
        restack the arrays in a consistent way.

        Due to a quirk in MXNet, we create a placeholder label_score. However,
        we actually use pred_box and label_box to compute IOU (true labels),
        which are then compared with pred_score.
        """
        num_splits = int(NUM_OUT_CHANNELS / ANCHORS_PER_GRID)
        splits = list(sym.split(splits, num_outputs=num_splits))

        # Compute loss for bounding box
        pred_box = sym.concat(*splits[:NUM_BBOX_ATTRS])
        loss_box = mx.sym.Custom(
            data=pred_box,
            label=self.label_box,
            op_type='LinearRegressionOutputWithMask')

        # Compute loss for class probabilities
        cidx = NUM_BBOX_ATTRS + NUM_CLASSES
        pred_class = reformat(sym.concat(*splits[NUM_BBOX_ATTRS:cidx]), pkg=sym)
        label_class = reformat(self.label_class, pkg=sym)
        loss_class = sym.SoftmaxOutput(data=pred_class, label=label_class)

        # Compute loss for confidence scores - see doc above for explanation
        pred_score = splits[cidx]
        loss_iou = mx.symbol.Custom(
            data=pred_score,
            label=sym.concat(self.label_score, pred_box, self.label_box),
            op_type='IOURegressionOutputWithMask')

        return mx.sym.Group([loss_box, loss_class, loss_iou])
Exemple #3
0
def format_sequence(length, inputs, layout, merge, in_layout=None):
    """
    `Original Code <https://github.com/apache/incubator-mxnet/blob/master/python/mxnet/gluon/rnn/rnn_cell.py#L52>`_

    Parameters
    ----------
    length
    inputs
    layout
    merge
    in_layout

    Returns
    -------

    Examples
    --------
    >>> import mxnet.ndarray as nd
    >>> seq = [[[0] * 4, [2] * 4, [4] * 4], [[1] * 4, [3] * 4, [5] * 4]]
    >>> seq1, axis, _, batch_size = format_sequence(3, nd.array(seq), "NTC", False)
    >>> seq1   # doctest: +NORMALIZE_WHITESPACE
    [
    [[0. 0. 0. 0.]
     [1. 1. 1. 1.]]
    <NDArray 2x4 @cpu(0)>,
    [[2. 2. 2. 2.]
     [3. 3. 3. 3.]]
    <NDArray 2x4 @cpu(0)>,
    [[4. 4. 4. 4.]
     [5. 5. 5. 5.]]
    <NDArray 2x4 @cpu(0)>]
    >>> axis
    1
    >>> batch_size
    2
    >>> seq2, _, _, _ = format_sequence(3, nd.array(seq), "NTC", True)
    >>> seq2   # doctest: +NORMALIZE_WHITESPACE
    <BLANKLINE>
    [[[0. 0. 0. 0.]
      [2. 2. 2. 2.]
      [4. 4. 4. 4.]]
    <BLANKLINE>
     [[1. 1. 1. 1.]
      [3. 3. 3. 3.]
      [5. 5. 5. 5.]]]
    <NDArray 2x3x4 @cpu(0)>
    >>> import mxnet.symbol as sym
    >>> seq3, _, _, _ = format_sequence(3, sym.Variable("s", shape=(2, 3, 4)), "NTC", False)
    >>> seq3
    [<Symbol split0>, <Symbol split0>, <Symbol split0>]
    >>> seq4 = [nd.array([[0] * 4, [1] * 4]), nd.array([[2] * 4, [3] * 4]), nd.array([[4] * 4, [5] * 4])]
    >>> seq5, _, _, _ = format_sequence(3, seq4, "NTC", True)
    >>> seq5   # doctest: +NORMALIZE_WHITESPACE
    <BLANKLINE>
    [[[0. 0. 0. 0.]
      [2. 2. 2. 2.]
      [4. 4. 4. 4.]]
    <BLANKLINE>
     [[1. 1. 1. 1.]
      [3. 3. 3. 3.]
      [5. 5. 5. 5.]]]
    <NDArray 2x3x4 @cpu(0)>
    >>> seq6 = [sym.Variable("1", shape=(2, 4)), sym.Variable("2", shape=(2, 4)), sym.Variable("3", shape=(2, 4))]
    >>> seq7, _, _, _ = format_sequence(3, seq6, "NTC", True)
    >>> seq7
    <Symbol stack0>
    """
    assert inputs is not None, \
        "unroll(inputs=None) has been deprecated. " \
        "Please create input variables outside unroll."

    axis = layout.find('T')
    batch_axis = layout.find('N')
    batch_size = 0
    in_axis = in_layout.find('T') if in_layout is not None else axis
    if isinstance(inputs, symbol.Symbol):
        F = symbol
        if merge is False:
            assert len(inputs.list_outputs()) == 1, \
                "unroll doesn't allow grouped symbol as input. " \
                "Please convert " \
                "to list with list(inputs) first or " \
                "let unroll handle splitting."
            inputs = list(
                symbol.split(inputs,
                             axis=in_axis,
                             num_outputs=length,
                             squeeze_axis=1))
    elif isinstance(inputs, ndarray.NDArray):
        F = ndarray
        batch_size = inputs.shape[batch_axis]
        if merge is False:
            assert length is None or length == inputs.shape[in_axis]
            inputs = as_list(
                ndarray.split(inputs,
                              axis=in_axis,
                              num_outputs=inputs.shape[in_axis],
                              squeeze_axis=1))
    else:
        assert length is None or len(inputs) == length
        if isinstance(inputs[0], symbol.Symbol):
            F = symbol
        else:
            F = ndarray
            batch_size = inputs[0].shape[batch_axis]
        if merge is True:
            inputs = F.stack(*inputs, axis=axis)
            in_axis = axis

    if isinstance(inputs,
                  tensor_types) and axis != in_axis:  # pragma: no cover
        # todo: find the test case
        inputs = F.swapaxes(inputs, dim1=axis, dim2=in_axis)

    return inputs, axis, F, batch_size