Ejemplo n.º 1
0
    def replacement(m):
        # type: (matcher.Match)->TFOperation
        block_shape = (m[stb].attribs["block_shape"]
                       if m[stb].name.endswith("_nd") else
                       [m[stb].attribs["block_size"]] *
                       len(m[stb].attribs["paddings"]))
        if m[conv].name == "_conv":
            padding = "SAME" if utils.recursive_any(
                m[stb].attribs["paddings"], lambda x: x > 0) else "VALID"

            return TFOperation(graph=g,
                               name=m[conv].name,
                               inputs=(m[input], m[filter]),
                               attribs=utils.updated_dict(m[conv].attribs,
                                                          dilation=block_shape,
                                                          padding=padding),
                               outputs=m[bts].outputs)
        else:
            padding = "SAME" if utils.recursive_any(
                m[bts].attribs["crops"], lambda x: x > 0) else "VALID"
            output_shape = _apply_block_shape(
                shape=m[conv].attribs["output_shape"],
                block_shape=block_shape,
                data_format=m[conv].attribs["data_format"],
                crops=m[bts].attribs["crops"])

            return TFOperation(graph=g,
                               name=m[conv].name,
                               inputs=(m[input], m[filter]),
                               attribs=utils.updated_dict(
                                   m[conv].attribs,
                                   dilation=block_shape,
                                   padding=padding,
                                   output_shape=output_shape),
                               outputs=m[bts].outputs)
Ejemplo n.º 2
0
def get_paddings(nnefop):
    # type: (NNEFOperation)->_InOpAndOutOfOpPadding
    nnefpadding = nnefop.attribs["padding"]
    nnefborder = nnefop.attribs["border"].lower()

    if len(nnefpadding) == 0 and (nnefborder == "constant"
                                  or nnefborder == "ignore"):
        return _InOpAndOutOfOpPadding(InOpPadding.SAME, None)
    elif not utils.recursive_any(nnefpadding, lambda x: x > 0):
        return _InOpAndOutOfOpPadding(InOpPadding.VALID, None)
    else:
        if len(nnefpadding) == 0:
            if nnefop.name == "conv":
                input, filter = tuple(nnefop.inputs)[:2]
                output = nnefop.output

                nnefpadding = calculate_padding(
                    upscaled_shape=input[2:],
                    downscaled_shape=output[2:],
                    filter_shape=filter.shape[2:],
                    strides=nnefop.attribs["stride"],
                    dilations=nnefop.attribs["dilation"])
            elif nnefop.name == "deconv":
                input, filter = tuple(nnefop.inputs)[:2]
                output = nnefop.output

                nnefpadding = calculate_padding(
                    upscaled_shape=output[2:],
                    downscaled_shape=input[2:],
                    filter_shape=filter.shape[2:],
                    strides=nnefop.attribs["stride"],
                    dilations=nnefop.attribs["dilation"])
            else:
                nnefpadding = calculate_padding(
                    upscaled_shape=nnefop.input.shape,
                    downscaled_shape=nnefop.output.shape,
                    filter_shape=nnefop.attribs["size"],
                    strides=nnefop.attribs["stride"],
                    dilations=nnefop.attribs["dilation"])

        if utils.recursive_any(nnefpadding, lambda x: x > 0):
            if nnefop.name in ["conv", "deconv"]:
                return _InOpAndOutOfOpPadding(InOpPadding.VALID,
                                              [(0, 0), (0, 0)] + nnefpadding)
            else:
                return _InOpAndOutOfOpPadding(InOpPadding.VALID, nnefpadding)
        else:
            return _InOpAndOutOfOpPadding(InOpPadding.VALID, None)
Ejemplo n.º 3
0
def _ensure_tensor(g, value, op_name):
    # type: (TFGraph, typing.Any, str)->typing.Optional[TFTensor]
    if value is None or isinstance(value, TFTensor):
        return value
    elif isinstance(value, (list, tuple)) and len(value) == 1 and isinstance(
            value[0], TFTensor):
        tensor = TFTensor(graph=g,
                          name=None,
                          shape=[1] + value[0].shape,
                          dtype=value[0].dtype)
        TFOperation(graph=g,
                    name="tf.expand_dims",
                    inputs=value[0],
                    attribs=dict(axis=0),
                    outputs=tensor)
        return tensor
    else:
        assert not utils.recursive_any(value, lambda x: isinstance(x, TFTensor)), \
            "Missing [] after T in op proto of '{}'?".format(op_name)
        arr = np.array(value)
        if str(arr.dtype).startswith('int'):
            arr = np.array(value, dtype=np.float32)
        return TFTensor(graph=g,
                        name=None,
                        shape=list(arr.shape),
                        dtype=str(arr.dtype),
                        data=arr.tolist())
Ejemplo n.º 4
0
    def action(m):
        # type: (matcher.Match)->None
        block_shape = (m[stb1].attribs["block_shape"]
                       if m[stb1].name.endswith("_nd") else
                       [m[stb1].attribs["block_size"]] *
                       len(m[stb1].attribs["paddings"]))
        padding = "SAME" if utils.recursive_any(m[stb1].attribs["paddings"],
                                                lambda x: x > 0) else "VALID"

        TFOperation(graph=g,
                    name=m[cgf].name,
                    inputs=(m[orig_input], m[output_grad]),
                    attribs=utils.updated_dict(m[cgf].attribs,
                                               dilation=block_shape,
                                               padding=padding),
                    outputs=m[cgf].outputs)
        g.remove_operation(m[cgf], unlink=True)
Ejemplo n.º 5
0
    def __init__(
        self,
        graph,  # type: _GraphT
        name=None,  # type: typing.Optional[str]
        inputs=None,  # type: typing.Union[None, Tensor, _TensorListOrTuple]
        outputs=None,  # type: typing.Union[None, Tensor, _TensorListOrTuple]
        attribs=None  # type: typing.Dict[str, typing.Any]
    ):
        # type: (...)->None
        super(BaseOperation, self).__init__(graph, inputs, outputs)

        assert name is None or isinstance(name, str)
        assert attribs is None or isinstance(attribs, dict)
        assert attribs is None or all(
            isinstance(key, str) for key in six.iterkeys(attribs))
        assert attribs is None or not utils.recursive_any(
            attribs, lambda x: isinstance(x, Tensor))

        self.name = name  # type: typing.Optional[str]
        self.attribs = attribs if attribs is not None else {
        }  # type: typing.Dict[str, typing.Any]
Ejemplo n.º 6
0
def _unify_shape(shape):
    if utils.recursive_any(shape, lambda x: x is None):
        return None
    assert all(isinstance(s, int) for s in shape)
    return shape
Ejemplo n.º 7
0
def _eliminate_identities(invocations):
    return [
        invocation for invocation in invocations if not (
            isinstance(invocation.result, tf.Tensor) and utils.recursive_any(
                invocation.args, lambda x: x is invocation.result))
    ]