def test_copy_op_attrs(self):
        """copy_op_attrs should only copy attrs supported by the target node"""
        # copy_op_attrs is used to transfer attrs from a fused op node
        # (e.g. _FusedConv2D) to a standalone op (e.g. Conv2D)
        # any additional attrs of the fused op need to be ignored
        fused_op_str = '{"name":"model/conv2d/BiasAdd",'\
            + '"op":"_FusedConv2D","input":["input",'\
            + '"model/conv2d/Conv2D/ReadVariableOp",'\
            + '"model/conv2d/BiasAdd/ReadVariableOp",'\
            + '"model/p_re_lu/Neg"],"device":"/device:CPU:0",' \
            + '"attr":{"dilations":{"list":{"i":["1","1","1","1"]}},'\
            + '"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="},'\
            + '"strides":{"list":{"i":["1","1","1","1"]}},'\
            + '"use_cudnn_on_gpu":{"b":true},'\
            + '"explicit_paddings":{"list":{}},'\
            + '"num_args":{"i":"2"},"epsilon":{"f":0},'\
            + '"padding":{"s":"VkFMSUQ="},'\
            + '"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UHJlbHU="]}}}}'
        fused_op = testutils.node_proto_from_json(fused_op_str)
        node = rewrite.make_op_node('Conv2D', fused_op.input[0:2])
        rewrite.copy_op_attrs(source=fused_op, target=node)

        op_def = rewrite.get_op_def(node.op)
        allowed = set(attr.name for attr in op_def.attr)
        forbidden = any(attr for attr in node.attr if attr not in allowed)

        self.assertFalse(forbidden)
        # randomply check for some of the expected attributes
        self.assertTrue('padding' in node.attr)
        self.assertTrue('strides' in node.attr)
Esempio n. 2
0
def _split_fused_op(node: util.NodeDef, input_node_map: util.NameToNode,
                    _) -> util.NodeList:
    # Possible fused Conv2D patterns are:
    # • Conv2D + BiasAdd
    # • Conv2D + BiasAdd + <Activation>
    # • Conv2D + FusedBatchNorm + <Activation>
    # • Conv2D + Squeeze + BiasAdd
    #
    # Fused MatMul only has one pattern:
    # • MatMul + BiasAdd + <Activation>
    #
    # FusedBatchNorm and Squeeze are not relevant for inference and thus never
    # present in (optimised) frozen graphs generated by tfjs converter.
    # This leaves us with Conv2D|MatMul + BiasAdd + <Activation> as the only
    # remaining possible variants.
    #
    # For compatibility reasons with quantised TFLite models, we optionally
    # split Conv2D + BiasAdd as well.
    #
    # We return [Conv2D|MatMul, BiasAdd|BiasAddV1, <Activation>].
    # Unsupported <Activation>-nodes will be dealt with in a separate step
    fused_op_name = node.op[6:]  # remove the '_Fused'-prefix
    fused_ops = list(s.decode('utf-8') for s in node.attr['fused_ops'].list.s)
    inputs = list(node.input)
    names_used = set()

    def node_name(node_index):
        name = generate_name_from(inputs[node_index], input_node_map)
        if name in names_used:
            name = generate_name_from(name,
                                      input_node_map,
                                      suffix=fused_ops[node_index - 2])
        names_used.add(name)
        return name

    fused_op = util.make_op_node(fused_op_name, inputs[0:2], node_name(1))
    fused_op = util.copy_op_attrs(source=node, target=fused_op)
    bias_add = util.make_op_node(fused_ops[0], [fused_op, inputs[2]],
                                 node_name(2))
    bias_add = util.copy_op_attrs(source=node, target=bias_add)

    have_activation = len(fused_ops) > 1
    if have_activation:
        # Prelu activation has additional input; reuse original name in all
        # cases
        name = node_name(3) if len(inputs) > 3 else node.name
        activation = util.make_op_node(fused_ops[1], [bias_add] + inputs[3:],
                                       name)
        return [fused_op, bias_add, activation]
    else:
        return [fused_op, bias_add]
def _split_fused_depthwise(node: util.NodeDef, input_node_map: util.NameToNode,
                           weight_mods: util.WeightModifiers) -> util.NodeList:
    """Decompose fused op into DepthwiseConv2dNative + BiasAdd [+ Activation]
    """
    fused_ops = list(s.decode('utf-8') for s in node.attr['fused_ops'].list.s)
    inputs = node.input
    names_used = set()

    def node_name(node_index):
        """Return unique node names for sub-operations by appending fused-op"""
        i = min(node_index, len(inputs)-1)  # PReLU has 4 inputs, others only 3
        name = generate_name_from(inputs[i], input_node_map)
        if name in names_used:
            name = generate_name_from(name, input_node_map,
                                      suffix=fused_ops[node_index-2])
        names_used.add(name)
        return name

    op = 'DepthwiseConv2dNative'
    depthwise = util.make_op_node(op, inputs[0:2], node_name(1))
    depthwise = util.copy_op_attrs(source=node, target=depthwise)
    op = fused_ops[0]
    bias_add = util.make_op_node(op, [depthwise, inputs[2]], node_name(2))
    bias_add = util.copy_op_attrs(source=node, target=bias_add)
    node_list = [depthwise, bias_add]
    if len(fused_ops) > 1:
        # we have an activation function
        op = fused_ops[1]
        input_nodes = [bias_add] + inputs[3:]
        if util.get_op_def(op) is None:
            # unsupported activation function - just copy type attribute
            dtype = depthwise.attr['T'].type
            activation = util.make_op_node(op, input_nodes, node_name(3),
                                           dtype)
        else:
            # supported activation function - copy applicable attributes
            activation = util.make_op_node(op, input_nodes, node_name(3))
            activation = util.copy_op_attrs(source=node, target=activation)
        node_list.append(activation)
    return node_list
Esempio n. 4
0
def _split_fused_op(node: util.NodeDef, input_node_map: util.NameToNode,
                    _) -> util.NodeList:
    # Possible fused Conv2D patterns are:
    # • Conv2D + BiasAdd
    # • Conv2D + BiasAdd + <Activation>
    # • Conv2D + FusedBatchNorm + <Activation>
    # • Conv2D + Squeeze + BiasAdd
    #
    # Fused MatMul only has one pattern:
    # • MatMul + BiasAdd + <Activation>
    #
    # FusedBatchNorm and Squeeze are not relevant for inference and thus never
    # present in (optimised) frozen graphs generated by tfjs converter.
    # This leaves us with Conv2D|MatMul + BiasAdd + <Activation> as the only
    # remaining possible variants, since Conv2D + BiasAdd doesn't need to be
    # split.
    #
    # We return [Conv2D|MatMul, BiasAdd|BiasAddV1, <Activation>].
    # Unsupported <Activation>-nodes will be dealt with in a separate step
    fused_op_name = node.op[6:]  # remove the '_Fused'-prefix
    fused_ops = list(s.decode('utf-8') for s in node.attr['fused_ops'].list.s)
    inputs = list(node.input)
    names_used = set()

    def node_name(i):
        name = util.generate_name_from(inputs[i], input_node_map)
        if name in names_used:
            # avoid name collisions by adding the name of the fused operation
            # (the first name is always unique)
            name = util.generate_name_from(name,
                                           input_node_map,
                                           suffix=fused_ops[i - 2])
        names_used.add(name)
        return name

    fused_op = util.make_op_node(fused_op_name, inputs[0:2], node_name(1))
    fused_op = util.copy_op_attrs(source=node, target=fused_op)
    bias_add = util.make_op_node(fused_ops[0], [fused_op, inputs[2]],
                                 node_name(2))
    activation = util.make_op_node(fused_ops[1], [bias_add] + inputs[3:],
                                   node_name(3))
    return [fused_op, bias_add, activation]