예제 #1
0
def _split_prelu(node: util.NodeDef, input_node_map: util.NameToNode,
                 weight_modifiers: util.WeightModifiers) -> util.NodeList:
    # Prelu activation is not supported by TF so this functions generates an
    # equivalent formulation:
    # f(x) = alpha*relu(x) if x < 0, relu(x) if x >= 0
    #      = pos(x)+neg(x), where pos(x)=relu(x), and neg(x)=-alpha*relu(-x)
    #
    # We return the sub-graph
    # [pos=Relu(x), Neg(x), Relu(-x), neg=Mul(-alpha, -x), Add(pos, neg)]
    inputs = list(node.input)

    def _get_name(suffix):
        return generate_name_from(node.name, input_node_map, suffix=suffix)

    # here we need to manually keep node names unique in the sub-graph
    # since we cannot modify input_node_map, because we don't have a node yet
    pos = util.make_op_node('Relu', inputs[0], _get_name('Relu'))
    neg_x = util.make_op_node('Neg', inputs[0], _get_name('Neg'))
    neg_relu = util.make_op_node('Relu', neg_x, _get_name('Relu_1'))
    neg_alpha = inputs[1]
    neg = util.make_op_node('Mul', [neg_alpha, neg_relu], _get_name('Mul'))
    add = util.make_op_node('Add', [pos, neg], _get_name('Add'))
    # convert alpha to -alpha by registering a weight modifier function
    weight_modifiers[neg_alpha] = lambda tensor: -tensor
    return [pos, neg_x, neg_relu, neg, add]
 def test_generate_name_from_given_duplicate_name(self):
     """generate_name_from should return new name if base name is taken"""
     node_map = {}
     # case #1: name without slashes -> return name + count
     base_name = 'name_without_slashes'
     node_map[base_name] = rewrite.make_op_node('Neg', 'x', name='_')
     name = rewrite.generate_name_from(base_name, node_map)
     self.assertEqual(name, base_name + '_1')
     # case #2: count needs to increment
     node_map[name] = rewrite.make_op_node('Neg', 'y', name='_')
     name = rewrite.generate_name_from(base_name, node_map)
     self.assertEqual(name, base_name + '_2')
     # case #3: name + suffix -> return name + suffix + count
     suffix = 'suffix'
     node_map[base_name + '/' + suffix] = rewrite.make_op_node('Inv',
                                                               'x',
                                                               name='_')
     name = rewrite.generate_name_from(base_name, node_map, suffix=suffix)
     self.assertEqual(name, f'{base_name}/{suffix}_1')
     # case #4: count needs to increment
     node_map[name] = rewrite.make_op_node('Inv', 'y', name='_')
     name = rewrite.generate_name_from(base_name, node_map, suffix=suffix)
     self.assertEqual(name, f'{base_name}/{suffix}_2')
     # case #5: slashes in name, name in map -> return name + count
     base_name = 'name_without_slashes/suffix'
     name = rewrite.generate_name_from(base_name, node_map)
     self.assertEqual(name, 'name_without_slashes_2')
     # case #6: slashes in name, name + suffix in map -> name+suffix+count
     base_name = 'name_without_slashes/suffix'
     name = rewrite.generate_name_from(base_name, node_map, suffix=suffix)
     self.assertEqual(name, f'{base_name}_2')
 def test_make_op_node_given_mixed_inputs(self):
     """make_op_node should support list of mixed types for inputs"""
     node_a = rewrite.make_op_node('Neg', ['x'], name='node_a')
     node = rewrite.make_op_node('Add', [node_a, 'node_b'])
     self.assertEqual(node.name, 'Add')
     self.assertEqual(node.op, 'Add')
     self.assertEqual(node.input, ['node_a', 'node_b'])
 def test_get_input_node_map_given_duplicates(self):
     """get_input_node_map should raise ValueError given duplicate names"""
     graph_def = testutils.get_sample_graph_def()
     relu = _get_node_by_name(graph_def, 'model/conv3/Relu')
     neg = rewrite.make_op_node('Neg', list(relu.input), name='kate')
     dup = rewrite.make_op_node('Exp', neg, name='model/conv3/BiasAdd')
     replace_nodes = {
         'model/conv3/Relu': [neg, dup],
     }
     updated_graph = rewrite.update_graph_def(graph_def, replace_nodes, {})
     self.assertRaises(ValueError,
                       lambda: rewrite.get_input_node_map(updated_graph))
예제 #5
0
def _split_fused_op(node: util.NodeDef, input_node_map: util.NameToNode,
                    _) -> util.NodeList:
    # Possible fused Conv2D patterns are:
    # • Conv2D + BiasAdd
    # • Conv2D + BiasAdd + <Activation>
    # • Conv2D + FusedBatchNorm + <Activation>
    # • Conv2D + Squeeze + BiasAdd
    #
    # Fused MatMul only has one pattern:
    # • MatMul + BiasAdd + <Activation>
    #
    # FusedBatchNorm and Squeeze are not relevant for inference and thus never
    # present in (optimised) frozen graphs generated by tfjs converter.
    # This leaves us with Conv2D|MatMul + BiasAdd + <Activation> as the only
    # remaining possible variants.
    #
    # For compatibility reasons with quantised TFLite models, we optionally
    # split Conv2D + BiasAdd as well.
    #
    # We return [Conv2D|MatMul, BiasAdd|BiasAddV1, <Activation>].
    # Unsupported <Activation>-nodes will be dealt with in a separate step
    fused_op_name = node.op[6:]  # remove the '_Fused'-prefix
    fused_ops = list(s.decode('utf-8') for s in node.attr['fused_ops'].list.s)
    inputs = list(node.input)
    names_used = set()

    def node_name(node_index):
        name = generate_name_from(inputs[node_index], input_node_map)
        if name in names_used:
            name = generate_name_from(name,
                                      input_node_map,
                                      suffix=fused_ops[node_index - 2])
        names_used.add(name)
        return name

    fused_op = util.make_op_node(fused_op_name, inputs[0:2], node_name(1))
    fused_op = util.copy_op_attrs(source=node, target=fused_op)
    bias_add = util.make_op_node(fused_ops[0], [fused_op, inputs[2]],
                                 node_name(2))
    bias_add = util.copy_op_attrs(source=node, target=bias_add)

    have_activation = len(fused_ops) > 1
    if have_activation:
        # Prelu activation has additional input; reuse original name in all
        # cases
        name = node_name(3) if len(inputs) > 3 else node.name
        activation = util.make_op_node(fused_ops[1], [bias_add] + inputs[3:],
                                       name)
        return [fused_op, bias_add, activation]
    else:
        return [fused_op, bias_add]
 def test_update_graph_def_given_replaced_nodes(self):
     """update_graph_def should replace nodes mapped to new sub-graph"""
     graph_def = testutils.get_sample_graph_def()
     # let's replace the conv1 activation with log-sigmoid
     relu = _get_node_by_name(graph_def, 'model/conv1/Relu')
     neg = rewrite.make_op_node('Neg', list(relu.input), 'model/conv1/Neg')
     exp = rewrite.make_op_node('Exp', neg, 'model/conv1/Exp')
     add = rewrite.make_op_node('Add', [exp, 'one'], 'model/conv1/Add')
     inv = rewrite.make_op_node('Inv', add, 'model/conv1/Inv')
     replace_nodes = {'model/conv1/Relu': [neg, exp, add, inv]}
     updated_graph = rewrite.update_graph_def(graph_def, replace_nodes, {})
     for node_name in replace_nodes.keys():
         self.assertIsNone(_get_node_by_name(updated_graph, node_name))
     for node in list(replace_nodes.values())[0]:
         self.assertIsNotNone(_get_node_by_name(updated_graph, node.name))
    def test_copy_op_attrs(self):
        """copy_op_attrs should only copy attrs supported by the target node"""
        # copy_op_attrs is used to transfer attrs from a fused op node
        # (e.g. _FusedConv2D) to a standalone op (e.g. Conv2D)
        # any additional attrs of the fused op need to be ignored
        fused_op_str = '{"name":"model/conv2d/BiasAdd",'\
            + '"op":"_FusedConv2D","input":["input",'\
            + '"model/conv2d/Conv2D/ReadVariableOp",'\
            + '"model/conv2d/BiasAdd/ReadVariableOp",'\
            + '"model/p_re_lu/Neg"],"device":"/device:CPU:0",' \
            + '"attr":{"dilations":{"list":{"i":["1","1","1","1"]}},'\
            + '"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="},'\
            + '"strides":{"list":{"i":["1","1","1","1"]}},'\
            + '"use_cudnn_on_gpu":{"b":true},'\
            + '"explicit_paddings":{"list":{}},'\
            + '"num_args":{"i":"2"},"epsilon":{"f":0},'\
            + '"padding":{"s":"VkFMSUQ="},'\
            + '"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UHJlbHU="]}}}}'
        fused_op = testutils.node_proto_from_json(fused_op_str)
        node = rewrite.make_op_node('Conv2D', fused_op.input[0:2])
        rewrite.copy_op_attrs(source=fused_op, target=node)

        op_def = rewrite.get_op_def(node.op)
        allowed = set(attr.name for attr in op_def.attr)
        forbidden = any(attr for attr in node.attr if attr not in allowed)

        self.assertFalse(forbidden)
        # randomply check for some of the expected attributes
        self.assertTrue('padding' in node.attr)
        self.assertTrue('strides' in node.attr)
예제 #8
0
def rename_output_nodes(graph_def: GraphDef, name_mapping: dict) -> GraphDef:
    """Rename output nodes - the update is performed in-place.

        Args:
            graph_def: GraphDef proto containing the model
            name_mapping: dict that maps output names to new names

        Returns:
            Updated GraphDef proto - same as input since the update is
            performed in-place.
    """
    output_nodes = get_output_nodes(graph_def)
    _validate_mapping(graph_def, output_nodes, name_mapping)
    nodes_to_rename = name_mapping.keys()
    target_nodes = filter(lambda n: n.name in nodes_to_rename, graph_def.node)
    for node in target_nodes:
        new_name = name_mapping[node.name]
        if node.op == 'Identity':
            # dummy nodes can just be renamed
            node.name = new_name
        else:
            # append an identity node that takes the original output as input
            identity = rewrite.make_op_node('Identity',
                                            node,
                                            name=new_name,
                                            dtype=_dtype(node))
            graph_def.node.append(identity)
    return graph_def
    def test_make_op_node_accepts_dtype(self):
        """make_op_node should accept dtype parameter and default to float32"""
        def dtype(node):
            return rewrite.dtypes.as_dtype(node.attr['T'].type)

        # from default
        node = rewrite.make_op_node('Neg', 'x')
        self.assertEqual(dtype(node), rewrite.dtypes.float32)
        # from dtype
        node = rewrite.make_op_node('Neg', 'x', dtype=rewrite.dtypes.float16)
        self.assertEqual(dtype(node), rewrite.dtypes.float16)
        # from enum
        node = rewrite.make_op_node('Neg', 'x', dtype=6)
        self.assertEqual(dtype(node), rewrite.dtypes.int8)
        # from string
        node = rewrite.make_op_node('Neg', 'x', dtype='half')
        self.assertEqual(dtype(node), rewrite.dtypes.float16)
예제 #10
0
def _split_fused_op(node: util.NodeDef, input_node_map: util.NameToNode,
                    _) -> util.NodeList:
    # Possible fused Conv2D patterns are:
    # • Conv2D + BiasAdd
    # • Conv2D + BiasAdd + <Activation>
    # • Conv2D + FusedBatchNorm + <Activation>
    # • Conv2D + Squeeze + BiasAdd
    #
    # Fused MatMul only has one pattern:
    # • MatMul + BiasAdd + <Activation>
    #
    # FusedBatchNorm and Squeeze are not relevant for inference and thus never
    # present in (optimised) frozen graphs generated by tfjs converter.
    # This leaves us with Conv2D|MatMul + BiasAdd + <Activation> as the only
    # remaining possible variants, since Conv2D + BiasAdd doesn't need to be
    # split.
    #
    # We return [Conv2D|MatMul, BiasAdd|BiasAddV1, <Activation>].
    # Unsupported <Activation>-nodes will be dealt with in a separate step
    fused_op_name = node.op[6:]  # remove the '_Fused'-prefix
    fused_ops = list(s.decode('utf-8') for s in node.attr['fused_ops'].list.s)
    inputs = list(node.input)
    names_used = set()

    def node_name(i):
        name = util.generate_name_from(inputs[i], input_node_map)
        if name in names_used:
            # avoid name collisions by adding the name of the fused operation
            # (the first name is always unique)
            name = util.generate_name_from(name,
                                           input_node_map,
                                           suffix=fused_ops[i - 2])
        names_used.add(name)
        return name

    fused_op = util.make_op_node(fused_op_name, inputs[0:2], node_name(1))
    fused_op = util.copy_op_attrs(source=node, target=fused_op)
    bias_add = util.make_op_node(fused_ops[0], [fused_op, inputs[2]],
                                 node_name(2))
    activation = util.make_op_node(fused_ops[1], [bias_add] + inputs[3:],
                                   node_name(3))
    return [fused_op, bias_add, activation]
 def test_generate_name_from_given_unique_name(self):
     """generate_name_from should return base name if name is unique"""
     # case #1: no slashes in name, name is unique -> return as-is
     node_map = {}
     base_name = 'name_without_slahes'
     name = rewrite.generate_name_from(base_name, node_map)
     self.assertEqual(name, base_name)
     # case #2: no slashes in name, name in map, name + suffix unique
     node_map[base_name] = rewrite.make_op_node('Neg', 'x', name=base_name)
     suffix = 'uniq'
     name = rewrite.generate_name_from(base_name, node_map, suffix=suffix)
     self.assertEqual(name, f'{base_name}/{suffix}')
     # case #3: slashes in name, name with slashes in map -> return 1st part
     base_name = 'name/suffix'
     node_map[base_name] = rewrite.make_op_node('Neg', 'y', name=base_name)
     name = rewrite.generate_name_from(base_name, node_map)
     self.assertEqual(name, 'name')
     # case #4: slashes in name, name in map, name + suffix unique
     base_name = 'name/suffix/op'
     name = rewrite.generate_name_from(base_name, node_map, suffix='uniq')
     self.assertEqual(name, 'name/suffix/uniq')
def _split_fused_depthwise(node: util.NodeDef, input_node_map: util.NameToNode,
                           weight_mods: util.WeightModifiers) -> util.NodeList:
    """Decompose fused op into DepthwiseConv2dNative + BiasAdd [+ Activation]
    """
    fused_ops = list(s.decode('utf-8') for s in node.attr['fused_ops'].list.s)
    inputs = node.input
    names_used = set()

    def node_name(node_index):
        """Return unique node names for sub-operations by appending fused-op"""
        i = min(node_index, len(inputs)-1)  # PReLU has 4 inputs, others only 3
        name = generate_name_from(inputs[i], input_node_map)
        if name in names_used:
            name = generate_name_from(name, input_node_map,
                                      suffix=fused_ops[node_index-2])
        names_used.add(name)
        return name

    op = 'DepthwiseConv2dNative'
    depthwise = util.make_op_node(op, inputs[0:2], node_name(1))
    depthwise = util.copy_op_attrs(source=node, target=depthwise)
    op = fused_ops[0]
    bias_add = util.make_op_node(op, [depthwise, inputs[2]], node_name(2))
    bias_add = util.copy_op_attrs(source=node, target=bias_add)
    node_list = [depthwise, bias_add]
    if len(fused_ops) > 1:
        # we have an activation function
        op = fused_ops[1]
        input_nodes = [bias_add] + inputs[3:]
        if util.get_op_def(op) is None:
            # unsupported activation function - just copy type attribute
            dtype = depthwise.attr['T'].type
            activation = util.make_op_node(op, input_nodes, node_name(3),
                                           dtype)
        else:
            # supported activation function - copy applicable attributes
            activation = util.make_op_node(op, input_nodes, node_name(3))
            activation = util.copy_op_attrs(source=node, target=activation)
        node_list.append(activation)
    return node_list
        def _convert_to_log_sigmoid(node, input_map, modifiers):
            """replace Relu with logarithmic sigmoid 1/(1+exp(-x))"""
            def _get_name(suffix):
                return rewrite.generate_name_from(node.name, input_map,
                                                  f'logSigmoid/{suffix}')

            nonlocal new_name_of_replaced_node
            # -x
            neg = rewrite.make_op_node('Neg',
                                       list(node.input),
                                       name=_get_name('Neg'))
            # exp(-x)
            exp = rewrite.make_op_node('Exp', neg, name=_get_name('Exp'))
            # constant tensor holding "1"
            res = rewrite.make_const_node(np.array([1], dtype=np.float32),
                                          name=_get_name('Var/resource'))
            # variable holding "1"
            one = rewrite.make_op_node('Identity', res, _get_name('Var'))
            # 1+exp(-x)
            add = rewrite.make_op_node('Add', [one, exp], _get_name('Add'))
            # 1/(1+exp-x)
            inv = rewrite.make_op_node('Inv', add, _get_name('Inv'))
            new_name_of_replaced_node = inv.name  # remember the output name
            return [neg, exp, res, one, add, inv]
 def test_make_op_node_given_input_as_scalar(self):
     """make_op_node should accept scalar input of type string and node"""
     node_from_str = rewrite.make_op_node('Neg', 'x', name='node_a')
     self.assertEqual(node_from_str.input, ['x'])
     node_from_node = rewrite.make_op_node('Neg', node_from_str)
     self.assertEqual(node_from_node.input, ['node_a'])
 def test_make_op_node_given_str_inputs(self):
     """make_op_node should support list of strings for inputs"""
     node = rewrite.make_op_node('Add', ['x', 'y'])
     self.assertEqual(node.name, 'Add')
     self.assertEqual(node.op, 'Add')
     self.assertEqual(node.input, ['x', 'y'])