Example #1
0
    def find_and_replace_pattern(self, graph: Graph):
        graph.stage = 'front'
        for node_id in graph.nodes(data=False):
            node = Node(graph, node_id)
            inputs = node.get_sorted_inputs()
            outputs = node.get_sorted_outputs()

            in_ports_count = node.in_ports_count if node.has_valid(
                'in_ports_count') else len(inputs)
            out_ports_count = node.out_ports_count if node.has_valid(
                'out_ports_count') else len(outputs)

            if len(outputs) > out_ports_count > 1:
                raise Error("Node {} has more children than it should: " +
                            "should be {} but there is {}".format(
                                node_id, out_ports_count, len(outputs)))

            node['_in_ports'] = {}
            node['_out_ports'] = {}
            if in_ports_count is not None:
                for idx in range(in_ports_count):
                    node.add_input_port(idx=idx)

            if out_ports_count is not None:
                for idx in range(out_ports_count):
                    node.add_output_port(idx=idx)
            idx = 0
            for in_node_id, edge_attrs in inputs:
                graph.remove_edge(in_node_id, node_id)
                if len(Node(graph, in_node_id).out_ports()) == 0:
                    Node(graph, in_node_id).add_output_port(0)
                in_node = Node(graph, in_node_id)
                in_node.out_port(edge_attrs['out']).connect(node.in_port(idx))
                # need to keep this attribute in edge for correct .mapping file generation and
                # for generation of "names" field in IR
                in_node.out_edge(
                    edge_attrs['out']
                )['fw_tensor_debug_info'] = edge_attrs['fw_tensor_debug_info']
                if idx < in_ports_count - 1:
                    idx = idx + 1

            idx = 0
            for out_node_id, edge_attrs in outputs:
                graph.remove_edge(node_id, out_node_id)
                if len(Node(graph, out_node_id).in_ports()) == 0:
                    Node(graph, out_node_id).add_input_port(0)
                node.out_port(idx).connect(
                    Node(graph, out_node_id).in_port(edge_attrs['in']))
                # need to keep this attribute in edge for correct .mapping file generation and
                # for generation of "names" field in IR
                node.out_edge(idx)['fw_tensor_debug_info'] = edge_attrs[
                    'fw_tensor_debug_info']
                if idx < out_ports_count - 1:
                    idx = idx + 1
Example #2
0
def copy_input_blobs(op: Node, copy_op: Node):
    """
    Function copy input blob data nodes from restored graph to copied one
    :param op: Node from restored graph
    :param copy_op: Node from copied graph
    :return:
    """
    for u, d in op.get_sorted_inputs():
        if 'bin' in d:
            Op.create_and_connect_input_data_node(
                copy_op.graph, copy_op, {
                    'value': op.in_node(d['in']).value,
                    'shape': op.in_node(d['in']).shape
                }, d)
Example #3
0
 def replace_input_edges(graph: Graph, input_edges_match: dict):
     """
     Replacing existing input/output edges with a new ones to a new sub-graph.
     :param graph: networkX graph to operate on.
     :param input_edges_match: match of input edges between old and new sub-graph.
     :return: None
     """
     for old_name_port, new_name_port in input_edges_match.items():
         old_node_name, old_in_port = __class__.extract_port(old_name_port)
         new_node_name, new_in_port = __class__.extract_port(new_name_port)
         old_node = Node(graph, old_node_name)
         src_node_name = old_node.get_sorted_inputs()[old_in_port][0]
         edge_attrs = graph[src_node_name][old_node_name][0].copy()
         edge_attrs['in'] = new_in_port
         graph.add_edge(src_node_name, new_node_name, **edge_attrs)
         log.debug("Created edge from {} to {} with attrs: {}".format(src_node_name, new_node_name, edge_attrs))
Example #4
0
def add_placeholders_to_subgraph(node: Node):
    """
    Adds placeholders to the node's list of protobufs based on input nodes to the subgraph (the value of
    'internal_input_node_name' property).
    The function also updates input tensors for nodes which consume output of nodes that were replaced with
    placeholders.
    :param node: the node to add placeholders to.
    :return: None
    """
    inputs_replacements = list()
    for index, (in_data_node,
                edge_attrs) in enumerate(node.get_sorted_inputs()):
        if 'control_flow_edge' in edge_attrs and edge_attrs[
                'control_flow_edge']:
            continue

        if 'internal_input_node_name' in edge_attrs.keys():
            input_tensor_name = edge_attrs['internal_input_node_name']
        else:
            input_tensor_name = node['pb'].input[index]

        input_node_name, port = get_tf_node_port(input_tensor_name)

        placeholder_name = placeholder_name_for_node(input_node_name, port)
        edge_attrs['placeholder_name'] = placeholder_name
        in_node = node.in_node(index)

        assert in_node.shape is not None

        if placeholder_name not in node['pbs'].keys():
            placeholder = tf_v1.placeholder(determine_data_type(in_node),
                                            in_node.shape, placeholder_name)
            inputs_replacements.append((input_tensor_name, placeholder_name))
            add_node_def_to_subgraph(node,
                                     placeholder.op.node_def,
                                     is_input=True)
            log.debug(
                "Added placeholder with name '{}'".format(placeholder_name))

    # update initial input names to a transposed ones
    for old_input_tensor_name, new_name in inputs_replacements:
        update_input_in_pbs(node, old_input_tensor_name, new_name)
Example #5
0
def eltwise_infer(node: Node, op=None, **kwargs):
    def broadcast_dims(dim1, dim2):
        if dim1 is not dynamic_dimension and dim2 is not dynamic_dimension:
            mind = min(dim1, dim2)
            maxd = max(dim1, dim2)
            if mind == 1:
                return maxd
            elif mind != maxd:
                raise Error('Input shapes mismatch for node {}: {}'.format(
                    node_name, shapes))
            return mind
        elif dim1 is dynamic_dimension and dim2 is dynamic_dimension:
            return dynamic_dimension_value
        elif dim1 is dynamic_dimension and dim2 is not dynamic_dimension:
            return broadcast_dims(dim2, dim1)
        else:  # dim1 is static, dim2 is dynamic
            if dim1 != 1:
                return dim1
            else:
                return dim2

    raw_inputs = [
        (inp, attr) for inp, attr in node.get_sorted_inputs()
        if 'control_flow_edge' not in attr or not attr['control_flow_edge']
    ]
    shapes = [node.graph.node[inp]['shape'] for inp, attr in raw_inputs]
    values = [node.graph.node[inp]['value'] for inp, attr in raw_inputs]
    node_name = node.soft_get('name', node.id)

    if any([s is None for s in shapes]):
        raise Error(
            'One of the input shapes for node "{}" is None'.format(node_name))

    max_dims = None
    for id, s in enumerate(shapes):
        if max_dims is None or len(s) > max_dims:
            max_dims = len(s)

    # Make all input shapes of the same size by adding 1's
    axis = node.axis if node.has_valid('axis') else None
    for id, item in enumerate(zip(shapes, values)):
        shape, value = item
        if len(shape) != max_dims and len(shape) > 0 and axis is not None:
            new_shape = shape

            # Extend shape with 1's
            for cnt in range(axis + len(shape), max_dims):
                new_shape = np.ma.append(new_shape, 1)

            shapes[id] = new_shape

            # Reshape value to correctly calculate output shape
            if values[id] is not None:
                values[id] = np.ma.reshape(values[id], new_shape)

    extended_shapes = [
        np.ma.concatenate((np.ma.ones(max_dims - len(s), dtype=np.int64), s))
        for s in shapes
    ]
    output_shape = extended_shapes[0]
    for si in range(1, len(extended_shapes)):
        for ei in range(max_dims):
            output_shape[ei] = broadcast_dims(output_shape[ei],
                                              extended_shapes[si][ei])

    node.out_port(0).data.set_shape(output_shape)

    if node.has_and_set('stop_value_propagation'):
        return

    if op is None or any([v is None for v in values]):
        return

    if len(values) <= 2:
        node.out_port(0).data.set_value(op(*values, **kwargs))
    else:
        node.out_port(0).data.set_value(values[0])
        for i in range(len(values) - 1):
            node.out_port(0).data.set_value(
                op(node.out_node().value, values[i + 1]))