def load_components(file_descr, graph, component_layer_map=None): num_components = collect_until_token_and_read(file_descr, b'<NumComponents>') log.debug('Network contains {} components'.format(num_components)) is_nnet3 = False if component_layer_map is None else True if not is_nnet3: collect_until_token(file_descr, b'<Components>') all_components = list() name = "" for _ in range(num_components): if is_nnet3: name = collect_until_token_and_read(file_descr, b'<ComponentName>', np.string_) component_type = find_next_component(file_descr) if component_type == end_of_nnet_tag.lower()[1:-1]: break start_index = file_descr.tell() end_tag, end_index = find_end_of_component(file_descr, component_type) # read dim info where possible to simplify shape calculation for MemoryOffset # shape calculation for MemoryOffset can't be done through shape of previous layer because # it is separated in 2 parts to remove cycle from graph file_descr.seek(start_index) dim = 0 dim_words = {b'<Dim>', b'<InputDim>'} for dim_word in dim_words: try: collect_until_token(file_descr, dim_word, size_search_zone=end_index - start_index) cur_index = file_descr.tell() if start_index < cur_index < end_index: dim = read_binary_integer32_token(file_descr) break else: file_descr.seek(start_index) except Error: file_descr.seek(start_index) if is_nnet3: if name in component_layer_map: layer_id = component_layer_map[name][0] for layer in component_layer_map[name]: node = Node(graph, layer) node['parameters'] = get_parameters( file_descr, start_index, end_index) node['op'] = component_type # Read dim info where possible to simplify shape calculation for MemoryOffset for o_n_name, params in node.get_outputs(): o_n = Node(graph, o_n_name) if o_n['op'] == 'MemoryOffset' and dim != 0: o_n['parameters']['element_size'] = int64_array( [1, dim]) else: raise Error("Something wrong with layer {}".format(name)) else: layer_id = graph.unique_id(prefix=component_type) graph.add_node(layer_id, parameters=get_parameters(file_descr, start_index, end_index), op=component_type, kind='op') if hasattr(graph, 'op_names_statistic'): graph.op_names_statistic[component_type] += 1 all_components.append(layer_id) log.debug('{} (type is {}) was loaded'.format(layer_id, component_type)) return all_components
def read_node(file_descr, graph, component_layer_map, layer_node_map): s = file_descr.readline() if s == b'\n': return False tokens = s.split(b' ') if tokens[0] == b'input-node': in_name = s[s.find(b'name=') + len(b'name='):].split(b' ')[0] in_name = str(in_name).strip('b').replace('\'', "") in_shape = np.array( [1, s[s.find(b'dim=') + len(b'dim='):].split(b' ')[0]], dtype=np.int) if in_name not in layer_node_map: graph.add_node(in_name, name=in_name, kind='op', op='Parameter', parameters=None, shape=in_shape) layer_node_map[in_name] = in_name else: Node(graph, in_name)['op'] = 'Parameter' Node(graph, in_name)['shape'] = in_shape elif tokens[0] == b'component-node': layer_name = s[s.find(b'name=') + len(b'name='):].split(b' ')[0] layer_name = str(layer_name).strip('b').replace('\'', "") component_name = s[s.find(b'component=') + len(b'component='):].split(b' ')[0] if layer_name not in layer_node_map: node_name = graph.unique_id(prefix=layer_name) graph.add_node(node_name, parameters=None, op=None, kind='op') layer_node_map[layer_name] = node_name else: node_name = layer_node_map[layer_name] if component_name in component_layer_map: component_layer_map[component_name].append(node_name) else: component_layer_map[component_name] = [node_name] # parse input in_node_id = parse_input_for_node(s[s.find(b'input=') + 6:], graph, layer_node_map) # don't create cyclic edges node to itself to avoid removing later if in_node_id != node_name: out_port = len(Node(graph, in_node_id).out_nodes()) in_port = len(Node(graph, node_name).in_nodes()) Node(graph, node_name).add_input_port(in_port) Node(graph, in_node_id).add_output_port(out_port, skip_if_exist=True) graph.add_edge( in_node_id, node_name, **create_edge_attrs(in_node_id, node_name, in_node_id, in_port, out_port)) elif tokens[0] == b'output-node': layer_name = s[s.find(b'name=') + len(b'name='):].split(b' ')[0] layer_name = str(layer_name).strip('b').replace('\'', "") node_name = graph.unique_id(prefix=layer_name) graph.add_node(node_name, parameters=None, op='Identity', kind='op') out_name = graph.unique_id(prefix=node_name + "_out") graph.add_node(out_name, parameters=None, op='Result', kind='op') Node(graph, node_name).add_input_port(0) Node(graph, node_name).add_output_port(0) Node(graph, out_name).add_input_port(0) graph.add_edge(node_name, out_name, **create_edge_attrs(node_name, out_name, node_name)) # parse input in_node_id = parse_input_for_node( s[s.find(b'input=') + len(b'input='):], graph, layer_node_map) out_port = len(Node(graph, in_node_id).out_nodes()) Node(graph, in_node_id).add_output_port(out_port) graph.create_edge( Node(graph, in_node_id), Node(graph, node_name), out_port, 0, create_edge_attrs(in_node_id, node_name, in_node_id, 0, out_port)) objective_type = s[s.find(b'objective=') + 10:].split(b' ')[0].split(b'\n')[0] if objective_type != b'linear': raise Error( "Unsupported objective-type for output {}".format(node_name)) elif tokens[0] == b'dim-range-node': layer_name = s[s.find(b'name=') + len(b'name='):].split(b' ')[0] layer_name = str(layer_name).strip('b').replace('\'', "") offset = int(s[s.find(b'dim-offset=') + len(b'dim-offset='):].split(b' ')[0]) dim = int(s[s.find(b'dim=') + len(b'dim='):].split(b' ')[0]) if layer_name in layer_node_map: node_name = layer_node_map[layer_name] node = Node(graph, node_name) node['parameters'] = { 'offset': np.array([offset]), 'dim': np.array([dim]), 'axis': np.array([1]) } node['op'] = 'Crop' else: node_name = graph.unique_id(prefix=layer_name) graph.add_node(node_name, parameters={ 'offset': np.array([offset]), 'dim': np.array([dim]), 'axis': np.array([1]) }, op='Crop', kind='op') layer_node_map[layer_name] = node_name node = Node(graph, node_name) in_node_id = parse_input_for_node( s[s.find(b'input-node=') + len(b'input-node='):], graph, layer_node_map) out_port = len(Node(graph, in_node_id).out_nodes()) in_port = len(Node(graph, node_name).in_nodes()) node.add_input_port(in_port) Node(graph, in_node_id).add_output_port(out_port) graph.create_edge( Node(graph, in_node_id), node, out_port, in_port, create_edge_attrs(in_node_id, node_name, in_node_id, in_port, out_port)) # read dim info where possible to simplify shape calculation for MemoryOffset # shape calculation for MemoryOffset can't be done through shape of previous layer because # it is separated in 2 parts to remove cycle from graph for o_n_name, params in node.get_outputs(): o_n = Node(graph, o_n_name) if o_n['op'] == 'MemoryOffset': o_n['parameters']['element_size'] = int64_array([1, dim]) else: raise Error("Unsupported node specifier {}".format(tokens[0])) return True
def compare_graphs(graph: Graph, graph_ref: Graph, last_node: str, last_node_ref=None, check_op_attrs=False): stderr = [] if last_node_ref is None: last_node_ref = last_node if 'statistics' in graph.graph and 'statistics' in graph_ref.graph: assert graph.graph['statistics'] == graph_ref.graph[ 'statistics'], "int8 statistics comparison failed" q = deque([last_node]) q_ref = deque([last_node_ref]) checked_nodes = [] checked_nodes_ref = [] while len(q_ref) != 0: if len(q) == 0: stderr.append('Graphs have different number of nodes') return (False, stderr) node = Node(graph, q.popleft()) node_ref = Node(graph_ref, q_ref.popleft()) checked_nodes.append(node.id) checked_nodes_ref.append(node_ref.id) # Check that nodes has same amount of output nodes if len(node_ref.out_nodes()) != len(node.out_nodes()): stderr.append('Current node "{}" and reference node "{}" have different amount of output nodes: {} vs {}'.\ format(node.id, node_ref.id, len(node.out_nodes()), len(node_ref.out_nodes()))) continue # Check that nodes has same amount of input nodes if len(node_ref.in_nodes()) != len(node.in_nodes()): stderr.append('Current node "{}" and reference node "{}" have different amount of input nodes: {} vs {}'.\ format(node.id, node_ref.id, len(node.in_nodes()), len(node_ref.in_nodes()))) continue # Check that nodes has same 'kind' if node_ref.kind != node.kind: stderr.append('Current node "{}" and reference node "{}" have different kind parameter'.\ format(node.id, node_ref.id)) return (False, stderr) # Check can_be_fused attr if node_ref.has_valid('can_be_fused'): if node_ref.soft_get('can_be_fused') != node.soft_get( 'can_be_fused'): stderr.append('Current node "{}" and reference node "{}" have different "can_be_fused" parameter ' \ '{} and {}'.format(node.id, node_ref.id, node.soft_get('can_be_fused'), node_ref.soft_get('can_be_fused'))) if node_ref.kind == 'op': # Check that nodes has same operation if check_op_attrs: cur_node_type = node.type if node.has_valid("type") else None ref_node_type = node_ref.type if node_ref.has_valid( "type") else None for attr in graph_ref.node[node_ref.id]: if graph_ref.node[node_ref.id][attr] is None or attr in \ ['name', 'id', '_in_ports', '_out_ports', 'infer', 'IE', 'biases', 'weights', 'custom', 'offset', 'ir_data_attrs', 'rt_info']: continue if attr not in graph.node[node.id]: stderr.append( 'Current node "{}" with type {} has missing attribute {}' ''.format(node.id, cur_node_type, attr)) continue if attr == 'value': if not values_are_equal(node.value, node_ref.value): stderr.append( 'Current node "{}" with type {} and reference node "{}" with type have ' 'different values \n{} \nand \n{}'.format( node.id, cur_node_type, node_ref.id, ref_node_type, node.value, node_ref.value)) continue compare_node(node_ref, node, graph_ref.node[node_ref.id][attr], graph.node[node.id][attr], attr, stderr) else: if node_ref.has_valid('shape') and not node.has_valid('shape'): stderr.append('{} has None shape'.format(node.id)) if node_ref.has_valid('value') and not node.has_valid('value'): stderr.append('{} has None value'.format(node.id)) # Check that nodes has same shape and value if node_ref.has_valid( 'shape' ) and node_ref.shape is not None and not np.array_equal( node_ref.shape, node.shape): stderr.append('Current node "{}" and reference node "{}" have different shapes {} and {}'.\ format(node.id, node_ref.id, node.shape, node_ref.shape)) continue if node_ref.has_valid('value') and node_ref.value is not None and \ not values_are_equal(node.value, node_ref.value): stderr.append('Current node "{}" and reference node "{}" have different values \n{} \nand \n{}'.\ format(node.id, node_ref.id, node.value, node_ref.value)) ports = sorted(node.in_nodes().keys()) if node.kind == 'op' else None in_nodes = [node.in_node(k) for k in ports] if node.kind == 'op' else node.in_nodes() for in_node in in_nodes: if in_node.id not in checked_nodes and in_node.id not in q: q.append(in_node.id) ports_ref = sorted( node_ref.in_nodes().keys()) if node_ref.kind == 'op' else None if ports != ports_ref: stderr.append( 'Current node "{}" and reference node "{}" have different ports' .format(node.id, node_ref.id)) return (False, stderr) in_nodes = [node_ref.in_node(k) for k in ports ] if node_ref.kind == 'op' else node_ref.in_nodes() for in_node in in_nodes: if in_node.id not in checked_nodes_ref and in_node.id not in q_ref: q_ref.append(in_node.id) if node.kind == 'op': out_nodes = sorted_by_name( [Node(graph, v) for v, _ in node.get_outputs()]) else: out_nodes = sorted_by_name(node.out_nodes()) for out_node in out_nodes: if out_node.id not in checked_nodes and out_node.id not in q: q.append(out_node.id) if node_ref.kind == 'op': out_nodes = sorted_by_name( [Node(graph_ref, v) for v, _ in node_ref.get_outputs()]) else: out_nodes = sorted_by_name(node_ref.out_nodes()) for out_node in out_nodes: if out_node.id not in checked_nodes_ref and out_node.id not in q_ref: q_ref.append(out_node.id) return (False, stderr) if stderr else (True, [])