Example #1
0
    def create_timing_data(self, redo=False):
        for vertex in self._control_flow_graph:
            if is_basic_block(vertex.program_point):
                if vertex.program_point not in self._wcets or redo:
                    self._wcets[vertex.program_point] = random.randint(1, 20)
            else:
                self._wcets[vertex.program_point] = 0

        maximum_loop_bound = random.randint(1, globals.args['max_loop_bound'])

        def create_loop_bound_tuple_for_header(level, abstract_vertex):
            if level == 0:
                return numpy.array([1])
            elif level == 1:
                return numpy.array([random.randint(1, maximum_loop_bound)])
            else:
                parent_abstract_vertex = loop_nesting_tree.get_vertex \
                    (abstract_vertex.
                     get_ith_predecessor_edge(0).
                     vertex_id)
                loop_bound = []
                for number_of_iterations in self._loop_bounds[parent_abstract_vertex.
                        program_point]:
                    for _ in range(1, number_of_iterations + 1):
                        loop_bound.append(random.randint(1, maximum_loop_bound))
                return numpy.array(loop_bound)

        loop_nesting_tree = self._control_flow_graph.get_loop_nesting_tree()
        for level, tree_vertices in loop_nesting_tree. \
                level_by_level_iterator(False, True):
            for abstract_vertex in tree_vertices:
                if is_basic_block(abstract_vertex.program_point):
                    header = self._control_flow_graph.get_vertex_for_program_point \
                        (abstract_vertex.program_point)
                    if header not in self._loop_bounds or redo:
                        self._loop_bounds[header.program_point] = \
                            create_loop_bound_tuple_for_header(level, abstract_vertex)
Example #2
0
def do_super_block_instrumentation(control_flow_graph,
                                   trace,
                                   actual_execution_counts):
    instrumentation_points = set()
    for _, abstract_vertices in control_flow_graph.get_loop_nesting_tree().\
                                level_by_level_iterator\
                                (abstract_vertices_only=True):
        for abstract_vertex in abstract_vertices:
            if is_basic_block(abstract_vertex.program_point):
                subgraph = control_flow_graph.get_super_block_subgraph(abstract_vertex)
                subgraph.choose_instrumentation_points_for_profiling(instrumentation_points)
    
    filtered_trace = [program_point for program_point in trace 
                      if program_point in instrumentation_points]
    inferred_execution_counts = Counter(filtered_trace)
    
    for _, abstract_vertices in control_flow_graph.get_loop_nesting_tree().\
                                level_by_level_iterator\
                                (abstract_vertices_only=True):
        for abstract_vertex in abstract_vertices:
            if is_basic_block(abstract_vertex.program_point):
                subgraph = control_flow_graph.get_super_block_subgraph(abstract_vertex,
                                                                       redo=True)
                
                # Put execution counts on super blocks.
                for super_vertex in subgraph:
                    super_vertex.instrumented = False
                    super_vertex.count = 0
                    for induced_vertex in super_vertex.vertices:
                        if induced_vertex.program_point in instrumentation_points:
                            super_vertex.count = inferred_execution_counts[induced_vertex.program_point]
                            super_vertex.instrumented = True
                
                # Now compute all super block execution counts.
                depth_first_search = DepthFirstSearch(subgraph, 
                                                      subgraph.root_vertex,
                                                      False)
                for super_vertex in depth_first_search.post_order:
                    if super_vertex.number_of_predecessors() > 1:
                        pred_execution_count = 0
                        uninstrumented_pred_super_vertex = None
                        for pred_edge in super_vertex.predecessor_edge_iterator():
                            pred_super_vertex = subgraph.get_vertex(pred_edge.vertex_id)
                            if pred_super_vertex.instrumented:
                                pred_execution_count += pred_super_vertex.count
                            else:
                                assert uninstrumented_pred_super_vertex is None
                                uninstrumented_pred_super_vertex = pred_super_vertex
                        uninstrumented_pred_super_vertex.count = super_vertex.count - pred_execution_count
                    
                    if super_vertex.number_of_successors() > 1:
                        for _, successor_edges in super_vertex.successor_edge_partition_iterator():
                            for succ_edge in successor_edges:
                                succ_super_vertex = subgraph.get_vertex(succ_edge.vertex_id)
                                super_vertex.count += succ_super_vertex.count
                            if super_vertex.count > 0:
                                break
                
                for super_vertex in subgraph:
                    for induced_vertex in super_vertex.vertices:
                        inferred_execution_counts[induced_vertex.program_point] = super_vertex.count
    
    
    
    
    for key in actual_execution_counts.keys():
        if inferred_execution_counts[key] != actual_execution_counts[key]:
            print(key, 
                  actual_execution_counts[key], 
                  inferred_execution_counts[key])