def testCost(self, serialize=False): requests = self.trace.request_track.GetEvents() for (index, request) in enumerate(requests): request.timing = request_track.Timing.FromDevToolsDict({ 'requestTime': index, 'receiveHeadersEnd': 10, 'loadingFinished': 10 }) dependencies_lens = request_dependencies_lens.RequestDependencyLens( self.trace) g = dependency_graph.RequestDependencyGraph(requests, dependencies_lens) if serialize: g = self._SerializeDeserialize(g) # First redirect -> Second redirect -> Redirected Request -> Request -> # JS Request 2 self.assertEqual(7010, g.Cost()) # Not on the critical path g.UpdateRequestsCost({TestRequests.JS_REQUEST.request_id: 0}) self.assertEqual(7010, g.Cost()) g.UpdateRequestsCost( {TestRequests.FIRST_REDIRECT_REQUEST.request_id: 0}) self.assertEqual(7000, g.Cost()) g.UpdateRequestsCost( {TestRequests.SECOND_REDIRECT_REQUEST.request_id: 0}) self.assertEqual(6990, g.Cost())
def _PageCore(prefix, graph_set_names, output): """Compute the page core over sets defined by graph_set_names.""" assert graph_set_names graph_sets = [] sack = resource_sack.GraphSack() for name in graph_set_names: name_graphs = [] _Progress('Processing %s' % name) for filename in glob.iglob('-'.join([prefix, name, '*.trace'])): _Progress('Reading %s' % filename) trace = loading_trace.LoadingTrace.FromJsonFile(filename) graph = dependency_graph.RequestDependencyGraph( trace.request_track.GetEvents(), request_dependencies_lens.RequestDependencyLens(trace)) sack.ConsumeGraph(graph) name_graphs.append(graph) graph_sets.append(name_graphs) core = sack.CoreSet(*graph_sets) json.dump({'page_core': [{'label': b.label, 'name': b.name, 'count': b.num_nodes} for b in core], 'non_core': [{'label': b.label, 'name': b.name, 'count': b.num_nodes} for b in sack.bags if b not in core], 'threshold': sack.CORE_THRESHOLD}, output, sort_keys=True, indent=2) output.write('\n')
def _AllCores(prefix, graph_set_names, output, threshold): """Compute all core sets (per-set and overall page core) for a site.""" core_sets = [] _Progress('Using threshold %s' % threshold) big_sack = resource_sack.GraphSack() graph_sets = [] for name in graph_set_names: _Progress('Finding core set for %s' % name) sack = resource_sack.GraphSack() sack.CORE_THRESHOLD = threshold this_set = [] for filename in glob.iglob('-'.join([prefix, name, '*.trace'])): _Progress('Reading %s' % filename) trace = loading_trace.LoadingTrace.FromJsonFile(filename) graph = dependency_graph.RequestDependencyGraph( trace.request_track.GetEvents(), request_dependencies_lens.RequestDependencyLens(trace)) sack.ConsumeGraph(graph) big_sack.ConsumeGraph(graph) this_set.append(graph) core_sets.append({ 'set_name': name, 'core_set': [l for l in sack.CoreSet()] }) graph_sets.append(this_set) json.dump( { 'core_sets': core_sets, 'page_core': [l for l in big_sack.CoreSet(*graph_sets)] }, output, sort_keys=True, indent=2)
def __init__(self, trace, dependencies_lens, user_lens): self.postload_msec = None self.graph = None if trace is None: return requests = trace.request_track.GetEvents() critical_requests_ids = user_lens.CriticalRequestIds() self.postload_msec = user_lens.PostloadTimeMsec() self.graph = dependency_graph.RequestDependencyGraph( requests, dependencies_lens, node_class=RequestNode) preloaded_requests = [ r.request_id for r in self.PreloadedRequests( requests[0], dependencies_lens, trace) ] self._AnnotateNodes(self.graph.graph.Nodes(), preloaded_requests, critical_requests_ids)
def testUpdateRequestCost(self): requests = self.trace.request_track.GetEvents() requests[0].timing = request_track.TimingFromDict({ 'requestTime': 12, 'loadingFinished': 10 }) dependencies_lens = request_dependencies_lens.RequestDependencyLens( self.trace) g = dependency_graph.RequestDependencyGraph(requests, dependencies_lens) self.assertEqual(10, g.Cost()) request_id = requests[0].request_id g.UpdateRequestsCost({request_id: 100}) self.assertEqual(100, g.Cost()) g.UpdateRequestsCost({'unrelated_id': 1000}) self.assertEqual(100, g.Cost())
def __init__(self, trace, dependencies_lens, user_lens): """Initializes an instance of PrefetchSimulationView. Args: trace: (LoadingTrace) a loading trace. dependencies_lens: (RequestDependencyLens) request dependencies. user_lens: (UserSatisfiedLens) Lens used to compute costs. """ self.trace = trace self.dependencies_lens = dependencies_lens self._resource_events = self.trace.tracing_track.Filter( categories=set([u'blink.net'])) assert len(self._resource_events.GetEvents()) > 0,\ 'Was the "blink.net" category enabled at trace collection time?"' self._user_lens = user_lens request_ids = self._user_lens.CriticalRequests() all_requests = self.trace.request_track.GetEvents() self._first_request_node = all_requests[0].request_id requests = [r for r in all_requests if r.request_id in request_ids] self.graph = dependency_graph.RequestDependencyGraph( requests, self.dependencies_lens)
def _GraphFromRequests(cls, requests): trace = test_utils.LoadingTraceFromEvents(requests) deps_lens = test_utils.SimpleLens(trace) return dependency_graph.RequestDependencyGraph(requests, deps_lens)
def _BuildGraph(self): self._graph = dependency_graph.RequestDependencyGraph( self._requests, self._deps_lens, RequestNode, Edge) self._AnnotateNodes() self._AnnotateEdges()