Ejemplo n.º 1
0
def ExtractParserDiscoverableResources(loading_trace_path):
  """Extracts the parser discoverable resources from a loading trace.

  Args:
    loading_trace_path: The loading trace's path.

  Returns:
    A set of urls.
  """
  whitelisted_urls = set()
  logging.info('loading %s' % loading_trace_path)
  trace = loading_trace.LoadingTrace.FromJsonFile(loading_trace_path)
  requests_lens = request_dependencies_lens.RequestDependencyLens(trace)
  deps = requests_lens.GetRequestDependencies()

  main_resource_request = deps[0][0]
  logging.info('white-listing %s' % main_resource_request.url)
  whitelisted_urls.add(main_resource_request.url)
  for (first, second, reason) in deps:
    # Work-around where the protocol may be none for an unclear reason yet.
    # TODO(gabadie): Follow up on this with Clovis guys and possibly remove
    #   this work-around.
    if not second.protocol:
      logging.info('ignoring %s (no protocol)' % second.url)
      continue
    # Ignore data protocols.
    if not second.protocol.startswith('http'):
      logging.info('ignoring %s (`%s` is not HTTP{,S} protocol)' % (
          second.url, second.protocol))
      continue
    if (first.request_id == main_resource_request.request_id and
        reason == 'parser' and second.url not in whitelisted_urls):
      logging.info('white-listing %s' % second.url)
      whitelisted_urls.add(second.url)
  return whitelisted_urls
Ejemplo n.º 2
0
def _AllCores(prefix, graph_set_names, output, threshold):
    """Compute all core sets (per-set and overall page core) for a site."""
    core_sets = []
    _Progress('Using threshold %s' % threshold)
    big_sack = resource_sack.GraphSack()
    graph_sets = []
    for name in graph_set_names:
        _Progress('Finding core set for %s' % name)
        sack = resource_sack.GraphSack()
        sack.CORE_THRESHOLD = threshold
        this_set = []
        for filename in glob.iglob('-'.join([prefix, name, '*.trace'])):
            _Progress('Reading %s' % filename)
            trace = loading_trace.LoadingTrace.FromJsonFile(filename)
            graph = dependency_graph.RequestDependencyGraph(
                trace.request_track.GetEvents(),
                request_dependencies_lens.RequestDependencyLens(trace))
            sack.ConsumeGraph(graph)
            big_sack.ConsumeGraph(graph)
            this_set.append(graph)
        core_sets.append({
            'set_name': name,
            'core_set': [l for l in sack.CoreSet()]
        })
        graph_sets.append(this_set)
    json.dump(
        {
            'core_sets': core_sets,
            'page_core': [l for l in big_sack.CoreSet(*graph_sets)]
        },
        output,
        sort_keys=True,
        indent=2)
Ejemplo n.º 3
0
 def _SetUp(self, added_trace_events=None):
   trace_events = [
       {'ts': 5, 'ph': 'X', 'dur': 10, 'pid': 2, 'tid': 1, 'cat': 'blink.net'}]
   if added_trace_events is not None:
     trace_events += added_trace_events
   self.trace = TestRequests.CreateLoadingTrace(trace_events)
   self.dependencies_lens = request_dependencies_lens.RequestDependencyLens(
       self.trace)
   self.user_satisfied_lens = test_utils.MockUserSatisfiedLens(self.trace)
   self.user_satisfied_lens._postload_msec = 12
   self.prefetch_view = PrefetchSimulationView(
       self.trace, self.dependencies_lens, self.user_satisfied_lens)
   for e in self.prefetch_view.graph.graph.Edges():
     e.cost = 10
Ejemplo n.º 4
0
 def FromTrace(cls, trace):
     """Create a graph from a trace with no additional annotation."""
     return cls(trace,
                request_dependencies_lens.RequestDependencyLens(trace))