Пример #1
0
def read(complete_state):
  """Reads a trace and returns the .isolate dictionary."""
  api = trace_inputs.get_api()
  logfile = complete_state.result_file + '.log'
  if not os.path.isfile(logfile):
    raise ExecutionError(
        'No log file \'%s\' to read, did you forget to \'trace\'?' % logfile)
  try:
    results = trace_inputs.load_trace(
        logfile, complete_state.root_dir, api, isolate_common.default_blacklist)
    tracked, touched = isolate_common.split_touched(results.existent)
    value = isolate_common.generate_isolate(
        tracked,
        [],
        touched,
        complete_state.root_dir,
        complete_state.saved_state.variables,
        complete_state.result.relative_cwd)
    return value
  except trace_inputs.TracingFailure, e:
    raise ExecutionError(
        'Reading traces failed for: %s\n%s' %
          (' '.join(complete_state.result.command), str(e)))
Пример #2
0
def trace_test_cases(
    executable, root_dir, cwd_dir, variables, test_cases, jobs, output_file):
  """Traces test cases one by one."""
  assert not os.path.isabs(cwd_dir)
  assert os.path.isabs(root_dir) and os.path.isdir(root_dir)
  assert os.path.isfile(executable) and os.path.isabs(executable)

  if not test_cases:
    return 0

  # Resolve any symlink.
  root_dir = os.path.realpath(root_dir)
  full_cwd_dir = os.path.normpath(os.path.join(root_dir, cwd_dir))
  assert os.path.isdir(full_cwd_dir)
  logname = output_file + '.logs'

  progress = worker_pool.Progress(len(test_cases))
  with worker_pool.ThreadPool(jobs or multiprocessing.cpu_count()) as pool:
    api = trace_inputs.get_api()
    api.clean_trace(logname)
    with api.get_tracer(logname) as tracer:
      function = Tracer(tracer, executable, full_cwd_dir, progress).map
      for test_case in test_cases:
        pool.add_task(function, test_case)

      values = pool.join(progress, 0.1)

  print ''
  print '%.1fs Done post-processing logs. Parsing logs.' % (
      time.time() - progress.start)
  results = api.parse_log(logname, isolate_common.default_blacklist)
  print '%.1fs Done parsing logs.' % (
      time.time() - progress.start)

  # Strips to root_dir.
  results_processed = {}
  for item in results:
    if 'results' in item:
      item = item.copy()
      item['results'] = item['results'].strip_root(root_dir)
      results_processed[item['trace']] = item
    else:
      print >> sys.stderr, 'Got exception while tracing %s: %s' % (
          item['trace'], item['exception'])
  print '%.1fs Done stripping root.' % (
      time.time() - progress.start)

  # Flatten.
  flattened = {}
  for item_list in values:
    for item in item_list:
      if item['valid']:
        test_case = item['test_case']
        tracename = test_case.replace('/', '-')
        flattened[test_case] = results_processed[tracename].copy()
        item_results = flattened[test_case]['results']
        tracked, touched = isolate_common.split_touched(item_results.existent)
        flattened[test_case].update({
            'processes': len(list(item_results.process.all)),
            'results': item_results.flatten(),
            'duration': item['duration'],
            'returncode': item['returncode'],
            'valid': item['valid'],
            'variables':
              isolate_common.generate_simplified(
                  tracked,
                  [],
                  touched,
                  root_dir,
                  variables,
                  cwd_dir),
          })
        del flattened[test_case]['trace']
  print '%.1fs Done flattening.' % (
      time.time() - progress.start)

  # Make it dense if there is more than 20 results.
  trace_inputs.write_json(
      output_file,
      flattened,
      False)

  # Also write the .isolate file.
  # First, get all the files from all results. Use a map to remove dupes.
  files = {}
  for item in results_processed.itervalues():
    files.update((f.full_path, f) for f in item['results'].existent)
  # Convert back to a list, discard the keys.
  files = files.values()
  tracked, touched = isolate_common.split_touched(files)
  value = isolate_common.generate_isolate(
      tracked,
      [],
      touched,
      root_dir,
      variables,
      cwd_dir)
  with open('%s.isolate' % output_file, 'wb') as f:
    isolate_common.pretty_print(value, f)
  return 0
Пример #3
0
def trace_test_cases(executable, root_dir, cwd_dir, variables, test_cases,
                     jobs, output_file):
    """Traces test cases one by one."""
    assert not os.path.isabs(cwd_dir)
    assert os.path.isabs(root_dir) and os.path.isdir(root_dir)
    assert os.path.isfile(executable) and os.path.isabs(executable)

    if not test_cases:
        return 0

    # Resolve any symlink.
    root_dir = os.path.realpath(root_dir)
    full_cwd_dir = os.path.normpath(os.path.join(root_dir, cwd_dir))
    assert os.path.isdir(full_cwd_dir)
    logname = output_file + '.logs'

    progress = worker_pool.Progress(len(test_cases))
    with worker_pool.ThreadPool(jobs or multiprocessing.cpu_count()) as pool:
        api = trace_inputs.get_api()
        api.clean_trace(logname)
        with api.get_tracer(logname) as tracer:
            function = Tracer(tracer, executable, full_cwd_dir, progress).map
            for test_case in test_cases:
                pool.add_task(function, test_case)

            values = pool.join(progress, 0.1)

    print ''
    print '%.1fs Done post-processing logs. Parsing logs.' % (time.time() -
                                                              progress.start)
    results = api.parse_log(logname, isolate_common.default_blacklist)
    print '%.1fs Done parsing logs.' % (time.time() - progress.start)

    # Strips to root_dir.
    results_processed = {}
    for item in results:
        if 'results' in item:
            item = item.copy()
            item['results'] = item['results'].strip_root(root_dir)
            results_processed[item['trace']] = item
        else:
            print >> sys.stderr, 'Got exception while tracing %s: %s' % (
                item['trace'], item['exception'])
    print '%.1fs Done stripping root.' % (time.time() - progress.start)

    # Flatten.
    flattened = {}
    for item_list in values:
        for item in item_list:
            if item['valid']:
                test_case = item['test_case']
                tracename = test_case.replace('/', '-')
                flattened[test_case] = results_processed[tracename].copy()
                item_results = flattened[test_case]['results']
                tracked, touched = isolate_common.split_touched(
                    item_results.existent)
                flattened[test_case].update({
                    'processes':
                    len(list(item_results.process.all)),
                    'results':
                    item_results.flatten(),
                    'duration':
                    item['duration'],
                    'returncode':
                    item['returncode'],
                    'valid':
                    item['valid'],
                    'variables':
                    isolate_common.generate_simplified(tracked, [], touched,
                                                       root_dir, variables,
                                                       cwd_dir),
                })
                del flattened[test_case]['trace']
    print '%.1fs Done flattening.' % (time.time() - progress.start)

    # Make it dense if there is more than 20 results.
    trace_inputs.write_json(output_file, flattened, False)

    # Also write the .isolate file.
    # First, get all the files from all results. Use a map to remove dupes.
    files = {}
    for item in results_processed.itervalues():
        files.update((f.full_path, f) for f in item['results'].existent)
    # Convert back to a list, discard the keys.
    files = files.values()
    tracked, touched = isolate_common.split_touched(files)
    value = isolate_common.generate_isolate(tracked, [], touched, root_dir,
                                            variables, cwd_dir)
    with open('%s.isolate' % output_file, 'wb') as f:
        isolate_common.pretty_print(value, f)
    return 0