示例#1
0
def isolate_test_cases(cmd, test_cases, jobs, isolated_file, isolate_file,
                       root_dir, reldir, path_variables, config_variables,
                       extra_variables, trace_blacklist):
    assert os.path.isabs(root_dir) and os.path.isdir(root_dir), root_dir

    logname = isolated_file + '.log'
    basename = isolated_file.rsplit('.', 1)[0]
    cwd_dir = os.path.join(root_dir, reldir)
    # Do the actual tracing.
    results = trace_test_cases.trace_test_cases(cmd, cwd_dir, test_cases, jobs,
                                                logname)
    api = trace_inputs.get_api()
    blacklist = tools.gen_blacklist(trace_blacklist)
    logs = dict(
        (i.pop('trace'), i) for i in api.parse_log(logname, blacklist, None))
    exception = None
    try:
        inputs = []
        for items in results:
            item = items[-1]
            assert item['valid']
            # Load the results;
            log_dict = logs[item['tracename']]
            if log_dict.get('exception'):
                exception = exception or log_dict['exception']
                logging.error('Got exception: %s', exception)
                continue
            files = log_dict['results'].strip_root(root_dir).files
            tracked, touched = isolate.isolate_format.split_touched(files)
            value = isolate.generate_isolate(tracked, [], touched, root_dir,
                                             path_variables, config_variables,
                                             extra_variables, reldir,
                                             blacklist)
            # item['test_case'] could be an invalid file name.
            out = basename + '.' + item['tracename'] + '.isolate'
            with open(out, 'w') as f:
                isolate.isolate_format.pretty_print(value, f)
            inputs.append(out)

        # Merges back. Note that it is possible to blow up the command line
        # argument length here but writing the files is still useful. Convert to
        # importing the module instead if necessary.
        merge_cmd = [
            sys.executable,
            os.path.join(ROOT_DIR, 'isolate_merge.py'),
            isolate_file,
            '-o',
            isolate_file,
        ]
        merge_cmd.extend(inputs)
        logging.info(merge_cmd)
        proc = subprocess.Popen(merge_cmd)
        proc.communicate()
        return proc.returncode
    finally:
        if exception:
            raise exception[0], exception[1], exception[2]
示例#2
0
def main():
  """CLI frontend to validate arguments."""
  tools.disable_buffering()
  parser = run_test_cases.OptionParserTestCases(
      usage='%prog <options> [gtest]')
  parser.format_description = lambda *_: parser.description
  parser.add_option(
      '-o', '--out',
      help='output file, defaults to <executable>.test_cases')
  parser.add_option(
      '-r', '--root-dir',
      help='Root directory under which file access should be noted')
  parser.add_option(
      '--trace-blacklist', action='append', default=[],
      help='List of regexp to use as blacklist filter')
  # TODO(maruel): Add support for options.timeout.
  parser.remove_option('--timeout')
  options, args = parser.parse_args()

  if not args:
    parser.error(
        'Please provide the executable line to run, if you need fancy things '
        'like xvfb, start this script from *inside* xvfb, it\'ll be much faster'
        '.')

  cmd = tools.fix_python_path(args)
  cmd[0] = os.path.abspath(cmd[0])
  if not os.path.isfile(cmd[0]):
    parser.error('Tracing failed for: %s\nIt doesn\'t exit' % ' '.join(cmd))

  if not options.out:
    options.out = '%s.test_cases' % cmd[-1]
  options.out = os.path.abspath(options.out)
  if options.root_dir:
    options.root_dir = os.path.abspath(options.root_dir)
  logname = options.out + '.log'

  test_cases = parser.process_gtest_options(cmd, os.getcwd(), options)

  # Then run them.
  print('Tracing...')
  results = trace_test_cases(
      cmd,
      os.getcwd(),
      test_cases,
      options.jobs,
      logname)
  print('Reading trace logs...')
  blacklist = tools.gen_blacklist(options.trace_blacklist)
  write_details(logname, options.out, options.root_dir, blacklist, results)
  return 0
 def test_blacklist_chromium(self):
   ok = [
     '.run_test_cases',
     'testserver.log2',
   ]
   blocked = [
     'foo.run_test_cases',
     'testserver.log',
     os.path.join('foo', 'testserver.log'),
   ]
   blacklist = tools.gen_blacklist(isolate.isolateserver.DEFAULT_BLACKLIST)
   for i in ok:
     self.assertFalse(blacklist(i), i)
   for i in blocked:
     self.assertTrue(blacklist(i), i)
示例#4
0
 def test_blacklist_chromium(self):
   ok = [
     '.run_test_cases',
     'testserver.log2',
   ]
   blocked = [
     'foo.run_test_cases',
     'testserver.log',
     os.path.join('foo', 'testserver.log'),
   ]
   blacklist = tools.gen_blacklist(isolate.isolateserver.DEFAULT_BLACKLIST)
   for i in ok:
     self.assertFalse(blacklist(i), i)
   for i in blocked:
     self.assertTrue(blacklist(i), i)
 def test_blacklist(self):
   ok = [
     '.git2',
     '.pyc',
     '.swp',
     'allo.git',
     'foo',
   ]
   blocked = [
     '.git',
     os.path.join('foo', '.git'),
     'foo.pyc',
     'bar.swp',
   ]
   blacklist = tools.gen_blacklist(isolate.isolateserver.DEFAULT_BLACKLIST)
   for i in ok:
     self.assertFalse(blacklist(i), i)
   for i in blocked:
     self.assertTrue(blacklist(i), i)
示例#6
0
 def test_blacklist(self):
   ok = [
     '.git2',
     '.pyc',
     '.swp',
     'allo.git',
     'foo',
   ]
   blocked = [
     '.git',
     os.path.join('foo', '.git'),
     'foo.pyc',
     'bar.swp',
   ]
   blacklist = tools.gen_blacklist(isolate.isolateserver.DEFAULT_BLACKLIST)
   for i in ok:
     self.assertFalse(blacklist(i), i)
   for i in blocked:
     self.assertTrue(blacklist(i), i)
示例#7
0
    def load_isolate(
        self, cwd, isolate_file, path_variables, config_variables, extra_variables, blacklist, ignore_broken_items
    ):
        """Updates self.isolated and self.saved_state with information loaded from a
    .isolate file.

    Processes the loaded data, deduce root_dir, relative_cwd.
    """
        # Make sure to not depend on os.getcwd().
        assert os.path.isabs(isolate_file), isolate_file
        isolate_file = file_path.get_native_path_case(isolate_file)
        logging.info(
            "CompleteState.load_isolate(%s, %s, %s, %s, %s, %s)",
            cwd,
            isolate_file,
            path_variables,
            config_variables,
            extra_variables,
            ignore_broken_items,
        )

        # Config variables are not affected by the paths and must be used to
        # retrieve the paths, so update them first.
        self.saved_state.update_config(config_variables)

        with open(isolate_file, "r") as f:
            # At that point, variables are not replaced yet in command and infiles.
            # infiles may contain directory entries and is in posix style.
            command, infiles, read_only, isolate_cmd_dir = isolate_format.load_isolate_for_config(
                os.path.dirname(isolate_file), f.read(), self.saved_state.config_variables
            )

        # Processes the variables with the new found relative root. Note that 'cwd'
        # is used when path variables are used.
        path_variables = normalize_path_variables(cwd, path_variables, isolate_cmd_dir)
        # Update the rest of the saved state.
        self.saved_state.update(isolate_file, path_variables, extra_variables)

        total_variables = self.saved_state.path_variables.copy()
        total_variables.update(self.saved_state.config_variables)
        total_variables.update(self.saved_state.extra_variables)
        command = [isolate_format.eval_variables(i, total_variables) for i in command]

        total_variables = self.saved_state.path_variables.copy()
        total_variables.update(self.saved_state.extra_variables)
        infiles = [isolate_format.eval_variables(f, total_variables) for f in infiles]
        # root_dir is automatically determined by the deepest root accessed with the
        # form '../../foo/bar'. Note that path variables must be taken in account
        # too, add them as if they were input files.
        self.saved_state.root_dir = isolate_format.determine_root_dir(
            isolate_cmd_dir, infiles + self.saved_state.path_variables.values()
        )
        # The relative directory is automatically determined by the relative path
        # between root_dir and the directory containing the .isolate file,
        # isolate_base_dir.
        relative_cwd = os.path.relpath(isolate_cmd_dir, self.saved_state.root_dir)
        # Now that we know where the root is, check that the path_variables point
        # inside it.
        for k, v in self.saved_state.path_variables.iteritems():
            dest = os.path.join(isolate_cmd_dir, relative_cwd, v)
            if not file_path.path_starts_with(self.saved_state.root_dir, dest):
                raise isolated_format.MappingError(
                    "Path variable %s=%r points outside the inferred root directory "
                    "%s; %s" % (k, v, self.saved_state.root_dir, dest)
                )
        # Normalize the files based to self.saved_state.root_dir. It is important to
        # keep the trailing os.path.sep at that step.
        infiles = [
            file_path.relpath(file_path.normpath(os.path.join(isolate_cmd_dir, f)), self.saved_state.root_dir)
            for f in infiles
        ]
        follow_symlinks = sys.platform != "win32"
        # Expand the directories by listing each file inside. Up to now, trailing
        # os.path.sep must be kept.
        infiles = isolated_format.expand_directories_and_symlinks(
            self.saved_state.root_dir, infiles, tools.gen_blacklist(blacklist), follow_symlinks, ignore_broken_items
        )

        # Finally, update the new data to be able to generate the foo.isolated file,
        # the file that is used by run_isolated.py.
        self.saved_state.update_isolated(command, infiles, read_only, relative_cwd)
        logging.debug(self)
示例#8
0
    def load_isolate(self, cwd, isolate_file, path_variables, config_variables,
                     extra_variables, blacklist, ignore_broken_items,
                     collapse_symlinks):
        """Updates self.isolated and self.saved_state with information loaded from a
    .isolate file.

    Processes the loaded data, deduce root_dir, relative_cwd.
    """
        # Make sure to not depend on os.getcwd().
        assert os.path.isabs(isolate_file), isolate_file
        isolate_file = file_path.get_native_path_case(isolate_file)
        logging.info('CompleteState.load_isolate(%s, %s, %s, %s, %s, %s, %s)',
                     cwd, isolate_file, path_variables, config_variables,
                     extra_variables, ignore_broken_items, collapse_symlinks)

        # Config variables are not affected by the paths and must be used to
        # retrieve the paths, so update them first.
        self.saved_state.update_config(config_variables)

        with fs.open(isolate_file, 'r') as f:
            # At that point, variables are not replaced yet in command and infiles.
            # infiles may contain directory entries and is in posix style.
            command, infiles, read_only, isolate_cmd_dir = (
                isolate_format.load_isolate_for_config(
                    os.path.dirname(isolate_file), f.read(),
                    self.saved_state.config_variables))

        # Processes the variables with the new found relative root. Note that 'cwd'
        # is used when path variables are used.
        path_variables = normalize_path_variables(cwd, path_variables,
                                                  isolate_cmd_dir)
        # Update the rest of the saved state.
        self.saved_state.update(isolate_file, path_variables, extra_variables)

        total_variables = self.saved_state.path_variables.copy()
        total_variables.update(self.saved_state.config_variables)
        total_variables.update(self.saved_state.extra_variables)
        command = [
            isolate_format.eval_variables(i, total_variables) for i in command
        ]

        total_variables = self.saved_state.path_variables.copy()
        total_variables.update(self.saved_state.extra_variables)
        infiles = [
            isolate_format.eval_variables(f, total_variables) for f in infiles
        ]
        # root_dir is automatically determined by the deepest root accessed with the
        # form '../../foo/bar'. Note that path variables must be taken in account
        # too, add them as if they were input files.
        self.saved_state.root_dir = isolate_format.determine_root_dir(
            isolate_cmd_dir,
            infiles + self.saved_state.path_variables.values())
        # The relative directory is automatically determined by the relative path
        # between root_dir and the directory containing the .isolate file,
        # isolate_base_dir.
        relative_cwd = os.path.relpath(isolate_cmd_dir,
                                       self.saved_state.root_dir)
        # Now that we know where the root is, check that the path_variables point
        # inside it.
        for k, v in self.saved_state.path_variables.iteritems():
            dest = os.path.join(isolate_cmd_dir, relative_cwd, v)
            if not file_path.path_starts_with(self.saved_state.root_dir, dest):
                raise isolated_format.MappingError(
                    'Path variable %s=%r points outside the inferred root directory '
                    '%s; %s' % (k, v, self.saved_state.root_dir, dest))
        # Normalize the files based to self.saved_state.root_dir. It is important to
        # keep the trailing os.path.sep at that step.
        infiles = [
            file_path.relpath(
                file_path.normpath(os.path.join(isolate_cmd_dir, f)),
                self.saved_state.root_dir) for f in infiles
        ]
        follow_symlinks = False
        if not collapse_symlinks:
            follow_symlinks = sys.platform != 'win32'
        # Expand the directories by listing each file inside. Up to now, trailing
        # os.path.sep must be kept.
        infiles = isolated_format.expand_directories_and_symlinks(
            self.saved_state.root_dir, infiles, tools.gen_blacklist(blacklist),
            follow_symlinks, ignore_broken_items)

        # Finally, update the new data to be able to generate the foo.isolated file,
        # the file that is used by run_isolated.py.
        self.saved_state.update_isolated(command, infiles, read_only,
                                         relative_cwd)
        logging.debug(self)
示例#9
0
def isolate_test_cases(
    cmd, test_cases, jobs, isolated_file, isolate_file,
    root_dir, reldir, path_variables, config_variables, extra_variables,
    trace_blacklist):
  assert os.path.isabs(root_dir) and os.path.isdir(root_dir), root_dir

  logname = isolated_file + '.log'
  basename = isolated_file.rsplit('.', 1)[0]
  cwd_dir = os.path.join(root_dir, reldir)
  # Do the actual tracing.
  results = trace_test_cases.trace_test_cases(
      cmd, cwd_dir, test_cases, jobs, logname)
  api = trace_inputs.get_api()
  blacklist = tools.gen_blacklist(trace_blacklist)
  logs = dict(
      (i.pop('trace'), i) for i in api.parse_log(logname, blacklist, None))
  exception = None
  try:
    inputs = []
    for items in results:
      item = items[-1]
      assert item['valid']
      # Load the results;
      log_dict = logs[item['tracename']]
      if log_dict.get('exception'):
        exception = exception or log_dict['exception']
        logging.error('Got exception: %s', exception)
        continue
      files = log_dict['results'].strip_root(root_dir).files
      tracked, touched = isolate.isolate_format.split_touched(files)
      value = isolate.generate_isolate(
          tracked,
          [],
          touched,
          root_dir,
          path_variables,
          config_variables,
          extra_variables,
          reldir,
          blacklist)
      # item['test_case'] could be an invalid file name.
      out = basename + '.' + item['tracename'] + '.isolate'
      with open(out, 'w') as f:
        isolate.isolate_format.pretty_print(value, f)
      inputs.append(out)

    # Merges back. Note that it is possible to blow up the command line
    # argument length here but writing the files is still useful. Convert to
    # importing the module instead if necessary.
    merge_cmd = [
      sys.executable,
      os.path.join(ROOT_DIR, 'isolate_merge.py'),
      isolate_file,
      '-o', isolate_file,
    ]
    merge_cmd.extend(inputs)
    logging.info(merge_cmd)
    proc = subprocess.Popen(merge_cmd)
    proc.communicate()
    return proc.returncode
  finally:
    if exception:
      raise exception[0], exception[1], exception[2]