Ejemplo n.º 1
0
    def _render_jar_tool_args(self, options):
        """Format the arguments to jar-tool.

    :param Options options:
    """
        args = []

        with temporary_dir() as manifest_stage_dir:
            # relativize urls in canonical classpath, this needs to be stable too therefore
            # do not follow the symlinks because symlinks may vary from platform to platform.
            classpath = relativize_classpath(self.classpath,
                                             os.path.dirname(self._path),
                                             followlinks=False)

            def as_cli_entry(entry):
                src = entry.materialize(manifest_stage_dir)
                return '{}={}'.format(src, entry.dest) if entry.dest else src

            files = map(as_cli_entry, self._entries) if self._entries else []

            jars = self._jars or []

            with safe_args(classpath, options,
                           delimiter=',') as classpath_args:
                with safe_args(files, options, delimiter=',') as files_args:
                    with safe_args(jars, options, delimiter=',') as jars_args:

                        # If you specify --manifest to jar-tool you cannot specify --main.
                        if self._manifest_entry:
                            manifest_file = self._manifest_entry.materialize(
                                manifest_stage_dir)
                        else:
                            manifest_file = None

                        if self._main and manifest_file:
                            main_arg = None
                            with open(manifest_file, 'a') as f:
                                f.write("Main-Class: {}\n".format(self._main))
                        else:
                            main_arg = self._main

                        if main_arg:
                            args.append('-main={}'.format(self._main))

                        if classpath_args:
                            args.append('-classpath={}'.format(
                                ','.join(classpath_args)))

                        if manifest_file:
                            args.append('-manifest={}'.format(manifest_file))

                        if files_args:
                            args.append('-files={}'.format(
                                ','.join(files_args)))

                        if jars_args:
                            args.append('-jars={}'.format(','.join(jars_args)))

                        yield args
Ejemplo n.º 2
0
    def _render_jar_tool_args(self, options):
        """Format the arguments to jar-tool.

    :param Options options:
    """
        args = []

        with temporary_dir() as manifest_stage_dir:
            classpath = self._classpath or []

            def as_cli_entry(entry):
                src = entry.materialize(manifest_stage_dir)
                return '{}={}'.format(src, entry.dest) if entry.dest else src

            files = map(as_cli_entry, self._entries) if self._entries else []

            jars = self._jars or []

            with safe_args(classpath, options,
                           delimiter=',') as classpath_args:
                with safe_args(files, options, delimiter=',') as files_args:
                    with safe_args(jars, options, delimiter=',') as jars_args:

                        # If you specify --manifest to jar-tool you cannot specify --main.
                        if self._manifest_entry:
                            manifest_file = self._manifest_entry.materialize(
                                manifest_stage_dir)
                        else:
                            manifest_file = None

                        if self._main and manifest_file:
                            main_arg = None
                            with open(manifest_file, 'a') as f:
                                f.write("Main-Class: {}\n".format(self._main))
                        else:
                            main_arg = self._main

                        if main_arg:
                            args.append('-main={}'.format(self._main))

                        if classpath_args:
                            args.append('-classpath={}'.format(
                                ','.join(classpath_args)))

                        if manifest_file:
                            args.append('-manifest={}'.format(manifest_file))

                        if files_args:
                            args.append('-files={}'.format(
                                ','.join(files_args)))

                        if jars_args:
                            args.append('-jars={}'.format(','.join(jars_args)))

                        yield args
Ejemplo n.º 3
0
  def _render_jar_tool_args(self, options):
    """Format the arguments to jar-tool.

    :param Options options:
    """
    args = []

    with temporary_dir() as manifest_stage_dir:
      # relativize urls in canonical classpath, this needs to be stable too therefore
      # do not follow the symlinks because symlinks may vary from platform to platform.
      classpath = relativize_classpath(self.classpath,
                                       os.path.dirname(self._path),
                                       followlinks=False)

      def as_cli_entry(entry):
        src = entry.materialize(manifest_stage_dir)
        return '{}={}'.format(src, entry.dest) if entry.dest else src
      files = map(as_cli_entry, self._entries) if self._entries else []

      jars = self._jars or []

      with safe_args(classpath, options, delimiter=',') as classpath_args:
        with safe_args(files, options, delimiter=',') as files_args:
          with safe_args(jars, options, delimiter=',') as jars_args:

            # If you specify --manifest to jar-tool you cannot specify --main.
            if self._manifest_entry:
              manifest_file = self._manifest_entry.materialize(manifest_stage_dir)
            else:
              manifest_file = None

            if self._main and manifest_file:
              main_arg = None
              with open(manifest_file, 'a') as f:
                f.write("Main-Class: {}\n".format(self._main))
            else:
              main_arg = self._main

            if main_arg:
              args.append('-main={}'.format(self._main))

            if classpath_args:
              args.append('-classpath={}'.format(','.join(classpath_args)))

            if manifest_file:
              args.append('-manifest={}'.format(manifest_file))

            if files_args:
              args.append('-files={}'.format(','.join(files_args)))

            if jars_args:
              args.append('-jars={}'.format(','.join(jars_args)))

            yield args
Ejemplo n.º 4
0
  def _render_jar_tool_args(self, options):
    """Format the arguments to jar-tool.

    :param Options options:
    """
    args = []

    with temporary_dir() as manifest_stage_dir:
      classpath = self._classpath or []

      def as_cli_entry(entry):
        src = entry.materialize(manifest_stage_dir)
        return '{}={}'.format(src, entry.dest) if entry.dest else src
      files = map(as_cli_entry, self._entries) if self._entries else []

      jars = self._jars or []

      with safe_args(classpath, options, delimiter=',') as classpath_args:
        with safe_args(files, options, delimiter=',') as files_args:
          with safe_args(jars, options, delimiter=',') as jars_args:

            # If you specify --manifest to jar-tool you cannot specify --main.
            if self._manifest_entry:
              manifest_file = self._manifest_entry.materialize(manifest_stage_dir)
            else:
              manifest_file = None

            if self._main and manifest_file:
              main_arg = None
              with open(manifest_file, 'a') as f:
                f.write("Main-Class: {}\n".format(self._main))
            else:
              main_arg = self._main

            if main_arg:
              args.append('-main={}'.format(self._main))

            if classpath_args:
              args.append('-classpath={}'.format(','.join(classpath_args)))

            if manifest_file:
              args.append('-manifest={}'.format(manifest_file))

            if files_args:
              args.append('-files={}'.format(','.join(files_args)))

            if jars_args:
              args.append('-jars={}'.format(','.join(jars_args)))

            yield args
Ejemplo n.º 5
0
 def instrument(self, targets, tests, compute_junit_classpath):
   junit_classpath = compute_junit_classpath()
   safe_mkdir(self._coverage_instrument_dir, clean=True)
   self._emma_classpath = self._task_exports.tool_classpath('emma')
   with binary_util.safe_args(self.get_coverage_patterns(targets),
                              self._task_exports.task_options) as patterns:
     args = [
       'instr',
       '-out', self._coverage_metadata_file,
       '-d', self._coverage_instrument_dir,
       '-cp', os.pathsep.join(junit_classpath),
       '-exit'
       ]
     for pattern in patterns:
       args.extend(['-filter', pattern])
     main = 'emma'
     execute_java = self.preferred_jvm_distribution_for_targets(targets).execute_java
     result = execute_java(classpath=self._emma_classpath,
                           main=main,
                           jvm_options=self._coverage_jvm_options,
                           args=args,
                           workunit_factory=self._context.new_workunit,
                           workunit_name='emma-instrument')
     if result != 0:
       raise TaskError("java {0} ... exited non-zero ({1})"
                       " 'failed to instrument'".format(main, result))
Ejemplo n.º 6
0
 def instrument(self, targets, tests, compute_junit_classpath):
     junit_classpath = compute_junit_classpath()
     safe_mkdir(self._coverage_instrument_dir, clean=True)
     self._emma_classpath = self._task_exports.tool_classpath('emma')
     with binary_util.safe_args(
             self.get_coverage_patterns(targets),
             self._task_exports.task_options) as patterns:
         args = [
             'instr', '-out', self._coverage_metadata_file, '-d',
             self._coverage_instrument_dir, '-cp',
             os.pathsep.join(junit_classpath), '-exit'
         ]
         for pattern in patterns:
             args.extend(['-filter', pattern])
         main = 'emma'
         execute_java = self.preferred_jvm_distribution_for_targets(
             targets).execute_java
         result = execute_java(classpath=self._emma_classpath,
                               main=main,
                               jvm_options=self._coverage_jvm_options,
                               args=args,
                               workunit_factory=self._context.new_workunit,
                               workunit_name='emma-instrument')
         if result != 0:
             raise TaskError("java {0} ... exited non-zero ({1})"
                             " 'failed to instrument'".format(main, result))
Ejemplo n.º 7
0
    def _run_tests(self,
                   tests_to_targets,
                   main,
                   extra_jvm_options=None,
                   classpath_prepend=(),
                   classpath_append=()):
        extra_jvm_options = extra_jvm_options or []

        tests_by_properties = self._tests_by_properties(
            tests_to_targets, self._infer_workdir,
            lambda target: target.test_platform)

        result = 0
        for (workdir, platform), tests in tests_by_properties.items():
            for batch in self._partition(tests):
                # Batches of test classes will likely exist within the same targets: dedupe them.
                relevant_targets = set(map(tests_to_targets.get, batch))
                classpath = self._task_exports.classpath(
                    relevant_targets,
                    cp=self._task_exports.tool_classpath('junit'))
                complete_classpath = OrderedSet()
                complete_classpath.update(classpath_prepend)
                complete_classpath.update(classpath)
                complete_classpath.update(classpath_append)
                distribution = self.preferred_jvm_distribution([platform])
                with binary_util.safe_args(
                        batch, self._task_exports.task_options) as batch_tests:
                    self._context.log.debug('CWD = {}'.format(workdir))
                    self._context.log.debug('platform = {}'.format(platform))
                    result += abs(
                        distribution.execute_java(
                            classpath=complete_classpath,
                            main=main,
                            jvm_options=self._task_exports.jvm_options +
                            extra_jvm_options,
                            args=self._args + batch_tests + [u'-xmlreport'],
                            workunit_factory=self._context.new_workunit,
                            workunit_name='run',
                            workunit_labels=[WorkUnitLabel.TEST],
                            cwd=workdir,
                        ))

                    if result != 0 and self._fail_fast:
                        break

        if result != 0:
            failed_targets = self._get_failed_targets(tests_to_targets)
            raise TestFailedTaskError(
                'java {0} ... exited non-zero ({1}); {2} failed targets.'.
                format(main, result, len(failed_targets)),
                failed_targets=failed_targets)
Ejemplo n.º 8
0
  def _run_tests(self, tests_to_targets, main, extra_jvm_options=None, classpath_prepend=(),
                 classpath_append=()):
    extra_jvm_options = extra_jvm_options or []

    tests_by_properties = self._tests_by_properties(tests_to_targets,
                                                    self._infer_workdir,
                                                    lambda target: target.test_platform)

    result = 0
    for (workdir, platform), tests in tests_by_properties.items():
      for batch in self._partition(tests):
        # Batches of test classes will likely exist within the same targets: dedupe them.
        relevant_targets = set(map(tests_to_targets.get, batch))
        classpath = self._task_exports.classpath(relevant_targets,
                                                 cp=self._task_exports.tool_classpath('junit'))
        complete_classpath = OrderedSet()
        complete_classpath.update(classpath_prepend)
        complete_classpath.update(classpath)
        complete_classpath.update(classpath_append)
        if self._strict_jvm_version:
          max_version = Revision(*(platform.target_level.components + [9999]))
          distribution = Distribution.cached(minimum_version=platform.target_level,
                                             maximum_version=max_version)
        else:
          distribution = Distribution.cached(minimum_version=platform.target_level)
        with binary_util.safe_args(batch, self._task_exports.task_options) as batch_tests:
          self._context.log.debug('CWD = {}'.format(workdir))
          self._context.log.debug('platform = {}'.format(platform))
          result += abs(execute_java(
            classpath=complete_classpath,
            main=main,
            jvm_options=self._task_exports.jvm_options + extra_jvm_options,
            args=self._args + batch_tests + [u'-xmlreport'],
            workunit_factory=self._context.new_workunit,
            workunit_name='run',
            workunit_labels=[WorkUnitLabel.TEST],
            cwd=workdir,
            distribution=distribution,
          ))

          if result != 0 and self._fail_fast:
            break

    if result != 0:
      failed_targets = self._get_failed_targets(tests_to_targets)
      raise TestFailedTaskError(
        'java {0} ... exited non-zero ({1}); {2} failed targets.'
        .format(main, result, len(failed_targets)),
        failed_targets=failed_targets
      )
Ejemplo n.º 9
0
  def _run_tests(self, tests_to_targets, extra_jvm_options=None,
                 classpath_prepend=(), classpath_append=()):
    extra_jvm_options = extra_jvm_options or []

    tests_by_properties = self._tests_by_properties(tests_to_targets,
                                                    self._infer_workdir,
                                                    lambda target: target.test_platform)

    # the below will be None if not set, and we'll default back to compile_classpath
    classpath_product = self._context.products.get_data('instrument_classpath')

    result = 0
    for (workdir, platform), tests in tests_by_properties.items():
      for batch in self._partition(tests):
        # Batches of test classes will likely exist within the same targets: dedupe them.
        relevant_targets = set(map(tests_to_targets.get, batch))
        classpath = self._task_exports.classpath(relevant_targets,
                                                 classpath_prefix=self._task_exports.tool_classpath('junit'),
                                                 classpath_product=classpath_product)
        complete_classpath = OrderedSet()
        complete_classpath.update(classpath_prepend)
        complete_classpath.update(classpath)
        complete_classpath.update(classpath_append)
        distribution = self.preferred_jvm_distribution([platform])
        with binary_util.safe_args(batch, self._task_exports.task_options) as batch_tests:
          self._context.log.debug('CWD = {}'.format(workdir))
          self._context.log.debug('platform = {}'.format(platform))
          result += abs(distribution.execute_java(
            classpath=complete_classpath,
            main=JUnitRun._MAIN,
            jvm_options=self._task_exports.jvm_options + extra_jvm_options,
            args=self._args + batch_tests + [u'-xmlreport'],
            workunit_factory=self._context.new_workunit,
            workunit_name='run',
            workunit_labels=[WorkUnitLabel.TEST],
            cwd=workdir,
          ))

          if result != 0 and self._fail_fast:
            break

    if result != 0:
      failed_targets = self._get_failed_targets(tests_to_targets)
      raise TestFailedTaskError(
        'java {0} ... exited non-zero ({1}); {2} failed targets.'
        .format(JUnitRun._MAIN, result, len(failed_targets)),
        failed_targets=failed_targets
      )
Ejemplo n.º 10
0
  def _run_tests(self, tests_to_targets, main, extra_jvm_options=None, classpath_prepend=(),
                 classpath_append=()):
    extra_jvm_options = extra_jvm_options or []

    result = 0
    for workdir, tests in self._tests_by_workdir(tests_to_targets).items():
      for batch in self._partition(tests):
        # Batches of test classes will likely exist within the same targets: dedupe them.
        relevant_targets = set(map(tests_to_targets.get, batch))
        classpath = self._task_exports.classpath(relevant_targets,
                                                 cp=self._task_exports.tool_classpath('junit'))
        complete_classpath = OrderedSet()
        complete_classpath.update(classpath_prepend)
        complete_classpath.update(classpath)
        complete_classpath.update(classpath_append)
        with binary_util.safe_args(batch, self._task_exports.task_options) as batch_tests:
          self._context.log.debug('CWD = {}'.format(workdir))
          result += abs(execute_java(
            classpath=complete_classpath,
            main=main,
            jvm_options=self._task_exports.jvm_options + extra_jvm_options,
            args=self._args + batch_tests + [u'-xmlreport'],
            workunit_factory=self._context.new_workunit,
            workunit_name='run',
            workunit_labels=[WorkUnitLabel.TEST],
            cwd=workdir,
          ))

          if result != 0 and self._fail_fast:
            break

    if result != 0:
      failed_targets = self._get_failed_targets(tests_to_targets)
      raise TestFailedTaskError(
        'java {0} ... exited non-zero ({1}); {2} failed targets.'
        .format(main, result, len(failed_targets)),
        failed_targets=failed_targets
      )
Ejemplo n.º 11
0
  def _run_tests(self, tests_to_targets):
    if self._coverage:
      extra_jvm_options = self._coverage.extra_jvm_options
      classpath_prepend = self._coverage.classpath_prepend
      classpath_append = self._coverage.classpath_append
    else:
      extra_jvm_options = []
      classpath_prepend = ()
      classpath_append = ()

    tests_by_properties = self._tests_by_properties(
      tests_to_targets,
      self._infer_workdir,
      lambda target: target.test_platform,
      lambda target: target.payload.extra_jvm_options,
      lambda target: target.payload.extra_env_vars,
      lambda target: target.concurrency,
      lambda target: target.threads
    )

    # the below will be None if not set, and we'll default back to runtime_classpath
    classpath_product = self.context.products.get_data('instrument_classpath')

    result = 0
    for properties, tests in tests_by_properties.items():
      (workdir, platform, target_jvm_options, target_env_vars, concurrency, threads) = properties
      for batch in self._partition(tests):
        # Batches of test classes will likely exist within the same targets: dedupe them.
        relevant_targets = set(map(tests_to_targets.get, batch))
        complete_classpath = OrderedSet()
        complete_classpath.update(classpath_prepend)
        complete_classpath.update(self.tool_classpath('junit'))
        complete_classpath.update(self.classpath(relevant_targets,
                                                 classpath_product=classpath_product))
        complete_classpath.update(classpath_append)
        distribution = JvmPlatform.preferred_jvm_distribution([platform], self._strict_jvm_version)

        # Override cmdline args with values from junit_test() target that specify concurrency:
        args = self._args + [u'-xmlreport']

        if concurrency is not None:
          args = remove_arg(args, '-default-parallel')
          if concurrency == junit_tests.CONCURRENCY_SERIAL:
            args = ensure_arg(args, '-default-concurrency', param='SERIAL')
          elif concurrency == junit_tests.CONCURRENCY_PARALLEL_CLASSES:
            args = ensure_arg(args, '-default-concurrency', param='PARALLEL_CLASSES')
          elif concurrency == junit_tests.CONCURRENCY_PARALLEL_METHODS:
            args = ensure_arg(args, '-default-concurrency', param='PARALLEL_METHODS')
          elif concurrency == junit_tests.CONCURRENCY_PARALLEL_CLASSES_AND_METHODS:
            args = ensure_arg(args, '-default-concurrency', param='PARALLEL_CLASSES_AND_METHODS')

        if threads is not None:
          args = remove_arg(args, '-parallel-threads', has_param=True)
          args += ['-parallel-threads', str(threads)]

        with binary_util.safe_args(batch, self.get_options()) as batch_tests:
          self.context.log.debug('CWD = {}'.format(workdir))
          self.context.log.debug('platform = {}'.format(platform))
          with environment_as(**dict(target_env_vars)):
            result += abs(self._spawn_and_wait(
              executor=SubprocessExecutor(distribution),
              distribution=distribution,
              classpath=complete_classpath,
              main=JUnitRun._MAIN,
              jvm_options=self.jvm_options + extra_jvm_options + list(target_jvm_options),
              args=args + batch_tests,
              workunit_factory=self.context.new_workunit,
              workunit_name='run',
              workunit_labels=[WorkUnitLabel.TEST],
              cwd=workdir,
              synthetic_jar_dir=self.workdir,
              create_synthetic_jar=self.synthetic_classpath,
            ))

          if result != 0 and self._fail_fast:
            break

    if result != 0:
      failed_targets_and_tests = self._get_failed_targets(tests_to_targets)
      failed_targets = sorted(failed_targets_and_tests, key=lambda target: target.address.spec)
      error_message_lines = []
      if self._failure_summary:
        for target in failed_targets:
          error_message_lines.append('\n{0}{1}'.format(' '*4, target.address.spec))
          for test in sorted(failed_targets_and_tests[target]):
            error_message_lines.append('{0}{1}'.format(' '*8, test))
      error_message_lines.append(
        '\njava {main} ... exited non-zero ({code}); {failed} failed {targets}.'
          .format(main=JUnitRun._MAIN, code=result, failed=len(failed_targets),
                  targets=pluralize(len(failed_targets), 'target'))
      )
      raise TestFailedTaskError('\n'.join(error_message_lines), failed_targets=list(failed_targets))
Ejemplo n.º 12
0
  def _run_tests(self, tests_to_targets):
    if self._coverage:
      extra_jvm_options = self._coverage.extra_jvm_options
      classpath_prepend = self._coverage.classpath_prepend
      classpath_append = self._coverage.classpath_append
    else:
      extra_jvm_options = []
      classpath_prepend = ()
      classpath_append = ()

    tests_by_properties = self._tests_by_properties(
      tests_to_targets,
      self._infer_workdir,
      lambda target: target.test_platform,
      lambda target: target.payload.extra_jvm_options,
      lambda target: target.payload.extra_env_vars,
      lambda target: target.concurrency,
      lambda target: target.threads
    )

    # the below will be None if not set, and we'll default back to runtime_classpath
    classpath_product = self.context.products.get_data('instrument_classpath')

    result = 0
    for properties, tests in tests_by_properties.items():
      (workdir, platform, target_jvm_options, target_env_vars, concurrency, threads) = properties
      for batch in self._partition(tests):
        # Batches of test classes will likely exist within the same targets: dedupe them.
        relevant_targets = set(map(tests_to_targets.get, batch))
        complete_classpath = OrderedSet()
        complete_classpath.update(classpath_prepend)
        complete_classpath.update(self.tool_classpath('junit'))
        complete_classpath.update(self.classpath(relevant_targets,
                                                 classpath_product=classpath_product))
        complete_classpath.update(classpath_append)
        distribution = JvmPlatform.preferred_jvm_distribution([platform], self._strict_jvm_version)

        # Override cmdline args with values from junit_test() target that specify concurrency:
        args = self._args + [u'-xmlreport']

        # TODO(zundel): Combine these together into a single -concurrency choices style argument
        if concurrency == junit_tests.CONCURRENCY_SERIAL:
          args = remove_arg(args, '-default-parallel')
        elif concurrency == junit_tests.CONCURRENCY_PARALLEL_CLASSES:
          args = ensure_arg(args, '-default-parallel')
        elif concurrency == junit_tests.CONCURRENCY_PARALLEL_METHODS:
          self.context.log.warn('Not implemented: parallel_methods')
        elif concurrency == junit_tests.CONCURRENCY_PARALLEL_BOTH:
          self.context.log.warn('specifying {} is experimental.'.format(concurrency))
          args = ensure_arg(args, '-default-parallel')
          args = ensure_arg(args, '-parallel-methods')
        if threads is not None:
          args = remove_arg(args, '-parallel-threads', has_param=True)
          args += ['-parallel-threads', str(threads)]

        with binary_util.safe_args(batch, self.get_options()) as batch_tests:
          self.context.log.debug('CWD = {}'.format(workdir))
          self.context.log.debug('platform = {}'.format(platform))
          with environment_as(**dict(target_env_vars)):
            result += abs(self._spawn_and_wait(
              executor=SubprocessExecutor(distribution),
              distribution=distribution,
              classpath=complete_classpath,
              main=JUnitRun._MAIN,
              jvm_options=self.jvm_options + extra_jvm_options + list(target_jvm_options),
              args=args + batch_tests,
              workunit_factory=self.context.new_workunit,
              workunit_name='run',
              workunit_labels=[WorkUnitLabel.TEST],
              cwd=workdir,
              synthetic_jar_dir=self.workdir,
              create_synthetic_jar=self.synthetic_classpath,
            ))

          if result != 0 and self._fail_fast:
            break

    if result != 0:
      failed_targets_and_tests = self._get_failed_targets(tests_to_targets)
      failed_targets = sorted(failed_targets_and_tests, key=lambda target: target.address.spec)
      error_message_lines = []
      if self._failure_summary:
        for target in failed_targets:
          error_message_lines.append('\n{0}{1}'.format(' '*4, target.address.spec))
          for test in sorted(failed_targets_and_tests[target]):
            error_message_lines.append('{0}{1}'.format(' '*8, test))
      error_message_lines.append(
        '\njava {main} ... exited non-zero ({code}); {failed} failed {targets}.'
          .format(main=JUnitRun._MAIN, code=result, failed=len(failed_targets),
                  targets=pluralize(len(failed_targets), 'target'))
      )
      raise TestFailedTaskError('\n'.join(error_message_lines), failed_targets=list(failed_targets))
Ejemplo n.º 13
0
    def _run_tests(self,
                   tests_to_targets,
                   extra_jvm_options=None,
                   classpath_prepend=(),
                   classpath_append=()):
        extra_jvm_options = extra_jvm_options or []

        tests_by_properties = self._tests_by_properties(
            tests_to_targets, self._infer_workdir,
            lambda target: target.test_platform)

        # the below will be None if not set, and we'll default back to runtime_classpath
        classpath_product = self._context.products.get_data(
            'instrument_classpath')

        result = 0
        for (workdir, platform), tests in tests_by_properties.items():
            for batch in self._partition(tests):
                # Batches of test classes will likely exist within the same targets: dedupe them.
                relevant_targets = set(map(tests_to_targets.get, batch))
                classpath = self._task_exports.classpath(
                    relevant_targets,
                    classpath_prefix=self._task_exports.tool_classpath(
                        'junit'),
                    classpath_product=classpath_product)
                complete_classpath = OrderedSet()
                complete_classpath.update(classpath_prepend)
                complete_classpath.update(classpath)
                complete_classpath.update(classpath_append)
                distribution = self.preferred_jvm_distribution([platform])
                with binary_util.safe_args(
                        batch, self._task_exports.task_options) as batch_tests:
                    self._context.log.debug('CWD = {}'.format(workdir))
                    self._context.log.debug('platform = {}'.format(platform))
                    result += abs(
                        distribution.execute_java(
                            classpath=complete_classpath,
                            main=JUnitRun._MAIN,
                            jvm_options=self._task_exports.jvm_options +
                            extra_jvm_options,
                            args=self._args + batch_tests + [u'-xmlreport'],
                            workunit_factory=self._context.new_workunit,
                            workunit_name='run',
                            workunit_labels=[WorkUnitLabel.TEST],
                            cwd=workdir,
                        ))

                    if result != 0 and self._fail_fast:
                        break

        if result != 0:
            failed_targets_and_tests = self._get_failed_targets(
                tests_to_targets)
            failed_targets = sorted(failed_targets_and_tests,
                                    key=lambda target: target.address.spec)
            error_message_lines = []
            if self._failure_summary:
                for target in failed_targets:
                    error_message_lines.append('\n{0}{1}'.format(
                        ' ' * 4, target.address.spec))
                    for test in sorted(failed_targets_and_tests[target]):
                        error_message_lines.append('{0}{1}'.format(
                            ' ' * 8, test))
            error_message_lines.append(
                '\njava {main} ... exited non-zero ({code}); {failed} failed {targets}.'
                .format(main=JUnitRun._MAIN,
                        code=result,
                        failed=len(failed_targets),
                        targets=pluralize(len(failed_targets), 'target')))
            raise TestFailedTaskError('\n'.join(error_message_lines),
                                      failed_targets=list(failed_targets))
Ejemplo n.º 14
0
  def _run_tests(self, tests_to_targets):

    if self._coverage:
      extra_jvm_options = self._coverage.extra_jvm_options
      classpath_prepend = self._coverage.classpath_prepend
      classpath_append = self._coverage.classpath_append
    else:
      extra_jvm_options = []
      classpath_prepend = ()
      classpath_append = ()

    tests_by_properties = self._tests_by_properties(
      tests_to_targets,
      self._infer_workdir,
      lambda target: target.test_platform,
      lambda target: target.payload.extra_jvm_options,
      lambda target: target.payload.extra_env_vars,
    )

    # the below will be None if not set, and we'll default back to runtime_classpath
    classpath_product = self.context.products.get_data('instrument_classpath')

    result = 0
    for (workdir, platform, target_jvm_options, target_env_vars), tests in tests_by_properties.items():
      for batch in self._partition(tests):
        # Batches of test classes will likely exist within the same targets: dedupe them.
        relevant_targets = set(map(tests_to_targets.get, batch))
        complete_classpath = OrderedSet()
        complete_classpath.update(classpath_prepend)
        complete_classpath.update(self.tool_classpath('junit'))
        complete_classpath.update(self.classpath(relevant_targets,
                                                 classpath_product=classpath_product))
        complete_classpath.update(classpath_append)
        distribution = self.preferred_jvm_distribution([platform])
        with binary_util.safe_args(batch, self.get_options()) as batch_tests:
          self.context.log.debug('CWD = {}'.format(workdir))
          self.context.log.debug('platform = {}'.format(platform))
          with environment_as(**dict(target_env_vars)):
            result += abs(self._spawn_and_wait(
              executor=SubprocessExecutor(distribution),
              distribution=distribution,
              classpath=complete_classpath,
              main=JUnitRun._MAIN,
              jvm_options=self.jvm_options + extra_jvm_options + list(target_jvm_options),
              args=self._args + batch_tests + [u'-xmlreport'],
              workunit_factory=self.context.new_workunit,
              workunit_name='run',
              workunit_labels=[WorkUnitLabel.TEST],
              cwd=workdir,
              synthetic_jar_dir=self.workdir,
            ))

          if result != 0 and self._fail_fast:
            break

    if result != 0:
      failed_targets_and_tests = self._get_failed_targets(tests_to_targets)
      failed_targets = sorted(failed_targets_and_tests, key=lambda target: target.address.spec)
      error_message_lines = []
      if self._failure_summary:
        for target in failed_targets:
          error_message_lines.append('\n{0}{1}'.format(' '*4, target.address.spec))
          for test in sorted(failed_targets_and_tests[target]):
            error_message_lines.append('{0}{1}'.format(' '*8, test))
      error_message_lines.append(
        '\njava {main} ... exited non-zero ({code}); {failed} failed {targets}.'
          .format(main=JUnitRun._MAIN, code=result, failed=len(failed_targets),
                  targets=pluralize(len(failed_targets), 'target'))
      )
      raise TestFailedTaskError('\n'.join(error_message_lines), failed_targets=list(failed_targets))
Ejemplo n.º 15
0
    def _run_tests(self, tests_to_targets, extra_jvm_options=None, classpath_prepend=(), classpath_append=()):
        extra_jvm_options = extra_jvm_options or []

        tests_by_properties = self._tests_by_properties(
            tests_to_targets, self._infer_workdir, lambda target: target.test_platform
        )

        # the below will be None if not set, and we'll default back to runtime_classpath
        classpath_product = self._context.products.get_data("instrument_classpath")

        result = 0
        for (workdir, platform), tests in tests_by_properties.items():
            for batch in self._partition(tests):
                # Batches of test classes will likely exist within the same targets: dedupe them.
                relevant_targets = set(map(tests_to_targets.get, batch))
                classpath = self._task_exports.classpath(
                    relevant_targets,
                    classpath_prefix=self._task_exports.tool_classpath("junit"),
                    classpath_product=classpath_product,
                )
                complete_classpath = OrderedSet()
                complete_classpath.update(classpath_prepend)
                complete_classpath.update(classpath)
                complete_classpath.update(classpath_append)
                distribution = self.preferred_jvm_distribution([platform])
                with binary_util.safe_args(batch, self._task_exports.task_options) as batch_tests:
                    self._context.log.debug("CWD = {}".format(workdir))
                    self._context.log.debug("platform = {}".format(platform))
                    result += abs(
                        distribution.execute_java(
                            classpath=complete_classpath,
                            main=JUnitRun._MAIN,
                            jvm_options=self._task_exports.jvm_options + extra_jvm_options,
                            args=self._args + batch_tests + ["-xmlreport"],
                            workunit_factory=self._context.new_workunit,
                            workunit_name="run",
                            workunit_labels=[WorkUnitLabel.TEST],
                            cwd=workdir,
                        )
                    )

                    if result != 0 and self._fail_fast:
                        break

        if result != 0:
            failed_targets_and_tests = self._get_failed_targets(tests_to_targets)
            failed_targets = sorted(failed_targets_and_tests, key=lambda target: target.address.spec)
            error_message_lines = []
            if self._failure_summary:
                for target in failed_targets:
                    error_message_lines.append("\n{0}{1}".format(" " * 4, target.address.spec))
                    for test in sorted(failed_targets_and_tests[target]):
                        error_message_lines.append("{0}{1}".format(" " * 8, test))
            error_message_lines.append(
                "\njava {main} ... exited non-zero ({code}); {failed} failed {targets}.".format(
                    main=JUnitRun._MAIN,
                    code=result,
                    failed=len(failed_targets),
                    targets=pluralize(len(failed_targets), "target"),
                )
            )
            raise TestFailedTaskError("\n".join(error_message_lines), failed_targets=list(failed_targets))