コード例 #1
0
    def compile_chunk(self, invalidation_check, all_targets, relevant_targets,
                      invalid_targets, extra_compile_time_classpath_elements,
                      compile_vts, register_vts,
                      update_artifact_cache_vts_work):
        """Executes compilations for the invalid targets contained in a single chunk."""
        assert invalid_targets, "compile_chunk should only be invoked if there are invalid targets."
        # Get the classpath generated by upstream JVM tasks and our own prepare_compile().
        compile_classpaths = self.context.products.get_data(
            'compile_classpath')

        extra_compile_time_classpath = self._compute_extra_classpath(
            extra_compile_time_classpath_elements)

        compile_contexts = self._create_compile_contexts_for_targets(
            all_targets)

        # Now create compile jobs for each invalid target one by one.
        jobs = self._create_compile_jobs(
            compile_classpaths, compile_contexts, extra_compile_time_classpath,
            invalid_targets, invalidation_check.invalid_vts_partitioned,
            compile_vts, register_vts, update_artifact_cache_vts_work)

        exec_graph = ExecutionGraph(jobs)
        try:
            exec_graph.execute(self._worker_pool, self.context.log)
        except ExecutionFailure as e:
            raise TaskError("Compilation failure: {}".format(e))
コード例 #2
0
ファイル: jvm_compile.py プロジェクト: omerzach/pants
  def do_compile(self, invalidation_check, compile_contexts):
    """Executes compilations for the invalid targets contained in a single chunk."""

    invalid_targets = [vt.target for vt in invalidation_check.invalid_vts]
    assert invalid_targets, "compile_chunk should only be invoked if there are invalid targets."

    # This ensures the workunit for the worker pool is set before attempting to compile.
    with self.context.new_workunit('isolation-{}-pool-bootstrap'.format(self.name())) \
            as workunit:
      # This uses workunit.parent as the WorkerPool's parent so that child workunits
      # of different pools will show up in order in the html output. This way the current running
      # workunit is on the bottom of the page rather than possibly in the middle.
      worker_pool = WorkerPool(workunit.parent,
                               self.context.run_tracker,
                               self._worker_count)

    # Prepare the output directory for each invalid target, and confirm that analysis is valid.
    for target in invalid_targets:
      cc = self.select_runtime_context(compile_contexts[target])
      safe_mkdir(cc.classes_dir)

    # Now create compile jobs for each invalid target one by one, using the classpath
    # generated by upstream JVM tasks and our own prepare_compile().
    jobs = self._create_compile_jobs(compile_contexts,
                                     invalid_targets,
                                     invalidation_check.invalid_vts)

    exec_graph = ExecutionGraph(jobs, self.get_options().print_exception_stacktrace)
    try:
      exec_graph.execute(worker_pool, self.context.log)
    except ExecutionFailure as e:
      raise TaskError("Compilation failure: {}".format(e))
コード例 #3
0
ファイル: jvm_compile.py プロジェクト: cheister/pants
  def compile_chunk(self,
                    invalidation_check,
                    compile_contexts,
                    invalid_targets,
                    extra_compile_time_classpath_elements):
    """Executes compilations for the invalid targets contained in a single chunk."""
    assert invalid_targets, "compile_chunk should only be invoked if there are invalid targets."

    # Prepare the output directory for each invalid target, and confirm that analysis is valid.
    for target in invalid_targets:
      cc = compile_contexts[target]
      safe_mkdir(cc.classes_dir)
      self.validate_analysis(cc.analysis_file)

    # Get the classpath generated by upstream JVM tasks and our own prepare_compile().
    classpath_products = self.context.products.get_data('runtime_classpath')

    extra_compile_time_classpath = self._compute_extra_classpath(
        extra_compile_time_classpath_elements)

    # Now create compile jobs for each invalid target one by one.
    jobs = self._create_compile_jobs(classpath_products,
                                     compile_contexts,
                                     extra_compile_time_classpath,
                                     invalid_targets,
                                     invalidation_check.invalid_vts)

    exec_graph = ExecutionGraph(jobs)
    try:
      exec_graph.execute(self._worker_pool, self.context.log)
    except ExecutionFailure as e:
      raise TaskError("Compilation failure: {}".format(e))
コード例 #4
0
ファイル: jvm_compile.py プロジェクト: baroquebobcat/pants
  def do_compile(self, invalidation_check, compile_contexts):
    """Executes compilations for the invalid targets contained in a single chunk."""

    invalid_targets = [vt.target for vt in invalidation_check.invalid_vts]
    assert invalid_targets, "compile_chunk should only be invoked if there are invalid targets."

    # This ensures the workunit for the worker pool is set before attempting to compile.
    with self.context.new_workunit('isolation-{}-pool-bootstrap'.format(self.name())) \
            as workunit:
      # This uses workunit.parent as the WorkerPool's parent so that child workunits
      # of different pools will show up in order in the html output. This way the current running
      # workunit is on the bottom of the page rather than possibly in the middle.
      worker_pool = WorkerPool(workunit.parent,
                               self.context.run_tracker,
                               self._worker_count)

    # Prepare the output directory for each invalid target, and confirm that analysis is valid.
    for target in invalid_targets:
      cc = self.select_runtime_context(compile_contexts[target])
      safe_mkdir(cc.classes_dir)

    # Now create compile jobs for each invalid target one by one, using the classpath
    # generated by upstream JVM tasks and our own prepare_compile().
    jobs = self._create_compile_jobs(compile_contexts,
                                     invalid_targets,
                                     invalidation_check.invalid_vts)

    exec_graph = ExecutionGraph(jobs)
    try:
      exec_graph.execute(worker_pool, self.context.log)
    except ExecutionFailure as e:
      raise TaskError("Compilation failure: {}".format(e))
コード例 #5
0
    def compile_chunk(self, invalidation_check, compile_contexts,
                      invalid_targets, extra_compile_time_classpath_elements):
        """Executes compilations for the invalid targets contained in a single chunk."""
        assert invalid_targets, "compile_chunk should only be invoked if there are invalid targets."

        # Prepare the output directory for each invalid target, and confirm that analysis is valid.
        for target in invalid_targets:
            cc = compile_contexts[target]
            safe_mkdir(cc.classes_dir)
            self.validate_analysis(cc.analysis_file)

        # Get the classpath generated by upstream JVM tasks and our own prepare_compile().
        classpath_products = self.context.products.get_data(
            'runtime_classpath')

        extra_compile_time_classpath = self._compute_extra_classpath(
            extra_compile_time_classpath_elements)

        # Now create compile jobs for each invalid target one by one.
        jobs = self._create_compile_jobs(
            classpath_products, compile_contexts, extra_compile_time_classpath,
            invalid_targets, invalidation_check.invalid_vts_partitioned)

        exec_graph = ExecutionGraph(jobs)
        try:
            exec_graph.execute(self._worker_pool, self.context.log)
        except ExecutionFailure as e:
            raise TaskError("Compilation failure: {}".format(e))
コード例 #6
0
  def compile_chunk(self,
                    invalidation_check,
                    all_targets,
                    relevant_targets,
                    invalid_targets,
                    extra_compile_time_classpath_elements,
                    compile_vts,
                    register_vts,
                    update_artifact_cache_vts_work):
    """Executes compilations for the invalid targets contained in a single chunk."""
    assert invalid_targets, "compile_chunk should only be invoked if there are invalid targets."
    # Get the classpath generated by upstream JVM tasks and our own prepare_compile().
    compile_classpaths = self.context.products.get_data('compile_classpath')

    extra_compile_time_classpath = self._compute_extra_classpath(
        extra_compile_time_classpath_elements)

    compile_contexts = self._create_compile_contexts_for_targets(all_targets)

    # Now create compile jobs for each invalid target one by one.
    jobs = self._create_compile_jobs(compile_classpaths,
                                     compile_contexts,
                                     extra_compile_time_classpath,
                                     invalid_targets,
                                     invalidation_check.invalid_vts_partitioned,
                                     compile_vts,
                                     register_vts,
                                     update_artifact_cache_vts_work)

    exec_graph = ExecutionGraph(jobs)
    try:
      exec_graph.execute(self._worker_pool, self.context.log)
    except ExecutionFailure as e:
      raise TaskError("Compilation failure: {}".format(e))
コード例 #7
0
ファイル: jvm_compile.py プロジェクト: thoward/pants
    def do_compile(self, invalidation_check, compile_contexts,
                   classpath_product):
        """Executes compilations for the invalid targets contained in a single chunk."""

        invalid_targets = [vt.target for vt in invalidation_check.invalid_vts]
        valid_targets = [
            vt.target for vt in invalidation_check.all_vts if vt.valid
        ]

        if self.execution_strategy == self.HERMETIC:
            self._set_directory_digests_for_valid_target_classpath_directories(
                valid_targets, compile_contexts)

        for valid_target in valid_targets:
            cc = self.select_runtime_context(compile_contexts[valid_target])

            classpath_product.add_for_target(
                valid_target,
                [(conf, self._classpath_for_context(cc))
                 for conf in self._confs],
            )
        self.register_extra_products_from_contexts(valid_targets,
                                                   compile_contexts)

        if not invalid_targets:
            return

        # This ensures the workunit for the worker pool is set before attempting to compile.
        with self.context.new_workunit('isolation-{}-pool-bootstrap'.format(self.name())) \
                as workunit:
            # This uses workunit.parent as the WorkerPool's parent so that child workunits
            # of different pools will show up in order in the html output. This way the current running
            # workunit is on the bottom of the page rather than possibly in the middle.
            worker_pool = WorkerPool(workunit.parent, self.context.run_tracker,
                                     self._worker_count)

        # Prepare the output directory for each invalid target, and confirm that analysis is valid.
        for target in invalid_targets:
            cc = self.select_runtime_context(compile_contexts[target])
            safe_mkdir(cc.classes_dir.path)

        # Now create compile jobs for each invalid target one by one, using the classpath
        # generated by upstream JVM tasks and our own prepare_compile().
        jobs = self._create_compile_jobs(compile_contexts, invalid_targets,
                                         invalidation_check.invalid_vts,
                                         classpath_product)

        exec_graph = ExecutionGraph(
            jobs,
            self.get_options().print_exception_stacktrace)
        try:
            exec_graph.execute(worker_pool, self.context.log)
        except ExecutionFailure as e:
            raise TaskError("Compilation failure: {}".format(e))
コード例 #8
0
ファイル: test_execution_graph.py プロジェクト: wiwa/pants
 def test_dumps_stack_trace(self):
     graph = ExecutionGraph([self.job("A", raising_wrapper, [])], True)
     capturing_logger = CapturingLogger()
     with self.assertRaises(ExecutionFailure):
         graph.execute(ImmediatelyExecutingPool(), capturing_logger)
     error_logs = capturing_logger.log_entries["error"]
     self.assertEqual(2, len(error_logs), msg=f"Wanted one error log, got: {error_logs}")
     regex = re.compile("A failed: I'm an error.*")
     self.assertRegex(error_logs[0], regex)
     regex = re.compile(
         'Traceback:.*in raising_wrapper.*raise Exception\\("I\'m an error.*"\\)', re.DOTALL,
     )
     self.assertRegex(error_logs[1], regex)
コード例 #9
0
 def test_dumps_stack_trace(self):
   graph = ExecutionGraph([self.job('A', raising_wrapper, [])], True)
   capturing_logger = CapturingLogger()
   with self.assertRaises(ExecutionFailure):
     graph.execute(ImmediatelyExecutingPool(), capturing_logger)
   error_logs = capturing_logger.log_entries['error']
   self.assertEquals(2, len(error_logs), msg='Wanted one error log, got: {}'.format(error_logs))
   self.assertEquals("A failed: I'm an error", error_logs[0])
   regex = re.compile(
     "Traceback:.*in raising_wrapper.*raise Exception\\(\"I'm an error\"\\)",
     re.DOTALL,
   )
   self.assertRegexpMatches(error_logs[1], regex)
コード例 #10
0
ファイル: test_execution_graph.py プロジェクト: thoward/pants
 def test_dumps_stack_trace(self):
   graph = ExecutionGraph([self.job('A', raising_wrapper, [])], True)
   capturing_logger = CapturingLogger()
   with self.assertRaises(ExecutionFailure):
     graph.execute(ImmediatelyExecutingPool(), capturing_logger)
   error_logs = capturing_logger.log_entries['error']
   self.assertEqual(2, len(error_logs), msg='Wanted one error log, got: {}'.format(error_logs))
   self.assertEqual("A failed: I'm an error", error_logs[0])
   regex = re.compile(
     "Traceback:.*in raising_wrapper.*raise Exception\\(\"I'm an error\"\\)",
     re.DOTALL,
   )
   assertRegex(self, error_logs[1], regex)
コード例 #11
0
ファイル: jvm_compile.py プロジェクト: jsirois/pants
  def do_compile(self, invalidation_check, compile_contexts, classpath_product):
    """Executes compilations for the invalid targets contained in a single chunk."""

    invalid_targets = [vt.target for vt in invalidation_check.invalid_vts]
    valid_targets = [vt.target for vt in invalidation_check.all_vts if vt.valid]

    if self.execution_strategy == self.HERMETIC:
      self._set_directory_digests_for_valid_target_classpath_directories(valid_targets, compile_contexts)

    for valid_target in valid_targets:
      cc = self.select_runtime_context(compile_contexts[valid_target])

      classpath_product.add_for_target(
        valid_target,
        [(conf, self._classpath_for_context(cc)) for conf in self._confs],
      )
    self.register_extra_products_from_contexts(valid_targets, compile_contexts)

    if not invalid_targets:
      return

    # This ensures the workunit for the worker pool is set before attempting to compile.
    with self.context.new_workunit('isolation-{}-pool-bootstrap'.format(self.name())) \
            as workunit:
      # This uses workunit.parent as the WorkerPool's parent so that child workunits
      # of different pools will show up in order in the html output. This way the current running
      # workunit is on the bottom of the page rather than possibly in the middle.
      worker_pool = WorkerPool(workunit.parent,
                               self.context.run_tracker,
                               self._worker_count)

    # Prepare the output directory for each invalid target, and confirm that analysis is valid.
    for target in invalid_targets:
      cc = self.select_runtime_context(compile_contexts[target])
      safe_mkdir(cc.classes_dir.path)

    # Now create compile jobs for each invalid target one by one, using the classpath
    # generated by upstream JVM tasks and our own prepare_compile().
    jobs = self._create_compile_jobs(compile_contexts,
                                     invalid_targets,
                                     invalidation_check.invalid_vts,
                                     classpath_product)

    exec_graph = ExecutionGraph(jobs, self.get_options().print_exception_stacktrace)
    try:
      exec_graph.execute(worker_pool, self.context.log)
    except ExecutionFailure as e:
      raise TaskError("Compilation failure: {}".format(e))