Ejemplo n.º 1
0
  def relativize_analysis_file(self, src, dst):
    # Make an analysis cache portable. Work on a tmpfile, for safety.
    #
    # NOTE: We can't port references to deps on the Java home. This is because different JVM
    # implementations on different systems have different structures, and there's not
    # necessarily a 1-1 mapping between Java jars on different systems. Instead we simply
    # drop those references from the analysis file.
    #
    # In practice the JVM changes rarely, and it should be fine to require a full rebuild
    # in those rare cases.
    with temporary_dir() as tmp_analysis_dir:
      stripped_src = os.path.join(tmp_analysis_dir, 'analysis.nojava')
      tmp_analysis_file = os.path.join(tmp_analysis_dir, 'analysis.relativized')

      # Strip all lines containing self._java_home.
      with open(src, 'r') as infile:
        with open (stripped_src, 'w') as outfile:
          for line in infile:
            if not self._java_home in line:
              outfile.write(line)

      rebasings = [
        (self._ivy_home, ZincUtils.IVY_HOME_PLACEHOLDER),
        (self._pants_home, ZincUtils.PANTS_HOME_PLACEHOLDER),
      ]
      Analysis.rebase(stripped_src, tmp_analysis_file, rebasings)
      shutil.move(tmp_analysis_file, dst)
Ejemplo n.º 2
0
 def localize_analysis_file(self, src, dst):
   with temporary_dir() as tmp_analysis_dir:
     tmp_analysis_file = os.path.join(tmp_analysis_dir, 'analysis')
     rebasings = [
       (ZincUtils.IVY_HOME_PLACEHOLDER, self._ivy_home),
       (ZincUtils.PANTS_HOME_PLACEHOLDER, self._pants_home),
     ]
     Analysis.rebase(src, tmp_analysis_file, rebasings)
     shutil.move(tmp_analysis_file, dst)
Ejemplo n.º 3
0
 def localize_analysis_file(self, src, dst):
   with temporary_dir() as tmp_analysis_dir:
     tmp_analysis_file = os.path.join(tmp_analysis_dir, 'analysis')
     rebasings = [
       (ZincUtils.IVY_HOME_PLACEHOLDER, self._ivy_home),
       (ZincUtils.PANTS_HOME_PLACEHOLDER, self._pants_home),
     ]
     Analysis.rebase(src, tmp_analysis_file, rebasings)
     shutil.move(tmp_analysis_file, dst)
Ejemplo n.º 4
0
    def post_process_cached_vts(cached_vts):
      # Merge the localized analysis with the global one (if any).
      analyses_to_merge = []
      for vt in cached_vts:
        for target in vt.targets:
          analysis_file = ScalaCompile._analysis_for_target(self._analysis_tmpdir, target)
          portable_analysis_file = ScalaCompile._portable_analysis_for_target(self._analysis_tmpdir, target)
          if os.path.exists(portable_analysis_file):
            self._zinc_utils.localize_analysis_file(portable_analysis_file, analysis_file)
          if os.path.exists(analysis_file):
            analyses_to_merge.append(analysis_file)

      if len(analyses_to_merge) > 0:
        if os.path.exists(self._analysis_file):
          analyses_to_merge.append(self._analysis_file)
        with contextutil.temporary_dir() as tmpdir:
          tmp_analysis = os.path.join(tmpdir, 'analysis')
          Analysis.merge_from_paths(analyses_to_merge, tmp_analysis)
          shutil.move(tmp_analysis, self._analysis_file)
Ejemplo n.º 5
0
  def _compute_classes_by_source(self, analysis_file=None):
    """Compute src->classes."""
    if analysis_file is None:
      analysis_file = self._analysis_file

    if not os.path.exists(analysis_file):
      return {}
    buildroot = get_buildroot()
    products = Analysis.parse_products_from_path(analysis_file)
    classes_by_src = {}
    for src, classes in products.items():
      relsrc = os.path.relpath(src, buildroot)
      classes_by_src[relsrc] = [os.path.relpath(cls, self._classes_dir) for cls in classes]
    return classes_by_src
Ejemplo n.º 6
0
  def _get_deleted_sources(self):
    """Returns the list of sources present in the last analysis that have since been deleted.

    This is a global list. We have no way of associating them to individual targets.
    """
    # We compute the list lazily.
    if self._deleted_sources is None:
      with self.context.new_workunit('find-deleted-sources'):
        if os.path.exists(self._analysis_file):
          products = Analysis.parse_products_from_path(self._analysis_file)
          buildroot = get_buildroot()
          old_sources = [os.path.relpath(src, buildroot) for src in products.keys()]
          self._deleted_sources = filter(lambda x: not os.path.exists(x), old_sources)
        else:
          self._deleted_sources = []
    return self._deleted_sources
Ejemplo n.º 7
0
  def execute(self, targets):
    # TODO(benjy): Add a pre-execute phase for injecting deps into targets, so we
    # can inject a dep on the scala runtime library and still have it ivy-resolve.

    scala_targets = [t for t in targets if t.has_sources('.scala')]
    
    if not scala_targets:
      return

    # Get the exclusives group for the targets to compile.
    # Group guarantees that they'll be a single exclusives key for them.
    egroups = self.context.products.get_data('exclusives_groups')
    group_id = egroups.get_group_key_for_target(scala_targets[0])

    # Add resource dirs to the classpath for us and for downstream tasks.
    for conf in self._confs:
      egroups.update_compatible_classpaths(group_id, [(conf, self._resources_dir)])

    # Get the classpath generated by upstream JVM tasks (including previous calls to execute()).
    cp = egroups.get_classpath_for_group(group_id)

    # Add (only to the local copy) classpath entries necessary for our compiler plugins.
    for conf in self._confs:
      for jar in self._zinc_utils.plugin_jars():
        cp.insert(0, (conf, jar))

    # Invalidation check. Everything inside the with block must succeed for the
    # invalid targets to become valid.
    with self.invalidated(scala_targets, invalidate_dependents=True,
                          partition_size_hint=self._partition_size_hint) as invalidation_check:
      if invalidation_check.invalid_vts and not self.dry_run:
        invalid_targets = [vt.target for vt in invalidation_check.invalid_vts]
        # The analysis for invalid and deleted sources is no longer valid.
        invalid_sources_by_target = self._compute_sources_by_target(invalid_targets)
        invalid_sources = list(itertools.chain.from_iterable(invalid_sources_by_target.values()))
        deleted_sources = self._get_deleted_sources()

        # Work in a tmpdir so we don't stomp the main analysis files on error.
        # The tmpdir is cleaned up in a shutdown hook, because background work
        # may need to access files we create here even after this method returns.
        self._ensure_analysis_tmpdir()
        tmpdir = os.path.join(self._analysis_tmpdir, str(uuid.uuid4()))
        os.mkdir(tmpdir)
        valid_analysis_tmp = os.path.join(tmpdir, 'valid_analysis')
        newly_invalid_analysis_tmp = os.path.join(tmpdir, 'newly_invalid_analysis')
        invalid_analysis_tmp = os.path.join(tmpdir, 'invalid_analysis')
        if ZincUtils.is_nonempty_analysis(self._analysis_file):
          with self.context.new_workunit(name='prepare-analysis'):
            Analysis.split_to_paths(self._analysis_file,
                                    [(invalid_sources + deleted_sources, newly_invalid_analysis_tmp)], valid_analysis_tmp)
            if ZincUtils.is_nonempty_analysis(self._invalid_analysis_file):
              Analysis.merge_from_paths([self._invalid_analysis_file, newly_invalid_analysis_tmp],
                                        invalid_analysis_tmp)
            else:
              invalid_analysis_tmp = newly_invalid_analysis_tmp

            # Now it's OK to overwrite the main analysis files with the new state.
            shutil.move(valid_analysis_tmp, self._analysis_file)
            shutil.move(invalid_analysis_tmp, self._invalid_analysis_file)

        # Figure out the sources and analysis belonging to each partition.
        partitions = []  # Each element is a triple (vts, sources_by_target, analysis).
        for vts in invalidation_check.invalid_vts_partitioned:
          partition_tmpdir = os.path.join(tmpdir, Target.maybe_readable_identify(vts.targets))
          os.mkdir(partition_tmpdir)
          sources = list(itertools.chain.from_iterable(
            [invalid_sources_by_target.get(t, []) for t in vts.targets]))
          analysis_file = os.path.join(partition_tmpdir, 'analysis')
          partitions.append((vts, sources, analysis_file))

        # Split per-partition files out of the global invalid analysis.
        if ZincUtils.is_nonempty_analysis(self._invalid_analysis_file) and partitions:
          with self.context.new_workunit(name='partition-analysis'):
            splits = [(x[1], x[2]) for x in partitions]
            Analysis.split_to_paths(self._invalid_analysis_file, splits)

        # Now compile partitions one by one.
        for partition in partitions:
          (vts, sources, analysis_file) = partition
          self._process_target_partition(partition, cp)
          # No exception was thrown, therefore the compile succeded and analysis_file is now valid.

          if os.path.exists(analysis_file):  # The compilation created an analysis.
            # Merge the newly-valid analysis with our global valid analysis.
            new_valid_analysis = analysis_file + '.valid.new'
            if ZincUtils.is_nonempty_analysis(self._analysis_file):
              with self.context.new_workunit(name='update-upstream-analysis'):
                Analysis.merge_from_paths([self._analysis_file, analysis_file], new_valid_analysis)
            else:  # We need to keep analysis_file around. Background tasks may need it.
              shutil.copy(analysis_file, new_valid_analysis)

            # Move the merged valid analysis to its proper location.
            # We do this before checking for missing dependencies, so that we can still
            # enjoy an incremental compile after fixing missing deps.
            shutil.move(new_valid_analysis, self._analysis_file)

            # Check for missing dependencies.
            actual_deps = Analysis.parse_deps_from_path(self._analysis_file)
            # TODO(benjy): Temporary hack until we inject a dep on the scala runtime jar.
            actual_deps_filtered = {}
            scalalib_re = re.compile(r'scala-library-\d+\.\d+\.\d+\.jar$')
            for src, deps in actual_deps.iteritems():
              actual_deps_filtered[src] = filter(lambda x: scalalib_re.search(x) is None, deps)
            self.check_for_missing_dependencies(sources, actual_deps_filtered)

            # Kick off the background artifact cache write.
            if self.artifact_cache_writes_enabled():
              self._write_to_artifact_cache(analysis_file, vts, invalid_sources_by_target)

          if ZincUtils.is_nonempty_analysis(self._invalid_analysis_file):
            with self.context.new_workunit(name='trim-downstream-analysis'):
              # Trim out the newly-valid sources from our global invalid analysis.
              new_invalid_analysis = analysis_file + '.invalid.new'
              discarded_invalid_analysis = analysis_file + '.invalid.discard'
              Analysis.split_to_paths(self._invalid_analysis_file,
                                      [(sources, discarded_invalid_analysis)], new_invalid_analysis)
              shutil.move(new_invalid_analysis, self._invalid_analysis_file)

          # Now that all the analysis accounting is complete, and we have no missing deps,
          # we can safely mark the targets as valid.
          vts.update()

    # Provide the target->class and source->class mappings to downstream tasks if needed.
    if self.context.products.isrequired('classes'):
      sources_by_target = self._compute_sources_by_target(scala_targets)
      classes_by_source = self._compute_classes_by_source()
      self._add_all_products_to_genmap(sources_by_target, classes_by_source)

    # Update the classpath for downstream tasks.
    for conf in self._confs:
      egroups.update_compatible_classpaths(group_id, [(conf, self._classes_dir)])