Ejemplo n.º 1
0
  def _merge_artifact(self, versioned_target_set):
    """Merges artifacts representing the individual targets in a VersionedTargetSet into one artifact for that set.
    Creates an output classes dir, depfile and analysis file for the VersionedTargetSet.
    Note that the merged artifact may be incomplete (e.g., if we have no previous artifacts for some of the
    individual targets). That's OK: We run this right before we invoke zinc, which will fill in what's missing.
    This method is not required for correctness, only for efficiency: it can prevent zinc from doing superfluous work.

    NOTE: This method is reentrant.
    """
    if len(versioned_target_set.targets) <= 1:
      return  # Nothing to do.

    with temporary_dir() as tmpdir:
      dst_classes_dir, dst_depfile, dst_analysis_file = self._output_paths(versioned_target_set.targets)
      safe_rmtree(dst_classes_dir)
      safe_mkdir(dst_classes_dir)
      src_analysis_files = []

      # TODO: Do we actually need to merge deps? Zinc will stomp them anyway on success.
      dst_deps = Dependencies(dst_classes_dir)

      for target in versioned_target_set.targets:
        src_classes_dir, src_depfile, src_analysis_file = self._output_paths([target])
        if os.path.exists(src_depfile):
          src_deps = Dependencies(src_classes_dir)
          src_deps.load(src_depfile)
          dst_deps.merge(src_deps)

          classes_by_source = src_deps.findclasses([target]).get(target, {})
          for source, classes in classes_by_source.items():
            for cls in classes:
              src = os.path.join(src_classes_dir, cls)
              dst = os.path.join(dst_classes_dir, cls)
              # src may not exist if we aborted a build in the middle. That's OK: zinc will notice that
              # it's missing and rebuild it.
              # dst may already exist if we have overlapping targets. It's not a good idea
              # to have those, but until we enforce it, we must allow it here.
              if os.path.exists(src) and not os.path.exists(dst):
                # Copy the class file.
                safe_mkdir(os.path.dirname(dst))
                os.link(src, dst)

          # Rebase a copy of the per-target analysis files to reflect the merged classes dir.
          if os.path.exists(src_analysis_file):
            src_analysis_file_tmp = \
            os.path.join(tmpdir, os.path.relpath(src_analysis_file, self._analysis_files_base))
            shutil.copyfile(src_analysis_file, src_analysis_file_tmp)
            src_analysis_files.append(src_analysis_file_tmp)
            if self._zinc_utils.run_zinc_rebase(src_analysis_file_tmp, [(src_classes_dir, dst_classes_dir)]):
              self.context.log.warn('In merge_artifact: zinc failed to rebase analysis file %s. '\
                                    'Target may require a full rebuild.' %\
                                    src_analysis_file_tmp)

      dst_deps.save(dst_depfile)

      if self._zinc_utils.run_zinc_merge(src_analysis_files, dst_analysis_file):
        self.context.log.warn('zinc failed to merge analysis files %s to %s. '\
                              'Target may require a full rebuild.' %\
                             (':'.join(src_analysis_files), dst_analysis_file))
Ejemplo n.º 2
0
  def merge_artifact(self, versioned_target_set):
    if len(versioned_target_set.targets) <= 1:
      return

    with temporary_dir() as tmpdir:
      dst_output_dir, dst_depfile, dst_analysis_cache = self.create_output_paths(versioned_target_set.targets)
      safe_rmtree(dst_output_dir)
      safe_mkdir(dst_output_dir)
      src_analysis_caches = []

      # TODO: Do we actually need to merge deps? Zinc will stomp them anyway on success.
      dst_deps = Dependencies(dst_output_dir)

      for target in versioned_target_set.targets:
        src_output_dir, src_depfile, src_analysis_cache = self.create_output_paths([target])
        if os.path.exists(src_depfile):
          src_deps = Dependencies(src_output_dir)
          src_deps.load(src_depfile)
          dst_deps.merge(src_deps)

          classes_by_source = src_deps.findclasses([target]).get(target, {})
          for source, classes in classes_by_source.items():
            for cls in classes:
              src = os.path.join(src_output_dir, cls)
              dst = os.path.join(dst_output_dir, cls)
              # src may not exist if we aborted a build in the middle. That's OK: zinc will notice that
              # it's missing and rebuild it.
              # dst may already exist if we have overlapping targets. It's not a good idea
              # to have those, but until we enforce it, we must allow it here.
              if os.path.exists(src) and not os.path.exists(dst):
                # Copy the class file.
                safe_mkdir(os.path.dirname(dst))
                os.link(src, dst)

          # Rebase a copy of the per-target analysis files prior to merging.
          if os.path.exists(src_analysis_cache):
            src_analysis_cache_tmp = \
              os.path.join(tmpdir, os.path.relpath(src_analysis_cache, self._analysis_cache_dir))
            shutil.copyfile(src_analysis_cache, src_analysis_cache_tmp)
            src_analysis_caches.append(src_analysis_cache_tmp)
            if self._zinc_utils.run_zinc_rebase(cache=src_analysis_cache_tmp, rebasings=[(src_output_dir, dst_output_dir)]):
              self.context.log.warn('In merge_artifact: zinc failed to rebase analysis file %s. ' \
                                    'Target may require a full rebuild.' % \
                                    src_analysis_cache_tmp)

      dst_deps.save(dst_depfile)

      if self._zinc_utils.run_zinc_merge(src_caches=src_analysis_caches, dst_cache=dst_analysis_cache):
        self.context.log.warn('zinc failed to merge analysis files %s to %s. ' \
                              'Target may require a full rebuild.' % \
                              (':'.join(src_analysis_caches), dst_analysis_cache))
Ejemplo n.º 3
0
  def merge_depfile(self, versioned_target_set):
    if len(versioned_target_set.targets) <= 1:
      return

    dst_depfile = self.create_depfile_path(versioned_target_set.targets)
    dst_deps = Dependencies(self._classes_dir)

    for target in versioned_target_set.targets:
      src_depfile = self.create_depfile_path([target])
      if os.path.exists(src_depfile):
        src_deps = Dependencies(self._classes_dir)
        src_deps.load(src_depfile)
        dst_deps.merge(src_deps)

    dst_deps.save(dst_depfile)
Ejemplo n.º 4
0
 def post_process_cached_vts(cached_vts):
   # Merge the cached analyses into the existing global one.
   if cached_vts:
     with self.context.new_workunit(name='merge-dependencies'):
       global_deps = Dependencies(self._classes_dir)
       if os.path.exists(self._depfile):
         global_deps.load(self._depfile)
       for vt in cached_vts:
         for target in vt.targets:
           depfile = JavaCompile.create_depfile_path(self._depfile_tmpdir, [target])
           if os.path.exists(depfile):
             deps = Dependencies(self._classes_dir)
             deps.load(depfile)
             global_deps.merge(deps)
       global_deps.save(self._depfile)
Ejemplo n.º 5
0
class JavaCompile(NailgunTask):
  @classmethod
  def setup_parser(cls, option_group, args, mkflag):
    NailgunTask.setup_parser(option_group, args, mkflag)

    option_group.add_option(mkflag("warnings"), mkflag("warnings", negate=True),
                            dest="java_compile_warnings", default=True,
                            action="callback", callback=mkflag.set_bool,
                            help="[%default] Compile java code with all configured warnings "
                                 "enabled.")

    option_group.add_option(mkflag("args"), dest="java_compile_args", action="append",
                            help="Pass these extra args to javac.")

    option_group.add_option(mkflag("partition-size-hint"), dest="java_compile_partition_size_hint",
                            action="store", type="int", default=-1,
                            help="Roughly how many source files to attempt to compile together. Set"
                                 " to a large number to compile all sources together. Set this to 0"
                                 " to compile target-by-target. Default is set in pants.ini.")

  def __init__(self, context):
    NailgunTask.__init__(self, context, workdir=context.config.get('java-compile', 'nailgun_dir'))

    if context.options.java_compile_partition_size_hint != -1:
      self._partition_size_hint = context.options.java_compile_partition_size_hint
    else:
      self._partition_size_hint = context.config.getint('java-compile', 'partition_size_hint',
                                                        default=1000)

    workdir = context.config.get('java-compile', 'workdir')
    self._classes_dir = os.path.join(workdir, 'classes')
    self._resources_dir = os.path.join(workdir, 'resources')
    self._depfile_dir = os.path.join(workdir, 'depfiles')
    self._deps = Dependencies(self._classes_dir)

    self._jmake_profile = context.config.get('java-compile', 'jmake-profile')
    self._compiler_profile = context.config.get('java-compile', 'compiler-profile')

    self._opts = context.config.getlist('java-compile', 'args')
    self._jvm_args = context.config.getlist('java-compile', 'jvm_args')

    self._javac_opts = []
    if context.options.java_compile_args:
      for arg in context.options.java_compile_args:
        self._javac_opts.extend(shlex.split(arg))
    else:
      self._javac_opts.extend(context.config.getlist('java-compile', 'javac_args', default=[]))

    if context.options.java_compile_warnings:
      self._opts.extend(context.config.getlist('java-compile', 'warning_args'))
    else:
      self._opts.extend(context.config.getlist('java-compile', 'no_warning_args'))

    self._confs = context.config.getlist('java-compile', 'confs')

    # The artifact cache to read from/write to.
    artifact_cache_spec = context.config.getlist('java-compile', 'artifact_caches')
    self.setup_artifact_cache(artifact_cache_spec)

  def product_type(self):
    return 'classes'

  def can_dry_run(self):
    return True

  def execute(self, targets):
    java_targets = filter(_is_java, targets)
    if java_targets:
      safe_mkdir(self._classes_dir)
      safe_mkdir(self._depfile_dir)

      with self.context.state('classpath', []) as cp:
        for conf in self._confs:
          cp.insert(0, (conf, self._resources_dir))
          cp.insert(0, (conf, self._classes_dir))

      with self.invalidated(java_targets, invalidate_dependents=True,
                            partition_size_hint=self._partition_size_hint) as invalidation_check:
        for vt in invalidation_check.invalid_vts_partitioned:
          # Compile, using partitions for efficiency.
          self.execute_single_compilation(vt, cp)
          if not self.dry_run:
            vt.update()

        for vt in invalidation_check.all_vts:
          depfile = self.create_depfile_path(vt.targets)
          if not self.dry_run and os.path.exists(depfile):
            # Read in the deps created either just now or by a previous run on these targets.
            deps = Dependencies(self._classes_dir)
            deps.load(depfile)
            self._deps.merge(deps)

      if not self.dry_run:
        if self.context.products.isrequired('classes'):
          genmap = self.context.products.get('classes')
          # Map generated classes to the owning targets and sources.
          for target, classes_by_source in self._deps.findclasses(java_targets).items():
            for source, classes in classes_by_source.items():
              genmap.add(source, self._classes_dir, classes)
              genmap.add(target, self._classes_dir, classes)

          # TODO(John Sirois): Map target.resources in the same way
          # 'Map' (rewrite) annotation processor service info files to the owning targets.
          for target in java_targets:
            if is_apt(target) and target.processors:
              basedir = os.path.join(self._resources_dir, Target.maybe_readable_identify([target]))
              processor_info_file = os.path.join(basedir, _PROCESSOR_INFO_FILE)
              self.write_processor_info(processor_info_file, target.processors)
              genmap.add(target, basedir, [_PROCESSOR_INFO_FILE])

        # Produce a monolithic apt processor service info file for further compilation rounds
        # and the unit test classpath.
        all_processors = set()
        for target in java_targets:
          if is_apt(target) and target.processors:
            all_processors.update(target.processors)
        processor_info_file = os.path.join(self._classes_dir, _PROCESSOR_INFO_FILE)
        if os.path.exists(processor_info_file):
          with safe_open(processor_info_file, 'r') as f:
            for processor in f:
              all_processors.add(processor.strip())
        self.write_processor_info(processor_info_file, all_processors)

  def execute_single_compilation(self, vt, cp):
    depfile = self.create_depfile_path(vt.targets)

    self.merge_depfile(vt)  # Get what we can from previous builds.
    sources_by_target, fingerprint = self.calculate_sources(vt.targets)
    if sources_by_target:
      sources = reduce(lambda all, sources: all.union(sources), sources_by_target.values())
      if not sources:
        self.context.log.warn('Skipping java compile for targets with no sources:\n  %s' %
                              '\n  '.join(str(t) for t in sources_by_target.keys()))
      else:
        classpath = [jar for conf, jar in cp if conf in self._confs]
        result = self.compile(classpath, sources, fingerprint, depfile)
        if result != 0:
          default_message = 'Unexpected error - %s returned %d' % (_JMAKE_MAIN, result)
          raise TaskError(_JMAKE_ERROR_CODES.get(result, default_message))
        self.split_depfile(vt)

      all_artifact_files = [depfile]

      if self._artifact_cache and self.context.options.write_to_artifact_cache:
        deps = Dependencies(self._classes_dir)
        deps.load(depfile)
        vts_artifactfile_pairs = []
        for single_vt in vt.versioned_targets:
          per_target_depfile = self.create_depfile_path([single_vt.target])
          per_target_artifact_files = [per_target_depfile]
          for _, classes_by_source in deps.findclasses([single_vt.target]).items():
            for _, classes in classes_by_source.items():
              classfile_paths = [os.path.join(self._classes_dir, cls) for cls in classes]
              per_target_artifact_files.extend(classfile_paths)
              all_artifact_files.extend(classfile_paths)
            vts_artifactfile_pairs.append((single_vt, per_target_artifact_files))
        vts_artifactfile_pairs.append((vt, all_artifact_files))
        self.update_artifact_cache(vts_artifactfile_pairs)

  def create_depfile_path(self, targets):
    compilation_id = Target.maybe_readable_identify(targets)
    return os.path.join(self._depfile_dir, compilation_id) + '.dependencies'

  def calculate_sources(self, targets):
    sources = defaultdict(set)
    def collect_sources(target):
      src = (os.path.join(target.target_base, source)
             for source in target.sources if source.endswith('.java'))
      if src:
        sources[target].update(src)

    for target in targets:
      collect_sources(target)
    return sources, Target.identify(targets)

  def compile(self, classpath, sources, fingerprint, depfile):
    jmake_classpath = self.profile_classpath(self._jmake_profile)

    opts = [
      '-classpath', ':'.join(classpath),
      '-d', self._classes_dir,
      '-pdb', os.path.join(self._classes_dir, '%s.dependencies.pdb' % fingerprint),
    ]

    compiler_classpath = self.profile_classpath(self._compiler_profile)
    opts.extend([
      '-jcpath', ':'.join(compiler_classpath),
      '-jcmainclass', 'com.twitter.common.tools.Compiler',
      '-C-Tdependencyfile', '-C%s' % depfile,
    ])
    opts.extend(map(lambda arg: '-C%s' % arg, self._javac_opts))

    opts.extend(self._opts)
    return self.runjava_indivisible(_JMAKE_MAIN, classpath=jmake_classpath, opts=opts, args=sources,
                                    jvmargs=self._jvm_args, workunit_name='jmake')

  def check_artifact_cache(self, vts):
    # Special handling for java artifacts.
    cached_vts, uncached_vts = Task.check_artifact_cache(self, vts)

    if cached_vts:
      with self.context.new_workunit('split'):
        for vt in cached_vts:
          self.split_depfile(vt)
    return cached_vts, uncached_vts

  def split_depfile(self, vt):
    depfile = self.create_depfile_path(vt.targets)
    if len(vt.targets) <= 1 or not os.path.exists(depfile) or self.dry_run:
      return

    deps = Dependencies(self._classes_dir)
    deps.load(depfile)

    classes_by_source_by_target = deps.findclasses(vt.targets)
    for target in vt.targets:
      classes_by_source = classes_by_source_by_target.get(target, {})
      dst_depfile = self.create_depfile_path([target])
      dst_deps = Dependencies(self._classes_dir)
      for source, classes in classes_by_source.items():
        src = os.path.join(target.target_base, source)
        dst_deps.add(src, classes)
      dst_deps.save(dst_depfile)

  # Merges individual target depfiles into a single one for all those targets.
  # Note that the merged depfile may be incomplete (e.g., if the previous build was aborted).
  # TODO: Is this even necessary? JMake will stomp these anyway on success.
  def merge_depfile(self, versioned_target_set):
    if len(versioned_target_set.targets) <= 1:
      return

    dst_depfile = self.create_depfile_path(versioned_target_set.targets)
    dst_deps = Dependencies(self._classes_dir)

    for target in versioned_target_set.targets:
      src_depfile = self.create_depfile_path([target])
      if os.path.exists(src_depfile):
        src_deps = Dependencies(self._classes_dir)
        src_deps.load(src_depfile)
        dst_deps.merge(src_deps)

    dst_deps.save(dst_depfile)

  def write_processor_info(self, processor_info_file, processors):
    with safe_open(processor_info_file, 'w') as f:
      for processor in processors:
        f.write('%s\n' % processor)
Ejemplo n.º 6
0
  def merge_artifact(self, versioned_target_set):
    if len(versioned_target_set.targets) <= 1:
      return

    with temporary_dir() as tmpdir:
      dst_output_dir, dst_depfile, dst_analysis_cache = self.create_output_paths(versioned_target_set.targets)
      safe_rmtree(dst_output_dir)
      safe_mkdir(dst_output_dir)
      src_analysis_caches = []

      analysis_args = []
      analysis_args.extend(self._zinc_jar_args)
      analysis_args.extend([
        '-log-level', self.context.options.log_level or 'info',
        '-analysis',
        ])

      # TODO: Do we actually need to merge deps? Zinc will stomp them anyway on success.
      dst_deps = Dependencies(dst_output_dir)

      for target in versioned_target_set.targets:
        src_output_dir, src_depfile, src_analysis_cache = self.create_output_paths([target])
        if os.path.exists(src_depfile):
          src_deps = Dependencies(src_output_dir)
          src_deps.load(src_depfile)
          dst_deps.merge(src_deps)

          classes_by_source = src_deps.findclasses([target]).get(target, {})
          for source, classes in classes_by_source.items():
            for cls in classes:
              src = os.path.join(src_output_dir, cls)
              dst = os.path.join(dst_output_dir, cls)
              # src may not exist if we aborted a build in the middle. That's OK: zinc will notice that
              # it's missing and rebuild it.
              # dst may already exist if we have overlapping targets. It's not a good idea
              # to have those, but until we enforce it, we must allow it here.
              if os.path.exists(src) and not os.path.exists(dst):
                # Copy the class file.
                safe_mkdir(os.path.dirname(dst))
                os.link(src, dst)

          # Use zinc to rebase a copy of the per-target analysis files prior to merging.
          if os.path.exists(src_analysis_cache):
            src_analysis_cache_tmp = \
              os.path.join(tmpdir, os.path.relpath(src_analysis_cache, self._analysis_cache_dir))
            shutil.copyfile(src_analysis_cache, src_analysis_cache_tmp)
            src_analysis_caches.append(src_analysis_cache_tmp)
            rebase_args = analysis_args + [
              '-cache', src_analysis_cache_tmp,
              '-rebase', '%s:%s' % (src_output_dir, dst_output_dir),
              ]
            if self.runjava(self._main, classpath=self._zinc_classpath, args=rebase_args, jvmargs=self._jvm_args):
              self.context.log.warn('In merge_artifact: zinc failed to rebase analysis file %s. ' \
              'Target may require a full rebuild.' % src_analysis_cache_tmp)

      dst_deps.save(dst_depfile)

      # Use zinc to merge the analysis files.
      merge_args = analysis_args + [
        '-cache', dst_analysis_cache,
        '-merge', ':'.join(src_analysis_caches),
      ]
      if self.runjava(self._main, classpath=self._zinc_classpath, args=merge_args, jvmargs=self._jvm_args):
        raise TaskError, 'zinc failed to merge analysis files %s to %s' % \
                         (':'.join(src_analysis_caches), dst_analysis_cache)
Ejemplo n.º 7
0
    def merge_artifact(self, versioned_target_set):
        if len(versioned_target_set.targets) <= 1:
            return

        with temporary_dir() as tmpdir:
            dst_output_dir, dst_depfile, dst_analysis_cache = self.create_output_paths(
                versioned_target_set.targets)
            safe_rmtree(dst_output_dir)
            safe_mkdir(dst_output_dir)
            src_analysis_caches = []

            analysis_args = []
            analysis_args.extend(self._zinc_jar_args)
            analysis_args.extend([
                '-log-level',
                self.context.options.log_level or 'info',
                '-analysis',
            ])

            # TODO: Do we actually need to merge deps? Zinc will stomp them anyway on success.
            dst_deps = Dependencies(dst_output_dir)

            for target in versioned_target_set.targets:
                src_output_dir, src_depfile, src_analysis_cache = self.create_output_paths(
                    [target])
                if os.path.exists(src_depfile):
                    src_deps = Dependencies(src_output_dir)
                    src_deps.load(src_depfile)
                    dst_deps.merge(src_deps)

                    classes_by_source = src_deps.findclasses([target]).get(
                        target, {})
                    for source, classes in classes_by_source.items():
                        for cls in classes:
                            src = os.path.join(src_output_dir, cls)
                            dst = os.path.join(dst_output_dir, cls)
                            # src may not exist if we aborted a build in the middle. That's OK: zinc will notice that
                            # it's missing and rebuild it.
                            # dst may already exist if we have overlapping targets. It's not a good idea
                            # to have those, but until we enforce it, we must allow it here.
                            if os.path.exists(src) and not os.path.exists(dst):
                                # Copy the class file.
                                safe_mkdir(os.path.dirname(dst))
                                os.link(src, dst)

                    # Use zinc to rebase a copy of the per-target analysis files prior to merging.
                    if os.path.exists(src_analysis_cache):
                        src_analysis_cache_tmp = \
                          os.path.join(tmpdir, os.path.relpath(src_analysis_cache, self._analysis_cache_dir))
                        shutil.copyfile(src_analysis_cache,
                                        src_analysis_cache_tmp)
                        src_analysis_caches.append(src_analysis_cache_tmp)
                        rebase_args = analysis_args + [
                            '-cache',
                            src_analysis_cache_tmp,
                            '-rebase',
                            '%s:%s' % (src_output_dir, dst_output_dir),
                        ]
                        if self.runjava(self._main,
                                        classpath=self._zinc_classpath,
                                        args=rebase_args,
                                        jvmargs=self._jvm_args):
                            self.context.log.warn('In merge_artifact: zinc failed to rebase analysis file %s. ' \
                            'Target may require a full rebuild.' % src_analysis_cache_tmp)

            dst_deps.save(dst_depfile)

            # Use zinc to merge the analysis files.
            merge_args = analysis_args + [
                '-cache',
                dst_analysis_cache,
                '-merge',
                ':'.join(src_analysis_caches),
            ]
            if self.runjava(self._main,
                            classpath=self._zinc_classpath,
                            args=merge_args,
                            jvmargs=self._jvm_args):
                raise TaskError, 'zinc failed to merge analysis files %s to %s' % \
                                 (':'.join(src_analysis_caches), dst_analysis_cache)
Ejemplo n.º 8
0
class JavaCompile(NailgunTask):
  @staticmethod
  def _has_java_sources(target):
    return is_apt(target) or isinstance(target, JavaLibrary) or isinstance(target, JavaTests)

  @classmethod
  def setup_parser(cls, option_group, args, mkflag):
    NailgunTask.setup_parser(option_group, args, mkflag)

    option_group.add_option(mkflag("warnings"), mkflag("warnings", negate=True),
                            dest="java_compile_warnings", default=True,
                            action="callback", callback=mkflag.set_bool,
                            help="[%default] Compile java code with all configured warnings "
                                 "enabled.")

    option_group.add_option(mkflag("partition-size-hint"), dest="java_compile_partition_size_hint",
      action="store", type="int", default=-1,
      help="Roughly how many source files to attempt to compile together. Set to a large number to compile "\
           "all sources together. Set this to 0 to compile target-by-target. Default is set in pants.ini.")

  def __init__(self, context):
    NailgunTask.__init__(self, context, workdir=context.config.get('java-compile', 'nailgun_dir'))

    self._partition_size_hint = \
      context.options.java_compile_partition_size_hint \
      if context.options.java_compile_partition_size_hint != -1 \
      else context.config.getint('java-compile', 'partition_size_hint')

    workdir = context.config.get('java-compile', 'workdir')
    self._classes_dir = os.path.join(workdir, 'classes')
    self._resources_dir = os.path.join(workdir, 'resources')
    self._depfile_dir = os.path.join(workdir, 'depfiles')
    self._deps = Dependencies(self._classes_dir)

    self._jmake_profile = context.config.get('java-compile', 'jmake-profile')
    self._compiler_profile = context.config.get('java-compile', 'compiler-profile')

    self._args = context.config.getlist('java-compile', 'args')
    self._jvm_args = context.config.getlist('java-compile', 'jvm_args')

    if context.options.java_compile_warnings:
      self._args.extend(context.config.getlist('java-compile', 'warning_args'))
    else:
      self._args.extend(context.config.getlist('java-compile', 'no_warning_args'))

    self._confs = context.config.getlist('java-compile', 'confs')

    # The artifact cache to read from/write to.
    artifact_cache_spec = context.config.getlist('java-compile', 'artifact_caches')
    self.setup_artifact_cache(artifact_cache_spec)

  def product_type(self):
    return 'classes'

  def can_dry_run(self):
    return True

  def execute(self, targets):
    java_targets = filter(JavaCompile._has_java_sources, targets)
    if java_targets:
      safe_mkdir(self._classes_dir)
      safe_mkdir(self._depfile_dir)

      with self.context.state('classpath', []) as cp:
        for conf in self._confs:
          cp.insert(0, (conf, self._resources_dir))
          cp.insert(0, (conf, self._classes_dir))

      with self.invalidated(java_targets, invalidate_dependents=True,
          partition_size_hint=self._partition_size_hint) as invalidation_check:
        for vt in invalidation_check.all_vts:
          if vt.valid:  # Don't compile, just post-process.
            self.post_process(vt)
        for vt in invalidation_check.invalid_vts_partitioned:
          # Compile, using partitions for efficiency.
          self.execute_single_compilation(vt, cp)
          if not self.dry_run:
            vt.update()

      if not self.dry_run:
        if self.context.products.isrequired('classes'):
          genmap = self.context.products.get('classes')

          # Map generated classes to the owning targets and sources.
          for target, classes_by_source in self._deps.findclasses(java_targets).items():
            for source, classes in classes_by_source.items():
              genmap.add(source, self._classes_dir, classes)
              genmap.add(target, self._classes_dir, classes)

          # TODO(John Sirois): Map target.resources in the same way
          # 'Map' (rewrite) annotation processor service info files to the owning targets.
          for target in java_targets:
            if is_apt(target) and target.processors:
              basedir = os.path.join(self._resources_dir, target.id)
              processor_info_file = os.path.join(basedir, _PROCESSOR_INFO_FILE)
              self.write_processor_info(processor_info_file, target.processors)
              genmap.add(target, basedir, [_PROCESSOR_INFO_FILE])

  def execute_single_compilation(self, vt, cp):
    depfile = self.create_depfile_path(vt.targets)

    self.merge_depfile(vt)  # Get what we can from previous builds.
    self.context.log.info('Compiling targets %s' % str(vt.targets))
    sources_by_target, processors, fingerprint = self.calculate_sources(vt.targets)
    if sources_by_target:
      sources = reduce(lambda all, sources: all.union(sources), sources_by_target.values())
      if not sources:
        self.context.log.warn('Skipping java compile for targets with no sources:\n  %s' %
                              '\n  '.join(str(t) for t in sources_by_target.keys()))
      else:
        classpath = [jar for conf, jar in cp if conf in self._confs]
        result = self.compile(classpath, sources, fingerprint, depfile)
        if result != 0:
          default_message = 'Unexpected error - %s returned %d' % (_JMAKE_MAIN, result)
          raise TaskError(_JMAKE_ERROR_CODES.get(result, default_message))

      # NOTE: Currently all classfiles go into one global classes_dir. If we compile in
      # multiple partitions the second one will cache all the classes of the first one.
      # This won't result in error, but is wasteful. Currently, however, Java compilation
      # is done in a single pass, so this won't occur in practice.
      # TODO: Handle this case better. Separate classes dirs for each partition, like for scala?
      artifact_files = [self._classes_dir, depfile]

      if processors and not self.dry_run:
        # Produce a monolithic apt processor service info file for further compilation rounds
        # and the unit test classpath.
        processor_info_file = os.path.join(self._classes_dir, _PROCESSOR_INFO_FILE)
        if os.path.exists(processor_info_file):
          with safe_open(processor_info_file, 'r') as f:
            for processor in f:
              processors.add(processor.strip())
        self.write_processor_info(processor_info_file, processors)
        artifact_files.append(processor_info_file)

      if self._artifact_cache and self.context.options.write_to_artifact_cache:
        self.update_artifact_cache(vt, artifact_files)

    self.post_process(vt)

  # Post-processing steps that must happen even for valid targets.
  def post_process(self, versioned_targets):
    depfile = self.create_depfile_path(versioned_targets.targets)
    if not self.dry_run and os.path.exists(depfile):
      # Read in the deps created either just now or by a previous compiler run on these targets.
      deps = Dependencies(self._classes_dir)
      deps.load(depfile)
      self.split_depfile(deps, versioned_targets)
      self._deps.merge(deps)

  def create_depfile_path(self, targets):
    compilation_id = Target.maybe_readable_identify(targets)
    return os.path.join(self._depfile_dir, compilation_id) + '.dependencies'

  def calculate_sources(self, targets):
    sources = defaultdict(set)
    processors = set()
    def collect_sources(target):
      src = (os.path.join(target.target_base, source)
             for source in target.sources if source.endswith('.java'))
      if src:
        sources[target].update(src)
        if is_apt(target) and target.processors:
          processors.update(target.processors)

    for target in targets:
      collect_sources(target)
    return sources, processors, Target.identify(targets)

  def compile(self, classpath, sources, fingerprint, depfile):
    jmake_classpath = nailgun_profile_classpath(self, self._jmake_profile)

    args = [
      '-classpath', ':'.join(classpath),
      '-d', self._classes_dir,
      '-pdb', os.path.join(self._classes_dir, '%s.dependencies.pdb' % fingerprint),
    ]

    compiler_classpath = nailgun_profile_classpath(self, self._compiler_profile)
    args.extend([
      '-jcpath', ':'.join(compiler_classpath),
      '-jcmainclass', 'com.twitter.common.tools.Compiler',
      '-C-Tdependencyfile', '-C%s' % depfile,
    ])

    args.extend(self._args)
    args.extend(sources)
    log.debug('Executing: %s %s' % (_JMAKE_MAIN, ' '.join(args)))
    return self.runjava(_JMAKE_MAIN, classpath=jmake_classpath, args=args, jvmargs=self._jvm_args)

  def split_depfile(self, deps, versioned_target_set):
    if len(versioned_target_set.targets) <= 1:
      return
    classes_by_source_by_target = deps.findclasses(versioned_target_set.targets)
    for target in versioned_target_set.targets:
      classes_by_source = classes_by_source_by_target.get(target, {})
      dst_depfile = self.create_depfile_path([target])
      dst_deps = Dependencies(self._classes_dir)
      for source, classes in classes_by_source.items():
        src = os.path.join(target.target_base, source)
        dst_deps.add(src, classes)
      dst_deps.save(dst_depfile)

  # Merges individual target depfiles into a single one for all those targets.
  # Note that the merged depfile may be incomplete (e.g., if the previous build was aborted).
  # TODO: Is this even necessary? JMake will stomp these anyway on success.
  def merge_depfile(self, versioned_target_set):
    if len(versioned_target_set.targets) <= 1:
      return

    dst_depfile = self.create_depfile_path(versioned_target_set.targets)
    dst_deps = Dependencies(self._classes_dir)

    for target in versioned_target_set.targets:
      src_depfile = self.create_depfile_path([target])
      if os.path.exists(src_depfile):
        src_deps = Dependencies(self._classes_dir)
        src_deps.load(src_depfile)
        dst_deps.merge(src_deps)

    dst_deps.save(dst_depfile)

  def write_processor_info(self, processor_info_file, processors):
    with safe_open(processor_info_file, 'w') as f:
      for processor in processors:
        f.write('%s\n' % processor)
Ejemplo n.º 9
0
class ScalaCompile(NailgunTask):
  @classmethod
  def setup_parser(cls, option_group, args, mkflag):
    NailgunTask.setup_parser(option_group, args, mkflag)

    option_group.add_option(mkflag("warnings"), mkflag("warnings", negate=True),
                            dest="scala_compile_warnings", default=True,
                            action="callback", callback=mkflag.set_bool,
                            help="[%default] Compile scala code with all configured warnings "
                                 "enabled.")

    option_group.add_option(mkflag("flatten"), mkflag("flatten", negate=True),
                            dest="scala_compile_flatten",
                            action="callback", callback=mkflag.set_bool,
                            help="[%default] Compile scala code for all dependencies in a "
                                 "single compilation.")

  def __init__(self, context):
    NailgunTask.__init__(self, context, workdir=context.config.get('scala-compile', 'nailgun_dir'))

    self._flatten = \
      context.options.scala_compile_flatten if context.options.scala_compile_flatten is not None else \
      context.config.getbool('scala-compile', 'default_to_flatten')

    self._compile_profile = context.config.get('scala-compile', 'compile-profile')  # The target scala version.
    self._zinc_profile = context.config.get('scala-compile', 'zinc-profile')
    self._depemitter_profile = context.config.get('scala-compile', 'dependencies-plugin-profile')

    # All scala targets implicitly depend on the selected scala runtime.
    scaladeps = []
    for spec in context.config.getlist('scala-compile', 'scaladeps'):
      scaladeps.extend(context.resolve(spec))
    for target in context.targets(is_scala):
      target.update_dependencies(scaladeps)

    workdir = context.config.get('scala-compile', 'workdir')
    self._incremental_classes_dir = os.path.join(workdir, 'incremental.classes')
    self._classes_dir = os.path.join(workdir, 'classes')
    self._analysis_cache_dir = os.path.join(workdir, 'analysis_cache')
    self._resources_dir = os.path.join(workdir, 'resources')

    self._main = context.config.get('scala-compile', 'main')

    self._args = context.config.getlist('scala-compile', 'args')
    self._jvm_args = context.config.getlist('scala-compile', 'jvm_args')
    if context.options.scala_compile_warnings:
      self._args.extend(context.config.getlist('scala-compile', 'warning_args'))
    else:
      self._args.extend(context.config.getlist('scala-compile', 'no_warning_args'))

    self._confs = context.config.getlist('scala-compile', 'confs')
    self._depfile_dir = os.path.join(workdir, 'depfiles')
    self._deps = Dependencies(self._classes_dir)

  def invalidate_for(self):
    return [self._flatten]

  def execute(self, targets):
    scala_targets = filter(is_scala, reversed(InternalTarget.sort_targets(targets)))
    if scala_targets:
      safe_mkdir(self._classes_dir)
      safe_mkdir(self._depfile_dir)

      with self.context.state('classpath', []) as cp:
        for conf in self._confs:
          cp.insert(0, (conf, self._resources_dir))
          # If we're not flattening, we don't want the classes dir on the classpath yet, as we want zinc to
          # see only the per-compilation output dirs, so it can map them to analysis caches.
          if self._flatten:
            cp.insert(0, (conf, self._classes_dir))

      if not self._flatten:
        upstream_analysis_caches = OrderedDict()  # output dir -> analysis cache file for the classes in that dir.
        for target in scala_targets:
          self.execute_single_compilation([target], cp, upstream_analysis_caches)
      else:
        self.execute_single_compilation(scala_targets, cp, {})

      if not self._flatten:
        # Now we can add the global output dir, so that subsequent goals can see it.
        with self.context.state('classpath', []) as cp:
          for conf in self._confs:
            cp.insert(0, (conf, self._classes_dir))

      if self.context.products.isrequired('classes'):
        genmap = self.context.products.get('classes')

        # Map generated classes to the owning targets and sources.
        for target, classes_by_source in self._deps.findclasses(scala_targets).items():
          for source, classes in classes_by_source.items():
            genmap.add(source, self._classes_dir, classes)
            genmap.add(target, self._classes_dir, classes)

        # TODO(John Sirois): Map target.resources in the same way
        # Create and Map scala plugin info files to the owning targets.
        for target in scala_targets:
          if is_scalac_plugin(target) and target.classname:
            basedir = self.write_plugin_info(target)
            genmap.add(target, basedir, [_PLUGIN_INFO_FILE])

  def execute_single_compilation(self, scala_targets, cp, upstream_analysis_caches):
    """Execute a single compilation, updating upstream_analysis_caches if needed."""
    self.context.log.info('Compiling targets %s' % str(scala_targets))

    compilation_id = self.context.maybe_readable_identify(scala_targets)

    if self._flatten:
      # If compiling in flat mode, we let all dependencies aggregate into a single well-known depfile. This
      # allows us to build different targets in different invocations without losing dependency information
      # from any of them.
      depfile = os.path.join(self._depfile_dir, 'dependencies.flat')
    else:
      # If not in flat mode, we let each compilation have its own depfile, to avoid quadratic behavior (each
      # compilation will read in the entire depfile, add its stuff to it and write it out again).
      depfile = os.path.join(self._depfile_dir, compilation_id) + '.dependencies'

    if self._flatten:
      output_dir = self._classes_dir
      analysis_cache = os.path.join(self._analysis_cache_dir, compilation_id) + '.flat'
    else:
      # When compiling with multiple compilations, each compilation must output to its own directory, so zinc
      # can then associate those with the analysis caches of previous compilations.
      # So we compile into a compilation-specific directory and then copy the results out to the real output dir.
      output_dir = os.path.join(self._incremental_classes_dir, compilation_id)
      analysis_cache = os.path.join(self._analysis_cache_dir, compilation_id)

    if self._flatten:
      # We must defer dependency analysis to zinc. If we exclude files from a repeat build, zinc will assume
      # the files were deleted and will nuke the corresponding class files.
      invalidate_globally = True
    else:
      invalidate_globally = False
    with self.changed(scala_targets, invalidate_dependants=True,
                      invalidate_globally=invalidate_globally) as changed_targets:
      sources_by_target = self.calculate_sources(changed_targets)
      if sources_by_target:
        sources = reduce(lambda all, sources: all.union(sources), sources_by_target.values())
        if not sources:
          self.context.log.warn('Skipping scala compile for targets with no sources:\n  %s' %
                                '\n  '.join(str(t) for t in sources_by_target.keys()))
        else:
          classpath = [jar for conf, jar in cp if conf in self._confs]
          result = self.compile(classpath, sources, output_dir, analysis_cache, upstream_analysis_caches, depfile)
          if result != 0:
            raise TaskError('%s returned %d' % (self._main, result))
          if output_dir != self._classes_dir:
            # Link class files emitted in this compilation into the central classes dir.
            for (dirpath, dirnames, filenames) in os.walk(output_dir):
              for d in [os.path.join(dirpath, x) for x in dirnames]:
                dir = os.path.join(self._classes_dir, os.path.relpath(d, output_dir))
                if not os.path.isdir(dir):
                  os.mkdir(dir)
              for f in [os.path.join(dirpath, x) for x in filenames]:
                outfile = os.path.join(self._classes_dir, os.path.relpath(f, output_dir))
                if os.path.exists(outfile):
                  os.unlink(outfile)
                os.link(f, outfile)

    # Read in the deps created either just now or by a previous compiler run on these targets.
    self.context.log.debug('Reading dependencies from ' + depfile)
    deps = Dependencies(output_dir)
    deps.load(depfile)
    self._deps.merge(deps)

    if not self._flatten:
      upstream_analysis_caches[output_dir] = analysis_cache

  def calculate_sources(self, targets):
    sources = defaultdict(set)
    def collect_sources(target):
      src = (os.path.join(target.target_base, source)
             for source in target.sources if source.endswith('.scala'))
      if src:
        sources[target].update(src)

        if (isinstance(target, ScalaLibrary) or isinstance(target, ScalaTests)) and (
            target.java_sources):
          sources[target].update(resolve_target_sources(target.java_sources, '.java'))

    for target in targets:
      collect_sources(target)
    return sources

  def compile(self, classpath, sources, output_dir, analysis_cache, upstream_analysis_caches, depfile):
    safe_mkdir(output_dir)

    compiler_classpath = nailgun_profile_classpath(self, self._compile_profile)

    compiler_args = []

    # TODO(John Sirois): separate compiler profile from runtime profile
    compiler_args.extend([
      # Support for outputting a dependencies file of source -> class
      '-Xplugin:%s' % self.get_depemitter_plugin(),
      '-P:depemitter:file:%s' % depfile
    ])
    compiler_args.extend(self._args)

    # To pass options to scalac simply prefix with -S.
    args = ['-S' + x for x in compiler_args]

    if len(upstream_analysis_caches) > 0:
      args.extend([ '-analysis-map', ','.join(['%s:%s' % kv for kv in upstream_analysis_caches.items()]) ])
    upstream_jars = upstream_analysis_caches.keys()

    zinc_classpath = nailgun_profile_classpath(self, self._zinc_profile)
    zinc_jars = ScalaCompile.identify_zinc_jars(compiler_classpath, zinc_classpath)
    for (name, jarpath) in zinc_jars.items():  # The zinc jar names are also the flag names.
      args.extend(['-%s' % name, jarpath])

    args.extend([
      '-analysis-cache', analysis_cache,
      '-log-level', self.context.options.log_level or 'info',
      '-classpath', ':'.join(zinc_classpath + classpath + upstream_jars),
      '-d', output_dir
    ])

    args.extend(sources)

    self.context.log.debug('Executing: %s %s' % (self._main, ' '.join(args)))
    return self.runjava(self._main, classpath=zinc_classpath, args=args, jvmargs=self._jvm_args)

  def get_depemitter_plugin(self):
    depemitter_classpath = nailgun_profile_classpath(self, self._depemitter_profile)
    depemitter_jar = depemitter_classpath.pop()
    if depemitter_classpath:
      raise TaskError('Expected only 1 jar for the depemitter plugin, '
                      'found these extra: ' % depemitter_classpath)
    return depemitter_jar

  def write_plugin_info(self, target):
    basedir = os.path.join(self._resources_dir, target.id)
    with safe_open(os.path.join(basedir, _PLUGIN_INFO_FILE), 'w') as f:
      f.write(textwrap.dedent('''
        <plugin>
          <name>%s</name>
          <classname>%s</classname>
        </plugin>
      ''' % (target.plugin, target.classname)).strip())
    return basedir

  # These are the names of the various jars zinc needs. They are, conveniently and non-coincidentally,
  # the names of the flags used to pass the jar locations to zinc.
  compiler_jar_names = [ 'scala-library', 'scala-compiler' ]  # Compiler version.
  zinc_jar_names = [ 'compiler-interface', 'sbt-interface' ]  # Other jars zinc needs to be pointed to.

  @staticmethod
  def identify_zinc_jars(compiler_classpath, zinc_classpath):
    """Find the named jars in the compiler and zinc classpaths.

    TODO: When profiles migrate to regular pants jar() deps instead of ivy.xml files we can make these
          mappings explicit instead of deriving them by jar name heuristics.
    """
    ret = OrderedDict()
    ret.update(ScalaCompile.identify_jars(ScalaCompile.compiler_jar_names, compiler_classpath))
    ret.update(ScalaCompile.identify_jars(ScalaCompile.zinc_jar_names, zinc_classpath))
    return ret

  @staticmethod
  def identify_jars(names, jars):
    jars_by_name = {}
    jars_and_filenames = [(x, os.path.basename(x)) for x in jars]

    for name in names:
      jar_for_name = None
      for jar, filename in jars_and_filenames:
        if filename.startswith(name):
          jar_for_name = jar
          break
      if jar_for_name is None:
        raise TaskError('Couldn\'t find jar named %s' % name)
      else:
        jars_by_name[name] = jar_for_name
    return jars_by_name
Ejemplo n.º 10
0
class JavaCompile(NailgunTask):
  @staticmethod
  def _is_java(target):
    return is_apt(target) or isinstance(target, JavaLibrary) or isinstance(target, JavaTests)

  @classmethod
  def setup_parser(cls, option_group, args, mkflag):
    NailgunTask.setup_parser(option_group, args, mkflag)

    option_group.add_option(mkflag("warnings"), mkflag("warnings", negate=True),
                            dest="java_compile_warnings", default=True,
                            action="callback", callback=mkflag.set_bool,
                            help="[%default] Compile java code with all configured warnings "
                                 "enabled.")

    option_group.add_option(mkflag("flatten"), mkflag("flatten", negate=True),
                            dest="java_compile_flatten",
                            action="callback", callback=mkflag.set_bool,
                            help="[%default] Compile java code for all dependencies in a "
                                 "single compilation.")

  def __init__(self, context):
    NailgunTask.__init__(self, context, workdir=context.config.get('java-compile', 'nailgun_dir'))

    self._flatten = \
      context.options.java_compile_flatten if context.options.java_compile_flatten is not None else \
      context.config.getbool('java-compile', 'default_to_flatten')

    workdir = context.config.get('java-compile', 'workdir')
    self._classes_dir = os.path.join(workdir, 'classes')
    self._resources_dir = os.path.join(workdir, 'resources')
    self._depfile_dir = os.path.join(workdir, 'depfiles')
    self._deps = Dependencies(self._classes_dir)

    self._jmake_profile = context.config.get('java-compile', 'jmake-profile')
    self._compiler_profile = context.config.get('java-compile', 'compiler-profile')

    self._args = context.config.getlist('java-compile', 'args')
    self._jvm_args = context.config.getlist('java-compile', 'jvm_args')

    if context.options.java_compile_warnings:
      self._args.extend(context.config.getlist('java-compile', 'warning_args'))
    else:
      self._args.extend(context.config.getlist('java-compile', 'no_warning_args'))

    self._confs = context.config.getlist('java-compile', 'confs')

  def execute(self, targets):
    java_targets = filter(JavaCompile._is_java, reversed(InternalTarget.sort_targets(targets)))
    if java_targets:
      safe_mkdir(self._classes_dir)
      safe_mkdir(self._depfile_dir)

      with self.context.state('classpath', []) as cp:
        for conf in self._confs:
          cp.insert(0, (conf, self._resources_dir))
          cp.insert(0, (conf, self._classes_dir))

      if not self._flatten:
        for target in java_targets:
          self.execute_single_compilation([target], cp)
      else:
        self.execute_single_compilation(java_targets, cp)

      if self.context.products.isrequired('classes'):
        genmap = self.context.products.get('classes')

        # Map generated classes to the owning targets and sources.
        for target, classes_by_source in self._deps.findclasses(java_targets).items():
          for source, classes in classes_by_source.items():
            genmap.add(source, self._classes_dir, classes)
            genmap.add(target, self._classes_dir, classes)

        # TODO(John Sirois): Map target.resources in the same way
        # 'Map' (rewrite) annotation processor service info files to the owning targets.
        for target in java_targets:
          if is_apt(target) and target.processors:
            basedir = os.path.join(self._resources_dir, target.id)
            processor_info_file = os.path.join(basedir, _PROCESSOR_INFO_FILE)
            self.write_processor_info(processor_info_file, target.processors)
            genmap.add(target, basedir, [_PROCESSOR_INFO_FILE])

  def execute_single_compilation(self, java_targets, cp):
    self.context.log.info('Compiling targets %s' % str(java_targets))

    # Compute the id of this compilation. We try to make it human-readable.
    if len(java_targets) == 1:
      compilation_id = java_targets[0].id
    else:
      compilation_id = self.context.identify(java_targets)

    if self._flatten:
      # If compiling in flat mode, we let all dependencies aggregate into a single well-known depfile. This
      # allows us to build different targets in different invocations without losing dependency information
      # from any of them.
      depfile = os.path.join(self._depfile_dir, 'dependencies.flat')
    else:
      # If not in flat mode, we let each compilation have its own depfile, to avoid quadratic behavior (each
      # compilation will read in the entire depfile, add its stuff to it and write it out again).
      depfile = os.path.join(self._depfile_dir, compilation_id) + '.dependencies'

    with self.changed(java_targets, invalidate_dependants=True) as changed:
      sources_by_target, processors, fingerprint = self.calculate_sources(changed)
      if sources_by_target:
        sources = reduce(lambda all, sources: all.union(sources), sources_by_target.values())
        if not sources:
          self.context.log.warn('Skipping java compile for targets with no sources:\n  %s' %
                                '\n  '.join(str(t) for t in sources_by_target.keys()))
        else:
          classpath = [jar for conf, jar in cp if conf in self._confs]
          result = self.compile(classpath, sources, fingerprint, depfile)
          if result != 0:
            default_message = 'Unexpected error - %s returned %d' % (_JMAKE_MAIN, result)
            raise TaskError(_JMAKE_ERROR_CODES.get(result, default_message))

        if processors:
          # Produce a monolithic apt processor service info file for further compilation rounds
          # and the unit test classpath.
          processor_info_file = os.path.join(self._classes_dir, _PROCESSOR_INFO_FILE)
          if os.path.exists(processor_info_file):
            with safe_open(processor_info_file, 'r') as f:
              for processor in f:
                processors.add(processor.strip())
          self.write_processor_info(processor_info_file, processors)

    # Read in the deps created either just now or by a previous compiler run on these targets.
    deps = Dependencies(self._classes_dir)
    deps.load(depfile)
    self._deps.merge(deps)

  def calculate_sources(self, targets):
    sources = defaultdict(set)
    processors = set()
    def collect_sources(target):
      src = (os.path.join(target.target_base, source)
             for source in target.sources if source.endswith('.java'))
      if src:
        sources[target].update(src)
        if is_apt(target) and target.processors:
          processors.update(target.processors)

    for target in targets:
      collect_sources(target)
    return sources, processors, self.context.identify(targets)

  def compile(self, classpath, sources, fingerprint, depfile):
    jmake_classpath = nailgun_profile_classpath(self, self._jmake_profile)

    args = [
      '-classpath', ':'.join(classpath),
      '-d', self._classes_dir,
      '-pdb', os.path.join(self._classes_dir, '%s.dependencies.pdb' % fingerprint),
    ]

    compiler_classpath = nailgun_profile_classpath(self, self._compiler_profile)
    args.extend([
      '-jcpath', ':'.join(compiler_classpath),
      '-jcmainclass', 'com.twitter.common.tools.Compiler',
      '-C-Tdependencyfile', '-C%s' % depfile,
    ])

    args.extend(self._args)
    args.extend(sources)
    log.debug('Executing: %s %s' % (_JMAKE_MAIN, ' '.join(args)))
    return self.runjava(_JMAKE_MAIN, classpath=jmake_classpath, args=args, jvmargs=self._jvm_args)

  def write_processor_info(self, processor_info_file, processors):
    with safe_open(processor_info_file, 'w') as f:
      for processor in processors:
        f.write('%s\n' % processor)
Ejemplo n.º 11
0
class JavaCompile(NailgunTask):
    @staticmethod
    def _has_java_sources(target):
        return is_apt(target) or isinstance(target, JavaLibrary) or isinstance(
            target, JavaTests)

    @classmethod
    def setup_parser(cls, option_group, args, mkflag):
        NailgunTask.setup_parser(option_group, args, mkflag)

        option_group.add_option(
            mkflag("warnings"),
            mkflag("warnings", negate=True),
            dest="java_compile_warnings",
            default=True,
            action="callback",
            callback=mkflag.set_bool,
            help="[%default] Compile java code with all configured warnings "
            "enabled.")

        option_group.add_option(
            mkflag("flatten"),
            mkflag("flatten", negate=True),
            dest="java_compile_flatten",
            action="callback",
            callback=mkflag.set_bool,
            help="[%default] Compile java code for all dependencies in a "
            "single compilation.")

    def __init__(self, context):
        NailgunTask.__init__(self,
                             context,
                             workdir=context.config.get(
                                 'java-compile', 'nailgun_dir'))

        self._flatten = \
          context.options.java_compile_flatten if context.options.java_compile_flatten is not None else \
          context.config.getbool('java-compile', 'default_to_flatten')

        workdir = context.config.get('java-compile', 'workdir')
        self._classes_dir = os.path.join(workdir, 'classes')
        self._resources_dir = os.path.join(workdir, 'resources')
        self._depfile_dir = os.path.join(workdir, 'depfiles')
        self._deps = Dependencies(self._classes_dir)

        self._jmake_profile = context.config.get('java-compile',
                                                 'jmake-profile')
        self._compiler_profile = context.config.get('java-compile',
                                                    'compiler-profile')

        self._args = context.config.getlist('java-compile', 'args')
        self._jvm_args = context.config.getlist('java-compile', 'jvm_args')

        if context.options.java_compile_warnings:
            self._args.extend(
                context.config.getlist('java-compile', 'warning_args'))
        else:
            self._args.extend(
                context.config.getlist('java-compile', 'no_warning_args'))

        self._confs = context.config.getlist('java-compile', 'confs')

    def product_type(self):
        return 'classes'

    def invalidate_for(self):
        return self._flatten

    def execute(self, targets):
        java_targets = filter(JavaCompile._has_java_sources, targets)
        if java_targets:
            safe_mkdir(self._classes_dir)
            safe_mkdir(self._depfile_dir)

            with self.context.state('classpath', []) as cp:
                for conf in self._confs:
                    cp.insert(0, (conf, self._resources_dir))
                    cp.insert(0, (conf, self._classes_dir))

            with self.invalidated(java_targets,
                                  invalidate_dependants=True) as invalidated:
                if self._flatten:
                    # The deps go to a single well-known file, so we need only pass in the invalid targets here.
                    self.execute_single_compilation(
                        invalidated.combined_invalid_versioned_targets(), cp)
                else:
                    # We must pass all targets,even valid ones, to execute_single_compilation(), so it can
                    # track the per-target deps correctly.
                    for vt in invalidated.all_versioned_targets():
                        self.execute_single_compilation(vt, cp)
                        invalidated.update_versioned_target(vt)

            if self.context.products.isrequired('classes'):
                genmap = self.context.products.get('classes')

                # Map generated classes to the owning targets and sources.
                for target, classes_by_source in self._deps.findclasses(
                        java_targets).items():
                    for source, classes in classes_by_source.items():
                        genmap.add(source, self._classes_dir, classes)
                        genmap.add(target, self._classes_dir, classes)

                # TODO(John Sirois): Map target.resources in the same way
                # 'Map' (rewrite) annotation processor service info files to the owning targets.
                for target in java_targets:
                    if is_apt(target) and target.processors:
                        basedir = os.path.join(self._resources_dir, target.id)
                        processor_info_file = os.path.join(
                            basedir, _PROCESSOR_INFO_FILE)
                        self.write_processor_info(processor_info_file,
                                                  target.processors)
                        genmap.add(target, basedir, [_PROCESSOR_INFO_FILE])

    def execute_single_compilation(self, versioned_targets, cp):
        compilation_id = Target.maybe_readable_identify(
            versioned_targets.targets)

        # TODO: Use the artifact cache. In flat mode we may want to look for the artifact for all targets,
        # not just the invalid ones, as it might be more likely to be present. Or we could look for both.

        if self._flatten:
            # If compiling in flat mode, we let all dependencies aggregate into a single well-known depfile. This
            # allows us to build different targets in different invocations without losing dependency information
            # from any of them.
            depfile = os.path.join(self._depfile_dir, 'dependencies.flat')
        else:
            # If not in flat mode, we let each compilation have its own depfile, to avoid quadratic behavior (each
            # compilation will read in the entire depfile, add its stuff to it and write it out again).
            depfile = os.path.join(self._depfile_dir,
                                   compilation_id) + '.dependencies'

        if not versioned_targets.valid:
            self.context.log.info('Compiling targets %s' %
                                  str(versioned_targets.targets))
            sources_by_target, processors, fingerprint = self.calculate_sources(
                versioned_targets.targets)
            if sources_by_target:
                sources = reduce(lambda all, sources: all.union(sources),
                                 sources_by_target.values())
                if not sources:
                    touch(
                        depfile
                    )  # Create an empty depfile, since downstream code may assume that one exists.
                    self.context.log.warn(
                        'Skipping java compile for targets with no sources:\n  %s'
                        %
                        '\n  '.join(str(t) for t in sources_by_target.keys()))
                else:
                    classpath = [
                        jar for conf, jar in cp if conf in self._confs
                    ]
                    result = self.compile(classpath, sources, fingerprint,
                                          depfile)
                    if result != 0:
                        default_message = 'Unexpected error - %s returned %d' % (
                            _JMAKE_MAIN, result)
                        raise TaskError(
                            _JMAKE_ERROR_CODES.get(result, default_message))

                if processors:
                    # Produce a monolithic apt processor service info file for further compilation rounds
                    # and the unit test classpath.
                    processor_info_file = os.path.join(self._classes_dir,
                                                       _PROCESSOR_INFO_FILE)
                    if os.path.exists(processor_info_file):
                        with safe_open(processor_info_file, 'r') as f:
                            for processor in f:
                                processors.add(processor.strip())
                    self.write_processor_info(processor_info_file, processors)

        # Read in the deps created either just now or by a previous compiler run on these targets.
        deps = Dependencies(self._classes_dir)
        deps.load(depfile)
        self._deps.merge(deps)

    def calculate_sources(self, targets):
        sources = defaultdict(set)
        processors = set()

        def collect_sources(target):
            src = (os.path.join(target.target_base, source)
                   for source in target.sources if source.endswith('.java'))
            if src:
                sources[target].update(src)
                if is_apt(target) and target.processors:
                    processors.update(target.processors)

        for target in targets:
            collect_sources(target)
        return sources, processors, Target.identify(targets)

    def compile(self, classpath, sources, fingerprint, depfile):
        jmake_classpath = nailgun_profile_classpath(self, self._jmake_profile)

        args = [
            '-classpath',
            ':'.join(classpath),
            '-d',
            self._classes_dir,
            '-pdb',
            os.path.join(self._classes_dir,
                         '%s.dependencies.pdb' % fingerprint),
        ]

        compiler_classpath = nailgun_profile_classpath(self,
                                                       self._compiler_profile)
        args.extend([
            '-jcpath',
            ':'.join(compiler_classpath),
            '-jcmainclass',
            'com.twitter.common.tools.Compiler',
            '-C-Tdependencyfile',
            '-C%s' % depfile,
        ])

        args.extend(self._args)
        args.extend(sources)
        log.debug('Executing: %s %s' % (_JMAKE_MAIN, ' '.join(args)))
        return self.runjava(_JMAKE_MAIN,
                            classpath=jmake_classpath,
                            args=args,
                            jvmargs=self._jvm_args)

    def write_processor_info(self, processor_info_file, processors):
        with safe_open(processor_info_file, 'w') as f:
            for processor in processors:
                f.write('%s\n' % processor)