def _post_process(self, target, cp):
    """Must be called on all targets, whether they needed compilation or not."""
    classes_dir, depfile, _ = self._output_paths([target])

    # Update the classpath, for the benefit of tasks downstream from us.
    if os.path.exists(classes_dir):
      for conf in self._confs:
        cp.insert(0, (conf, classes_dir))

    # Make note of the classes generated by this target.
    if os.path.exists(depfile) and self.context.products.isrequired('classes'):
      self.context.log.debug('Reading dependencies from ' + depfile)
      deps = Dependencies(classes_dir)
      deps.load(depfile)
      genmap = self.context.products.get('classes')
      for classes_by_source in deps.findclasses([target]).values():
        for source, classes in classes_by_source.items():
          genmap.add(source, classes_dir, classes)
          genmap.add(target, classes_dir, classes)

          # TODO(John Sirois): Map target.resources in the same way
          # Create and Map scala plugin info files to the owning targets.
        if is_scalac_plugin(target) and target.classname:
          basedir, plugin_info_file = self._zinc_utils.write_plugin_info(self._resources_dir, target)
          genmap.add(target, basedir, [plugin_info_file])
Exemple #2
0
    def execute(self, targets):
        if not self._flatten and len(targets) > 1:
            topologically_sorted_targets = filter(
                is_scala, reversed(InternalTarget.sort_targets(targets)))
            for target in topologically_sorted_targets:
                self.execute([target])
            return

        self.context.log.info('Compiling targets %s' % str(targets))

        scala_targets = filter(is_scala, targets)
        if scala_targets:
            with self.context.state('classpath', []) as cp:
                for conf in self._confs:
                    cp.insert(0, (conf, self._resources_dir))
                    cp.insert(0, (conf, self._classes_dir))

            with self.changed(scala_targets,
                              invalidate_dependants=True) as changed_targets:
                sources_by_target = self.calculate_sources(changed_targets)
                if sources_by_target:
                    sources = reduce(lambda all, sources: all.union(sources),
                                     sources_by_target.values())
                    if not sources:
                        self.context.log.warn(
                            'Skipping scala compile for targets with no sources:\n  %s'
                            % '\n  '.join(
                                str(t) for t in sources_by_target.keys()))
                    else:
                        classpath = [
                            jar for conf, jar in cp if conf in self._confs
                        ]
                        result = self.compile(classpath, sources)
                        if result != 0:
                            raise TaskError('%s returned %d' %
                                            (self._main, result))

            if self.context.products.isrequired('classes'):
                genmap = self.context.products.get('classes')

                # Map generated classes to the owning targets and sources.
                dependencies = Dependencies(self._classes_dir, self._depfile)
                for target, classes_by_source in dependencies.findclasses(
                        targets).items():
                    for source, classes in classes_by_source.items():
                        genmap.add(source, self._classes_dir, classes)
                        genmap.add(target, self._classes_dir, classes)

                # TODO(John Sirois): Map target.resources in the same way
                # Create and Map scala plugin info files to the owning targets.
                for target in targets:
                    if is_scalac_plugin(target) and target.classname:
                        basedir = self.write_plugin_info(target)
                        genmap.add(target, basedir, [_PLUGIN_INFO_FILE])
 def _add_products_to_genmap(self, artifact, state):
     """Must be called on all targets, whether they needed compilation or not."""
     genmap = self.context.products.get("classes")
     for target, sources in artifact.sources_by_target.items():
         for source in sources:
             classes = state.classes_by_src.get(source, [])
             relsrc = os.path.relpath(source, target.target_base)
             genmap.add(relsrc, artifact.classes_dir, classes)
             genmap.add(target, artifact.classes_dir, classes)
         # TODO(John Sirois): Map target.resources in the same way
         # Create and Map scala plugin info files to the owning targets.
         if is_scalac_plugin(target) and target.classname:
             basedir, plugin_info_file = self._zinc_utils.write_plugin_info(self._resources_dir, target)
             genmap.add(target, basedir, [plugin_info_file])
Exemple #4
0
 def _add_products_to_genmap(self, artifact, state):
   """Must be called on all targets, whether they needed compilation or not."""
   genmap = self.context.products.get('classes')
   for target, sources in artifact.sources_by_target.items():
     for source in sources:
       classes = state.classes_by_src.get(source, [])
       relsrc = os.path.relpath(source, target.target_base)
       genmap.add(relsrc, artifact.classes_dir, classes)
       genmap.add(target, artifact.classes_dir, classes)
     # TODO(John Sirois): Map target.resources in the same way
     # Create and Map scala plugin info files to the owning targets.
     if is_scalac_plugin(target) and target.classname:
       basedir, plugin_info_file = self._zinc_utils.write_plugin_info(self._resources_dir, target)
       genmap.add(target, basedir, [plugin_info_file])
  def _add_all_products_to_genmap(self, sources_by_target, classes_by_source):
    # Map generated classes to the owning targets and sources.
    genmap = self.context.products.get('classes')
    for target, sources in sources_by_target.items():
      for source in sources:
        classes = classes_by_source.get(source, [])
        relsrc = os.path.relpath(source, target.target_base)
        genmap.add(relsrc, self._classes_dir, classes)
        genmap.add(target, self._classes_dir, classes)

      # TODO(John Sirois): Map target.resources in the same way
      # Create and Map scala plugin info files to the owning targets.
      if is_scalac_plugin(target) and target.classname:
        basedir, plugin_info_file = self._zinc_utils.write_plugin_info(self._resources_dir, target)
        genmap.add(target, basedir, [plugin_info_file])
Exemple #6
0
    def _add_all_products_to_genmap(self, sources_by_target,
                                    classes_by_source):
        # Map generated classes to the owning targets and sources.
        genmap = self.context.products.get('classes')
        for target, sources in sources_by_target.items():
            for source in sources:
                classes = classes_by_source.get(source, [])
                relsrc = os.path.relpath(source, target.target_base)
                genmap.add(relsrc, self._classes_dir, classes)
                genmap.add(target, self._classes_dir, classes)

            # TODO(John Sirois): Map target.resources in the same way
            # Create and Map scala plugin info files to the owning targets.
            if is_scalac_plugin(target) and target.classname:
                basedir, plugin_info_file = self._zinc_utils.write_plugin_info(
                    self._resources_dir, target)
                genmap.add(target, basedir, [plugin_info_file])
Exemple #7
0
  def execute(self, targets):
    if not self._flatten and len(targets) > 1:
      topologically_sorted_targets = filter(is_scala, reversed(InternalTarget.sort_targets(targets)))
      for target in topologically_sorted_targets:
        self.execute([target])
      return

    self.context.log.info('Compiling targets %s' % str(targets))

    scala_targets = filter(is_scala, targets)
    if scala_targets:
      with self.context.state('classpath', []) as cp:
        for conf in self._confs:
          cp.insert(0, (conf, self._resources_dir))
          cp.insert(0, (conf, self._classes_dir))

      with self.changed(scala_targets, invalidate_dependants=True) as changed_targets:
        sources_by_target = self.calculate_sources(changed_targets)
        if sources_by_target:
          sources = reduce(lambda all, sources: all.union(sources), sources_by_target.values())
          if not sources:
            self.context.log.warn('Skipping scala compile for targets with no sources:\n  %s' %
                                  '\n  '.join(str(t) for t in sources_by_target.keys()))
          else:
            classpath = [jar for conf, jar in cp if conf in self._confs]
            result = self.compile(classpath, sources)
            if result != 0:
              raise TaskError('%s returned %d' % (self._main, result))

      if self.context.products.isrequired('classes'):
        genmap = self.context.products.get('classes')

        # Map generated classes to the owning targets and sources.
        dependencies = Dependencies(self._classes_dir, self._depfile)
        for target, classes_by_source in dependencies.findclasses(targets).items():
          for source, classes in classes_by_source.items():
            genmap.add(source, self._classes_dir, classes)
            genmap.add(target, self._classes_dir, classes)

        # TODO(John Sirois): Map target.resources in the same way
        # Create and Map scala plugin info files to the owning targets.
        for target in targets:
          if is_scalac_plugin(target) and target.classname:
            basedir = self.write_plugin_info(target)
            genmap.add(target, basedir, [_PLUGIN_INFO_FILE])
Exemple #8
0
    def post_process(self, vt, upstream_analysis_caches, split_artifact):
        output_dir, depfile, analysis_cache = self.create_output_paths(
            vt.targets)
        if not self.dry_run:
            # Read in the deps created either just now or by a previous compiler run on these targets.
            if os.path.exists(depfile):
                self.context.log.debug('Reading dependencies from ' + depfile)
                deps = Dependencies(output_dir)
                deps.load(depfile)

                if split_artifact:
                    self.split_artifact(deps, vt)

                if self.context.products.isrequired('classes'):
                    genmap = self.context.products.get('classes')
                    for target, classes_by_source in deps.findclasses(
                            vt.targets).items():
                        for source, classes in classes_by_source.items():
                            genmap.add(source, output_dir, classes)
                            genmap.add(target, output_dir, classes)

                    # TODO(John Sirois): Map target.resources in the same way
                    # Create and Map scala plugin info files to the owning targets.
                    for target in vt.targets:
                        if is_scalac_plugin(target) and target.classname:
                            basedir = self.write_plugin_info(target)
                            genmap.add(target, basedir, [_PLUGIN_INFO_FILE])

        # Update the upstream analysis map.
        if os.path.exists(analysis_cache):
            analysis_cache_parts = os.path.split(analysis_cache)
            if not upstream_analysis_caches.has(output_dir):
                # A previous chunk might have already updated this. It is certainly possible for a later chunk to
                # independently depend on some target that a previous chunk already built.
                upstream_analysis_caches.add(output_dir,
                                             analysis_cache_parts[0],
                                             [analysis_cache_parts[1]])

        # Update the classpath.
        with self.context.state('classpath', []) as cp:
            for conf in self._confs:
                cp.insert(0, (conf, output_dir))
Exemple #9
0
  def execute(self, targets):
    scala_targets = filter(is_scala, reversed(InternalTarget.sort_targets(targets)))
    if scala_targets:
      safe_mkdir(self._classes_dir)
      safe_mkdir(self._depfile_dir)

      with self.context.state('classpath', []) as cp:
        for conf in self._confs:
          cp.insert(0, (conf, self._resources_dir))
          # If we're not flattening, we don't want the classes dir on the classpath yet, as we want zinc to
          # see only the per-compilation output dirs, so it can map them to analysis caches.
          if self._flatten:
            cp.insert(0, (conf, self._classes_dir))

      if not self._flatten:
        upstream_analysis_caches = OrderedDict()  # output dir -> analysis cache file for the classes in that dir.
        for target in scala_targets:
          self.execute_single_compilation([target], cp, upstream_analysis_caches)
      else:
        self.execute_single_compilation(scala_targets, cp, {})

      if not self._flatten:
        # Now we can add the global output dir, so that subsequent goals can see it.
        with self.context.state('classpath', []) as cp:
          for conf in self._confs:
            cp.insert(0, (conf, self._classes_dir))

      if self.context.products.isrequired('classes'):
        genmap = self.context.products.get('classes')

        # Map generated classes to the owning targets and sources.
        for target, classes_by_source in self._deps.findclasses(scala_targets).items():
          for source, classes in classes_by_source.items():
            genmap.add(source, self._classes_dir, classes)
            genmap.add(target, self._classes_dir, classes)

        # TODO(John Sirois): Map target.resources in the same way
        # Create and Map scala plugin info files to the owning targets.
        for target in scala_targets:
          if is_scalac_plugin(target) and target.classname:
            basedir = self.write_plugin_info(target)
            genmap.add(target, basedir, [_PLUGIN_INFO_FILE])
Exemple #10
0
  def execute(self, targets):
    scala_targets = filter(is_scala, reversed(InternalTarget.sort_targets(targets)))
    if scala_targets:
      safe_mkdir(self._depfile_dir)
      safe_mkdir(self._analysis_cache_dir)

      # Map from output directory to { analysis_cache_dir, [ analysis_cache_file ]}
      upstream_analysis_caches = self.context.products.get('upstream')

      with self.context.state('classpath', []) as cp:
        for conf in self._confs:
          cp.insert(0, (conf, self._resources_dir))

      if self._flatten:
        self.execute_single_compilation(scala_targets, cp, upstream_analysis_caches)
      else:
        for target in scala_targets:
          self.execute_single_compilation([target], cp, upstream_analysis_caches)


      # Now we can add the global output dir, so that subsequent goals can see it.
      with self.context.state('classpath', []) as cp:
        for conf in self._confs:
          cp.insert(0, (conf, self._classes_dir))

      if self.context.products.isrequired('classes'):
        genmap = self.context.products.get('classes')

        # Map generated classes to the owning targets and sources.
        for target, classes_by_source in self._deps.findclasses(scala_targets).items():
          for source, classes in classes_by_source.items():
            genmap.add(source, self._classes_dir, classes)
            genmap.add(target, self._classes_dir, classes)

        # TODO(John Sirois): Map target.resources in the same way
        # Create and Map scala plugin info files to the owning targets.
        for target in scala_targets:
          if is_scalac_plugin(target) and target.classname:
            basedir = self.write_plugin_info(target)
            genmap.add(target, basedir, [_PLUGIN_INFO_FILE])
Exemple #11
0
  def post_process(self, vt, upstream_analysis_caches, split_artifact):
    output_dir, depfile, analysis_cache = self.create_output_paths(vt.targets)
    if not self.dry_run:
      # Read in the deps created either just now or by a previous compiler run on these targets.
      if os.path.exists(depfile):
        self.context.log.debug('Reading dependencies from ' + depfile)
        deps = Dependencies(output_dir)
        deps.load(depfile)

        if split_artifact:
          self.split_artifact(deps, vt)

        if self.context.products.isrequired('classes') :
          genmap = self.context.products.get('classes')
          for target, classes_by_source in deps.findclasses(vt.targets).items():
            for source, classes in classes_by_source.items():
              genmap.add(source, output_dir, classes)
              genmap.add(target, output_dir, classes)

          # TODO(John Sirois): Map target.resources in the same way
          # Create and Map scala plugin info files to the owning targets.
          for target in vt.targets:
            if is_scalac_plugin(target) and target.classname:
              basedir = self.write_plugin_info(target)
              genmap.add(target, basedir, [_PLUGIN_INFO_FILE])

    # Update the upstream analysis map.
    if os.path.exists(analysis_cache):
      analysis_cache_parts = os.path.split(analysis_cache)
      if not upstream_analysis_caches.has(output_dir):
        # A previous chunk might have already updated this. It is certainly possible for a later chunk to
        # independently depend on some target that a previous chunk already built.
        upstream_analysis_caches.add(output_dir, analysis_cache_parts[0], [ analysis_cache_parts[1] ])

    # Update the classpath.
    with self.context.state('classpath', []) as cp:
      for conf in self._confs:
        cp.insert(0, (conf, output_dir))
Exemple #12
0
  def execute_single_compilation(self, versioned_target_set, cp, upstream_analysis_caches):
    """Execute a single compilation, updating upstream_analysis_caches if needed."""
    if self._flatten:
      compilation_id = 'flat'
      output_dir = self._flat_classes_dir
    else:
      compilation_id = Target.maybe_readable_identify(versioned_target_set.targets)
      # Each compilation must output to its own directory, so zinc can then associate those with the appropriate
      # analysis caches of previous compilations. We then copy the results out to the real output dir.
      output_dir = os.path.join(self._incremental_classes_dir, compilation_id)

    depfile = os.path.join(self._depfile_dir, compilation_id) + '.dependencies'
    analysis_cache = os.path.join(self._analysis_cache_dir, compilation_id) + '.analysis_cache'

    safe_mkdir(output_dir)

    if not versioned_target_set.valid:
      with self.check_artifact_cache(versioned_target_set,
                                     build_artifacts=[output_dir, depfile, analysis_cache],
                                     artifact_root=self._workdir) as needs_building:
        if needs_building:
          self.context.log.info('Compiling targets %s' % versioned_target_set.targets)
          sources_by_target = self.calculate_sources(versioned_target_set.targets)
          if sources_by_target:
            sources = reduce(lambda all, sources: all.union(sources), sources_by_target.values())
            if not sources:
              touch(depfile)  # Create an empty depfile, since downstream code may assume that one exists.
              self.context.log.warn('Skipping scala compile for targets with no sources:\n  %s' %
                                    '\n  '.join(str(t) for t in sources_by_target.keys()))
            else:
              classpath = [jar for conf, jar in cp if conf in self._confs]
              result = self.compile(classpath, sources, output_dir, analysis_cache, upstream_analysis_caches, depfile)
              if result != 0:
                raise TaskError('%s returned %d' % (self._main, result))

    # Note that the following post-processing steps must happen even for valid targets.

    # Read in the deps created either just now or by a previous compiler run on these targets.
    if self.context.products.isrequired('classes'):
      self.context.log.debug('Reading dependencies from ' + depfile)
      deps = Dependencies(output_dir)
      deps.load(depfile)

      genmap = self.context.products.get('classes')

      for target, classes_by_source in deps.findclasses(versioned_target_set.targets).items():
        for source, classes in classes_by_source.items():
          genmap.add(source, output_dir, classes)
          genmap.add(target, output_dir, classes)

      # TODO(John Sirois): Map target.resources in the same way
      # Create and Map scala plugin info files to the owning targets.
      for target in versioned_target_set.targets:
        if is_scalac_plugin(target) and target.classname:
          basedir = self.write_plugin_info(target)
          genmap.add(target, basedir, [_PLUGIN_INFO_FILE])

    # Update the upstream analysis map.
    analysis_cache_parts = os.path.split(analysis_cache)
    if not upstream_analysis_caches.has(output_dir):
      # A previous chunk might have already updated this. It is certainly possible for a later chunk to
      # independently depend on some target that a previous chunk already built.
      upstream_analysis_caches.add(output_dir, analysis_cache_parts[0], [ analysis_cache_parts[1] ])

    # Update the classpath.
    with self.context.state('classpath', []) as cp:
      for conf in self._confs:
        cp.insert(0, (conf, output_dir))
Exemple #13
0
    def execute_single_compilation(self, versioned_target_set, cp,
                                   upstream_analysis_caches):
        """Execute a single compilation, updating upstream_analysis_caches if needed."""
        if self._flatten:
            compilation_id = 'flat'
            output_dir = self._flat_classes_dir
        else:
            compilation_id = Target.maybe_readable_identify(
                versioned_target_set.targets)
            # Each compilation must output to its own directory, so zinc can then associate those with the appropriate
            # analysis caches of previous compilations. We then copy the results out to the real output dir.
            output_dir = os.path.join(self._incremental_classes_dir,
                                      compilation_id)

        depfile = os.path.join(self._depfile_dir,
                               compilation_id) + '.dependencies'
        analysis_cache = os.path.join(self._analysis_cache_dir,
                                      compilation_id) + '.analysis_cache'

        safe_mkdir(output_dir)

        if not versioned_target_set.valid:
            with self.check_artifact_cache(
                    versioned_target_set,
                    build_artifacts=[output_dir, depfile,
                                     analysis_cache]) as in_cache:
                if not in_cache:
                    self.context.log.info('Compiling targets %s' %
                                          versioned_target_set.targets)
                    sources_by_target = self.calculate_sources(
                        versioned_target_set.targets)
                    if sources_by_target:
                        sources = reduce(
                            lambda all, sources: all.union(sources),
                            sources_by_target.values())
                        if not sources:
                            # Create empty files, since downstream code may assume that these exist.
                            touch(depfile)
                            touch(analysis_cache)
                            self.context.log.warn(
                                'Skipping scala compile for targets with no sources:\n  %s'
                                % '\n  '.join(
                                    str(t) for t in sources_by_target.keys()))
                        else:
                            classpath = [
                                jar for conf, jar in cp if conf in self._confs
                            ]
                            result = self.compile(classpath, sources,
                                                  output_dir, analysis_cache,
                                                  upstream_analysis_caches,
                                                  depfile)
                            if result != 0:
                                raise TaskError('%s returned %d' %
                                                (self._main, result))

        # Note that the following post-processing steps must happen even for valid targets.

        # Read in the deps created either just now or by a previous compiler run on these targets.
        if self.context.products.isrequired('classes'):
            self.context.log.debug('Reading dependencies from ' + depfile)
            deps = Dependencies(output_dir)
            deps.load(depfile)

            genmap = self.context.products.get('classes')

            for target, classes_by_source in deps.findclasses(
                    versioned_target_set.targets).items():
                for source, classes in classes_by_source.items():
                    genmap.add(source, output_dir, classes)
                    genmap.add(target, output_dir, classes)

            # TODO(John Sirois): Map target.resources in the same way
            # Create and Map scala plugin info files to the owning targets.
            for target in versioned_target_set.targets:
                if is_scalac_plugin(target) and target.classname:
                    basedir = self.write_plugin_info(target)
                    genmap.add(target, basedir, [_PLUGIN_INFO_FILE])

        # Update the upstream analysis map.
        analysis_cache_parts = os.path.split(analysis_cache)
        if not upstream_analysis_caches.has(output_dir):
            # A previous chunk might have already updated this. It is certainly possible for a later chunk to
            # independently depend on some target that a previous chunk already built.
            upstream_analysis_caches.add(output_dir, analysis_cache_parts[0],
                                         [analysis_cache_parts[1]])

        # Update the classpath.
        with self.context.state('classpath', []) as cp:
            for conf in self._confs:
                cp.insert(0, (conf, output_dir))